repository_name
stringclasses
316 values
func_path_in_repository
stringlengths
6
223
func_name
stringlengths
1
134
language
stringclasses
1 value
func_code_string
stringlengths
57
65.5k
func_documentation_string
stringlengths
1
46.3k
split_name
stringclasses
1 value
func_code_url
stringlengths
91
315
called_functions
listlengths
1
156
enclosing_scope
stringlengths
2
1.48M
ChristopherRabotin/bungiesearch
bungiesearch/fields.py
django_field_to_index
python
def django_field_to_index(field, **attr): ''' Returns the index field type that would likely be associated with each Django type. ''' dj_type = field.get_internal_type() if dj_type in ('DateField', 'DateTimeField'): return DateField(**attr) elif dj_type in ('BooleanField', 'NullBooleanField'): return BooleanField(**attr) elif dj_type in ('DecimalField', 'FloatField'): return NumberField(coretype='float', **attr) elif dj_type in ('PositiveSmallIntegerField', 'SmallIntegerField'): return NumberField(coretype='short', **attr) elif dj_type in ('IntegerField', 'PositiveIntegerField', 'AutoField'): return NumberField(coretype='integer', **attr) elif dj_type in ('BigIntegerField'): return NumberField(coretype='long', **attr) return StringField(**attr)
Returns the index field type that would likely be associated with each Django type.
train
https://github.com/ChristopherRabotin/bungiesearch/blob/13768342bc2698b214eb0003c2d113b6e273c30d/bungiesearch/fields.py#L145-L165
null
from django.template import Context, loader from django.template.defaultfilters import striptags from six import iteritems from elasticsearch_dsl.analysis import Analyzer class AbstractField(object): ''' Represents an elasticsearch index field and values from given objects. Currently does not support binary fields, but those can be created by manually providing a dictionary. Values are extracted using the `model_attr` or `eval_as` attribute. ''' meta_fields = ['_index', '_uid', '_type', '_id'] common_fields = ['index_name', 'store', 'index', 'boost', 'null_value', 'copy_to', 'type', 'fields'] @property def fields(self): try: return self.fields except: raise NotImplementedError('Allowed fields are not defined.') @property def coretype(self): try: return self.coretype except: raise NotImplementedError('Core type is not defined!') @property def defaults(self): ''' Stores default values. ''' try: return self.defaults except: return {} def __init__(self, **args): ''' Performs several checks to ensure that the provided attributes are valid. Will not check their values. ''' if isinstance(self.coretype, list): if 'coretype' not in args: raise KeyError('{} can be represented as one of the following types: {}. Specify which to select as the `coretype` parameter.'.format(unicode(self), ', '.join(self.coretype))) if args['coretype'] not in self.coretype: raise KeyError('Core type {} is not supported by {}.'.format(args['coretype'], unicode(self))) self.type = args.pop('coretype') else: self.type = self.coretype self.model_attr = args.pop('model_attr', None) self.eval_func = args.pop('eval_as', None) self.template_name = args.pop('template', None) for attr, value in iteritems(args): if attr not in self.fields and attr not in AbstractField.common_fields: raise KeyError('Attribute `{}` is not allowed for core type {}.'.format(attr, self.coretype)) setattr(self, attr, value) for attr, value in iteritems(self.defaults): if not hasattr(self, attr): setattr(self, attr, value) def value(self, obj): ''' Computes the value of this field to update the index. :param obj: object instance, as a dictionary or as a model instance. ''' if self.template_name: t = loader.select_template([self.template_name]) return t.render(Context({'object': obj})) if self.eval_func: try: return eval(self.eval_func) except Exception as e: raise type(e)('Could not compute value of {} field (eval_as=`{}`): {}.'.format(unicode(self), self.eval_func, unicode(e))) elif self.model_attr: if isinstance(obj, dict): return obj[self.model_attr] current_obj = getattr(obj, self.model_attr) if callable(current_obj): return current_obj() else: return current_obj else: raise KeyError('{0} gets its value via a model attribute, an eval function, a template, or is prepared in a method ' 'call but none of `model_attr`, `eval_as,` `template,` `prepare_{0}` is provided.'.format(unicode(self))) def json(self): json = {} for attr, val in iteritems(self.__dict__): if attr in ('eval_func', 'model_attr', 'template_name'): continue elif attr in ('analyzer', 'index_analyzer', 'search_analyzer') and isinstance(val, Analyzer): json[attr] = val.to_dict() else: json[attr] = val return json # All the following definitions could probably be done with better polymorphism. class StringField(AbstractField): coretype = 'string' fields = ['doc_values', 'term_vector', 'norms', 'index_options', 'analyzer', 'index_analyzer', 'search_analyzer', 'include_in_all', 'ignore_above', 'position_offset_gap', 'fielddata', 'similarity'] defaults = {'analyzer': 'snowball'} def value(self, obj): val = super(StringField, self).value(obj) if val is None: return None return striptags(val) def __unicode__(self): return 'StringField' class NumberField(AbstractField): coretype = ['float', 'double', 'byte', 'short', 'integer', 'long'] fields = ['doc_values', 'precision_step', 'include_in_all', 'ignore_malformed', 'coerce'] def __unicode__(self): return 'NumberField' class DateField(AbstractField): coretype = 'date' fields = ['format', 'doc_values', 'precision_step', 'include_in_all', 'ignore_malformed'] def __unicode__(self): return 'DateField' class BooleanField(AbstractField): coretype = 'boolean' fields = [] # No specific fields. def __unicode__(self): return 'BooleanField' # Correspondence between a Django field and an elasticsearch field.
ChristopherRabotin/bungiesearch
bungiesearch/fields.py
AbstractField.value
python
def value(self, obj): ''' Computes the value of this field to update the index. :param obj: object instance, as a dictionary or as a model instance. ''' if self.template_name: t = loader.select_template([self.template_name]) return t.render(Context({'object': obj})) if self.eval_func: try: return eval(self.eval_func) except Exception as e: raise type(e)('Could not compute value of {} field (eval_as=`{}`): {}.'.format(unicode(self), self.eval_func, unicode(e))) elif self.model_attr: if isinstance(obj, dict): return obj[self.model_attr] current_obj = getattr(obj, self.model_attr) if callable(current_obj): return current_obj() else: return current_obj else: raise KeyError('{0} gets its value via a model attribute, an eval function, a template, or is prepared in a method ' 'call but none of `model_attr`, `eval_as,` `template,` `prepare_{0}` is provided.'.format(unicode(self)))
Computes the value of this field to update the index. :param obj: object instance, as a dictionary or as a model instance.
train
https://github.com/ChristopherRabotin/bungiesearch/blob/13768342bc2698b214eb0003c2d113b6e273c30d/bungiesearch/fields.py#L67-L94
null
class AbstractField(object): ''' Represents an elasticsearch index field and values from given objects. Currently does not support binary fields, but those can be created by manually providing a dictionary. Values are extracted using the `model_attr` or `eval_as` attribute. ''' meta_fields = ['_index', '_uid', '_type', '_id'] common_fields = ['index_name', 'store', 'index', 'boost', 'null_value', 'copy_to', 'type', 'fields'] @property def fields(self): try: return self.fields except: raise NotImplementedError('Allowed fields are not defined.') @property def coretype(self): try: return self.coretype except: raise NotImplementedError('Core type is not defined!') @property def defaults(self): ''' Stores default values. ''' try: return self.defaults except: return {} def __init__(self, **args): ''' Performs several checks to ensure that the provided attributes are valid. Will not check their values. ''' if isinstance(self.coretype, list): if 'coretype' not in args: raise KeyError('{} can be represented as one of the following types: {}. Specify which to select as the `coretype` parameter.'.format(unicode(self), ', '.join(self.coretype))) if args['coretype'] not in self.coretype: raise KeyError('Core type {} is not supported by {}.'.format(args['coretype'], unicode(self))) self.type = args.pop('coretype') else: self.type = self.coretype self.model_attr = args.pop('model_attr', None) self.eval_func = args.pop('eval_as', None) self.template_name = args.pop('template', None) for attr, value in iteritems(args): if attr not in self.fields and attr not in AbstractField.common_fields: raise KeyError('Attribute `{}` is not allowed for core type {}.'.format(attr, self.coretype)) setattr(self, attr, value) for attr, value in iteritems(self.defaults): if not hasattr(self, attr): setattr(self, attr, value) def json(self): json = {} for attr, val in iteritems(self.__dict__): if attr in ('eval_func', 'model_attr', 'template_name'): continue elif attr in ('analyzer', 'index_analyzer', 'search_analyzer') and isinstance(val, Analyzer): json[attr] = val.to_dict() else: json[attr] = val return json
ChristopherRabotin/bungiesearch
bungiesearch/indices.py
ModelIndex.get_mapping
python
def get_mapping(self, meta_fields=True): ''' Returns the mapping for the index as a dictionary. :param meta_fields: Also include elasticsearch meta fields in the dictionary. :return: a dictionary which can be used to generate the elasticsearch index mapping for this doctype. ''' return {'properties': dict((name, field.json()) for name, field in iteritems(self.fields) if meta_fields or name not in AbstractField.meta_fields)}
Returns the mapping for the index as a dictionary. :param meta_fields: Also include elasticsearch meta fields in the dictionary. :return: a dictionary which can be used to generate the elasticsearch index mapping for this doctype.
train
https://github.com/ChristopherRabotin/bungiesearch/blob/13768342bc2698b214eb0003c2d113b6e273c30d/bungiesearch/indices.py#L71-L78
null
class ModelIndex(object): ''' Introspects a model to generate an indexable mapping and methods to extract objects. Supports custom fields, including Python code, and all elasticsearch field types (apart from binary type). ModelIndex does efficient querying by only fetching from the database fields which are to be indexed. How to create an index? 1. Create a class which inherits from ModelIndex. 2. Define custom indexed fields as class attributes. Values must be instances AbstractField. Important info in 3b. 3. Define a `Meta` subclass, which must contain at least `model` as a class attribute. a. Optional class attributes: `fields`, `excludes` and `additional_fields`. b. If custom indexed field requires model attributes which are not in the difference between `fields` and `excludes`, these must be defined in `additional_fields`. ''' def __init__(self): # Introspect the model, adding/removing fields as needed. # Adds/Excludes should happen only if the fields are not already # defined in `self.fields`. try: _meta = getattr(self, 'Meta') except AttributeError: raise AttributeError('ModelIndex {} does not contain a Meta class.'.format(self.__class__.__name__)) self.model = getattr(_meta, 'model', None) self.fields = {} fields = getattr(_meta, 'fields', []) excludes = getattr(_meta, 'exclude', []) hotfixes = getattr(_meta, 'hotfixes', {}) additional_fields = getattr(_meta, 'additional_fields', []) id_field = getattr(_meta, 'id_field', 'id') self.updated_field = getattr(_meta, 'updated_field', None) self.optimize_queries = getattr(_meta, 'optimize_queries', False) self.is_default = getattr(_meta, 'default', True) self.indexing_query = getattr(_meta, 'indexing_query', None) # Add in fields from the model. self.fields.update(self._get_fields(fields, excludes, hotfixes)) # Elasticsearch uses '_id' to identify items uniquely, so let's duplicate that field. # We're duplicating it in order for devs to still perform searches on `.id` as expected. self.fields_to_fetch = list(set(self.fields.keys()).union(additional_fields)) # Adding or updating the fields which are defined at class level. for cls_attr, obj in iteritems(self.__class__.__dict__): if not isinstance(obj, AbstractField): continue if cls_attr in self.fields: logger.info('Overwriting implicitly defined model field {} ({}) its explicit definition: {}.'.format(cls_attr, text_type(self.fields[cls_attr]), text_type(obj))) self.fields[cls_attr] = obj self.fields['_id'] = self.fields[id_field] def matches_indexing_condition(self, item): ''' Returns True by default to index all documents. ''' return True def get_model(self): return self.model def collect_analysis(self): ''' :return: a dictionary which is used to get the serialized analyzer definition from the analyzer class. ''' analysis = {} for field in self.fields.values(): for analyzer_name in ('analyzer', 'index_analyzer', 'search_analyzer'): if not hasattr(field, analyzer_name): continue analyzer = getattr(field, analyzer_name) if not isinstance(analyzer, Analyzer): continue definition = analyzer.get_analysis_definition() if definition is None: continue for key in definition: analysis.setdefault(key, {}).update(definition[key]) return analysis def serialize_object(self, obj, obj_pk=None): ''' Serializes an object for it to be added to the index. :param obj: Object to be serialized. Optional if obj_pk is passed. :param obj_pk: Object primary key. Superseded by `obj` if available. :return: A dictionary representing the object as defined in the mapping. ''' if not obj: try: # We're using `filter` followed by `values` in order to only fetch the required fields. obj = self.model.objects.filter(pk=obj_pk).values(*self.fields_to_fetch)[0] except Exception as e: raise ValueError('Could not find object of primary key = {} in model {} (model index class {}). (Original exception: {}.)'.format(obj_pk, self.model, self.__class__.__name__, e)) serialized_object = {} for name, field in iteritems(self.fields): if hasattr(self, "prepare_%s" % name): value = getattr(self, "prepare_%s" % name)(obj) else: value = field.value(obj) serialized_object[name] = value return serialized_object def _get_fields(self, fields, excludes, hotfixes): ''' Given any explicit fields to include and fields to exclude, add additional fields based on the associated model. If the field needs a hotfix, apply it. ''' final_fields = {} fields = fields or [] excludes = excludes or [] for f in self.model._meta.fields: # If the field name is already present, skip if f.name in self.fields: continue # If field is not present in explicit field listing, skip if fields and f.name not in fields: continue # If field is in exclude list, skip if excludes and f.name in excludes: continue # If field is a relation, skip. if getattr(f, 'rel'): continue attr = {'model_attr': f.name} if f.has_default(): attr['null_value'] = f.default if f.name in hotfixes: attr.update(hotfixes[f.name]) final_fields[f.name] = django_field_to_index(f, **attr) return final_fields def __str__(self): return '<{0.__class__.__name__}:{0.model.__name__}>'.format(self)
ChristopherRabotin/bungiesearch
bungiesearch/indices.py
ModelIndex.collect_analysis
python
def collect_analysis(self): ''' :return: a dictionary which is used to get the serialized analyzer definition from the analyzer class. ''' analysis = {} for field in self.fields.values(): for analyzer_name in ('analyzer', 'index_analyzer', 'search_analyzer'): if not hasattr(field, analyzer_name): continue analyzer = getattr(field, analyzer_name) if not isinstance(analyzer, Analyzer): continue definition = analyzer.get_analysis_definition() if definition is None: continue for key in definition: analysis.setdefault(key, {}).update(definition[key]) return analysis
:return: a dictionary which is used to get the serialized analyzer definition from the analyzer class.
train
https://github.com/ChristopherRabotin/bungiesearch/blob/13768342bc2698b214eb0003c2d113b6e273c30d/bungiesearch/indices.py#L80-L102
null
class ModelIndex(object): ''' Introspects a model to generate an indexable mapping and methods to extract objects. Supports custom fields, including Python code, and all elasticsearch field types (apart from binary type). ModelIndex does efficient querying by only fetching from the database fields which are to be indexed. How to create an index? 1. Create a class which inherits from ModelIndex. 2. Define custom indexed fields as class attributes. Values must be instances AbstractField. Important info in 3b. 3. Define a `Meta` subclass, which must contain at least `model` as a class attribute. a. Optional class attributes: `fields`, `excludes` and `additional_fields`. b. If custom indexed field requires model attributes which are not in the difference between `fields` and `excludes`, these must be defined in `additional_fields`. ''' def __init__(self): # Introspect the model, adding/removing fields as needed. # Adds/Excludes should happen only if the fields are not already # defined in `self.fields`. try: _meta = getattr(self, 'Meta') except AttributeError: raise AttributeError('ModelIndex {} does not contain a Meta class.'.format(self.__class__.__name__)) self.model = getattr(_meta, 'model', None) self.fields = {} fields = getattr(_meta, 'fields', []) excludes = getattr(_meta, 'exclude', []) hotfixes = getattr(_meta, 'hotfixes', {}) additional_fields = getattr(_meta, 'additional_fields', []) id_field = getattr(_meta, 'id_field', 'id') self.updated_field = getattr(_meta, 'updated_field', None) self.optimize_queries = getattr(_meta, 'optimize_queries', False) self.is_default = getattr(_meta, 'default', True) self.indexing_query = getattr(_meta, 'indexing_query', None) # Add in fields from the model. self.fields.update(self._get_fields(fields, excludes, hotfixes)) # Elasticsearch uses '_id' to identify items uniquely, so let's duplicate that field. # We're duplicating it in order for devs to still perform searches on `.id` as expected. self.fields_to_fetch = list(set(self.fields.keys()).union(additional_fields)) # Adding or updating the fields which are defined at class level. for cls_attr, obj in iteritems(self.__class__.__dict__): if not isinstance(obj, AbstractField): continue if cls_attr in self.fields: logger.info('Overwriting implicitly defined model field {} ({}) its explicit definition: {}.'.format(cls_attr, text_type(self.fields[cls_attr]), text_type(obj))) self.fields[cls_attr] = obj self.fields['_id'] = self.fields[id_field] def matches_indexing_condition(self, item): ''' Returns True by default to index all documents. ''' return True def get_model(self): return self.model def get_mapping(self, meta_fields=True): ''' Returns the mapping for the index as a dictionary. :param meta_fields: Also include elasticsearch meta fields in the dictionary. :return: a dictionary which can be used to generate the elasticsearch index mapping for this doctype. ''' return {'properties': dict((name, field.json()) for name, field in iteritems(self.fields) if meta_fields or name not in AbstractField.meta_fields)} def serialize_object(self, obj, obj_pk=None): ''' Serializes an object for it to be added to the index. :param obj: Object to be serialized. Optional if obj_pk is passed. :param obj_pk: Object primary key. Superseded by `obj` if available. :return: A dictionary representing the object as defined in the mapping. ''' if not obj: try: # We're using `filter` followed by `values` in order to only fetch the required fields. obj = self.model.objects.filter(pk=obj_pk).values(*self.fields_to_fetch)[0] except Exception as e: raise ValueError('Could not find object of primary key = {} in model {} (model index class {}). (Original exception: {}.)'.format(obj_pk, self.model, self.__class__.__name__, e)) serialized_object = {} for name, field in iteritems(self.fields): if hasattr(self, "prepare_%s" % name): value = getattr(self, "prepare_%s" % name)(obj) else: value = field.value(obj) serialized_object[name] = value return serialized_object def _get_fields(self, fields, excludes, hotfixes): ''' Given any explicit fields to include and fields to exclude, add additional fields based on the associated model. If the field needs a hotfix, apply it. ''' final_fields = {} fields = fields or [] excludes = excludes or [] for f in self.model._meta.fields: # If the field name is already present, skip if f.name in self.fields: continue # If field is not present in explicit field listing, skip if fields and f.name not in fields: continue # If field is in exclude list, skip if excludes and f.name in excludes: continue # If field is a relation, skip. if getattr(f, 'rel'): continue attr = {'model_attr': f.name} if f.has_default(): attr['null_value'] = f.default if f.name in hotfixes: attr.update(hotfixes[f.name]) final_fields[f.name] = django_field_to_index(f, **attr) return final_fields def __str__(self): return '<{0.__class__.__name__}:{0.model.__name__}>'.format(self)
ChristopherRabotin/bungiesearch
bungiesearch/indices.py
ModelIndex.serialize_object
python
def serialize_object(self, obj, obj_pk=None): ''' Serializes an object for it to be added to the index. :param obj: Object to be serialized. Optional if obj_pk is passed. :param obj_pk: Object primary key. Superseded by `obj` if available. :return: A dictionary representing the object as defined in the mapping. ''' if not obj: try: # We're using `filter` followed by `values` in order to only fetch the required fields. obj = self.model.objects.filter(pk=obj_pk).values(*self.fields_to_fetch)[0] except Exception as e: raise ValueError('Could not find object of primary key = {} in model {} (model index class {}). (Original exception: {}.)'.format(obj_pk, self.model, self.__class__.__name__, e)) serialized_object = {} for name, field in iteritems(self.fields): if hasattr(self, "prepare_%s" % name): value = getattr(self, "prepare_%s" % name)(obj) else: value = field.value(obj) serialized_object[name] = value return serialized_object
Serializes an object for it to be added to the index. :param obj: Object to be serialized. Optional if obj_pk is passed. :param obj_pk: Object primary key. Superseded by `obj` if available. :return: A dictionary representing the object as defined in the mapping.
train
https://github.com/ChristopherRabotin/bungiesearch/blob/13768342bc2698b214eb0003c2d113b6e273c30d/bungiesearch/indices.py#L104-L129
null
class ModelIndex(object): ''' Introspects a model to generate an indexable mapping and methods to extract objects. Supports custom fields, including Python code, and all elasticsearch field types (apart from binary type). ModelIndex does efficient querying by only fetching from the database fields which are to be indexed. How to create an index? 1. Create a class which inherits from ModelIndex. 2. Define custom indexed fields as class attributes. Values must be instances AbstractField. Important info in 3b. 3. Define a `Meta` subclass, which must contain at least `model` as a class attribute. a. Optional class attributes: `fields`, `excludes` and `additional_fields`. b. If custom indexed field requires model attributes which are not in the difference between `fields` and `excludes`, these must be defined in `additional_fields`. ''' def __init__(self): # Introspect the model, adding/removing fields as needed. # Adds/Excludes should happen only if the fields are not already # defined in `self.fields`. try: _meta = getattr(self, 'Meta') except AttributeError: raise AttributeError('ModelIndex {} does not contain a Meta class.'.format(self.__class__.__name__)) self.model = getattr(_meta, 'model', None) self.fields = {} fields = getattr(_meta, 'fields', []) excludes = getattr(_meta, 'exclude', []) hotfixes = getattr(_meta, 'hotfixes', {}) additional_fields = getattr(_meta, 'additional_fields', []) id_field = getattr(_meta, 'id_field', 'id') self.updated_field = getattr(_meta, 'updated_field', None) self.optimize_queries = getattr(_meta, 'optimize_queries', False) self.is_default = getattr(_meta, 'default', True) self.indexing_query = getattr(_meta, 'indexing_query', None) # Add in fields from the model. self.fields.update(self._get_fields(fields, excludes, hotfixes)) # Elasticsearch uses '_id' to identify items uniquely, so let's duplicate that field. # We're duplicating it in order for devs to still perform searches on `.id` as expected. self.fields_to_fetch = list(set(self.fields.keys()).union(additional_fields)) # Adding or updating the fields which are defined at class level. for cls_attr, obj in iteritems(self.__class__.__dict__): if not isinstance(obj, AbstractField): continue if cls_attr in self.fields: logger.info('Overwriting implicitly defined model field {} ({}) its explicit definition: {}.'.format(cls_attr, text_type(self.fields[cls_attr]), text_type(obj))) self.fields[cls_attr] = obj self.fields['_id'] = self.fields[id_field] def matches_indexing_condition(self, item): ''' Returns True by default to index all documents. ''' return True def get_model(self): return self.model def get_mapping(self, meta_fields=True): ''' Returns the mapping for the index as a dictionary. :param meta_fields: Also include elasticsearch meta fields in the dictionary. :return: a dictionary which can be used to generate the elasticsearch index mapping for this doctype. ''' return {'properties': dict((name, field.json()) for name, field in iteritems(self.fields) if meta_fields or name not in AbstractField.meta_fields)} def collect_analysis(self): ''' :return: a dictionary which is used to get the serialized analyzer definition from the analyzer class. ''' analysis = {} for field in self.fields.values(): for analyzer_name in ('analyzer', 'index_analyzer', 'search_analyzer'): if not hasattr(field, analyzer_name): continue analyzer = getattr(field, analyzer_name) if not isinstance(analyzer, Analyzer): continue definition = analyzer.get_analysis_definition() if definition is None: continue for key in definition: analysis.setdefault(key, {}).update(definition[key]) return analysis def _get_fields(self, fields, excludes, hotfixes): ''' Given any explicit fields to include and fields to exclude, add additional fields based on the associated model. If the field needs a hotfix, apply it. ''' final_fields = {} fields = fields or [] excludes = excludes or [] for f in self.model._meta.fields: # If the field name is already present, skip if f.name in self.fields: continue # If field is not present in explicit field listing, skip if fields and f.name not in fields: continue # If field is in exclude list, skip if excludes and f.name in excludes: continue # If field is a relation, skip. if getattr(f, 'rel'): continue attr = {'model_attr': f.name} if f.has_default(): attr['null_value'] = f.default if f.name in hotfixes: attr.update(hotfixes[f.name]) final_fields[f.name] = django_field_to_index(f, **attr) return final_fields def __str__(self): return '<{0.__class__.__name__}:{0.model.__name__}>'.format(self)
ChristopherRabotin/bungiesearch
bungiesearch/indices.py
ModelIndex._get_fields
python
def _get_fields(self, fields, excludes, hotfixes): ''' Given any explicit fields to include and fields to exclude, add additional fields based on the associated model. If the field needs a hotfix, apply it. ''' final_fields = {} fields = fields or [] excludes = excludes or [] for f in self.model._meta.fields: # If the field name is already present, skip if f.name in self.fields: continue # If field is not present in explicit field listing, skip if fields and f.name not in fields: continue # If field is in exclude list, skip if excludes and f.name in excludes: continue # If field is a relation, skip. if getattr(f, 'rel'): continue attr = {'model_attr': f.name} if f.has_default(): attr['null_value'] = f.default if f.name in hotfixes: attr.update(hotfixes[f.name]) final_fields[f.name] = django_field_to_index(f, **attr) return final_fields
Given any explicit fields to include and fields to exclude, add additional fields based on the associated model. If the field needs a hotfix, apply it.
train
https://github.com/ChristopherRabotin/bungiesearch/blob/13768342bc2698b214eb0003c2d113b6e273c30d/bungiesearch/indices.py#L131-L166
null
class ModelIndex(object): ''' Introspects a model to generate an indexable mapping and methods to extract objects. Supports custom fields, including Python code, and all elasticsearch field types (apart from binary type). ModelIndex does efficient querying by only fetching from the database fields which are to be indexed. How to create an index? 1. Create a class which inherits from ModelIndex. 2. Define custom indexed fields as class attributes. Values must be instances AbstractField. Important info in 3b. 3. Define a `Meta` subclass, which must contain at least `model` as a class attribute. a. Optional class attributes: `fields`, `excludes` and `additional_fields`. b. If custom indexed field requires model attributes which are not in the difference between `fields` and `excludes`, these must be defined in `additional_fields`. ''' def __init__(self): # Introspect the model, adding/removing fields as needed. # Adds/Excludes should happen only if the fields are not already # defined in `self.fields`. try: _meta = getattr(self, 'Meta') except AttributeError: raise AttributeError('ModelIndex {} does not contain a Meta class.'.format(self.__class__.__name__)) self.model = getattr(_meta, 'model', None) self.fields = {} fields = getattr(_meta, 'fields', []) excludes = getattr(_meta, 'exclude', []) hotfixes = getattr(_meta, 'hotfixes', {}) additional_fields = getattr(_meta, 'additional_fields', []) id_field = getattr(_meta, 'id_field', 'id') self.updated_field = getattr(_meta, 'updated_field', None) self.optimize_queries = getattr(_meta, 'optimize_queries', False) self.is_default = getattr(_meta, 'default', True) self.indexing_query = getattr(_meta, 'indexing_query', None) # Add in fields from the model. self.fields.update(self._get_fields(fields, excludes, hotfixes)) # Elasticsearch uses '_id' to identify items uniquely, so let's duplicate that field. # We're duplicating it in order for devs to still perform searches on `.id` as expected. self.fields_to_fetch = list(set(self.fields.keys()).union(additional_fields)) # Adding or updating the fields which are defined at class level. for cls_attr, obj in iteritems(self.__class__.__dict__): if not isinstance(obj, AbstractField): continue if cls_attr in self.fields: logger.info('Overwriting implicitly defined model field {} ({}) its explicit definition: {}.'.format(cls_attr, text_type(self.fields[cls_attr]), text_type(obj))) self.fields[cls_attr] = obj self.fields['_id'] = self.fields[id_field] def matches_indexing_condition(self, item): ''' Returns True by default to index all documents. ''' return True def get_model(self): return self.model def get_mapping(self, meta_fields=True): ''' Returns the mapping for the index as a dictionary. :param meta_fields: Also include elasticsearch meta fields in the dictionary. :return: a dictionary which can be used to generate the elasticsearch index mapping for this doctype. ''' return {'properties': dict((name, field.json()) for name, field in iteritems(self.fields) if meta_fields or name not in AbstractField.meta_fields)} def collect_analysis(self): ''' :return: a dictionary which is used to get the serialized analyzer definition from the analyzer class. ''' analysis = {} for field in self.fields.values(): for analyzer_name in ('analyzer', 'index_analyzer', 'search_analyzer'): if not hasattr(field, analyzer_name): continue analyzer = getattr(field, analyzer_name) if not isinstance(analyzer, Analyzer): continue definition = analyzer.get_analysis_definition() if definition is None: continue for key in definition: analysis.setdefault(key, {}).update(definition[key]) return analysis def serialize_object(self, obj, obj_pk=None): ''' Serializes an object for it to be added to the index. :param obj: Object to be serialized. Optional if obj_pk is passed. :param obj_pk: Object primary key. Superseded by `obj` if available. :return: A dictionary representing the object as defined in the mapping. ''' if not obj: try: # We're using `filter` followed by `values` in order to only fetch the required fields. obj = self.model.objects.filter(pk=obj_pk).values(*self.fields_to_fetch)[0] except Exception as e: raise ValueError('Could not find object of primary key = {} in model {} (model index class {}). (Original exception: {}.)'.format(obj_pk, self.model, self.__class__.__name__, e)) serialized_object = {} for name, field in iteritems(self.fields): if hasattr(self, "prepare_%s" % name): value = getattr(self, "prepare_%s" % name)(obj) else: value = field.value(obj) serialized_object[name] = value return serialized_object def __str__(self): return '<{0.__class__.__name__}:{0.model.__name__}>'.format(self)
timothycrosley/deprecated.frosted
frosted/reporter.py
Reporter.flake
python
def flake(self, message): self.stdout.write(str(message)) self.stdout.write('\n')
Print an error message to stdout.
train
https://github.com/timothycrosley/deprecated.frosted/blob/61ba7f341fc55676c3580c8c4e52117986cd5e12/frosted/reporter.py#L34-L37
null
class Reporter(namedtuple('Reporter', ('stdout', 'stderr'))): """Formats the results of frosted checks and then presents them to the user.""" def unexpected_error(self, filename, msg): """Output an unexpected_error specific to the provided filename.""" self.stderr.write("%s: %s\n" % (filename, msg))
timothycrosley/deprecated.frosted
frosted/checker.py
node_name
python
def node_name(node): return hasattr(node, 'id') and node.id or hasattr(node, 'name') and node.name
Convenience function: Returns node.id, or node.name, or None
train
https://github.com/timothycrosley/deprecated.frosted/blob/61ba7f341fc55676c3580c8c4e52117986cd5e12/frosted/checker.py#L39-L43
null
"""frosted/checker.py. The core functionality of frosted lives here. Implements the core checking capability models Bindings and Scopes Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ from __future__ import absolute_import, division, print_function, unicode_literals import builtins import doctest import itertools import os import pkg_resources import sys from pies import ast from pies.overrides import * from frosted import messages PY34_GTE = sys.version_info >= (3, 4) FROSTED_BUILTINS = set(dir(builtins) + ['__file__', '__builtins__', '__debug__', '__name__', 'WindowsError', '__import__'] + os.environ.get('PYFLAKES_BUILTINS', '').split(',')) class Binding(object): """Represents the binding of a value to a name. The checker uses this to keep track of which names have been bound and which names have not. See Assignment for a special type of binding that is checked with stricter rules. """ __slots__ = ('name', 'source', 'used') def __init__(self, name, source): self.name = name self.source = source self.used = False def __str__(self): return self.name def __repr__(self): return '<%s object %r from line %r at 0x%x>' % (self.__class__.__name__, self.name, self.source.lineno, id(self)) class Importation(Binding): """A binding created by an import statement.""" __slots__ = ('fullName', ) def __init__(self, name, source): self.fullName = name name = name.split('.')[0] super(Importation, self).__init__(name, source) class Argument(Binding): """Represents binding a name as an argument.""" __slots__ = () class Definition(Binding): """A binding that defines a function or a class.""" __slots__ = () class Assignment(Binding): """Represents binding a name with an explicit assignment. The checker will raise warnings for any Assignment that isn't used. Also, the checker does not consider assignments in tuple/list unpacking to be Assignments, rather it treats them as simple Bindings. """ __slots__ = () class FunctionDefinition(Definition): __slots__ = ('signature', ) def __init__(self, name, source): super(FunctionDefinition, self).__init__(name, source) self.signature = FunctionSignature(source) class ClassDefinition(Definition): __slots__ = () class ExportBinding(Binding): """A binding created by an __all__ assignment. If the names in the list can be determined statically, they will be treated as names for export and additional checking applied to them. The only __all__ assignment that can be recognized is one which takes the value of a literal list containing literal strings. For example: __all__ = ["foo", "bar"] Names which are imported and not otherwise used but appear in the value of __all__ will not have an unused import warning reported for them. """ __slots__ = () def names(self): """Return a list of the names referenced by this binding.""" names = [] if isinstance(self.source, ast.List): for node in self.source.elts: if isinstance(node, ast.Str): names.append(node.s) return names class Scope(dict): importStarred = False # set to True when import * is found def __repr__(self): scope_cls = self.__class__.__name__ return '<%s at 0x%x %s>' % (scope_cls, id(self), dict.__repr__(self)) class ClassScope(Scope): pass class FunctionScope(Scope): """Represents the name scope for a function.""" uses_locals = False always_used = set(['__tracebackhide__', '__traceback_info__', '__traceback_supplement__']) def __init__(self): Scope.__init__(self) self.globals = self.always_used.copy() def unusedAssignments(self): """Return a generator for the assignments which have not been used.""" for name, binding in self.items(): if (not binding.used and name not in self.globals and not self.uses_locals and isinstance(binding, Assignment)): yield name, binding class GeneratorScope(Scope): pass class ModuleScope(Scope): pass class FunctionSignature(object): __slots__ = ('decorated', 'argument_names', 'default_count', 'kw_only_argument_names', 'default_count', 'kw_only_argument_names', 'kw_only_default_count', 'has_var_arg', 'has_kw_arg') def __init__(self, node): self.decorated = bool(any(node.decorator_list)) self.argument_names = ast.argument_names(node) self.default_count = len(node.args.defaults) self.kw_only_argument_names = ast.kw_only_argument_names(node) self.kw_only_default_count = ast.kw_only_default_count(node) self.has_var_arg = node.args.vararg is not None self.has_kw_arg = node.args.kwarg is not None def min_argument_count(self): return len(self.argument_names) - self.default_count def maxArgumentCount(self): return len(self.argument_names) def checkCall(self, call_node, reporter, name): if self.decorated: return filledSlots = set() filledKwOnlySlots = set() for item, arg in enumerate(call_node.args): if item >= len(self.argument_names): if not self.has_var_arg: return reporter.report(messages.TooManyArguments, call_node, name, self.maxArgumentCount()) break filledSlots.add(item) for kw in call_node.keywords: slots = None try: argIndex = self.argument_names.index(kw.arg) slots = filledSlots except ValueError: try: argIndex = self.kw_only_argument_names.index(kw.arg) slots = filledKwOnlySlots except ValueError: if self.has_kw_arg: continue else: return reporter.report(messages.UnexpectedArgument, call_node, name, kw.arg) if argIndex in slots: return reporter.report(messages.MultipleValuesForArgument, call_node, name, kw.arg) slots.add(argIndex) filledSlots.update(range(len(self.argument_names) - self.default_count, len(self.argument_names))) filledKwOnlySlots.update(range(len(self.kw_only_argument_names) - self.kw_only_default_count, len(self.kw_only_argument_names))) if (len(filledSlots) < len(self.argument_names) and not call_node.starargs and not call_node.kwargs): return reporter.report(messages.TooFewArguments, call_node, name, self.min_argument_count()) if (len(filledKwOnlySlots) < len(self.kw_only_argument_names) and not call_node.kwargs): missing_arguments = [repr(arg) for i, arg in enumerate(self.kw_only_argument_names) if i not in filledKwOnlySlots] return reporter.report(messages.NeedKwOnlyArgument, call_node, name, ', '.join(missing_arguments)) class Checker(object): """The core of frosted, checks the cleanliness and sanity of Python code.""" node_depth = 0 offset = None trace_tree = False frosted_builtins = FROSTED_BUILTINS def __init__(self, tree, filename='(none)', builtins=None, ignore_lines=(), **settings): self.settings = settings self.ignore_errors = settings.get('ignore_frosted_errors', []) self.ignore_lines = ignore_lines file_specific_ignores = settings.get('ignore_frosted_errors_for_' + (os.path.basename(filename) or ""), None) if file_specific_ignores: self.ignore_errors += file_specific_ignores self._node_handlers = {} self._deferred_functions = [] self._deferred_assignments = [] self.dead_scopes = [] self.messages = [] self.filename = filename if builtins: self.frosted_builtins = self.frosted_builtins.union(builtins) self.scope_stack = [ModuleScope()] self.except_handlers = [()] self.futures_allowed = True self.root = tree self.handle_children(tree) self.run_deferred(self._deferred_functions) self._deferred_functions = None self.run_deferred(self._deferred_assignments) self._deferred_assignments = None del self.scope_stack[1:] self.pop_scope() self.check_dead_scopes() self.check_plugins() def check_plugins(self): """ collect plugins from entry point 'frosted.plugins' and run their check() method, passing the filename """ checkers = {} for ep in pkg_resources.iter_entry_points(group='frosted.plugins'): checkers.update({ep.name: ep.load()}) for plugin_name, plugin in checkers.items(): if self.filename != '(none)': messages = plugin.check(self.filename) for message, loc, args, kwargs in messages: self.report(message, loc, *args, **kwargs) def defer_function(self, callable): """Schedule a function handler to be called just before completion. This is used for handling function bodies, which must be deferred because code later in the file might modify the global scope. When 'callable' is called, the scope at the time this is called will be restored, however it will contain any new bindings added to it. """ self._deferred_functions.append((callable, self.scope_stack[:], self.offset)) def defer_assignment(self, callable): """Schedule an assignment handler to be called just after deferred function handlers.""" self._deferred_assignments.append((callable, self.scope_stack[:], self.offset)) def run_deferred(self, deferred): """Run the callables in deferred using their associated scope stack.""" for handler, scope, offset in deferred: self.scope_stack = scope self.offset = offset handler() @property def scope(self): return self.scope_stack[-1] def pop_scope(self): self.dead_scopes.append(self.scope_stack.pop()) def check_dead_scopes(self): """Look at scopes which have been fully examined and report names in them which were imported but unused.""" for scope in self.dead_scopes: export = isinstance(scope.get('__all__'), ExportBinding) if export: all = scope['__all__'].names() # Look for possible mistakes in the export list if not scope.importStarred and os.path.basename(self.filename) != '__init__.py': undefined = set(all) - set(scope) for name in undefined: self.report(messages.UndefinedExport, scope['__all__'].source, name) else: all = [] # Look for imported names that aren't used without checking imports in namespace definition for importation in scope.values(): if isinstance(importation, Importation) and not importation.used and importation.name not in all: self.report(messages.UnusedImport, importation.source, importation.name) def push_scope(self, scope_class=FunctionScope): self.scope_stack.append(scope_class()) def push_function_scope(self): # XXX Deprecated self.push_scope(FunctionScope) def push_class_scope(self): # XXX Deprecated self.push_scope(ClassScope) def report(self, message_class, *args, **kwargs): error_code = message_class.error_code if(not error_code[:2] + "00" in self.ignore_errors and not error_code in self.ignore_errors and not str(message_class.error_number) in self.ignore_errors): kwargs['verbose'] = self.settings.get('verbose') message = message_class(self.filename, *args, **kwargs) if message.lineno not in self.ignore_lines: self.messages.append(message) def has_parent(self, node, kind): while hasattr(node, 'parent'): node = node.parent if isinstance(node, kind): return True def get_common_ancestor(self, lnode, rnode, stop=None): stop = stop or self.root if lnode is rnode: return lnode if stop in (lnode, rnode): return stop if not hasattr(lnode, 'parent') or not hasattr(rnode, 'parent'): return if (lnode.level > rnode.level): return self.get_common_ancestor(lnode.parent, rnode, stop) if (rnode.level > lnode.level): return self.get_common_ancestor(lnode, rnode.parent, stop) return self.get_common_ancestor(lnode.parent, rnode.parent, stop) def descendant_of(self, node, ancestors, stop=None): for ancestor in ancestors: if self.get_common_ancestor(node, ancestor, stop) not in (stop, None): return True return False def on_fork(self, parent, lnode, rnode, items): return (self.descendant_of(lnode, items, parent) ^ self.descendant_of(rnode, items, parent)) def different_forks(self, lnode, rnode): """True, if lnode and rnode are located on different forks of IF/TRY.""" ancestor = self.get_common_ancestor(lnode, rnode) if isinstance(ancestor, ast.If): for fork in (ancestor.body, ancestor.orelse): if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.Try): body = ancestor.body + ancestor.orelse for fork in [body] + [[hdl] for hdl in ancestor.handlers]: if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.TryFinally): if self.on_fork(ancestor, lnode, rnode, ancestor.body): return True return False def add_binding(self, node, value, report_redef=True): """Called when a binding is altered. - `node` is the statement responsible for the change - `value` is the optional new value, a Binding instance, associated with the binding; if None, the binding is deleted if it exists. - if `report_redef` is True (default), rebinding while unused will be reported. """ redefinedWhileUnused = False if not isinstance(self.scope, ClassScope): for scope in self.scope_stack[::-1]: existing = scope.get(value.name) if (isinstance(existing, Importation) and not existing.used and (not isinstance(value, Importation) or value.fullName == existing.fullName) and report_redef and not self.different_forks(node, existing.source)): redefinedWhileUnused = True self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) existing = self.scope.get(value.name) if not redefinedWhileUnused and self.has_parent(value.source, ast.ListComp): if (existing and report_redef and not self.has_parent(existing.source, (ast.For, ast.ListComp)) and not self.different_forks(node, existing.source)): self.report(messages.RedefinedInListComp, node, value.name, existing.source) if (isinstance(existing, Definition) and not existing.used and not self.different_forks(node, existing.source)): self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) else: self.scope[value.name] = value def get_node_handler(self, node_class): try: return self._node_handlers[node_class] except KeyError: nodeType = str(node_class.__name__).upper() self._node_handlers[node_class] = handler = getattr(self, nodeType) return handler def iter_visible_scopes(self): outerScopes = itertools.islice(self.scope_stack, len(self.scope_stack) - 1) scopes = [scope for scope in outerScopes if isinstance(scope, (FunctionScope, ModuleScope))] if (isinstance(self.scope, GeneratorScope) and scopes[-1] != self.scope_stack[-2]): scopes.append(self.scope_stack[-2]) scopes.append(self.scope_stack[-1]) return iter(reversed(scopes)) def handle_node_load(self, node): name = node_name(node) if not name: return importStarred = False for scope in self.iter_visible_scopes(): importStarred = importStarred or scope.importStarred try: scope[name].used = (self.scope, node) except KeyError: pass else: return # look in the built-ins if importStarred or name in self.frosted_builtins: return if name == '__path__' and os.path.basename(self.filename) == '__init__.py': # the special name __path__ is valid only in packages return # protected with a NameError handler? if 'NameError' not in self.except_handlers[-1]: self.report(messages.UndefinedName, node, name) def handle_node_store(self, node): name = node_name(node) if not name: return # if the name hasn't already been defined in the current scope if isinstance(self.scope, FunctionScope) and name not in self.scope: # for each function or module scope above us for scope in self.scope_stack[:-1]: if not isinstance(scope, (FunctionScope, ModuleScope)): continue # if the name was defined in that scope, and the name has # been accessed already in the current scope, and hasn't # been declared global used = name in scope and scope[name].used if used and used[0] is self.scope and name not in self.scope.globals: # then it's probably a mistake self.report(messages.UndefinedLocal, scope[name].used[1], name, scope[name].source) break parent = getattr(node, 'parent', None) if isinstance(parent, (ast.For, ast.comprehension, ast.Tuple, ast.List)): binding = Binding(name, node) elif (parent is not None and name == '__all__' and isinstance(self.scope, ModuleScope)): binding = ExportBinding(name, parent.value) else: binding = Assignment(name, node) if name in self.scope: binding.used = self.scope[name].used self.add_binding(node, binding) def handle_node_delete(self, node): name = node_name(node) if not name: return if isinstance(self.scope, FunctionScope) and name in self.scope.globals: self.scope.globals.remove(name) else: try: del self.scope[name] except KeyError: self.report(messages.UndefinedName, node, name) def handle_children(self, tree): for node in ast.iter_child_nodes(tree): self.handleNode(node, tree) def is_docstring(self, node): """Determine if the given node is a docstring, as long as it is at the correct place in the node tree.""" return isinstance(node, ast.Str) or (isinstance(node, ast.Expr) and isinstance(node.value, ast.Str)) def docstring(self, node): if isinstance(node, ast.Expr): node = node.value if not isinstance(node, ast.Str): return (None, None) # Computed incorrectly if the docstring has backslash doctest_lineno = node.lineno - node.s.count('\n') - 1 return (node.s, doctest_lineno) def handleNode(self, node, parent): if node is None: return if self.offset and getattr(node, 'lineno', None) is not None: node.lineno += self.offset[0] node.col_offset += self.offset[1] if self.trace_tree: print(' ' * self.node_depth + node.__class__.__name__) if self.futures_allowed and not (isinstance(node, ast.ImportFrom) or self.is_docstring(node)): self.futures_allowed = False self.node_depth += 1 node.level = self.node_depth node.parent = parent try: handler = self.get_node_handler(node.__class__) handler(node) finally: self.node_depth -= 1 if self.trace_tree: print(' ' * self.node_depth + 'end ' + node.__class__.__name__) _get_doctest_examples = doctest.DocTestParser().get_examples def handle_doctests(self, node): try: docstring, node_lineno = self.docstring(node.body[0]) if not docstring: return examples = self._get_doctest_examples(docstring) except (ValueError, IndexError): # e.g. line 6 of the docstring for <string> has inconsistent # leading whitespace: ... return node_offset = self.offset or (0, 0) self.push_scope() for example in examples: try: tree = compile(example.source, "<doctest>", "exec", ast.PyCF_ONLY_AST) except SyntaxError: e = sys.exc_info()[1] position = (node_lineno + example.lineno + e.lineno, example.indent + 4 + (e.offset or 0)) self.report(messages.DoctestSyntaxError, node, position) else: self.offset = (node_offset[0] + node_lineno + example.lineno, node_offset[1] + example.indent + 4) self.handle_children(tree) self.offset = node_offset self.pop_scope() def find_return_with_argument(self, node): """Finds and returns a return statment that has an argument. Note that we should use node.returns in Python 3, but this method is never called in Python 3 so we don't bother checking. """ for item in node.body: if isinstance(item, ast.Return) and item.value: return item elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): return_with_argument = self.find_return_with_argument(item) if return_with_argument: return return_with_argument def is_generator(self, node): """Checks whether a function is a generator by looking for a yield statement or expression.""" if not isinstance(node.body, list): # lambdas can not be generators return False for item in node.body: if isinstance(item, (ast.Assign, ast.Expr)): if isinstance(item.value, ast.Yield): return True elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): if self.is_generator(item): return True return False def ignore(self, node): pass # "stmt" type nodes RETURN = DELETE = PRINT = WHILE = IF = WITH = WITHITEM = RAISE = TRYFINALLY = ASSERT = EXEC = EXPR = handle_children CONTINUE = BREAK = PASS = ignore # "expr" type nodes BOOLOP = BINOP = UNARYOP = IFEXP = DICT = SET = YIELD = YIELDFROM = COMPARE = REPR = ATTRIBUTE = SUBSCRIPT = \ LIST = TUPLE = STARRED = NAMECONSTANT = handle_children NUM = STR = BYTES = ELLIPSIS = ignore # "slice" type nodes SLICE = EXTSLICE = INDEX = handle_children # expression contexts are node instances too, though being constants LOAD = STORE = DEL = AUGLOAD = AUGSTORE = PARAM = ignore # same for operators AND = OR = ADD = SUB = MULT = DIV = MOD = POW = LSHIFT = RSHIFT = BITOR = BITXOR = BITAND = FLOORDIV = INVERT = \ NOT = UADD = USUB = EQ = NOTEQ = LT = LTE = GT = GTE = IS = ISNOT = IN = NOTIN = ignore # additional node types COMPREHENSION = KEYWORD = handle_children def GLOBAL(self, node): """Keep track of globals declarations.""" if isinstance(self.scope, FunctionScope): self.scope.globals.update(node.names) NONLOCAL = GLOBAL def LISTCOMP(self, node): # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) def GENERATOREXP(self, node): self.push_scope(GeneratorScope) # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) self.pop_scope() SETCOMP = GENERATOREXP def DICTCOMP(self, node): self.push_scope(GeneratorScope) for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.key, node) self.handleNode(node.value, node) self.pop_scope() def FOR(self, node): """Process bindings for loop variables.""" vars = [] def collectLoopVars(n): if isinstance(n, ast.Name): vars.append(n.id) elif isinstance(n, ast.expr_context): return else: for c in ast.iter_child_nodes(n): collectLoopVars(c) collectLoopVars(node.target) for varn in vars: if (isinstance(self.scope.get(varn), Importation) # unused ones will get an unused import warning and self.scope[varn].used): self.report(messages.ImportShadowedByLoopVar, node, varn, self.scope[varn].source) self.handle_children(node) def NAME(self, node): """Handle occurrence of Name (which can be a load/store/delete access.)""" # Locate the name in locals / function / globals scopes. if isinstance(node.ctx, (ast.Load, ast.AugLoad)): self.handle_node_load(node) if (node.id == 'locals' and isinstance(self.scope, FunctionScope) and isinstance(node.parent, ast.Call)): # we are doing locals() call in current scope self.scope.uses_locals = True elif isinstance(node.ctx, (ast.Store, ast.AugStore)): self.handle_node_store(node) elif isinstance(node.ctx, ast.Del): self.handle_node_delete(node) else: # must be a Param context -- this only happens for names in function # arguments, but these aren't dispatched through here raise RuntimeError("Got impossible expression context: %r" % (node.ctx,)) def CALL(self, node): f = node.func if isinstance(f, ast.Name): for scope in self.iter_visible_scopes(): definition = scope.get(f.id) if definition: if isinstance(definition, FunctionDefinition): definition.signature.checkCall(node, self, f.id) break self.handle_children(node) def FUNCTIONDEF(self, node): for deco in node.decorator_list: self.handleNode(deco, node) self.add_binding(node, FunctionDefinition(node.name, node)) self.LAMBDA(node) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) def LAMBDA(self, node): args = [] annotations = [] if PY2: def addArgs(arglist): for arg in arglist: if isinstance(arg, ast.Tuple): addArgs(arg.elts) else: if arg.id in args: self.report(messages.DuplicateArgument, node, arg.id) args.append(arg.id) addArgs(node.args.args) defaults = node.args.defaults else: for arg in node.args.args + node.args.kwonlyargs: annotations.append(arg.annotation) args.append(arg.arg) defaults = node.args.defaults + node.args.kw_defaults # Only for Python3 FunctionDefs is_py3_func = hasattr(node, 'returns') for arg_name in ('vararg', 'kwarg'): wildcard = getattr(node.args, arg_name) if not wildcard: continue args.append(getattr(wildcard, 'arg', wildcard)) if is_py3_func: if PY34_GTE: annotations.append(wildcard.annotation) else: argannotation = arg_name + 'annotation' annotations.append(getattr(node.args, argannotation)) if is_py3_func: annotations.append(node.returns) if PY3: if len(set(args)) < len(args): for (idx, arg) in enumerate(args): if arg in args[:idx]: self.report(messages.DuplicateArgument, node, arg) for child in annotations + defaults: if child: self.handleNode(child, node) def runFunction(): self.push_scope() for name in args: self.add_binding(node, Argument(name, node), report_redef=False) if isinstance(node.body, list): # case for FunctionDefs for stmt in node.body: self.handleNode(stmt, node) else: # case for Lambdas self.handleNode(node.body, node) def checkUnusedAssignments(): """Check to see if any assignments have not been used.""" for name, binding in self.scope.unusedAssignments(): self.report(messages.UnusedVariable, binding.source, name) self.defer_assignment(checkUnusedAssignments) if PY2: def checkReturnWithArgumentInsideGenerator(): """Check to see if there are any return statements with arguments but the function is a generator.""" if self.is_generator(node): stmt = self.find_return_with_argument(node) if stmt is not None: self.report(messages.ReturnWithArgsInsideGenerator, stmt) self.defer_assignment(checkReturnWithArgumentInsideGenerator) self.pop_scope() self.defer_function(runFunction) def CLASSDEF(self, node): """Check names used in a class definition, including its decorators, base classes, and the body of its definition. Additionally, add its name to the current scope. """ for deco in node.decorator_list: self.handleNode(deco, node) for baseNode in node.bases: self.handleNode(baseNode, node) if not PY2: for keywordNode in node.keywords: self.handleNode(keywordNode, node) self.push_scope(ClassScope) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) for stmt in node.body: self.handleNode(stmt, node) self.pop_scope() self.add_binding(node, ClassDefinition(node.name, node)) def ASSIGN(self, node): self.handleNode(node.value, node) for target in node.targets: self.handleNode(target, node) def AUGASSIGN(self, node): self.handle_node_load(node.target) self.handleNode(node.value, node) self.handleNode(node.target, node) def IMPORT(self, node): for alias in node.names: name = alias.asname or alias.name importation = Importation(name, node) self.add_binding(node, importation) def IMPORTFROM(self, node): if node.module == '__future__': if not self.futures_allowed: self.report(messages.LateFutureImport, node, [n.name for n in node.names]) else: self.futures_allowed = False for alias in node.names: if alias.name == '*': self.scope.importStarred = True self.report(messages.ImportStarUsed, node, node.module) continue name = alias.asname or alias.name importation = Importation(name, node) if node.module == '__future__': importation.used = (self.scope, node) self.add_binding(node, importation) def TRY(self, node): handler_names = [] # List the exception handlers for handler in node.handlers: if isinstance(handler.type, ast.Tuple): for exc_type in handler.type.elts: handler_names.append(node_name(exc_type)) elif handler.type: handler_names.append(node_name(handler.type)) # Memorize the except handlers and process the body self.except_handlers.append(handler_names) for child in node.body: self.handleNode(child, node) self.except_handlers.pop() # Process the other nodes: "except:", "else:", "finally:" for child in ast.iter_child_nodes(node): if child not in node.body: self.handleNode(child, node) TRYEXCEPT = TRY def EXCEPTHANDLER(self, node): # 3.x: in addition to handling children, we must handle the name of # the exception, which is not a Name node, but a simple string. if node.type is None: self.report(messages.BareExcept, node) if isinstance(node.name, str): self.handle_node_store(node) self.handle_children(node)
timothycrosley/deprecated.frosted
frosted/checker.py
ExportBinding.names
python
def names(self): names = [] if isinstance(self.source, ast.List): for node in self.source.elts: if isinstance(node, ast.Str): names.append(node.s) return names
Return a list of the names referenced by this binding.
train
https://github.com/timothycrosley/deprecated.frosted/blob/61ba7f341fc55676c3580c8c4e52117986cd5e12/frosted/checker.py#L128-L135
null
class ExportBinding(Binding): """A binding created by an __all__ assignment. If the names in the list can be determined statically, they will be treated as names for export and additional checking applied to them. The only __all__ assignment that can be recognized is one which takes the value of a literal list containing literal strings. For example: __all__ = ["foo", "bar"] Names which are imported and not otherwise used but appear in the value of __all__ will not have an unused import warning reported for them. """ __slots__ = ()
timothycrosley/deprecated.frosted
frosted/checker.py
FunctionScope.unusedAssignments
python
def unusedAssignments(self): for name, binding in self.items(): if (not binding.used and name not in self.globals and not self.uses_locals and isinstance(binding, Assignment)): yield name, binding
Return a generator for the assignments which have not been used.
train
https://github.com/timothycrosley/deprecated.frosted/blob/61ba7f341fc55676c3580c8c4e52117986cd5e12/frosted/checker.py#L159-L165
null
class FunctionScope(Scope): """Represents the name scope for a function.""" uses_locals = False always_used = set(['__tracebackhide__', '__traceback_info__', '__traceback_supplement__']) def __init__(self): Scope.__init__(self) self.globals = self.always_used.copy()
timothycrosley/deprecated.frosted
frosted/checker.py
Checker.check_plugins
python
def check_plugins(self): checkers = {} for ep in pkg_resources.iter_entry_points(group='frosted.plugins'): checkers.update({ep.name: ep.load()}) for plugin_name, plugin in checkers.items(): if self.filename != '(none)': messages = plugin.check(self.filename) for message, loc, args, kwargs in messages: self.report(message, loc, *args, **kwargs)
collect plugins from entry point 'frosted.plugins' and run their check() method, passing the filename
train
https://github.com/timothycrosley/deprecated.frosted/blob/61ba7f341fc55676c3580c8c4e52117986cd5e12/frosted/checker.py#L276-L289
null
class Checker(object): """The core of frosted, checks the cleanliness and sanity of Python code.""" node_depth = 0 offset = None trace_tree = False frosted_builtins = FROSTED_BUILTINS def __init__(self, tree, filename='(none)', builtins=None, ignore_lines=(), **settings): self.settings = settings self.ignore_errors = settings.get('ignore_frosted_errors', []) self.ignore_lines = ignore_lines file_specific_ignores = settings.get('ignore_frosted_errors_for_' + (os.path.basename(filename) or ""), None) if file_specific_ignores: self.ignore_errors += file_specific_ignores self._node_handlers = {} self._deferred_functions = [] self._deferred_assignments = [] self.dead_scopes = [] self.messages = [] self.filename = filename if builtins: self.frosted_builtins = self.frosted_builtins.union(builtins) self.scope_stack = [ModuleScope()] self.except_handlers = [()] self.futures_allowed = True self.root = tree self.handle_children(tree) self.run_deferred(self._deferred_functions) self._deferred_functions = None self.run_deferred(self._deferred_assignments) self._deferred_assignments = None del self.scope_stack[1:] self.pop_scope() self.check_dead_scopes() self.check_plugins() def defer_function(self, callable): """Schedule a function handler to be called just before completion. This is used for handling function bodies, which must be deferred because code later in the file might modify the global scope. When 'callable' is called, the scope at the time this is called will be restored, however it will contain any new bindings added to it. """ self._deferred_functions.append((callable, self.scope_stack[:], self.offset)) def defer_assignment(self, callable): """Schedule an assignment handler to be called just after deferred function handlers.""" self._deferred_assignments.append((callable, self.scope_stack[:], self.offset)) def run_deferred(self, deferred): """Run the callables in deferred using their associated scope stack.""" for handler, scope, offset in deferred: self.scope_stack = scope self.offset = offset handler() @property def scope(self): return self.scope_stack[-1] def pop_scope(self): self.dead_scopes.append(self.scope_stack.pop()) def check_dead_scopes(self): """Look at scopes which have been fully examined and report names in them which were imported but unused.""" for scope in self.dead_scopes: export = isinstance(scope.get('__all__'), ExportBinding) if export: all = scope['__all__'].names() # Look for possible mistakes in the export list if not scope.importStarred and os.path.basename(self.filename) != '__init__.py': undefined = set(all) - set(scope) for name in undefined: self.report(messages.UndefinedExport, scope['__all__'].source, name) else: all = [] # Look for imported names that aren't used without checking imports in namespace definition for importation in scope.values(): if isinstance(importation, Importation) and not importation.used and importation.name not in all: self.report(messages.UnusedImport, importation.source, importation.name) def push_scope(self, scope_class=FunctionScope): self.scope_stack.append(scope_class()) def push_function_scope(self): # XXX Deprecated self.push_scope(FunctionScope) def push_class_scope(self): # XXX Deprecated self.push_scope(ClassScope) def report(self, message_class, *args, **kwargs): error_code = message_class.error_code if(not error_code[:2] + "00" in self.ignore_errors and not error_code in self.ignore_errors and not str(message_class.error_number) in self.ignore_errors): kwargs['verbose'] = self.settings.get('verbose') message = message_class(self.filename, *args, **kwargs) if message.lineno not in self.ignore_lines: self.messages.append(message) def has_parent(self, node, kind): while hasattr(node, 'parent'): node = node.parent if isinstance(node, kind): return True def get_common_ancestor(self, lnode, rnode, stop=None): stop = stop or self.root if lnode is rnode: return lnode if stop in (lnode, rnode): return stop if not hasattr(lnode, 'parent') or not hasattr(rnode, 'parent'): return if (lnode.level > rnode.level): return self.get_common_ancestor(lnode.parent, rnode, stop) if (rnode.level > lnode.level): return self.get_common_ancestor(lnode, rnode.parent, stop) return self.get_common_ancestor(lnode.parent, rnode.parent, stop) def descendant_of(self, node, ancestors, stop=None): for ancestor in ancestors: if self.get_common_ancestor(node, ancestor, stop) not in (stop, None): return True return False def on_fork(self, parent, lnode, rnode, items): return (self.descendant_of(lnode, items, parent) ^ self.descendant_of(rnode, items, parent)) def different_forks(self, lnode, rnode): """True, if lnode and rnode are located on different forks of IF/TRY.""" ancestor = self.get_common_ancestor(lnode, rnode) if isinstance(ancestor, ast.If): for fork in (ancestor.body, ancestor.orelse): if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.Try): body = ancestor.body + ancestor.orelse for fork in [body] + [[hdl] for hdl in ancestor.handlers]: if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.TryFinally): if self.on_fork(ancestor, lnode, rnode, ancestor.body): return True return False def add_binding(self, node, value, report_redef=True): """Called when a binding is altered. - `node` is the statement responsible for the change - `value` is the optional new value, a Binding instance, associated with the binding; if None, the binding is deleted if it exists. - if `report_redef` is True (default), rebinding while unused will be reported. """ redefinedWhileUnused = False if not isinstance(self.scope, ClassScope): for scope in self.scope_stack[::-1]: existing = scope.get(value.name) if (isinstance(existing, Importation) and not existing.used and (not isinstance(value, Importation) or value.fullName == existing.fullName) and report_redef and not self.different_forks(node, existing.source)): redefinedWhileUnused = True self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) existing = self.scope.get(value.name) if not redefinedWhileUnused and self.has_parent(value.source, ast.ListComp): if (existing and report_redef and not self.has_parent(existing.source, (ast.For, ast.ListComp)) and not self.different_forks(node, existing.source)): self.report(messages.RedefinedInListComp, node, value.name, existing.source) if (isinstance(existing, Definition) and not existing.used and not self.different_forks(node, existing.source)): self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) else: self.scope[value.name] = value def get_node_handler(self, node_class): try: return self._node_handlers[node_class] except KeyError: nodeType = str(node_class.__name__).upper() self._node_handlers[node_class] = handler = getattr(self, nodeType) return handler def iter_visible_scopes(self): outerScopes = itertools.islice(self.scope_stack, len(self.scope_stack) - 1) scopes = [scope for scope in outerScopes if isinstance(scope, (FunctionScope, ModuleScope))] if (isinstance(self.scope, GeneratorScope) and scopes[-1] != self.scope_stack[-2]): scopes.append(self.scope_stack[-2]) scopes.append(self.scope_stack[-1]) return iter(reversed(scopes)) def handle_node_load(self, node): name = node_name(node) if not name: return importStarred = False for scope in self.iter_visible_scopes(): importStarred = importStarred or scope.importStarred try: scope[name].used = (self.scope, node) except KeyError: pass else: return # look in the built-ins if importStarred or name in self.frosted_builtins: return if name == '__path__' and os.path.basename(self.filename) == '__init__.py': # the special name __path__ is valid only in packages return # protected with a NameError handler? if 'NameError' not in self.except_handlers[-1]: self.report(messages.UndefinedName, node, name) def handle_node_store(self, node): name = node_name(node) if not name: return # if the name hasn't already been defined in the current scope if isinstance(self.scope, FunctionScope) and name not in self.scope: # for each function or module scope above us for scope in self.scope_stack[:-1]: if not isinstance(scope, (FunctionScope, ModuleScope)): continue # if the name was defined in that scope, and the name has # been accessed already in the current scope, and hasn't # been declared global used = name in scope and scope[name].used if used and used[0] is self.scope and name not in self.scope.globals: # then it's probably a mistake self.report(messages.UndefinedLocal, scope[name].used[1], name, scope[name].source) break parent = getattr(node, 'parent', None) if isinstance(parent, (ast.For, ast.comprehension, ast.Tuple, ast.List)): binding = Binding(name, node) elif (parent is not None and name == '__all__' and isinstance(self.scope, ModuleScope)): binding = ExportBinding(name, parent.value) else: binding = Assignment(name, node) if name in self.scope: binding.used = self.scope[name].used self.add_binding(node, binding) def handle_node_delete(self, node): name = node_name(node) if not name: return if isinstance(self.scope, FunctionScope) and name in self.scope.globals: self.scope.globals.remove(name) else: try: del self.scope[name] except KeyError: self.report(messages.UndefinedName, node, name) def handle_children(self, tree): for node in ast.iter_child_nodes(tree): self.handleNode(node, tree) def is_docstring(self, node): """Determine if the given node is a docstring, as long as it is at the correct place in the node tree.""" return isinstance(node, ast.Str) or (isinstance(node, ast.Expr) and isinstance(node.value, ast.Str)) def docstring(self, node): if isinstance(node, ast.Expr): node = node.value if not isinstance(node, ast.Str): return (None, None) # Computed incorrectly if the docstring has backslash doctest_lineno = node.lineno - node.s.count('\n') - 1 return (node.s, doctest_lineno) def handleNode(self, node, parent): if node is None: return if self.offset and getattr(node, 'lineno', None) is not None: node.lineno += self.offset[0] node.col_offset += self.offset[1] if self.trace_tree: print(' ' * self.node_depth + node.__class__.__name__) if self.futures_allowed and not (isinstance(node, ast.ImportFrom) or self.is_docstring(node)): self.futures_allowed = False self.node_depth += 1 node.level = self.node_depth node.parent = parent try: handler = self.get_node_handler(node.__class__) handler(node) finally: self.node_depth -= 1 if self.trace_tree: print(' ' * self.node_depth + 'end ' + node.__class__.__name__) _get_doctest_examples = doctest.DocTestParser().get_examples def handle_doctests(self, node): try: docstring, node_lineno = self.docstring(node.body[0]) if not docstring: return examples = self._get_doctest_examples(docstring) except (ValueError, IndexError): # e.g. line 6 of the docstring for <string> has inconsistent # leading whitespace: ... return node_offset = self.offset or (0, 0) self.push_scope() for example in examples: try: tree = compile(example.source, "<doctest>", "exec", ast.PyCF_ONLY_AST) except SyntaxError: e = sys.exc_info()[1] position = (node_lineno + example.lineno + e.lineno, example.indent + 4 + (e.offset or 0)) self.report(messages.DoctestSyntaxError, node, position) else: self.offset = (node_offset[0] + node_lineno + example.lineno, node_offset[1] + example.indent + 4) self.handle_children(tree) self.offset = node_offset self.pop_scope() def find_return_with_argument(self, node): """Finds and returns a return statment that has an argument. Note that we should use node.returns in Python 3, but this method is never called in Python 3 so we don't bother checking. """ for item in node.body: if isinstance(item, ast.Return) and item.value: return item elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): return_with_argument = self.find_return_with_argument(item) if return_with_argument: return return_with_argument def is_generator(self, node): """Checks whether a function is a generator by looking for a yield statement or expression.""" if not isinstance(node.body, list): # lambdas can not be generators return False for item in node.body: if isinstance(item, (ast.Assign, ast.Expr)): if isinstance(item.value, ast.Yield): return True elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): if self.is_generator(item): return True return False def ignore(self, node): pass # "stmt" type nodes RETURN = DELETE = PRINT = WHILE = IF = WITH = WITHITEM = RAISE = TRYFINALLY = ASSERT = EXEC = EXPR = handle_children CONTINUE = BREAK = PASS = ignore # "expr" type nodes BOOLOP = BINOP = UNARYOP = IFEXP = DICT = SET = YIELD = YIELDFROM = COMPARE = REPR = ATTRIBUTE = SUBSCRIPT = \ LIST = TUPLE = STARRED = NAMECONSTANT = handle_children NUM = STR = BYTES = ELLIPSIS = ignore # "slice" type nodes SLICE = EXTSLICE = INDEX = handle_children # expression contexts are node instances too, though being constants LOAD = STORE = DEL = AUGLOAD = AUGSTORE = PARAM = ignore # same for operators AND = OR = ADD = SUB = MULT = DIV = MOD = POW = LSHIFT = RSHIFT = BITOR = BITXOR = BITAND = FLOORDIV = INVERT = \ NOT = UADD = USUB = EQ = NOTEQ = LT = LTE = GT = GTE = IS = ISNOT = IN = NOTIN = ignore # additional node types COMPREHENSION = KEYWORD = handle_children def GLOBAL(self, node): """Keep track of globals declarations.""" if isinstance(self.scope, FunctionScope): self.scope.globals.update(node.names) NONLOCAL = GLOBAL def LISTCOMP(self, node): # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) def GENERATOREXP(self, node): self.push_scope(GeneratorScope) # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) self.pop_scope() SETCOMP = GENERATOREXP def DICTCOMP(self, node): self.push_scope(GeneratorScope) for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.key, node) self.handleNode(node.value, node) self.pop_scope() def FOR(self, node): """Process bindings for loop variables.""" vars = [] def collectLoopVars(n): if isinstance(n, ast.Name): vars.append(n.id) elif isinstance(n, ast.expr_context): return else: for c in ast.iter_child_nodes(n): collectLoopVars(c) collectLoopVars(node.target) for varn in vars: if (isinstance(self.scope.get(varn), Importation) # unused ones will get an unused import warning and self.scope[varn].used): self.report(messages.ImportShadowedByLoopVar, node, varn, self.scope[varn].source) self.handle_children(node) def NAME(self, node): """Handle occurrence of Name (which can be a load/store/delete access.)""" # Locate the name in locals / function / globals scopes. if isinstance(node.ctx, (ast.Load, ast.AugLoad)): self.handle_node_load(node) if (node.id == 'locals' and isinstance(self.scope, FunctionScope) and isinstance(node.parent, ast.Call)): # we are doing locals() call in current scope self.scope.uses_locals = True elif isinstance(node.ctx, (ast.Store, ast.AugStore)): self.handle_node_store(node) elif isinstance(node.ctx, ast.Del): self.handle_node_delete(node) else: # must be a Param context -- this only happens for names in function # arguments, but these aren't dispatched through here raise RuntimeError("Got impossible expression context: %r" % (node.ctx,)) def CALL(self, node): f = node.func if isinstance(f, ast.Name): for scope in self.iter_visible_scopes(): definition = scope.get(f.id) if definition: if isinstance(definition, FunctionDefinition): definition.signature.checkCall(node, self, f.id) break self.handle_children(node) def FUNCTIONDEF(self, node): for deco in node.decorator_list: self.handleNode(deco, node) self.add_binding(node, FunctionDefinition(node.name, node)) self.LAMBDA(node) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) def LAMBDA(self, node): args = [] annotations = [] if PY2: def addArgs(arglist): for arg in arglist: if isinstance(arg, ast.Tuple): addArgs(arg.elts) else: if arg.id in args: self.report(messages.DuplicateArgument, node, arg.id) args.append(arg.id) addArgs(node.args.args) defaults = node.args.defaults else: for arg in node.args.args + node.args.kwonlyargs: annotations.append(arg.annotation) args.append(arg.arg) defaults = node.args.defaults + node.args.kw_defaults # Only for Python3 FunctionDefs is_py3_func = hasattr(node, 'returns') for arg_name in ('vararg', 'kwarg'): wildcard = getattr(node.args, arg_name) if not wildcard: continue args.append(getattr(wildcard, 'arg', wildcard)) if is_py3_func: if PY34_GTE: annotations.append(wildcard.annotation) else: argannotation = arg_name + 'annotation' annotations.append(getattr(node.args, argannotation)) if is_py3_func: annotations.append(node.returns) if PY3: if len(set(args)) < len(args): for (idx, arg) in enumerate(args): if arg in args[:idx]: self.report(messages.DuplicateArgument, node, arg) for child in annotations + defaults: if child: self.handleNode(child, node) def runFunction(): self.push_scope() for name in args: self.add_binding(node, Argument(name, node), report_redef=False) if isinstance(node.body, list): # case for FunctionDefs for stmt in node.body: self.handleNode(stmt, node) else: # case for Lambdas self.handleNode(node.body, node) def checkUnusedAssignments(): """Check to see if any assignments have not been used.""" for name, binding in self.scope.unusedAssignments(): self.report(messages.UnusedVariable, binding.source, name) self.defer_assignment(checkUnusedAssignments) if PY2: def checkReturnWithArgumentInsideGenerator(): """Check to see if there are any return statements with arguments but the function is a generator.""" if self.is_generator(node): stmt = self.find_return_with_argument(node) if stmt is not None: self.report(messages.ReturnWithArgsInsideGenerator, stmt) self.defer_assignment(checkReturnWithArgumentInsideGenerator) self.pop_scope() self.defer_function(runFunction) def CLASSDEF(self, node): """Check names used in a class definition, including its decorators, base classes, and the body of its definition. Additionally, add its name to the current scope. """ for deco in node.decorator_list: self.handleNode(deco, node) for baseNode in node.bases: self.handleNode(baseNode, node) if not PY2: for keywordNode in node.keywords: self.handleNode(keywordNode, node) self.push_scope(ClassScope) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) for stmt in node.body: self.handleNode(stmt, node) self.pop_scope() self.add_binding(node, ClassDefinition(node.name, node)) def ASSIGN(self, node): self.handleNode(node.value, node) for target in node.targets: self.handleNode(target, node) def AUGASSIGN(self, node): self.handle_node_load(node.target) self.handleNode(node.value, node) self.handleNode(node.target, node) def IMPORT(self, node): for alias in node.names: name = alias.asname or alias.name importation = Importation(name, node) self.add_binding(node, importation) def IMPORTFROM(self, node): if node.module == '__future__': if not self.futures_allowed: self.report(messages.LateFutureImport, node, [n.name for n in node.names]) else: self.futures_allowed = False for alias in node.names: if alias.name == '*': self.scope.importStarred = True self.report(messages.ImportStarUsed, node, node.module) continue name = alias.asname or alias.name importation = Importation(name, node) if node.module == '__future__': importation.used = (self.scope, node) self.add_binding(node, importation) def TRY(self, node): handler_names = [] # List the exception handlers for handler in node.handlers: if isinstance(handler.type, ast.Tuple): for exc_type in handler.type.elts: handler_names.append(node_name(exc_type)) elif handler.type: handler_names.append(node_name(handler.type)) # Memorize the except handlers and process the body self.except_handlers.append(handler_names) for child in node.body: self.handleNode(child, node) self.except_handlers.pop() # Process the other nodes: "except:", "else:", "finally:" for child in ast.iter_child_nodes(node): if child not in node.body: self.handleNode(child, node) TRYEXCEPT = TRY def EXCEPTHANDLER(self, node): # 3.x: in addition to handling children, we must handle the name of # the exception, which is not a Name node, but a simple string. if node.type is None: self.report(messages.BareExcept, node) if isinstance(node.name, str): self.handle_node_store(node) self.handle_children(node)
timothycrosley/deprecated.frosted
frosted/checker.py
Checker.defer_function
python
def defer_function(self, callable): self._deferred_functions.append((callable, self.scope_stack[:], self.offset))
Schedule a function handler to be called just before completion. This is used for handling function bodies, which must be deferred because code later in the file might modify the global scope. When 'callable' is called, the scope at the time this is called will be restored, however it will contain any new bindings added to it.
train
https://github.com/timothycrosley/deprecated.frosted/blob/61ba7f341fc55676c3580c8c4e52117986cd5e12/frosted/checker.py#L291-L299
null
class Checker(object): """The core of frosted, checks the cleanliness and sanity of Python code.""" node_depth = 0 offset = None trace_tree = False frosted_builtins = FROSTED_BUILTINS def __init__(self, tree, filename='(none)', builtins=None, ignore_lines=(), **settings): self.settings = settings self.ignore_errors = settings.get('ignore_frosted_errors', []) self.ignore_lines = ignore_lines file_specific_ignores = settings.get('ignore_frosted_errors_for_' + (os.path.basename(filename) or ""), None) if file_specific_ignores: self.ignore_errors += file_specific_ignores self._node_handlers = {} self._deferred_functions = [] self._deferred_assignments = [] self.dead_scopes = [] self.messages = [] self.filename = filename if builtins: self.frosted_builtins = self.frosted_builtins.union(builtins) self.scope_stack = [ModuleScope()] self.except_handlers = [()] self.futures_allowed = True self.root = tree self.handle_children(tree) self.run_deferred(self._deferred_functions) self._deferred_functions = None self.run_deferred(self._deferred_assignments) self._deferred_assignments = None del self.scope_stack[1:] self.pop_scope() self.check_dead_scopes() self.check_plugins() def check_plugins(self): """ collect plugins from entry point 'frosted.plugins' and run their check() method, passing the filename """ checkers = {} for ep in pkg_resources.iter_entry_points(group='frosted.plugins'): checkers.update({ep.name: ep.load()}) for plugin_name, plugin in checkers.items(): if self.filename != '(none)': messages = plugin.check(self.filename) for message, loc, args, kwargs in messages: self.report(message, loc, *args, **kwargs) def defer_assignment(self, callable): """Schedule an assignment handler to be called just after deferred function handlers.""" self._deferred_assignments.append((callable, self.scope_stack[:], self.offset)) def run_deferred(self, deferred): """Run the callables in deferred using their associated scope stack.""" for handler, scope, offset in deferred: self.scope_stack = scope self.offset = offset handler() @property def scope(self): return self.scope_stack[-1] def pop_scope(self): self.dead_scopes.append(self.scope_stack.pop()) def check_dead_scopes(self): """Look at scopes which have been fully examined and report names in them which were imported but unused.""" for scope in self.dead_scopes: export = isinstance(scope.get('__all__'), ExportBinding) if export: all = scope['__all__'].names() # Look for possible mistakes in the export list if not scope.importStarred and os.path.basename(self.filename) != '__init__.py': undefined = set(all) - set(scope) for name in undefined: self.report(messages.UndefinedExport, scope['__all__'].source, name) else: all = [] # Look for imported names that aren't used without checking imports in namespace definition for importation in scope.values(): if isinstance(importation, Importation) and not importation.used and importation.name not in all: self.report(messages.UnusedImport, importation.source, importation.name) def push_scope(self, scope_class=FunctionScope): self.scope_stack.append(scope_class()) def push_function_scope(self): # XXX Deprecated self.push_scope(FunctionScope) def push_class_scope(self): # XXX Deprecated self.push_scope(ClassScope) def report(self, message_class, *args, **kwargs): error_code = message_class.error_code if(not error_code[:2] + "00" in self.ignore_errors and not error_code in self.ignore_errors and not str(message_class.error_number) in self.ignore_errors): kwargs['verbose'] = self.settings.get('verbose') message = message_class(self.filename, *args, **kwargs) if message.lineno not in self.ignore_lines: self.messages.append(message) def has_parent(self, node, kind): while hasattr(node, 'parent'): node = node.parent if isinstance(node, kind): return True def get_common_ancestor(self, lnode, rnode, stop=None): stop = stop or self.root if lnode is rnode: return lnode if stop in (lnode, rnode): return stop if not hasattr(lnode, 'parent') or not hasattr(rnode, 'parent'): return if (lnode.level > rnode.level): return self.get_common_ancestor(lnode.parent, rnode, stop) if (rnode.level > lnode.level): return self.get_common_ancestor(lnode, rnode.parent, stop) return self.get_common_ancestor(lnode.parent, rnode.parent, stop) def descendant_of(self, node, ancestors, stop=None): for ancestor in ancestors: if self.get_common_ancestor(node, ancestor, stop) not in (stop, None): return True return False def on_fork(self, parent, lnode, rnode, items): return (self.descendant_of(lnode, items, parent) ^ self.descendant_of(rnode, items, parent)) def different_forks(self, lnode, rnode): """True, if lnode and rnode are located on different forks of IF/TRY.""" ancestor = self.get_common_ancestor(lnode, rnode) if isinstance(ancestor, ast.If): for fork in (ancestor.body, ancestor.orelse): if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.Try): body = ancestor.body + ancestor.orelse for fork in [body] + [[hdl] for hdl in ancestor.handlers]: if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.TryFinally): if self.on_fork(ancestor, lnode, rnode, ancestor.body): return True return False def add_binding(self, node, value, report_redef=True): """Called when a binding is altered. - `node` is the statement responsible for the change - `value` is the optional new value, a Binding instance, associated with the binding; if None, the binding is deleted if it exists. - if `report_redef` is True (default), rebinding while unused will be reported. """ redefinedWhileUnused = False if not isinstance(self.scope, ClassScope): for scope in self.scope_stack[::-1]: existing = scope.get(value.name) if (isinstance(existing, Importation) and not existing.used and (not isinstance(value, Importation) or value.fullName == existing.fullName) and report_redef and not self.different_forks(node, existing.source)): redefinedWhileUnused = True self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) existing = self.scope.get(value.name) if not redefinedWhileUnused and self.has_parent(value.source, ast.ListComp): if (existing and report_redef and not self.has_parent(existing.source, (ast.For, ast.ListComp)) and not self.different_forks(node, existing.source)): self.report(messages.RedefinedInListComp, node, value.name, existing.source) if (isinstance(existing, Definition) and not existing.used and not self.different_forks(node, existing.source)): self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) else: self.scope[value.name] = value def get_node_handler(self, node_class): try: return self._node_handlers[node_class] except KeyError: nodeType = str(node_class.__name__).upper() self._node_handlers[node_class] = handler = getattr(self, nodeType) return handler def iter_visible_scopes(self): outerScopes = itertools.islice(self.scope_stack, len(self.scope_stack) - 1) scopes = [scope for scope in outerScopes if isinstance(scope, (FunctionScope, ModuleScope))] if (isinstance(self.scope, GeneratorScope) and scopes[-1] != self.scope_stack[-2]): scopes.append(self.scope_stack[-2]) scopes.append(self.scope_stack[-1]) return iter(reversed(scopes)) def handle_node_load(self, node): name = node_name(node) if not name: return importStarred = False for scope in self.iter_visible_scopes(): importStarred = importStarred or scope.importStarred try: scope[name].used = (self.scope, node) except KeyError: pass else: return # look in the built-ins if importStarred or name in self.frosted_builtins: return if name == '__path__' and os.path.basename(self.filename) == '__init__.py': # the special name __path__ is valid only in packages return # protected with a NameError handler? if 'NameError' not in self.except_handlers[-1]: self.report(messages.UndefinedName, node, name) def handle_node_store(self, node): name = node_name(node) if not name: return # if the name hasn't already been defined in the current scope if isinstance(self.scope, FunctionScope) and name not in self.scope: # for each function or module scope above us for scope in self.scope_stack[:-1]: if not isinstance(scope, (FunctionScope, ModuleScope)): continue # if the name was defined in that scope, and the name has # been accessed already in the current scope, and hasn't # been declared global used = name in scope and scope[name].used if used and used[0] is self.scope and name not in self.scope.globals: # then it's probably a mistake self.report(messages.UndefinedLocal, scope[name].used[1], name, scope[name].source) break parent = getattr(node, 'parent', None) if isinstance(parent, (ast.For, ast.comprehension, ast.Tuple, ast.List)): binding = Binding(name, node) elif (parent is not None and name == '__all__' and isinstance(self.scope, ModuleScope)): binding = ExportBinding(name, parent.value) else: binding = Assignment(name, node) if name in self.scope: binding.used = self.scope[name].used self.add_binding(node, binding) def handle_node_delete(self, node): name = node_name(node) if not name: return if isinstance(self.scope, FunctionScope) and name in self.scope.globals: self.scope.globals.remove(name) else: try: del self.scope[name] except KeyError: self.report(messages.UndefinedName, node, name) def handle_children(self, tree): for node in ast.iter_child_nodes(tree): self.handleNode(node, tree) def is_docstring(self, node): """Determine if the given node is a docstring, as long as it is at the correct place in the node tree.""" return isinstance(node, ast.Str) or (isinstance(node, ast.Expr) and isinstance(node.value, ast.Str)) def docstring(self, node): if isinstance(node, ast.Expr): node = node.value if not isinstance(node, ast.Str): return (None, None) # Computed incorrectly if the docstring has backslash doctest_lineno = node.lineno - node.s.count('\n') - 1 return (node.s, doctest_lineno) def handleNode(self, node, parent): if node is None: return if self.offset and getattr(node, 'lineno', None) is not None: node.lineno += self.offset[0] node.col_offset += self.offset[1] if self.trace_tree: print(' ' * self.node_depth + node.__class__.__name__) if self.futures_allowed and not (isinstance(node, ast.ImportFrom) or self.is_docstring(node)): self.futures_allowed = False self.node_depth += 1 node.level = self.node_depth node.parent = parent try: handler = self.get_node_handler(node.__class__) handler(node) finally: self.node_depth -= 1 if self.trace_tree: print(' ' * self.node_depth + 'end ' + node.__class__.__name__) _get_doctest_examples = doctest.DocTestParser().get_examples def handle_doctests(self, node): try: docstring, node_lineno = self.docstring(node.body[0]) if not docstring: return examples = self._get_doctest_examples(docstring) except (ValueError, IndexError): # e.g. line 6 of the docstring for <string> has inconsistent # leading whitespace: ... return node_offset = self.offset or (0, 0) self.push_scope() for example in examples: try: tree = compile(example.source, "<doctest>", "exec", ast.PyCF_ONLY_AST) except SyntaxError: e = sys.exc_info()[1] position = (node_lineno + example.lineno + e.lineno, example.indent + 4 + (e.offset or 0)) self.report(messages.DoctestSyntaxError, node, position) else: self.offset = (node_offset[0] + node_lineno + example.lineno, node_offset[1] + example.indent + 4) self.handle_children(tree) self.offset = node_offset self.pop_scope() def find_return_with_argument(self, node): """Finds and returns a return statment that has an argument. Note that we should use node.returns in Python 3, but this method is never called in Python 3 so we don't bother checking. """ for item in node.body: if isinstance(item, ast.Return) and item.value: return item elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): return_with_argument = self.find_return_with_argument(item) if return_with_argument: return return_with_argument def is_generator(self, node): """Checks whether a function is a generator by looking for a yield statement or expression.""" if not isinstance(node.body, list): # lambdas can not be generators return False for item in node.body: if isinstance(item, (ast.Assign, ast.Expr)): if isinstance(item.value, ast.Yield): return True elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): if self.is_generator(item): return True return False def ignore(self, node): pass # "stmt" type nodes RETURN = DELETE = PRINT = WHILE = IF = WITH = WITHITEM = RAISE = TRYFINALLY = ASSERT = EXEC = EXPR = handle_children CONTINUE = BREAK = PASS = ignore # "expr" type nodes BOOLOP = BINOP = UNARYOP = IFEXP = DICT = SET = YIELD = YIELDFROM = COMPARE = REPR = ATTRIBUTE = SUBSCRIPT = \ LIST = TUPLE = STARRED = NAMECONSTANT = handle_children NUM = STR = BYTES = ELLIPSIS = ignore # "slice" type nodes SLICE = EXTSLICE = INDEX = handle_children # expression contexts are node instances too, though being constants LOAD = STORE = DEL = AUGLOAD = AUGSTORE = PARAM = ignore # same for operators AND = OR = ADD = SUB = MULT = DIV = MOD = POW = LSHIFT = RSHIFT = BITOR = BITXOR = BITAND = FLOORDIV = INVERT = \ NOT = UADD = USUB = EQ = NOTEQ = LT = LTE = GT = GTE = IS = ISNOT = IN = NOTIN = ignore # additional node types COMPREHENSION = KEYWORD = handle_children def GLOBAL(self, node): """Keep track of globals declarations.""" if isinstance(self.scope, FunctionScope): self.scope.globals.update(node.names) NONLOCAL = GLOBAL def LISTCOMP(self, node): # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) def GENERATOREXP(self, node): self.push_scope(GeneratorScope) # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) self.pop_scope() SETCOMP = GENERATOREXP def DICTCOMP(self, node): self.push_scope(GeneratorScope) for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.key, node) self.handleNode(node.value, node) self.pop_scope() def FOR(self, node): """Process bindings for loop variables.""" vars = [] def collectLoopVars(n): if isinstance(n, ast.Name): vars.append(n.id) elif isinstance(n, ast.expr_context): return else: for c in ast.iter_child_nodes(n): collectLoopVars(c) collectLoopVars(node.target) for varn in vars: if (isinstance(self.scope.get(varn), Importation) # unused ones will get an unused import warning and self.scope[varn].used): self.report(messages.ImportShadowedByLoopVar, node, varn, self.scope[varn].source) self.handle_children(node) def NAME(self, node): """Handle occurrence of Name (which can be a load/store/delete access.)""" # Locate the name in locals / function / globals scopes. if isinstance(node.ctx, (ast.Load, ast.AugLoad)): self.handle_node_load(node) if (node.id == 'locals' and isinstance(self.scope, FunctionScope) and isinstance(node.parent, ast.Call)): # we are doing locals() call in current scope self.scope.uses_locals = True elif isinstance(node.ctx, (ast.Store, ast.AugStore)): self.handle_node_store(node) elif isinstance(node.ctx, ast.Del): self.handle_node_delete(node) else: # must be a Param context -- this only happens for names in function # arguments, but these aren't dispatched through here raise RuntimeError("Got impossible expression context: %r" % (node.ctx,)) def CALL(self, node): f = node.func if isinstance(f, ast.Name): for scope in self.iter_visible_scopes(): definition = scope.get(f.id) if definition: if isinstance(definition, FunctionDefinition): definition.signature.checkCall(node, self, f.id) break self.handle_children(node) def FUNCTIONDEF(self, node): for deco in node.decorator_list: self.handleNode(deco, node) self.add_binding(node, FunctionDefinition(node.name, node)) self.LAMBDA(node) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) def LAMBDA(self, node): args = [] annotations = [] if PY2: def addArgs(arglist): for arg in arglist: if isinstance(arg, ast.Tuple): addArgs(arg.elts) else: if arg.id in args: self.report(messages.DuplicateArgument, node, arg.id) args.append(arg.id) addArgs(node.args.args) defaults = node.args.defaults else: for arg in node.args.args + node.args.kwonlyargs: annotations.append(arg.annotation) args.append(arg.arg) defaults = node.args.defaults + node.args.kw_defaults # Only for Python3 FunctionDefs is_py3_func = hasattr(node, 'returns') for arg_name in ('vararg', 'kwarg'): wildcard = getattr(node.args, arg_name) if not wildcard: continue args.append(getattr(wildcard, 'arg', wildcard)) if is_py3_func: if PY34_GTE: annotations.append(wildcard.annotation) else: argannotation = arg_name + 'annotation' annotations.append(getattr(node.args, argannotation)) if is_py3_func: annotations.append(node.returns) if PY3: if len(set(args)) < len(args): for (idx, arg) in enumerate(args): if arg in args[:idx]: self.report(messages.DuplicateArgument, node, arg) for child in annotations + defaults: if child: self.handleNode(child, node) def runFunction(): self.push_scope() for name in args: self.add_binding(node, Argument(name, node), report_redef=False) if isinstance(node.body, list): # case for FunctionDefs for stmt in node.body: self.handleNode(stmt, node) else: # case for Lambdas self.handleNode(node.body, node) def checkUnusedAssignments(): """Check to see if any assignments have not been used.""" for name, binding in self.scope.unusedAssignments(): self.report(messages.UnusedVariable, binding.source, name) self.defer_assignment(checkUnusedAssignments) if PY2: def checkReturnWithArgumentInsideGenerator(): """Check to see if there are any return statements with arguments but the function is a generator.""" if self.is_generator(node): stmt = self.find_return_with_argument(node) if stmt is not None: self.report(messages.ReturnWithArgsInsideGenerator, stmt) self.defer_assignment(checkReturnWithArgumentInsideGenerator) self.pop_scope() self.defer_function(runFunction) def CLASSDEF(self, node): """Check names used in a class definition, including its decorators, base classes, and the body of its definition. Additionally, add its name to the current scope. """ for deco in node.decorator_list: self.handleNode(deco, node) for baseNode in node.bases: self.handleNode(baseNode, node) if not PY2: for keywordNode in node.keywords: self.handleNode(keywordNode, node) self.push_scope(ClassScope) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) for stmt in node.body: self.handleNode(stmt, node) self.pop_scope() self.add_binding(node, ClassDefinition(node.name, node)) def ASSIGN(self, node): self.handleNode(node.value, node) for target in node.targets: self.handleNode(target, node) def AUGASSIGN(self, node): self.handle_node_load(node.target) self.handleNode(node.value, node) self.handleNode(node.target, node) def IMPORT(self, node): for alias in node.names: name = alias.asname or alias.name importation = Importation(name, node) self.add_binding(node, importation) def IMPORTFROM(self, node): if node.module == '__future__': if not self.futures_allowed: self.report(messages.LateFutureImport, node, [n.name for n in node.names]) else: self.futures_allowed = False for alias in node.names: if alias.name == '*': self.scope.importStarred = True self.report(messages.ImportStarUsed, node, node.module) continue name = alias.asname or alias.name importation = Importation(name, node) if node.module == '__future__': importation.used = (self.scope, node) self.add_binding(node, importation) def TRY(self, node): handler_names = [] # List the exception handlers for handler in node.handlers: if isinstance(handler.type, ast.Tuple): for exc_type in handler.type.elts: handler_names.append(node_name(exc_type)) elif handler.type: handler_names.append(node_name(handler.type)) # Memorize the except handlers and process the body self.except_handlers.append(handler_names) for child in node.body: self.handleNode(child, node) self.except_handlers.pop() # Process the other nodes: "except:", "else:", "finally:" for child in ast.iter_child_nodes(node): if child not in node.body: self.handleNode(child, node) TRYEXCEPT = TRY def EXCEPTHANDLER(self, node): # 3.x: in addition to handling children, we must handle the name of # the exception, which is not a Name node, but a simple string. if node.type is None: self.report(messages.BareExcept, node) if isinstance(node.name, str): self.handle_node_store(node) self.handle_children(node)
timothycrosley/deprecated.frosted
frosted/checker.py
Checker.defer_assignment
python
def defer_assignment(self, callable): self._deferred_assignments.append((callable, self.scope_stack[:], self.offset))
Schedule an assignment handler to be called just after deferred function handlers.
train
https://github.com/timothycrosley/deprecated.frosted/blob/61ba7f341fc55676c3580c8c4e52117986cd5e12/frosted/checker.py#L301-L304
null
class Checker(object): """The core of frosted, checks the cleanliness and sanity of Python code.""" node_depth = 0 offset = None trace_tree = False frosted_builtins = FROSTED_BUILTINS def __init__(self, tree, filename='(none)', builtins=None, ignore_lines=(), **settings): self.settings = settings self.ignore_errors = settings.get('ignore_frosted_errors', []) self.ignore_lines = ignore_lines file_specific_ignores = settings.get('ignore_frosted_errors_for_' + (os.path.basename(filename) or ""), None) if file_specific_ignores: self.ignore_errors += file_specific_ignores self._node_handlers = {} self._deferred_functions = [] self._deferred_assignments = [] self.dead_scopes = [] self.messages = [] self.filename = filename if builtins: self.frosted_builtins = self.frosted_builtins.union(builtins) self.scope_stack = [ModuleScope()] self.except_handlers = [()] self.futures_allowed = True self.root = tree self.handle_children(tree) self.run_deferred(self._deferred_functions) self._deferred_functions = None self.run_deferred(self._deferred_assignments) self._deferred_assignments = None del self.scope_stack[1:] self.pop_scope() self.check_dead_scopes() self.check_plugins() def check_plugins(self): """ collect plugins from entry point 'frosted.plugins' and run their check() method, passing the filename """ checkers = {} for ep in pkg_resources.iter_entry_points(group='frosted.plugins'): checkers.update({ep.name: ep.load()}) for plugin_name, plugin in checkers.items(): if self.filename != '(none)': messages = plugin.check(self.filename) for message, loc, args, kwargs in messages: self.report(message, loc, *args, **kwargs) def defer_function(self, callable): """Schedule a function handler to be called just before completion. This is used for handling function bodies, which must be deferred because code later in the file might modify the global scope. When 'callable' is called, the scope at the time this is called will be restored, however it will contain any new bindings added to it. """ self._deferred_functions.append((callable, self.scope_stack[:], self.offset)) def run_deferred(self, deferred): """Run the callables in deferred using their associated scope stack.""" for handler, scope, offset in deferred: self.scope_stack = scope self.offset = offset handler() @property def scope(self): return self.scope_stack[-1] def pop_scope(self): self.dead_scopes.append(self.scope_stack.pop()) def check_dead_scopes(self): """Look at scopes which have been fully examined and report names in them which were imported but unused.""" for scope in self.dead_scopes: export = isinstance(scope.get('__all__'), ExportBinding) if export: all = scope['__all__'].names() # Look for possible mistakes in the export list if not scope.importStarred and os.path.basename(self.filename) != '__init__.py': undefined = set(all) - set(scope) for name in undefined: self.report(messages.UndefinedExport, scope['__all__'].source, name) else: all = [] # Look for imported names that aren't used without checking imports in namespace definition for importation in scope.values(): if isinstance(importation, Importation) and not importation.used and importation.name not in all: self.report(messages.UnusedImport, importation.source, importation.name) def push_scope(self, scope_class=FunctionScope): self.scope_stack.append(scope_class()) def push_function_scope(self): # XXX Deprecated self.push_scope(FunctionScope) def push_class_scope(self): # XXX Deprecated self.push_scope(ClassScope) def report(self, message_class, *args, **kwargs): error_code = message_class.error_code if(not error_code[:2] + "00" in self.ignore_errors and not error_code in self.ignore_errors and not str(message_class.error_number) in self.ignore_errors): kwargs['verbose'] = self.settings.get('verbose') message = message_class(self.filename, *args, **kwargs) if message.lineno not in self.ignore_lines: self.messages.append(message) def has_parent(self, node, kind): while hasattr(node, 'parent'): node = node.parent if isinstance(node, kind): return True def get_common_ancestor(self, lnode, rnode, stop=None): stop = stop or self.root if lnode is rnode: return lnode if stop in (lnode, rnode): return stop if not hasattr(lnode, 'parent') or not hasattr(rnode, 'parent'): return if (lnode.level > rnode.level): return self.get_common_ancestor(lnode.parent, rnode, stop) if (rnode.level > lnode.level): return self.get_common_ancestor(lnode, rnode.parent, stop) return self.get_common_ancestor(lnode.parent, rnode.parent, stop) def descendant_of(self, node, ancestors, stop=None): for ancestor in ancestors: if self.get_common_ancestor(node, ancestor, stop) not in (stop, None): return True return False def on_fork(self, parent, lnode, rnode, items): return (self.descendant_of(lnode, items, parent) ^ self.descendant_of(rnode, items, parent)) def different_forks(self, lnode, rnode): """True, if lnode and rnode are located on different forks of IF/TRY.""" ancestor = self.get_common_ancestor(lnode, rnode) if isinstance(ancestor, ast.If): for fork in (ancestor.body, ancestor.orelse): if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.Try): body = ancestor.body + ancestor.orelse for fork in [body] + [[hdl] for hdl in ancestor.handlers]: if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.TryFinally): if self.on_fork(ancestor, lnode, rnode, ancestor.body): return True return False def add_binding(self, node, value, report_redef=True): """Called when a binding is altered. - `node` is the statement responsible for the change - `value` is the optional new value, a Binding instance, associated with the binding; if None, the binding is deleted if it exists. - if `report_redef` is True (default), rebinding while unused will be reported. """ redefinedWhileUnused = False if not isinstance(self.scope, ClassScope): for scope in self.scope_stack[::-1]: existing = scope.get(value.name) if (isinstance(existing, Importation) and not existing.used and (not isinstance(value, Importation) or value.fullName == existing.fullName) and report_redef and not self.different_forks(node, existing.source)): redefinedWhileUnused = True self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) existing = self.scope.get(value.name) if not redefinedWhileUnused and self.has_parent(value.source, ast.ListComp): if (existing and report_redef and not self.has_parent(existing.source, (ast.For, ast.ListComp)) and not self.different_forks(node, existing.source)): self.report(messages.RedefinedInListComp, node, value.name, existing.source) if (isinstance(existing, Definition) and not existing.used and not self.different_forks(node, existing.source)): self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) else: self.scope[value.name] = value def get_node_handler(self, node_class): try: return self._node_handlers[node_class] except KeyError: nodeType = str(node_class.__name__).upper() self._node_handlers[node_class] = handler = getattr(self, nodeType) return handler def iter_visible_scopes(self): outerScopes = itertools.islice(self.scope_stack, len(self.scope_stack) - 1) scopes = [scope for scope in outerScopes if isinstance(scope, (FunctionScope, ModuleScope))] if (isinstance(self.scope, GeneratorScope) and scopes[-1] != self.scope_stack[-2]): scopes.append(self.scope_stack[-2]) scopes.append(self.scope_stack[-1]) return iter(reversed(scopes)) def handle_node_load(self, node): name = node_name(node) if not name: return importStarred = False for scope in self.iter_visible_scopes(): importStarred = importStarred or scope.importStarred try: scope[name].used = (self.scope, node) except KeyError: pass else: return # look in the built-ins if importStarred or name in self.frosted_builtins: return if name == '__path__' and os.path.basename(self.filename) == '__init__.py': # the special name __path__ is valid only in packages return # protected with a NameError handler? if 'NameError' not in self.except_handlers[-1]: self.report(messages.UndefinedName, node, name) def handle_node_store(self, node): name = node_name(node) if not name: return # if the name hasn't already been defined in the current scope if isinstance(self.scope, FunctionScope) and name not in self.scope: # for each function or module scope above us for scope in self.scope_stack[:-1]: if not isinstance(scope, (FunctionScope, ModuleScope)): continue # if the name was defined in that scope, and the name has # been accessed already in the current scope, and hasn't # been declared global used = name in scope and scope[name].used if used and used[0] is self.scope and name not in self.scope.globals: # then it's probably a mistake self.report(messages.UndefinedLocal, scope[name].used[1], name, scope[name].source) break parent = getattr(node, 'parent', None) if isinstance(parent, (ast.For, ast.comprehension, ast.Tuple, ast.List)): binding = Binding(name, node) elif (parent is not None and name == '__all__' and isinstance(self.scope, ModuleScope)): binding = ExportBinding(name, parent.value) else: binding = Assignment(name, node) if name in self.scope: binding.used = self.scope[name].used self.add_binding(node, binding) def handle_node_delete(self, node): name = node_name(node) if not name: return if isinstance(self.scope, FunctionScope) and name in self.scope.globals: self.scope.globals.remove(name) else: try: del self.scope[name] except KeyError: self.report(messages.UndefinedName, node, name) def handle_children(self, tree): for node in ast.iter_child_nodes(tree): self.handleNode(node, tree) def is_docstring(self, node): """Determine if the given node is a docstring, as long as it is at the correct place in the node tree.""" return isinstance(node, ast.Str) or (isinstance(node, ast.Expr) and isinstance(node.value, ast.Str)) def docstring(self, node): if isinstance(node, ast.Expr): node = node.value if not isinstance(node, ast.Str): return (None, None) # Computed incorrectly if the docstring has backslash doctest_lineno = node.lineno - node.s.count('\n') - 1 return (node.s, doctest_lineno) def handleNode(self, node, parent): if node is None: return if self.offset and getattr(node, 'lineno', None) is not None: node.lineno += self.offset[0] node.col_offset += self.offset[1] if self.trace_tree: print(' ' * self.node_depth + node.__class__.__name__) if self.futures_allowed and not (isinstance(node, ast.ImportFrom) or self.is_docstring(node)): self.futures_allowed = False self.node_depth += 1 node.level = self.node_depth node.parent = parent try: handler = self.get_node_handler(node.__class__) handler(node) finally: self.node_depth -= 1 if self.trace_tree: print(' ' * self.node_depth + 'end ' + node.__class__.__name__) _get_doctest_examples = doctest.DocTestParser().get_examples def handle_doctests(self, node): try: docstring, node_lineno = self.docstring(node.body[0]) if not docstring: return examples = self._get_doctest_examples(docstring) except (ValueError, IndexError): # e.g. line 6 of the docstring for <string> has inconsistent # leading whitespace: ... return node_offset = self.offset or (0, 0) self.push_scope() for example in examples: try: tree = compile(example.source, "<doctest>", "exec", ast.PyCF_ONLY_AST) except SyntaxError: e = sys.exc_info()[1] position = (node_lineno + example.lineno + e.lineno, example.indent + 4 + (e.offset or 0)) self.report(messages.DoctestSyntaxError, node, position) else: self.offset = (node_offset[0] + node_lineno + example.lineno, node_offset[1] + example.indent + 4) self.handle_children(tree) self.offset = node_offset self.pop_scope() def find_return_with_argument(self, node): """Finds and returns a return statment that has an argument. Note that we should use node.returns in Python 3, but this method is never called in Python 3 so we don't bother checking. """ for item in node.body: if isinstance(item, ast.Return) and item.value: return item elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): return_with_argument = self.find_return_with_argument(item) if return_with_argument: return return_with_argument def is_generator(self, node): """Checks whether a function is a generator by looking for a yield statement or expression.""" if not isinstance(node.body, list): # lambdas can not be generators return False for item in node.body: if isinstance(item, (ast.Assign, ast.Expr)): if isinstance(item.value, ast.Yield): return True elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): if self.is_generator(item): return True return False def ignore(self, node): pass # "stmt" type nodes RETURN = DELETE = PRINT = WHILE = IF = WITH = WITHITEM = RAISE = TRYFINALLY = ASSERT = EXEC = EXPR = handle_children CONTINUE = BREAK = PASS = ignore # "expr" type nodes BOOLOP = BINOP = UNARYOP = IFEXP = DICT = SET = YIELD = YIELDFROM = COMPARE = REPR = ATTRIBUTE = SUBSCRIPT = \ LIST = TUPLE = STARRED = NAMECONSTANT = handle_children NUM = STR = BYTES = ELLIPSIS = ignore # "slice" type nodes SLICE = EXTSLICE = INDEX = handle_children # expression contexts are node instances too, though being constants LOAD = STORE = DEL = AUGLOAD = AUGSTORE = PARAM = ignore # same for operators AND = OR = ADD = SUB = MULT = DIV = MOD = POW = LSHIFT = RSHIFT = BITOR = BITXOR = BITAND = FLOORDIV = INVERT = \ NOT = UADD = USUB = EQ = NOTEQ = LT = LTE = GT = GTE = IS = ISNOT = IN = NOTIN = ignore # additional node types COMPREHENSION = KEYWORD = handle_children def GLOBAL(self, node): """Keep track of globals declarations.""" if isinstance(self.scope, FunctionScope): self.scope.globals.update(node.names) NONLOCAL = GLOBAL def LISTCOMP(self, node): # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) def GENERATOREXP(self, node): self.push_scope(GeneratorScope) # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) self.pop_scope() SETCOMP = GENERATOREXP def DICTCOMP(self, node): self.push_scope(GeneratorScope) for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.key, node) self.handleNode(node.value, node) self.pop_scope() def FOR(self, node): """Process bindings for loop variables.""" vars = [] def collectLoopVars(n): if isinstance(n, ast.Name): vars.append(n.id) elif isinstance(n, ast.expr_context): return else: for c in ast.iter_child_nodes(n): collectLoopVars(c) collectLoopVars(node.target) for varn in vars: if (isinstance(self.scope.get(varn), Importation) # unused ones will get an unused import warning and self.scope[varn].used): self.report(messages.ImportShadowedByLoopVar, node, varn, self.scope[varn].source) self.handle_children(node) def NAME(self, node): """Handle occurrence of Name (which can be a load/store/delete access.)""" # Locate the name in locals / function / globals scopes. if isinstance(node.ctx, (ast.Load, ast.AugLoad)): self.handle_node_load(node) if (node.id == 'locals' and isinstance(self.scope, FunctionScope) and isinstance(node.parent, ast.Call)): # we are doing locals() call in current scope self.scope.uses_locals = True elif isinstance(node.ctx, (ast.Store, ast.AugStore)): self.handle_node_store(node) elif isinstance(node.ctx, ast.Del): self.handle_node_delete(node) else: # must be a Param context -- this only happens for names in function # arguments, but these aren't dispatched through here raise RuntimeError("Got impossible expression context: %r" % (node.ctx,)) def CALL(self, node): f = node.func if isinstance(f, ast.Name): for scope in self.iter_visible_scopes(): definition = scope.get(f.id) if definition: if isinstance(definition, FunctionDefinition): definition.signature.checkCall(node, self, f.id) break self.handle_children(node) def FUNCTIONDEF(self, node): for deco in node.decorator_list: self.handleNode(deco, node) self.add_binding(node, FunctionDefinition(node.name, node)) self.LAMBDA(node) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) def LAMBDA(self, node): args = [] annotations = [] if PY2: def addArgs(arglist): for arg in arglist: if isinstance(arg, ast.Tuple): addArgs(arg.elts) else: if arg.id in args: self.report(messages.DuplicateArgument, node, arg.id) args.append(arg.id) addArgs(node.args.args) defaults = node.args.defaults else: for arg in node.args.args + node.args.kwonlyargs: annotations.append(arg.annotation) args.append(arg.arg) defaults = node.args.defaults + node.args.kw_defaults # Only for Python3 FunctionDefs is_py3_func = hasattr(node, 'returns') for arg_name in ('vararg', 'kwarg'): wildcard = getattr(node.args, arg_name) if not wildcard: continue args.append(getattr(wildcard, 'arg', wildcard)) if is_py3_func: if PY34_GTE: annotations.append(wildcard.annotation) else: argannotation = arg_name + 'annotation' annotations.append(getattr(node.args, argannotation)) if is_py3_func: annotations.append(node.returns) if PY3: if len(set(args)) < len(args): for (idx, arg) in enumerate(args): if arg in args[:idx]: self.report(messages.DuplicateArgument, node, arg) for child in annotations + defaults: if child: self.handleNode(child, node) def runFunction(): self.push_scope() for name in args: self.add_binding(node, Argument(name, node), report_redef=False) if isinstance(node.body, list): # case for FunctionDefs for stmt in node.body: self.handleNode(stmt, node) else: # case for Lambdas self.handleNode(node.body, node) def checkUnusedAssignments(): """Check to see if any assignments have not been used.""" for name, binding in self.scope.unusedAssignments(): self.report(messages.UnusedVariable, binding.source, name) self.defer_assignment(checkUnusedAssignments) if PY2: def checkReturnWithArgumentInsideGenerator(): """Check to see if there are any return statements with arguments but the function is a generator.""" if self.is_generator(node): stmt = self.find_return_with_argument(node) if stmt is not None: self.report(messages.ReturnWithArgsInsideGenerator, stmt) self.defer_assignment(checkReturnWithArgumentInsideGenerator) self.pop_scope() self.defer_function(runFunction) def CLASSDEF(self, node): """Check names used in a class definition, including its decorators, base classes, and the body of its definition. Additionally, add its name to the current scope. """ for deco in node.decorator_list: self.handleNode(deco, node) for baseNode in node.bases: self.handleNode(baseNode, node) if not PY2: for keywordNode in node.keywords: self.handleNode(keywordNode, node) self.push_scope(ClassScope) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) for stmt in node.body: self.handleNode(stmt, node) self.pop_scope() self.add_binding(node, ClassDefinition(node.name, node)) def ASSIGN(self, node): self.handleNode(node.value, node) for target in node.targets: self.handleNode(target, node) def AUGASSIGN(self, node): self.handle_node_load(node.target) self.handleNode(node.value, node) self.handleNode(node.target, node) def IMPORT(self, node): for alias in node.names: name = alias.asname or alias.name importation = Importation(name, node) self.add_binding(node, importation) def IMPORTFROM(self, node): if node.module == '__future__': if not self.futures_allowed: self.report(messages.LateFutureImport, node, [n.name for n in node.names]) else: self.futures_allowed = False for alias in node.names: if alias.name == '*': self.scope.importStarred = True self.report(messages.ImportStarUsed, node, node.module) continue name = alias.asname or alias.name importation = Importation(name, node) if node.module == '__future__': importation.used = (self.scope, node) self.add_binding(node, importation) def TRY(self, node): handler_names = [] # List the exception handlers for handler in node.handlers: if isinstance(handler.type, ast.Tuple): for exc_type in handler.type.elts: handler_names.append(node_name(exc_type)) elif handler.type: handler_names.append(node_name(handler.type)) # Memorize the except handlers and process the body self.except_handlers.append(handler_names) for child in node.body: self.handleNode(child, node) self.except_handlers.pop() # Process the other nodes: "except:", "else:", "finally:" for child in ast.iter_child_nodes(node): if child not in node.body: self.handleNode(child, node) TRYEXCEPT = TRY def EXCEPTHANDLER(self, node): # 3.x: in addition to handling children, we must handle the name of # the exception, which is not a Name node, but a simple string. if node.type is None: self.report(messages.BareExcept, node) if isinstance(node.name, str): self.handle_node_store(node) self.handle_children(node)
timothycrosley/deprecated.frosted
frosted/checker.py
Checker.run_deferred
python
def run_deferred(self, deferred): for handler, scope, offset in deferred: self.scope_stack = scope self.offset = offset handler()
Run the callables in deferred using their associated scope stack.
train
https://github.com/timothycrosley/deprecated.frosted/blob/61ba7f341fc55676c3580c8c4e52117986cd5e12/frosted/checker.py#L306-L311
null
class Checker(object): """The core of frosted, checks the cleanliness and sanity of Python code.""" node_depth = 0 offset = None trace_tree = False frosted_builtins = FROSTED_BUILTINS def __init__(self, tree, filename='(none)', builtins=None, ignore_lines=(), **settings): self.settings = settings self.ignore_errors = settings.get('ignore_frosted_errors', []) self.ignore_lines = ignore_lines file_specific_ignores = settings.get('ignore_frosted_errors_for_' + (os.path.basename(filename) or ""), None) if file_specific_ignores: self.ignore_errors += file_specific_ignores self._node_handlers = {} self._deferred_functions = [] self._deferred_assignments = [] self.dead_scopes = [] self.messages = [] self.filename = filename if builtins: self.frosted_builtins = self.frosted_builtins.union(builtins) self.scope_stack = [ModuleScope()] self.except_handlers = [()] self.futures_allowed = True self.root = tree self.handle_children(tree) self.run_deferred(self._deferred_functions) self._deferred_functions = None self.run_deferred(self._deferred_assignments) self._deferred_assignments = None del self.scope_stack[1:] self.pop_scope() self.check_dead_scopes() self.check_plugins() def check_plugins(self): """ collect plugins from entry point 'frosted.plugins' and run their check() method, passing the filename """ checkers = {} for ep in pkg_resources.iter_entry_points(group='frosted.plugins'): checkers.update({ep.name: ep.load()}) for plugin_name, plugin in checkers.items(): if self.filename != '(none)': messages = plugin.check(self.filename) for message, loc, args, kwargs in messages: self.report(message, loc, *args, **kwargs) def defer_function(self, callable): """Schedule a function handler to be called just before completion. This is used for handling function bodies, which must be deferred because code later in the file might modify the global scope. When 'callable' is called, the scope at the time this is called will be restored, however it will contain any new bindings added to it. """ self._deferred_functions.append((callable, self.scope_stack[:], self.offset)) def defer_assignment(self, callable): """Schedule an assignment handler to be called just after deferred function handlers.""" self._deferred_assignments.append((callable, self.scope_stack[:], self.offset)) @property def scope(self): return self.scope_stack[-1] def pop_scope(self): self.dead_scopes.append(self.scope_stack.pop()) def check_dead_scopes(self): """Look at scopes which have been fully examined and report names in them which were imported but unused.""" for scope in self.dead_scopes: export = isinstance(scope.get('__all__'), ExportBinding) if export: all = scope['__all__'].names() # Look for possible mistakes in the export list if not scope.importStarred and os.path.basename(self.filename) != '__init__.py': undefined = set(all) - set(scope) for name in undefined: self.report(messages.UndefinedExport, scope['__all__'].source, name) else: all = [] # Look for imported names that aren't used without checking imports in namespace definition for importation in scope.values(): if isinstance(importation, Importation) and not importation.used and importation.name not in all: self.report(messages.UnusedImport, importation.source, importation.name) def push_scope(self, scope_class=FunctionScope): self.scope_stack.append(scope_class()) def push_function_scope(self): # XXX Deprecated self.push_scope(FunctionScope) def push_class_scope(self): # XXX Deprecated self.push_scope(ClassScope) def report(self, message_class, *args, **kwargs): error_code = message_class.error_code if(not error_code[:2] + "00" in self.ignore_errors and not error_code in self.ignore_errors and not str(message_class.error_number) in self.ignore_errors): kwargs['verbose'] = self.settings.get('verbose') message = message_class(self.filename, *args, **kwargs) if message.lineno not in self.ignore_lines: self.messages.append(message) def has_parent(self, node, kind): while hasattr(node, 'parent'): node = node.parent if isinstance(node, kind): return True def get_common_ancestor(self, lnode, rnode, stop=None): stop = stop or self.root if lnode is rnode: return lnode if stop in (lnode, rnode): return stop if not hasattr(lnode, 'parent') or not hasattr(rnode, 'parent'): return if (lnode.level > rnode.level): return self.get_common_ancestor(lnode.parent, rnode, stop) if (rnode.level > lnode.level): return self.get_common_ancestor(lnode, rnode.parent, stop) return self.get_common_ancestor(lnode.parent, rnode.parent, stop) def descendant_of(self, node, ancestors, stop=None): for ancestor in ancestors: if self.get_common_ancestor(node, ancestor, stop) not in (stop, None): return True return False def on_fork(self, parent, lnode, rnode, items): return (self.descendant_of(lnode, items, parent) ^ self.descendant_of(rnode, items, parent)) def different_forks(self, lnode, rnode): """True, if lnode and rnode are located on different forks of IF/TRY.""" ancestor = self.get_common_ancestor(lnode, rnode) if isinstance(ancestor, ast.If): for fork in (ancestor.body, ancestor.orelse): if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.Try): body = ancestor.body + ancestor.orelse for fork in [body] + [[hdl] for hdl in ancestor.handlers]: if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.TryFinally): if self.on_fork(ancestor, lnode, rnode, ancestor.body): return True return False def add_binding(self, node, value, report_redef=True): """Called when a binding is altered. - `node` is the statement responsible for the change - `value` is the optional new value, a Binding instance, associated with the binding; if None, the binding is deleted if it exists. - if `report_redef` is True (default), rebinding while unused will be reported. """ redefinedWhileUnused = False if not isinstance(self.scope, ClassScope): for scope in self.scope_stack[::-1]: existing = scope.get(value.name) if (isinstance(existing, Importation) and not existing.used and (not isinstance(value, Importation) or value.fullName == existing.fullName) and report_redef and not self.different_forks(node, existing.source)): redefinedWhileUnused = True self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) existing = self.scope.get(value.name) if not redefinedWhileUnused and self.has_parent(value.source, ast.ListComp): if (existing and report_redef and not self.has_parent(existing.source, (ast.For, ast.ListComp)) and not self.different_forks(node, existing.source)): self.report(messages.RedefinedInListComp, node, value.name, existing.source) if (isinstance(existing, Definition) and not existing.used and not self.different_forks(node, existing.source)): self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) else: self.scope[value.name] = value def get_node_handler(self, node_class): try: return self._node_handlers[node_class] except KeyError: nodeType = str(node_class.__name__).upper() self._node_handlers[node_class] = handler = getattr(self, nodeType) return handler def iter_visible_scopes(self): outerScopes = itertools.islice(self.scope_stack, len(self.scope_stack) - 1) scopes = [scope for scope in outerScopes if isinstance(scope, (FunctionScope, ModuleScope))] if (isinstance(self.scope, GeneratorScope) and scopes[-1] != self.scope_stack[-2]): scopes.append(self.scope_stack[-2]) scopes.append(self.scope_stack[-1]) return iter(reversed(scopes)) def handle_node_load(self, node): name = node_name(node) if not name: return importStarred = False for scope in self.iter_visible_scopes(): importStarred = importStarred or scope.importStarred try: scope[name].used = (self.scope, node) except KeyError: pass else: return # look in the built-ins if importStarred or name in self.frosted_builtins: return if name == '__path__' and os.path.basename(self.filename) == '__init__.py': # the special name __path__ is valid only in packages return # protected with a NameError handler? if 'NameError' not in self.except_handlers[-1]: self.report(messages.UndefinedName, node, name) def handle_node_store(self, node): name = node_name(node) if not name: return # if the name hasn't already been defined in the current scope if isinstance(self.scope, FunctionScope) and name not in self.scope: # for each function or module scope above us for scope in self.scope_stack[:-1]: if not isinstance(scope, (FunctionScope, ModuleScope)): continue # if the name was defined in that scope, and the name has # been accessed already in the current scope, and hasn't # been declared global used = name in scope and scope[name].used if used and used[0] is self.scope and name not in self.scope.globals: # then it's probably a mistake self.report(messages.UndefinedLocal, scope[name].used[1], name, scope[name].source) break parent = getattr(node, 'parent', None) if isinstance(parent, (ast.For, ast.comprehension, ast.Tuple, ast.List)): binding = Binding(name, node) elif (parent is not None and name == '__all__' and isinstance(self.scope, ModuleScope)): binding = ExportBinding(name, parent.value) else: binding = Assignment(name, node) if name in self.scope: binding.used = self.scope[name].used self.add_binding(node, binding) def handle_node_delete(self, node): name = node_name(node) if not name: return if isinstance(self.scope, FunctionScope) and name in self.scope.globals: self.scope.globals.remove(name) else: try: del self.scope[name] except KeyError: self.report(messages.UndefinedName, node, name) def handle_children(self, tree): for node in ast.iter_child_nodes(tree): self.handleNode(node, tree) def is_docstring(self, node): """Determine if the given node is a docstring, as long as it is at the correct place in the node tree.""" return isinstance(node, ast.Str) or (isinstance(node, ast.Expr) and isinstance(node.value, ast.Str)) def docstring(self, node): if isinstance(node, ast.Expr): node = node.value if not isinstance(node, ast.Str): return (None, None) # Computed incorrectly if the docstring has backslash doctest_lineno = node.lineno - node.s.count('\n') - 1 return (node.s, doctest_lineno) def handleNode(self, node, parent): if node is None: return if self.offset and getattr(node, 'lineno', None) is not None: node.lineno += self.offset[0] node.col_offset += self.offset[1] if self.trace_tree: print(' ' * self.node_depth + node.__class__.__name__) if self.futures_allowed and not (isinstance(node, ast.ImportFrom) or self.is_docstring(node)): self.futures_allowed = False self.node_depth += 1 node.level = self.node_depth node.parent = parent try: handler = self.get_node_handler(node.__class__) handler(node) finally: self.node_depth -= 1 if self.trace_tree: print(' ' * self.node_depth + 'end ' + node.__class__.__name__) _get_doctest_examples = doctest.DocTestParser().get_examples def handle_doctests(self, node): try: docstring, node_lineno = self.docstring(node.body[0]) if not docstring: return examples = self._get_doctest_examples(docstring) except (ValueError, IndexError): # e.g. line 6 of the docstring for <string> has inconsistent # leading whitespace: ... return node_offset = self.offset or (0, 0) self.push_scope() for example in examples: try: tree = compile(example.source, "<doctest>", "exec", ast.PyCF_ONLY_AST) except SyntaxError: e = sys.exc_info()[1] position = (node_lineno + example.lineno + e.lineno, example.indent + 4 + (e.offset or 0)) self.report(messages.DoctestSyntaxError, node, position) else: self.offset = (node_offset[0] + node_lineno + example.lineno, node_offset[1] + example.indent + 4) self.handle_children(tree) self.offset = node_offset self.pop_scope() def find_return_with_argument(self, node): """Finds and returns a return statment that has an argument. Note that we should use node.returns in Python 3, but this method is never called in Python 3 so we don't bother checking. """ for item in node.body: if isinstance(item, ast.Return) and item.value: return item elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): return_with_argument = self.find_return_with_argument(item) if return_with_argument: return return_with_argument def is_generator(self, node): """Checks whether a function is a generator by looking for a yield statement or expression.""" if not isinstance(node.body, list): # lambdas can not be generators return False for item in node.body: if isinstance(item, (ast.Assign, ast.Expr)): if isinstance(item.value, ast.Yield): return True elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): if self.is_generator(item): return True return False def ignore(self, node): pass # "stmt" type nodes RETURN = DELETE = PRINT = WHILE = IF = WITH = WITHITEM = RAISE = TRYFINALLY = ASSERT = EXEC = EXPR = handle_children CONTINUE = BREAK = PASS = ignore # "expr" type nodes BOOLOP = BINOP = UNARYOP = IFEXP = DICT = SET = YIELD = YIELDFROM = COMPARE = REPR = ATTRIBUTE = SUBSCRIPT = \ LIST = TUPLE = STARRED = NAMECONSTANT = handle_children NUM = STR = BYTES = ELLIPSIS = ignore # "slice" type nodes SLICE = EXTSLICE = INDEX = handle_children # expression contexts are node instances too, though being constants LOAD = STORE = DEL = AUGLOAD = AUGSTORE = PARAM = ignore # same for operators AND = OR = ADD = SUB = MULT = DIV = MOD = POW = LSHIFT = RSHIFT = BITOR = BITXOR = BITAND = FLOORDIV = INVERT = \ NOT = UADD = USUB = EQ = NOTEQ = LT = LTE = GT = GTE = IS = ISNOT = IN = NOTIN = ignore # additional node types COMPREHENSION = KEYWORD = handle_children def GLOBAL(self, node): """Keep track of globals declarations.""" if isinstance(self.scope, FunctionScope): self.scope.globals.update(node.names) NONLOCAL = GLOBAL def LISTCOMP(self, node): # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) def GENERATOREXP(self, node): self.push_scope(GeneratorScope) # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) self.pop_scope() SETCOMP = GENERATOREXP def DICTCOMP(self, node): self.push_scope(GeneratorScope) for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.key, node) self.handleNode(node.value, node) self.pop_scope() def FOR(self, node): """Process bindings for loop variables.""" vars = [] def collectLoopVars(n): if isinstance(n, ast.Name): vars.append(n.id) elif isinstance(n, ast.expr_context): return else: for c in ast.iter_child_nodes(n): collectLoopVars(c) collectLoopVars(node.target) for varn in vars: if (isinstance(self.scope.get(varn), Importation) # unused ones will get an unused import warning and self.scope[varn].used): self.report(messages.ImportShadowedByLoopVar, node, varn, self.scope[varn].source) self.handle_children(node) def NAME(self, node): """Handle occurrence of Name (which can be a load/store/delete access.)""" # Locate the name in locals / function / globals scopes. if isinstance(node.ctx, (ast.Load, ast.AugLoad)): self.handle_node_load(node) if (node.id == 'locals' and isinstance(self.scope, FunctionScope) and isinstance(node.parent, ast.Call)): # we are doing locals() call in current scope self.scope.uses_locals = True elif isinstance(node.ctx, (ast.Store, ast.AugStore)): self.handle_node_store(node) elif isinstance(node.ctx, ast.Del): self.handle_node_delete(node) else: # must be a Param context -- this only happens for names in function # arguments, but these aren't dispatched through here raise RuntimeError("Got impossible expression context: %r" % (node.ctx,)) def CALL(self, node): f = node.func if isinstance(f, ast.Name): for scope in self.iter_visible_scopes(): definition = scope.get(f.id) if definition: if isinstance(definition, FunctionDefinition): definition.signature.checkCall(node, self, f.id) break self.handle_children(node) def FUNCTIONDEF(self, node): for deco in node.decorator_list: self.handleNode(deco, node) self.add_binding(node, FunctionDefinition(node.name, node)) self.LAMBDA(node) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) def LAMBDA(self, node): args = [] annotations = [] if PY2: def addArgs(arglist): for arg in arglist: if isinstance(arg, ast.Tuple): addArgs(arg.elts) else: if arg.id in args: self.report(messages.DuplicateArgument, node, arg.id) args.append(arg.id) addArgs(node.args.args) defaults = node.args.defaults else: for arg in node.args.args + node.args.kwonlyargs: annotations.append(arg.annotation) args.append(arg.arg) defaults = node.args.defaults + node.args.kw_defaults # Only for Python3 FunctionDefs is_py3_func = hasattr(node, 'returns') for arg_name in ('vararg', 'kwarg'): wildcard = getattr(node.args, arg_name) if not wildcard: continue args.append(getattr(wildcard, 'arg', wildcard)) if is_py3_func: if PY34_GTE: annotations.append(wildcard.annotation) else: argannotation = arg_name + 'annotation' annotations.append(getattr(node.args, argannotation)) if is_py3_func: annotations.append(node.returns) if PY3: if len(set(args)) < len(args): for (idx, arg) in enumerate(args): if arg in args[:idx]: self.report(messages.DuplicateArgument, node, arg) for child in annotations + defaults: if child: self.handleNode(child, node) def runFunction(): self.push_scope() for name in args: self.add_binding(node, Argument(name, node), report_redef=False) if isinstance(node.body, list): # case for FunctionDefs for stmt in node.body: self.handleNode(stmt, node) else: # case for Lambdas self.handleNode(node.body, node) def checkUnusedAssignments(): """Check to see if any assignments have not been used.""" for name, binding in self.scope.unusedAssignments(): self.report(messages.UnusedVariable, binding.source, name) self.defer_assignment(checkUnusedAssignments) if PY2: def checkReturnWithArgumentInsideGenerator(): """Check to see if there are any return statements with arguments but the function is a generator.""" if self.is_generator(node): stmt = self.find_return_with_argument(node) if stmt is not None: self.report(messages.ReturnWithArgsInsideGenerator, stmt) self.defer_assignment(checkReturnWithArgumentInsideGenerator) self.pop_scope() self.defer_function(runFunction) def CLASSDEF(self, node): """Check names used in a class definition, including its decorators, base classes, and the body of its definition. Additionally, add its name to the current scope. """ for deco in node.decorator_list: self.handleNode(deco, node) for baseNode in node.bases: self.handleNode(baseNode, node) if not PY2: for keywordNode in node.keywords: self.handleNode(keywordNode, node) self.push_scope(ClassScope) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) for stmt in node.body: self.handleNode(stmt, node) self.pop_scope() self.add_binding(node, ClassDefinition(node.name, node)) def ASSIGN(self, node): self.handleNode(node.value, node) for target in node.targets: self.handleNode(target, node) def AUGASSIGN(self, node): self.handle_node_load(node.target) self.handleNode(node.value, node) self.handleNode(node.target, node) def IMPORT(self, node): for alias in node.names: name = alias.asname or alias.name importation = Importation(name, node) self.add_binding(node, importation) def IMPORTFROM(self, node): if node.module == '__future__': if not self.futures_allowed: self.report(messages.LateFutureImport, node, [n.name for n in node.names]) else: self.futures_allowed = False for alias in node.names: if alias.name == '*': self.scope.importStarred = True self.report(messages.ImportStarUsed, node, node.module) continue name = alias.asname or alias.name importation = Importation(name, node) if node.module == '__future__': importation.used = (self.scope, node) self.add_binding(node, importation) def TRY(self, node): handler_names = [] # List the exception handlers for handler in node.handlers: if isinstance(handler.type, ast.Tuple): for exc_type in handler.type.elts: handler_names.append(node_name(exc_type)) elif handler.type: handler_names.append(node_name(handler.type)) # Memorize the except handlers and process the body self.except_handlers.append(handler_names) for child in node.body: self.handleNode(child, node) self.except_handlers.pop() # Process the other nodes: "except:", "else:", "finally:" for child in ast.iter_child_nodes(node): if child not in node.body: self.handleNode(child, node) TRYEXCEPT = TRY def EXCEPTHANDLER(self, node): # 3.x: in addition to handling children, we must handle the name of # the exception, which is not a Name node, but a simple string. if node.type is None: self.report(messages.BareExcept, node) if isinstance(node.name, str): self.handle_node_store(node) self.handle_children(node)
timothycrosley/deprecated.frosted
frosted/checker.py
Checker.check_dead_scopes
python
def check_dead_scopes(self): for scope in self.dead_scopes: export = isinstance(scope.get('__all__'), ExportBinding) if export: all = scope['__all__'].names() # Look for possible mistakes in the export list if not scope.importStarred and os.path.basename(self.filename) != '__init__.py': undefined = set(all) - set(scope) for name in undefined: self.report(messages.UndefinedExport, scope['__all__'].source, name) else: all = [] # Look for imported names that aren't used without checking imports in namespace definition for importation in scope.values(): if isinstance(importation, Importation) and not importation.used and importation.name not in all: self.report(messages.UnusedImport, importation.source, importation.name)
Look at scopes which have been fully examined and report names in them which were imported but unused.
train
https://github.com/timothycrosley/deprecated.frosted/blob/61ba7f341fc55676c3580c8c4e52117986cd5e12/frosted/checker.py#L320-L338
null
class Checker(object): """The core of frosted, checks the cleanliness and sanity of Python code.""" node_depth = 0 offset = None trace_tree = False frosted_builtins = FROSTED_BUILTINS def __init__(self, tree, filename='(none)', builtins=None, ignore_lines=(), **settings): self.settings = settings self.ignore_errors = settings.get('ignore_frosted_errors', []) self.ignore_lines = ignore_lines file_specific_ignores = settings.get('ignore_frosted_errors_for_' + (os.path.basename(filename) or ""), None) if file_specific_ignores: self.ignore_errors += file_specific_ignores self._node_handlers = {} self._deferred_functions = [] self._deferred_assignments = [] self.dead_scopes = [] self.messages = [] self.filename = filename if builtins: self.frosted_builtins = self.frosted_builtins.union(builtins) self.scope_stack = [ModuleScope()] self.except_handlers = [()] self.futures_allowed = True self.root = tree self.handle_children(tree) self.run_deferred(self._deferred_functions) self._deferred_functions = None self.run_deferred(self._deferred_assignments) self._deferred_assignments = None del self.scope_stack[1:] self.pop_scope() self.check_dead_scopes() self.check_plugins() def check_plugins(self): """ collect plugins from entry point 'frosted.plugins' and run their check() method, passing the filename """ checkers = {} for ep in pkg_resources.iter_entry_points(group='frosted.plugins'): checkers.update({ep.name: ep.load()}) for plugin_name, plugin in checkers.items(): if self.filename != '(none)': messages = plugin.check(self.filename) for message, loc, args, kwargs in messages: self.report(message, loc, *args, **kwargs) def defer_function(self, callable): """Schedule a function handler to be called just before completion. This is used for handling function bodies, which must be deferred because code later in the file might modify the global scope. When 'callable' is called, the scope at the time this is called will be restored, however it will contain any new bindings added to it. """ self._deferred_functions.append((callable, self.scope_stack[:], self.offset)) def defer_assignment(self, callable): """Schedule an assignment handler to be called just after deferred function handlers.""" self._deferred_assignments.append((callable, self.scope_stack[:], self.offset)) def run_deferred(self, deferred): """Run the callables in deferred using their associated scope stack.""" for handler, scope, offset in deferred: self.scope_stack = scope self.offset = offset handler() @property def scope(self): return self.scope_stack[-1] def pop_scope(self): self.dead_scopes.append(self.scope_stack.pop()) def push_scope(self, scope_class=FunctionScope): self.scope_stack.append(scope_class()) def push_function_scope(self): # XXX Deprecated self.push_scope(FunctionScope) def push_class_scope(self): # XXX Deprecated self.push_scope(ClassScope) def report(self, message_class, *args, **kwargs): error_code = message_class.error_code if(not error_code[:2] + "00" in self.ignore_errors and not error_code in self.ignore_errors and not str(message_class.error_number) in self.ignore_errors): kwargs['verbose'] = self.settings.get('verbose') message = message_class(self.filename, *args, **kwargs) if message.lineno not in self.ignore_lines: self.messages.append(message) def has_parent(self, node, kind): while hasattr(node, 'parent'): node = node.parent if isinstance(node, kind): return True def get_common_ancestor(self, lnode, rnode, stop=None): stop = stop or self.root if lnode is rnode: return lnode if stop in (lnode, rnode): return stop if not hasattr(lnode, 'parent') or not hasattr(rnode, 'parent'): return if (lnode.level > rnode.level): return self.get_common_ancestor(lnode.parent, rnode, stop) if (rnode.level > lnode.level): return self.get_common_ancestor(lnode, rnode.parent, stop) return self.get_common_ancestor(lnode.parent, rnode.parent, stop) def descendant_of(self, node, ancestors, stop=None): for ancestor in ancestors: if self.get_common_ancestor(node, ancestor, stop) not in (stop, None): return True return False def on_fork(self, parent, lnode, rnode, items): return (self.descendant_of(lnode, items, parent) ^ self.descendant_of(rnode, items, parent)) def different_forks(self, lnode, rnode): """True, if lnode and rnode are located on different forks of IF/TRY.""" ancestor = self.get_common_ancestor(lnode, rnode) if isinstance(ancestor, ast.If): for fork in (ancestor.body, ancestor.orelse): if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.Try): body = ancestor.body + ancestor.orelse for fork in [body] + [[hdl] for hdl in ancestor.handlers]: if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.TryFinally): if self.on_fork(ancestor, lnode, rnode, ancestor.body): return True return False def add_binding(self, node, value, report_redef=True): """Called when a binding is altered. - `node` is the statement responsible for the change - `value` is the optional new value, a Binding instance, associated with the binding; if None, the binding is deleted if it exists. - if `report_redef` is True (default), rebinding while unused will be reported. """ redefinedWhileUnused = False if not isinstance(self.scope, ClassScope): for scope in self.scope_stack[::-1]: existing = scope.get(value.name) if (isinstance(existing, Importation) and not existing.used and (not isinstance(value, Importation) or value.fullName == existing.fullName) and report_redef and not self.different_forks(node, existing.source)): redefinedWhileUnused = True self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) existing = self.scope.get(value.name) if not redefinedWhileUnused and self.has_parent(value.source, ast.ListComp): if (existing and report_redef and not self.has_parent(existing.source, (ast.For, ast.ListComp)) and not self.different_forks(node, existing.source)): self.report(messages.RedefinedInListComp, node, value.name, existing.source) if (isinstance(existing, Definition) and not existing.used and not self.different_forks(node, existing.source)): self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) else: self.scope[value.name] = value def get_node_handler(self, node_class): try: return self._node_handlers[node_class] except KeyError: nodeType = str(node_class.__name__).upper() self._node_handlers[node_class] = handler = getattr(self, nodeType) return handler def iter_visible_scopes(self): outerScopes = itertools.islice(self.scope_stack, len(self.scope_stack) - 1) scopes = [scope for scope in outerScopes if isinstance(scope, (FunctionScope, ModuleScope))] if (isinstance(self.scope, GeneratorScope) and scopes[-1] != self.scope_stack[-2]): scopes.append(self.scope_stack[-2]) scopes.append(self.scope_stack[-1]) return iter(reversed(scopes)) def handle_node_load(self, node): name = node_name(node) if not name: return importStarred = False for scope in self.iter_visible_scopes(): importStarred = importStarred or scope.importStarred try: scope[name].used = (self.scope, node) except KeyError: pass else: return # look in the built-ins if importStarred or name in self.frosted_builtins: return if name == '__path__' and os.path.basename(self.filename) == '__init__.py': # the special name __path__ is valid only in packages return # protected with a NameError handler? if 'NameError' not in self.except_handlers[-1]: self.report(messages.UndefinedName, node, name) def handle_node_store(self, node): name = node_name(node) if not name: return # if the name hasn't already been defined in the current scope if isinstance(self.scope, FunctionScope) and name not in self.scope: # for each function or module scope above us for scope in self.scope_stack[:-1]: if not isinstance(scope, (FunctionScope, ModuleScope)): continue # if the name was defined in that scope, and the name has # been accessed already in the current scope, and hasn't # been declared global used = name in scope and scope[name].used if used and used[0] is self.scope and name not in self.scope.globals: # then it's probably a mistake self.report(messages.UndefinedLocal, scope[name].used[1], name, scope[name].source) break parent = getattr(node, 'parent', None) if isinstance(parent, (ast.For, ast.comprehension, ast.Tuple, ast.List)): binding = Binding(name, node) elif (parent is not None and name == '__all__' and isinstance(self.scope, ModuleScope)): binding = ExportBinding(name, parent.value) else: binding = Assignment(name, node) if name in self.scope: binding.used = self.scope[name].used self.add_binding(node, binding) def handle_node_delete(self, node): name = node_name(node) if not name: return if isinstance(self.scope, FunctionScope) and name in self.scope.globals: self.scope.globals.remove(name) else: try: del self.scope[name] except KeyError: self.report(messages.UndefinedName, node, name) def handle_children(self, tree): for node in ast.iter_child_nodes(tree): self.handleNode(node, tree) def is_docstring(self, node): """Determine if the given node is a docstring, as long as it is at the correct place in the node tree.""" return isinstance(node, ast.Str) or (isinstance(node, ast.Expr) and isinstance(node.value, ast.Str)) def docstring(self, node): if isinstance(node, ast.Expr): node = node.value if not isinstance(node, ast.Str): return (None, None) # Computed incorrectly if the docstring has backslash doctest_lineno = node.lineno - node.s.count('\n') - 1 return (node.s, doctest_lineno) def handleNode(self, node, parent): if node is None: return if self.offset and getattr(node, 'lineno', None) is not None: node.lineno += self.offset[0] node.col_offset += self.offset[1] if self.trace_tree: print(' ' * self.node_depth + node.__class__.__name__) if self.futures_allowed and not (isinstance(node, ast.ImportFrom) or self.is_docstring(node)): self.futures_allowed = False self.node_depth += 1 node.level = self.node_depth node.parent = parent try: handler = self.get_node_handler(node.__class__) handler(node) finally: self.node_depth -= 1 if self.trace_tree: print(' ' * self.node_depth + 'end ' + node.__class__.__name__) _get_doctest_examples = doctest.DocTestParser().get_examples def handle_doctests(self, node): try: docstring, node_lineno = self.docstring(node.body[0]) if not docstring: return examples = self._get_doctest_examples(docstring) except (ValueError, IndexError): # e.g. line 6 of the docstring for <string> has inconsistent # leading whitespace: ... return node_offset = self.offset or (0, 0) self.push_scope() for example in examples: try: tree = compile(example.source, "<doctest>", "exec", ast.PyCF_ONLY_AST) except SyntaxError: e = sys.exc_info()[1] position = (node_lineno + example.lineno + e.lineno, example.indent + 4 + (e.offset or 0)) self.report(messages.DoctestSyntaxError, node, position) else: self.offset = (node_offset[0] + node_lineno + example.lineno, node_offset[1] + example.indent + 4) self.handle_children(tree) self.offset = node_offset self.pop_scope() def find_return_with_argument(self, node): """Finds and returns a return statment that has an argument. Note that we should use node.returns in Python 3, but this method is never called in Python 3 so we don't bother checking. """ for item in node.body: if isinstance(item, ast.Return) and item.value: return item elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): return_with_argument = self.find_return_with_argument(item) if return_with_argument: return return_with_argument def is_generator(self, node): """Checks whether a function is a generator by looking for a yield statement or expression.""" if not isinstance(node.body, list): # lambdas can not be generators return False for item in node.body: if isinstance(item, (ast.Assign, ast.Expr)): if isinstance(item.value, ast.Yield): return True elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): if self.is_generator(item): return True return False def ignore(self, node): pass # "stmt" type nodes RETURN = DELETE = PRINT = WHILE = IF = WITH = WITHITEM = RAISE = TRYFINALLY = ASSERT = EXEC = EXPR = handle_children CONTINUE = BREAK = PASS = ignore # "expr" type nodes BOOLOP = BINOP = UNARYOP = IFEXP = DICT = SET = YIELD = YIELDFROM = COMPARE = REPR = ATTRIBUTE = SUBSCRIPT = \ LIST = TUPLE = STARRED = NAMECONSTANT = handle_children NUM = STR = BYTES = ELLIPSIS = ignore # "slice" type nodes SLICE = EXTSLICE = INDEX = handle_children # expression contexts are node instances too, though being constants LOAD = STORE = DEL = AUGLOAD = AUGSTORE = PARAM = ignore # same for operators AND = OR = ADD = SUB = MULT = DIV = MOD = POW = LSHIFT = RSHIFT = BITOR = BITXOR = BITAND = FLOORDIV = INVERT = \ NOT = UADD = USUB = EQ = NOTEQ = LT = LTE = GT = GTE = IS = ISNOT = IN = NOTIN = ignore # additional node types COMPREHENSION = KEYWORD = handle_children def GLOBAL(self, node): """Keep track of globals declarations.""" if isinstance(self.scope, FunctionScope): self.scope.globals.update(node.names) NONLOCAL = GLOBAL def LISTCOMP(self, node): # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) def GENERATOREXP(self, node): self.push_scope(GeneratorScope) # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) self.pop_scope() SETCOMP = GENERATOREXP def DICTCOMP(self, node): self.push_scope(GeneratorScope) for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.key, node) self.handleNode(node.value, node) self.pop_scope() def FOR(self, node): """Process bindings for loop variables.""" vars = [] def collectLoopVars(n): if isinstance(n, ast.Name): vars.append(n.id) elif isinstance(n, ast.expr_context): return else: for c in ast.iter_child_nodes(n): collectLoopVars(c) collectLoopVars(node.target) for varn in vars: if (isinstance(self.scope.get(varn), Importation) # unused ones will get an unused import warning and self.scope[varn].used): self.report(messages.ImportShadowedByLoopVar, node, varn, self.scope[varn].source) self.handle_children(node) def NAME(self, node): """Handle occurrence of Name (which can be a load/store/delete access.)""" # Locate the name in locals / function / globals scopes. if isinstance(node.ctx, (ast.Load, ast.AugLoad)): self.handle_node_load(node) if (node.id == 'locals' and isinstance(self.scope, FunctionScope) and isinstance(node.parent, ast.Call)): # we are doing locals() call in current scope self.scope.uses_locals = True elif isinstance(node.ctx, (ast.Store, ast.AugStore)): self.handle_node_store(node) elif isinstance(node.ctx, ast.Del): self.handle_node_delete(node) else: # must be a Param context -- this only happens for names in function # arguments, but these aren't dispatched through here raise RuntimeError("Got impossible expression context: %r" % (node.ctx,)) def CALL(self, node): f = node.func if isinstance(f, ast.Name): for scope in self.iter_visible_scopes(): definition = scope.get(f.id) if definition: if isinstance(definition, FunctionDefinition): definition.signature.checkCall(node, self, f.id) break self.handle_children(node) def FUNCTIONDEF(self, node): for deco in node.decorator_list: self.handleNode(deco, node) self.add_binding(node, FunctionDefinition(node.name, node)) self.LAMBDA(node) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) def LAMBDA(self, node): args = [] annotations = [] if PY2: def addArgs(arglist): for arg in arglist: if isinstance(arg, ast.Tuple): addArgs(arg.elts) else: if arg.id in args: self.report(messages.DuplicateArgument, node, arg.id) args.append(arg.id) addArgs(node.args.args) defaults = node.args.defaults else: for arg in node.args.args + node.args.kwonlyargs: annotations.append(arg.annotation) args.append(arg.arg) defaults = node.args.defaults + node.args.kw_defaults # Only for Python3 FunctionDefs is_py3_func = hasattr(node, 'returns') for arg_name in ('vararg', 'kwarg'): wildcard = getattr(node.args, arg_name) if not wildcard: continue args.append(getattr(wildcard, 'arg', wildcard)) if is_py3_func: if PY34_GTE: annotations.append(wildcard.annotation) else: argannotation = arg_name + 'annotation' annotations.append(getattr(node.args, argannotation)) if is_py3_func: annotations.append(node.returns) if PY3: if len(set(args)) < len(args): for (idx, arg) in enumerate(args): if arg in args[:idx]: self.report(messages.DuplicateArgument, node, arg) for child in annotations + defaults: if child: self.handleNode(child, node) def runFunction(): self.push_scope() for name in args: self.add_binding(node, Argument(name, node), report_redef=False) if isinstance(node.body, list): # case for FunctionDefs for stmt in node.body: self.handleNode(stmt, node) else: # case for Lambdas self.handleNode(node.body, node) def checkUnusedAssignments(): """Check to see if any assignments have not been used.""" for name, binding in self.scope.unusedAssignments(): self.report(messages.UnusedVariable, binding.source, name) self.defer_assignment(checkUnusedAssignments) if PY2: def checkReturnWithArgumentInsideGenerator(): """Check to see if there are any return statements with arguments but the function is a generator.""" if self.is_generator(node): stmt = self.find_return_with_argument(node) if stmt is not None: self.report(messages.ReturnWithArgsInsideGenerator, stmt) self.defer_assignment(checkReturnWithArgumentInsideGenerator) self.pop_scope() self.defer_function(runFunction) def CLASSDEF(self, node): """Check names used in a class definition, including its decorators, base classes, and the body of its definition. Additionally, add its name to the current scope. """ for deco in node.decorator_list: self.handleNode(deco, node) for baseNode in node.bases: self.handleNode(baseNode, node) if not PY2: for keywordNode in node.keywords: self.handleNode(keywordNode, node) self.push_scope(ClassScope) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) for stmt in node.body: self.handleNode(stmt, node) self.pop_scope() self.add_binding(node, ClassDefinition(node.name, node)) def ASSIGN(self, node): self.handleNode(node.value, node) for target in node.targets: self.handleNode(target, node) def AUGASSIGN(self, node): self.handle_node_load(node.target) self.handleNode(node.value, node) self.handleNode(node.target, node) def IMPORT(self, node): for alias in node.names: name = alias.asname or alias.name importation = Importation(name, node) self.add_binding(node, importation) def IMPORTFROM(self, node): if node.module == '__future__': if not self.futures_allowed: self.report(messages.LateFutureImport, node, [n.name for n in node.names]) else: self.futures_allowed = False for alias in node.names: if alias.name == '*': self.scope.importStarred = True self.report(messages.ImportStarUsed, node, node.module) continue name = alias.asname or alias.name importation = Importation(name, node) if node.module == '__future__': importation.used = (self.scope, node) self.add_binding(node, importation) def TRY(self, node): handler_names = [] # List the exception handlers for handler in node.handlers: if isinstance(handler.type, ast.Tuple): for exc_type in handler.type.elts: handler_names.append(node_name(exc_type)) elif handler.type: handler_names.append(node_name(handler.type)) # Memorize the except handlers and process the body self.except_handlers.append(handler_names) for child in node.body: self.handleNode(child, node) self.except_handlers.pop() # Process the other nodes: "except:", "else:", "finally:" for child in ast.iter_child_nodes(node): if child not in node.body: self.handleNode(child, node) TRYEXCEPT = TRY def EXCEPTHANDLER(self, node): # 3.x: in addition to handling children, we must handle the name of # the exception, which is not a Name node, but a simple string. if node.type is None: self.report(messages.BareExcept, node) if isinstance(node.name, str): self.handle_node_store(node) self.handle_children(node)
timothycrosley/deprecated.frosted
frosted/checker.py
Checker.different_forks
python
def different_forks(self, lnode, rnode): ancestor = self.get_common_ancestor(lnode, rnode) if isinstance(ancestor, ast.If): for fork in (ancestor.body, ancestor.orelse): if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.Try): body = ancestor.body + ancestor.orelse for fork in [body] + [[hdl] for hdl in ancestor.handlers]: if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.TryFinally): if self.on_fork(ancestor, lnode, rnode, ancestor.body): return True return False
True, if lnode and rnode are located on different forks of IF/TRY.
train
https://github.com/timothycrosley/deprecated.frosted/blob/61ba7f341fc55676c3580c8c4e52117986cd5e12/frosted/checker.py#L389-L405
null
class Checker(object): """The core of frosted, checks the cleanliness and sanity of Python code.""" node_depth = 0 offset = None trace_tree = False frosted_builtins = FROSTED_BUILTINS def __init__(self, tree, filename='(none)', builtins=None, ignore_lines=(), **settings): self.settings = settings self.ignore_errors = settings.get('ignore_frosted_errors', []) self.ignore_lines = ignore_lines file_specific_ignores = settings.get('ignore_frosted_errors_for_' + (os.path.basename(filename) or ""), None) if file_specific_ignores: self.ignore_errors += file_specific_ignores self._node_handlers = {} self._deferred_functions = [] self._deferred_assignments = [] self.dead_scopes = [] self.messages = [] self.filename = filename if builtins: self.frosted_builtins = self.frosted_builtins.union(builtins) self.scope_stack = [ModuleScope()] self.except_handlers = [()] self.futures_allowed = True self.root = tree self.handle_children(tree) self.run_deferred(self._deferred_functions) self._deferred_functions = None self.run_deferred(self._deferred_assignments) self._deferred_assignments = None del self.scope_stack[1:] self.pop_scope() self.check_dead_scopes() self.check_plugins() def check_plugins(self): """ collect plugins from entry point 'frosted.plugins' and run their check() method, passing the filename """ checkers = {} for ep in pkg_resources.iter_entry_points(group='frosted.plugins'): checkers.update({ep.name: ep.load()}) for plugin_name, plugin in checkers.items(): if self.filename != '(none)': messages = plugin.check(self.filename) for message, loc, args, kwargs in messages: self.report(message, loc, *args, **kwargs) def defer_function(self, callable): """Schedule a function handler to be called just before completion. This is used for handling function bodies, which must be deferred because code later in the file might modify the global scope. When 'callable' is called, the scope at the time this is called will be restored, however it will contain any new bindings added to it. """ self._deferred_functions.append((callable, self.scope_stack[:], self.offset)) def defer_assignment(self, callable): """Schedule an assignment handler to be called just after deferred function handlers.""" self._deferred_assignments.append((callable, self.scope_stack[:], self.offset)) def run_deferred(self, deferred): """Run the callables in deferred using their associated scope stack.""" for handler, scope, offset in deferred: self.scope_stack = scope self.offset = offset handler() @property def scope(self): return self.scope_stack[-1] def pop_scope(self): self.dead_scopes.append(self.scope_stack.pop()) def check_dead_scopes(self): """Look at scopes which have been fully examined and report names in them which were imported but unused.""" for scope in self.dead_scopes: export = isinstance(scope.get('__all__'), ExportBinding) if export: all = scope['__all__'].names() # Look for possible mistakes in the export list if not scope.importStarred and os.path.basename(self.filename) != '__init__.py': undefined = set(all) - set(scope) for name in undefined: self.report(messages.UndefinedExport, scope['__all__'].source, name) else: all = [] # Look for imported names that aren't used without checking imports in namespace definition for importation in scope.values(): if isinstance(importation, Importation) and not importation.used and importation.name not in all: self.report(messages.UnusedImport, importation.source, importation.name) def push_scope(self, scope_class=FunctionScope): self.scope_stack.append(scope_class()) def push_function_scope(self): # XXX Deprecated self.push_scope(FunctionScope) def push_class_scope(self): # XXX Deprecated self.push_scope(ClassScope) def report(self, message_class, *args, **kwargs): error_code = message_class.error_code if(not error_code[:2] + "00" in self.ignore_errors and not error_code in self.ignore_errors and not str(message_class.error_number) in self.ignore_errors): kwargs['verbose'] = self.settings.get('verbose') message = message_class(self.filename, *args, **kwargs) if message.lineno not in self.ignore_lines: self.messages.append(message) def has_parent(self, node, kind): while hasattr(node, 'parent'): node = node.parent if isinstance(node, kind): return True def get_common_ancestor(self, lnode, rnode, stop=None): stop = stop or self.root if lnode is rnode: return lnode if stop in (lnode, rnode): return stop if not hasattr(lnode, 'parent') or not hasattr(rnode, 'parent'): return if (lnode.level > rnode.level): return self.get_common_ancestor(lnode.parent, rnode, stop) if (rnode.level > lnode.level): return self.get_common_ancestor(lnode, rnode.parent, stop) return self.get_common_ancestor(lnode.parent, rnode.parent, stop) def descendant_of(self, node, ancestors, stop=None): for ancestor in ancestors: if self.get_common_ancestor(node, ancestor, stop) not in (stop, None): return True return False def on_fork(self, parent, lnode, rnode, items): return (self.descendant_of(lnode, items, parent) ^ self.descendant_of(rnode, items, parent)) def add_binding(self, node, value, report_redef=True): """Called when a binding is altered. - `node` is the statement responsible for the change - `value` is the optional new value, a Binding instance, associated with the binding; if None, the binding is deleted if it exists. - if `report_redef` is True (default), rebinding while unused will be reported. """ redefinedWhileUnused = False if not isinstance(self.scope, ClassScope): for scope in self.scope_stack[::-1]: existing = scope.get(value.name) if (isinstance(existing, Importation) and not existing.used and (not isinstance(value, Importation) or value.fullName == existing.fullName) and report_redef and not self.different_forks(node, existing.source)): redefinedWhileUnused = True self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) existing = self.scope.get(value.name) if not redefinedWhileUnused and self.has_parent(value.source, ast.ListComp): if (existing and report_redef and not self.has_parent(existing.source, (ast.For, ast.ListComp)) and not self.different_forks(node, existing.source)): self.report(messages.RedefinedInListComp, node, value.name, existing.source) if (isinstance(existing, Definition) and not existing.used and not self.different_forks(node, existing.source)): self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) else: self.scope[value.name] = value def get_node_handler(self, node_class): try: return self._node_handlers[node_class] except KeyError: nodeType = str(node_class.__name__).upper() self._node_handlers[node_class] = handler = getattr(self, nodeType) return handler def iter_visible_scopes(self): outerScopes = itertools.islice(self.scope_stack, len(self.scope_stack) - 1) scopes = [scope for scope in outerScopes if isinstance(scope, (FunctionScope, ModuleScope))] if (isinstance(self.scope, GeneratorScope) and scopes[-1] != self.scope_stack[-2]): scopes.append(self.scope_stack[-2]) scopes.append(self.scope_stack[-1]) return iter(reversed(scopes)) def handle_node_load(self, node): name = node_name(node) if not name: return importStarred = False for scope in self.iter_visible_scopes(): importStarred = importStarred or scope.importStarred try: scope[name].used = (self.scope, node) except KeyError: pass else: return # look in the built-ins if importStarred or name in self.frosted_builtins: return if name == '__path__' and os.path.basename(self.filename) == '__init__.py': # the special name __path__ is valid only in packages return # protected with a NameError handler? if 'NameError' not in self.except_handlers[-1]: self.report(messages.UndefinedName, node, name) def handle_node_store(self, node): name = node_name(node) if not name: return # if the name hasn't already been defined in the current scope if isinstance(self.scope, FunctionScope) and name not in self.scope: # for each function or module scope above us for scope in self.scope_stack[:-1]: if not isinstance(scope, (FunctionScope, ModuleScope)): continue # if the name was defined in that scope, and the name has # been accessed already in the current scope, and hasn't # been declared global used = name in scope and scope[name].used if used and used[0] is self.scope and name not in self.scope.globals: # then it's probably a mistake self.report(messages.UndefinedLocal, scope[name].used[1], name, scope[name].source) break parent = getattr(node, 'parent', None) if isinstance(parent, (ast.For, ast.comprehension, ast.Tuple, ast.List)): binding = Binding(name, node) elif (parent is not None and name == '__all__' and isinstance(self.scope, ModuleScope)): binding = ExportBinding(name, parent.value) else: binding = Assignment(name, node) if name in self.scope: binding.used = self.scope[name].used self.add_binding(node, binding) def handle_node_delete(self, node): name = node_name(node) if not name: return if isinstance(self.scope, FunctionScope) and name in self.scope.globals: self.scope.globals.remove(name) else: try: del self.scope[name] except KeyError: self.report(messages.UndefinedName, node, name) def handle_children(self, tree): for node in ast.iter_child_nodes(tree): self.handleNode(node, tree) def is_docstring(self, node): """Determine if the given node is a docstring, as long as it is at the correct place in the node tree.""" return isinstance(node, ast.Str) or (isinstance(node, ast.Expr) and isinstance(node.value, ast.Str)) def docstring(self, node): if isinstance(node, ast.Expr): node = node.value if not isinstance(node, ast.Str): return (None, None) # Computed incorrectly if the docstring has backslash doctest_lineno = node.lineno - node.s.count('\n') - 1 return (node.s, doctest_lineno) def handleNode(self, node, parent): if node is None: return if self.offset and getattr(node, 'lineno', None) is not None: node.lineno += self.offset[0] node.col_offset += self.offset[1] if self.trace_tree: print(' ' * self.node_depth + node.__class__.__name__) if self.futures_allowed and not (isinstance(node, ast.ImportFrom) or self.is_docstring(node)): self.futures_allowed = False self.node_depth += 1 node.level = self.node_depth node.parent = parent try: handler = self.get_node_handler(node.__class__) handler(node) finally: self.node_depth -= 1 if self.trace_tree: print(' ' * self.node_depth + 'end ' + node.__class__.__name__) _get_doctest_examples = doctest.DocTestParser().get_examples def handle_doctests(self, node): try: docstring, node_lineno = self.docstring(node.body[0]) if not docstring: return examples = self._get_doctest_examples(docstring) except (ValueError, IndexError): # e.g. line 6 of the docstring for <string> has inconsistent # leading whitespace: ... return node_offset = self.offset or (0, 0) self.push_scope() for example in examples: try: tree = compile(example.source, "<doctest>", "exec", ast.PyCF_ONLY_AST) except SyntaxError: e = sys.exc_info()[1] position = (node_lineno + example.lineno + e.lineno, example.indent + 4 + (e.offset or 0)) self.report(messages.DoctestSyntaxError, node, position) else: self.offset = (node_offset[0] + node_lineno + example.lineno, node_offset[1] + example.indent + 4) self.handle_children(tree) self.offset = node_offset self.pop_scope() def find_return_with_argument(self, node): """Finds and returns a return statment that has an argument. Note that we should use node.returns in Python 3, but this method is never called in Python 3 so we don't bother checking. """ for item in node.body: if isinstance(item, ast.Return) and item.value: return item elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): return_with_argument = self.find_return_with_argument(item) if return_with_argument: return return_with_argument def is_generator(self, node): """Checks whether a function is a generator by looking for a yield statement or expression.""" if not isinstance(node.body, list): # lambdas can not be generators return False for item in node.body: if isinstance(item, (ast.Assign, ast.Expr)): if isinstance(item.value, ast.Yield): return True elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): if self.is_generator(item): return True return False def ignore(self, node): pass # "stmt" type nodes RETURN = DELETE = PRINT = WHILE = IF = WITH = WITHITEM = RAISE = TRYFINALLY = ASSERT = EXEC = EXPR = handle_children CONTINUE = BREAK = PASS = ignore # "expr" type nodes BOOLOP = BINOP = UNARYOP = IFEXP = DICT = SET = YIELD = YIELDFROM = COMPARE = REPR = ATTRIBUTE = SUBSCRIPT = \ LIST = TUPLE = STARRED = NAMECONSTANT = handle_children NUM = STR = BYTES = ELLIPSIS = ignore # "slice" type nodes SLICE = EXTSLICE = INDEX = handle_children # expression contexts are node instances too, though being constants LOAD = STORE = DEL = AUGLOAD = AUGSTORE = PARAM = ignore # same for operators AND = OR = ADD = SUB = MULT = DIV = MOD = POW = LSHIFT = RSHIFT = BITOR = BITXOR = BITAND = FLOORDIV = INVERT = \ NOT = UADD = USUB = EQ = NOTEQ = LT = LTE = GT = GTE = IS = ISNOT = IN = NOTIN = ignore # additional node types COMPREHENSION = KEYWORD = handle_children def GLOBAL(self, node): """Keep track of globals declarations.""" if isinstance(self.scope, FunctionScope): self.scope.globals.update(node.names) NONLOCAL = GLOBAL def LISTCOMP(self, node): # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) def GENERATOREXP(self, node): self.push_scope(GeneratorScope) # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) self.pop_scope() SETCOMP = GENERATOREXP def DICTCOMP(self, node): self.push_scope(GeneratorScope) for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.key, node) self.handleNode(node.value, node) self.pop_scope() def FOR(self, node): """Process bindings for loop variables.""" vars = [] def collectLoopVars(n): if isinstance(n, ast.Name): vars.append(n.id) elif isinstance(n, ast.expr_context): return else: for c in ast.iter_child_nodes(n): collectLoopVars(c) collectLoopVars(node.target) for varn in vars: if (isinstance(self.scope.get(varn), Importation) # unused ones will get an unused import warning and self.scope[varn].used): self.report(messages.ImportShadowedByLoopVar, node, varn, self.scope[varn].source) self.handle_children(node) def NAME(self, node): """Handle occurrence of Name (which can be a load/store/delete access.)""" # Locate the name in locals / function / globals scopes. if isinstance(node.ctx, (ast.Load, ast.AugLoad)): self.handle_node_load(node) if (node.id == 'locals' and isinstance(self.scope, FunctionScope) and isinstance(node.parent, ast.Call)): # we are doing locals() call in current scope self.scope.uses_locals = True elif isinstance(node.ctx, (ast.Store, ast.AugStore)): self.handle_node_store(node) elif isinstance(node.ctx, ast.Del): self.handle_node_delete(node) else: # must be a Param context -- this only happens for names in function # arguments, but these aren't dispatched through here raise RuntimeError("Got impossible expression context: %r" % (node.ctx,)) def CALL(self, node): f = node.func if isinstance(f, ast.Name): for scope in self.iter_visible_scopes(): definition = scope.get(f.id) if definition: if isinstance(definition, FunctionDefinition): definition.signature.checkCall(node, self, f.id) break self.handle_children(node) def FUNCTIONDEF(self, node): for deco in node.decorator_list: self.handleNode(deco, node) self.add_binding(node, FunctionDefinition(node.name, node)) self.LAMBDA(node) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) def LAMBDA(self, node): args = [] annotations = [] if PY2: def addArgs(arglist): for arg in arglist: if isinstance(arg, ast.Tuple): addArgs(arg.elts) else: if arg.id in args: self.report(messages.DuplicateArgument, node, arg.id) args.append(arg.id) addArgs(node.args.args) defaults = node.args.defaults else: for arg in node.args.args + node.args.kwonlyargs: annotations.append(arg.annotation) args.append(arg.arg) defaults = node.args.defaults + node.args.kw_defaults # Only for Python3 FunctionDefs is_py3_func = hasattr(node, 'returns') for arg_name in ('vararg', 'kwarg'): wildcard = getattr(node.args, arg_name) if not wildcard: continue args.append(getattr(wildcard, 'arg', wildcard)) if is_py3_func: if PY34_GTE: annotations.append(wildcard.annotation) else: argannotation = arg_name + 'annotation' annotations.append(getattr(node.args, argannotation)) if is_py3_func: annotations.append(node.returns) if PY3: if len(set(args)) < len(args): for (idx, arg) in enumerate(args): if arg in args[:idx]: self.report(messages.DuplicateArgument, node, arg) for child in annotations + defaults: if child: self.handleNode(child, node) def runFunction(): self.push_scope() for name in args: self.add_binding(node, Argument(name, node), report_redef=False) if isinstance(node.body, list): # case for FunctionDefs for stmt in node.body: self.handleNode(stmt, node) else: # case for Lambdas self.handleNode(node.body, node) def checkUnusedAssignments(): """Check to see if any assignments have not been used.""" for name, binding in self.scope.unusedAssignments(): self.report(messages.UnusedVariable, binding.source, name) self.defer_assignment(checkUnusedAssignments) if PY2: def checkReturnWithArgumentInsideGenerator(): """Check to see if there are any return statements with arguments but the function is a generator.""" if self.is_generator(node): stmt = self.find_return_with_argument(node) if stmt is not None: self.report(messages.ReturnWithArgsInsideGenerator, stmt) self.defer_assignment(checkReturnWithArgumentInsideGenerator) self.pop_scope() self.defer_function(runFunction) def CLASSDEF(self, node): """Check names used in a class definition, including its decorators, base classes, and the body of its definition. Additionally, add its name to the current scope. """ for deco in node.decorator_list: self.handleNode(deco, node) for baseNode in node.bases: self.handleNode(baseNode, node) if not PY2: for keywordNode in node.keywords: self.handleNode(keywordNode, node) self.push_scope(ClassScope) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) for stmt in node.body: self.handleNode(stmt, node) self.pop_scope() self.add_binding(node, ClassDefinition(node.name, node)) def ASSIGN(self, node): self.handleNode(node.value, node) for target in node.targets: self.handleNode(target, node) def AUGASSIGN(self, node): self.handle_node_load(node.target) self.handleNode(node.value, node) self.handleNode(node.target, node) def IMPORT(self, node): for alias in node.names: name = alias.asname or alias.name importation = Importation(name, node) self.add_binding(node, importation) def IMPORTFROM(self, node): if node.module == '__future__': if not self.futures_allowed: self.report(messages.LateFutureImport, node, [n.name for n in node.names]) else: self.futures_allowed = False for alias in node.names: if alias.name == '*': self.scope.importStarred = True self.report(messages.ImportStarUsed, node, node.module) continue name = alias.asname or alias.name importation = Importation(name, node) if node.module == '__future__': importation.used = (self.scope, node) self.add_binding(node, importation) def TRY(self, node): handler_names = [] # List the exception handlers for handler in node.handlers: if isinstance(handler.type, ast.Tuple): for exc_type in handler.type.elts: handler_names.append(node_name(exc_type)) elif handler.type: handler_names.append(node_name(handler.type)) # Memorize the except handlers and process the body self.except_handlers.append(handler_names) for child in node.body: self.handleNode(child, node) self.except_handlers.pop() # Process the other nodes: "except:", "else:", "finally:" for child in ast.iter_child_nodes(node): if child not in node.body: self.handleNode(child, node) TRYEXCEPT = TRY def EXCEPTHANDLER(self, node): # 3.x: in addition to handling children, we must handle the name of # the exception, which is not a Name node, but a simple string. if node.type is None: self.report(messages.BareExcept, node) if isinstance(node.name, str): self.handle_node_store(node) self.handle_children(node)
timothycrosley/deprecated.frosted
frosted/checker.py
Checker.add_binding
python
def add_binding(self, node, value, report_redef=True): redefinedWhileUnused = False if not isinstance(self.scope, ClassScope): for scope in self.scope_stack[::-1]: existing = scope.get(value.name) if (isinstance(existing, Importation) and not existing.used and (not isinstance(value, Importation) or value.fullName == existing.fullName) and report_redef and not self.different_forks(node, existing.source)): redefinedWhileUnused = True self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) existing = self.scope.get(value.name) if not redefinedWhileUnused and self.has_parent(value.source, ast.ListComp): if (existing and report_redef and not self.has_parent(existing.source, (ast.For, ast.ListComp)) and not self.different_forks(node, existing.source)): self.report(messages.RedefinedInListComp, node, value.name, existing.source) if (isinstance(existing, Definition) and not existing.used and not self.different_forks(node, existing.source)): self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) else: self.scope[value.name] = value
Called when a binding is altered. - `node` is the statement responsible for the change - `value` is the optional new value, a Binding instance, associated with the binding; if None, the binding is deleted if it exists. - if `report_redef` is True (default), rebinding while unused will be reported.
train
https://github.com/timothycrosley/deprecated.frosted/blob/61ba7f341fc55676c3580c8c4e52117986cd5e12/frosted/checker.py#L407-L445
null
class Checker(object): """The core of frosted, checks the cleanliness and sanity of Python code.""" node_depth = 0 offset = None trace_tree = False frosted_builtins = FROSTED_BUILTINS def __init__(self, tree, filename='(none)', builtins=None, ignore_lines=(), **settings): self.settings = settings self.ignore_errors = settings.get('ignore_frosted_errors', []) self.ignore_lines = ignore_lines file_specific_ignores = settings.get('ignore_frosted_errors_for_' + (os.path.basename(filename) or ""), None) if file_specific_ignores: self.ignore_errors += file_specific_ignores self._node_handlers = {} self._deferred_functions = [] self._deferred_assignments = [] self.dead_scopes = [] self.messages = [] self.filename = filename if builtins: self.frosted_builtins = self.frosted_builtins.union(builtins) self.scope_stack = [ModuleScope()] self.except_handlers = [()] self.futures_allowed = True self.root = tree self.handle_children(tree) self.run_deferred(self._deferred_functions) self._deferred_functions = None self.run_deferred(self._deferred_assignments) self._deferred_assignments = None del self.scope_stack[1:] self.pop_scope() self.check_dead_scopes() self.check_plugins() def check_plugins(self): """ collect plugins from entry point 'frosted.plugins' and run their check() method, passing the filename """ checkers = {} for ep in pkg_resources.iter_entry_points(group='frosted.plugins'): checkers.update({ep.name: ep.load()}) for plugin_name, plugin in checkers.items(): if self.filename != '(none)': messages = plugin.check(self.filename) for message, loc, args, kwargs in messages: self.report(message, loc, *args, **kwargs) def defer_function(self, callable): """Schedule a function handler to be called just before completion. This is used for handling function bodies, which must be deferred because code later in the file might modify the global scope. When 'callable' is called, the scope at the time this is called will be restored, however it will contain any new bindings added to it. """ self._deferred_functions.append((callable, self.scope_stack[:], self.offset)) def defer_assignment(self, callable): """Schedule an assignment handler to be called just after deferred function handlers.""" self._deferred_assignments.append((callable, self.scope_stack[:], self.offset)) def run_deferred(self, deferred): """Run the callables in deferred using their associated scope stack.""" for handler, scope, offset in deferred: self.scope_stack = scope self.offset = offset handler() @property def scope(self): return self.scope_stack[-1] def pop_scope(self): self.dead_scopes.append(self.scope_stack.pop()) def check_dead_scopes(self): """Look at scopes which have been fully examined and report names in them which were imported but unused.""" for scope in self.dead_scopes: export = isinstance(scope.get('__all__'), ExportBinding) if export: all = scope['__all__'].names() # Look for possible mistakes in the export list if not scope.importStarred and os.path.basename(self.filename) != '__init__.py': undefined = set(all) - set(scope) for name in undefined: self.report(messages.UndefinedExport, scope['__all__'].source, name) else: all = [] # Look for imported names that aren't used without checking imports in namespace definition for importation in scope.values(): if isinstance(importation, Importation) and not importation.used and importation.name not in all: self.report(messages.UnusedImport, importation.source, importation.name) def push_scope(self, scope_class=FunctionScope): self.scope_stack.append(scope_class()) def push_function_scope(self): # XXX Deprecated self.push_scope(FunctionScope) def push_class_scope(self): # XXX Deprecated self.push_scope(ClassScope) def report(self, message_class, *args, **kwargs): error_code = message_class.error_code if(not error_code[:2] + "00" in self.ignore_errors and not error_code in self.ignore_errors and not str(message_class.error_number) in self.ignore_errors): kwargs['verbose'] = self.settings.get('verbose') message = message_class(self.filename, *args, **kwargs) if message.lineno not in self.ignore_lines: self.messages.append(message) def has_parent(self, node, kind): while hasattr(node, 'parent'): node = node.parent if isinstance(node, kind): return True def get_common_ancestor(self, lnode, rnode, stop=None): stop = stop or self.root if lnode is rnode: return lnode if stop in (lnode, rnode): return stop if not hasattr(lnode, 'parent') or not hasattr(rnode, 'parent'): return if (lnode.level > rnode.level): return self.get_common_ancestor(lnode.parent, rnode, stop) if (rnode.level > lnode.level): return self.get_common_ancestor(lnode, rnode.parent, stop) return self.get_common_ancestor(lnode.parent, rnode.parent, stop) def descendant_of(self, node, ancestors, stop=None): for ancestor in ancestors: if self.get_common_ancestor(node, ancestor, stop) not in (stop, None): return True return False def on_fork(self, parent, lnode, rnode, items): return (self.descendant_of(lnode, items, parent) ^ self.descendant_of(rnode, items, parent)) def different_forks(self, lnode, rnode): """True, if lnode and rnode are located on different forks of IF/TRY.""" ancestor = self.get_common_ancestor(lnode, rnode) if isinstance(ancestor, ast.If): for fork in (ancestor.body, ancestor.orelse): if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.Try): body = ancestor.body + ancestor.orelse for fork in [body] + [[hdl] for hdl in ancestor.handlers]: if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.TryFinally): if self.on_fork(ancestor, lnode, rnode, ancestor.body): return True return False def get_node_handler(self, node_class): try: return self._node_handlers[node_class] except KeyError: nodeType = str(node_class.__name__).upper() self._node_handlers[node_class] = handler = getattr(self, nodeType) return handler def iter_visible_scopes(self): outerScopes = itertools.islice(self.scope_stack, len(self.scope_stack) - 1) scopes = [scope for scope in outerScopes if isinstance(scope, (FunctionScope, ModuleScope))] if (isinstance(self.scope, GeneratorScope) and scopes[-1] != self.scope_stack[-2]): scopes.append(self.scope_stack[-2]) scopes.append(self.scope_stack[-1]) return iter(reversed(scopes)) def handle_node_load(self, node): name = node_name(node) if not name: return importStarred = False for scope in self.iter_visible_scopes(): importStarred = importStarred or scope.importStarred try: scope[name].used = (self.scope, node) except KeyError: pass else: return # look in the built-ins if importStarred or name in self.frosted_builtins: return if name == '__path__' and os.path.basename(self.filename) == '__init__.py': # the special name __path__ is valid only in packages return # protected with a NameError handler? if 'NameError' not in self.except_handlers[-1]: self.report(messages.UndefinedName, node, name) def handle_node_store(self, node): name = node_name(node) if not name: return # if the name hasn't already been defined in the current scope if isinstance(self.scope, FunctionScope) and name not in self.scope: # for each function or module scope above us for scope in self.scope_stack[:-1]: if not isinstance(scope, (FunctionScope, ModuleScope)): continue # if the name was defined in that scope, and the name has # been accessed already in the current scope, and hasn't # been declared global used = name in scope and scope[name].used if used and used[0] is self.scope and name not in self.scope.globals: # then it's probably a mistake self.report(messages.UndefinedLocal, scope[name].used[1], name, scope[name].source) break parent = getattr(node, 'parent', None) if isinstance(parent, (ast.For, ast.comprehension, ast.Tuple, ast.List)): binding = Binding(name, node) elif (parent is not None and name == '__all__' and isinstance(self.scope, ModuleScope)): binding = ExportBinding(name, parent.value) else: binding = Assignment(name, node) if name in self.scope: binding.used = self.scope[name].used self.add_binding(node, binding) def handle_node_delete(self, node): name = node_name(node) if not name: return if isinstance(self.scope, FunctionScope) and name in self.scope.globals: self.scope.globals.remove(name) else: try: del self.scope[name] except KeyError: self.report(messages.UndefinedName, node, name) def handle_children(self, tree): for node in ast.iter_child_nodes(tree): self.handleNode(node, tree) def is_docstring(self, node): """Determine if the given node is a docstring, as long as it is at the correct place in the node tree.""" return isinstance(node, ast.Str) or (isinstance(node, ast.Expr) and isinstance(node.value, ast.Str)) def docstring(self, node): if isinstance(node, ast.Expr): node = node.value if not isinstance(node, ast.Str): return (None, None) # Computed incorrectly if the docstring has backslash doctest_lineno = node.lineno - node.s.count('\n') - 1 return (node.s, doctest_lineno) def handleNode(self, node, parent): if node is None: return if self.offset and getattr(node, 'lineno', None) is not None: node.lineno += self.offset[0] node.col_offset += self.offset[1] if self.trace_tree: print(' ' * self.node_depth + node.__class__.__name__) if self.futures_allowed and not (isinstance(node, ast.ImportFrom) or self.is_docstring(node)): self.futures_allowed = False self.node_depth += 1 node.level = self.node_depth node.parent = parent try: handler = self.get_node_handler(node.__class__) handler(node) finally: self.node_depth -= 1 if self.trace_tree: print(' ' * self.node_depth + 'end ' + node.__class__.__name__) _get_doctest_examples = doctest.DocTestParser().get_examples def handle_doctests(self, node): try: docstring, node_lineno = self.docstring(node.body[0]) if not docstring: return examples = self._get_doctest_examples(docstring) except (ValueError, IndexError): # e.g. line 6 of the docstring for <string> has inconsistent # leading whitespace: ... return node_offset = self.offset or (0, 0) self.push_scope() for example in examples: try: tree = compile(example.source, "<doctest>", "exec", ast.PyCF_ONLY_AST) except SyntaxError: e = sys.exc_info()[1] position = (node_lineno + example.lineno + e.lineno, example.indent + 4 + (e.offset or 0)) self.report(messages.DoctestSyntaxError, node, position) else: self.offset = (node_offset[0] + node_lineno + example.lineno, node_offset[1] + example.indent + 4) self.handle_children(tree) self.offset = node_offset self.pop_scope() def find_return_with_argument(self, node): """Finds and returns a return statment that has an argument. Note that we should use node.returns in Python 3, but this method is never called in Python 3 so we don't bother checking. """ for item in node.body: if isinstance(item, ast.Return) and item.value: return item elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): return_with_argument = self.find_return_with_argument(item) if return_with_argument: return return_with_argument def is_generator(self, node): """Checks whether a function is a generator by looking for a yield statement or expression.""" if not isinstance(node.body, list): # lambdas can not be generators return False for item in node.body: if isinstance(item, (ast.Assign, ast.Expr)): if isinstance(item.value, ast.Yield): return True elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): if self.is_generator(item): return True return False def ignore(self, node): pass # "stmt" type nodes RETURN = DELETE = PRINT = WHILE = IF = WITH = WITHITEM = RAISE = TRYFINALLY = ASSERT = EXEC = EXPR = handle_children CONTINUE = BREAK = PASS = ignore # "expr" type nodes BOOLOP = BINOP = UNARYOP = IFEXP = DICT = SET = YIELD = YIELDFROM = COMPARE = REPR = ATTRIBUTE = SUBSCRIPT = \ LIST = TUPLE = STARRED = NAMECONSTANT = handle_children NUM = STR = BYTES = ELLIPSIS = ignore # "slice" type nodes SLICE = EXTSLICE = INDEX = handle_children # expression contexts are node instances too, though being constants LOAD = STORE = DEL = AUGLOAD = AUGSTORE = PARAM = ignore # same for operators AND = OR = ADD = SUB = MULT = DIV = MOD = POW = LSHIFT = RSHIFT = BITOR = BITXOR = BITAND = FLOORDIV = INVERT = \ NOT = UADD = USUB = EQ = NOTEQ = LT = LTE = GT = GTE = IS = ISNOT = IN = NOTIN = ignore # additional node types COMPREHENSION = KEYWORD = handle_children def GLOBAL(self, node): """Keep track of globals declarations.""" if isinstance(self.scope, FunctionScope): self.scope.globals.update(node.names) NONLOCAL = GLOBAL def LISTCOMP(self, node): # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) def GENERATOREXP(self, node): self.push_scope(GeneratorScope) # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) self.pop_scope() SETCOMP = GENERATOREXP def DICTCOMP(self, node): self.push_scope(GeneratorScope) for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.key, node) self.handleNode(node.value, node) self.pop_scope() def FOR(self, node): """Process bindings for loop variables.""" vars = [] def collectLoopVars(n): if isinstance(n, ast.Name): vars.append(n.id) elif isinstance(n, ast.expr_context): return else: for c in ast.iter_child_nodes(n): collectLoopVars(c) collectLoopVars(node.target) for varn in vars: if (isinstance(self.scope.get(varn), Importation) # unused ones will get an unused import warning and self.scope[varn].used): self.report(messages.ImportShadowedByLoopVar, node, varn, self.scope[varn].source) self.handle_children(node) def NAME(self, node): """Handle occurrence of Name (which can be a load/store/delete access.)""" # Locate the name in locals / function / globals scopes. if isinstance(node.ctx, (ast.Load, ast.AugLoad)): self.handle_node_load(node) if (node.id == 'locals' and isinstance(self.scope, FunctionScope) and isinstance(node.parent, ast.Call)): # we are doing locals() call in current scope self.scope.uses_locals = True elif isinstance(node.ctx, (ast.Store, ast.AugStore)): self.handle_node_store(node) elif isinstance(node.ctx, ast.Del): self.handle_node_delete(node) else: # must be a Param context -- this only happens for names in function # arguments, but these aren't dispatched through here raise RuntimeError("Got impossible expression context: %r" % (node.ctx,)) def CALL(self, node): f = node.func if isinstance(f, ast.Name): for scope in self.iter_visible_scopes(): definition = scope.get(f.id) if definition: if isinstance(definition, FunctionDefinition): definition.signature.checkCall(node, self, f.id) break self.handle_children(node) def FUNCTIONDEF(self, node): for deco in node.decorator_list: self.handleNode(deco, node) self.add_binding(node, FunctionDefinition(node.name, node)) self.LAMBDA(node) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) def LAMBDA(self, node): args = [] annotations = [] if PY2: def addArgs(arglist): for arg in arglist: if isinstance(arg, ast.Tuple): addArgs(arg.elts) else: if arg.id in args: self.report(messages.DuplicateArgument, node, arg.id) args.append(arg.id) addArgs(node.args.args) defaults = node.args.defaults else: for arg in node.args.args + node.args.kwonlyargs: annotations.append(arg.annotation) args.append(arg.arg) defaults = node.args.defaults + node.args.kw_defaults # Only for Python3 FunctionDefs is_py3_func = hasattr(node, 'returns') for arg_name in ('vararg', 'kwarg'): wildcard = getattr(node.args, arg_name) if not wildcard: continue args.append(getattr(wildcard, 'arg', wildcard)) if is_py3_func: if PY34_GTE: annotations.append(wildcard.annotation) else: argannotation = arg_name + 'annotation' annotations.append(getattr(node.args, argannotation)) if is_py3_func: annotations.append(node.returns) if PY3: if len(set(args)) < len(args): for (idx, arg) in enumerate(args): if arg in args[:idx]: self.report(messages.DuplicateArgument, node, arg) for child in annotations + defaults: if child: self.handleNode(child, node) def runFunction(): self.push_scope() for name in args: self.add_binding(node, Argument(name, node), report_redef=False) if isinstance(node.body, list): # case for FunctionDefs for stmt in node.body: self.handleNode(stmt, node) else: # case for Lambdas self.handleNode(node.body, node) def checkUnusedAssignments(): """Check to see if any assignments have not been used.""" for name, binding in self.scope.unusedAssignments(): self.report(messages.UnusedVariable, binding.source, name) self.defer_assignment(checkUnusedAssignments) if PY2: def checkReturnWithArgumentInsideGenerator(): """Check to see if there are any return statements with arguments but the function is a generator.""" if self.is_generator(node): stmt = self.find_return_with_argument(node) if stmt is not None: self.report(messages.ReturnWithArgsInsideGenerator, stmt) self.defer_assignment(checkReturnWithArgumentInsideGenerator) self.pop_scope() self.defer_function(runFunction) def CLASSDEF(self, node): """Check names used in a class definition, including its decorators, base classes, and the body of its definition. Additionally, add its name to the current scope. """ for deco in node.decorator_list: self.handleNode(deco, node) for baseNode in node.bases: self.handleNode(baseNode, node) if not PY2: for keywordNode in node.keywords: self.handleNode(keywordNode, node) self.push_scope(ClassScope) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) for stmt in node.body: self.handleNode(stmt, node) self.pop_scope() self.add_binding(node, ClassDefinition(node.name, node)) def ASSIGN(self, node): self.handleNode(node.value, node) for target in node.targets: self.handleNode(target, node) def AUGASSIGN(self, node): self.handle_node_load(node.target) self.handleNode(node.value, node) self.handleNode(node.target, node) def IMPORT(self, node): for alias in node.names: name = alias.asname or alias.name importation = Importation(name, node) self.add_binding(node, importation) def IMPORTFROM(self, node): if node.module == '__future__': if not self.futures_allowed: self.report(messages.LateFutureImport, node, [n.name for n in node.names]) else: self.futures_allowed = False for alias in node.names: if alias.name == '*': self.scope.importStarred = True self.report(messages.ImportStarUsed, node, node.module) continue name = alias.asname or alias.name importation = Importation(name, node) if node.module == '__future__': importation.used = (self.scope, node) self.add_binding(node, importation) def TRY(self, node): handler_names = [] # List the exception handlers for handler in node.handlers: if isinstance(handler.type, ast.Tuple): for exc_type in handler.type.elts: handler_names.append(node_name(exc_type)) elif handler.type: handler_names.append(node_name(handler.type)) # Memorize the except handlers and process the body self.except_handlers.append(handler_names) for child in node.body: self.handleNode(child, node) self.except_handlers.pop() # Process the other nodes: "except:", "else:", "finally:" for child in ast.iter_child_nodes(node): if child not in node.body: self.handleNode(child, node) TRYEXCEPT = TRY def EXCEPTHANDLER(self, node): # 3.x: in addition to handling children, we must handle the name of # the exception, which is not a Name node, but a simple string. if node.type is None: self.report(messages.BareExcept, node) if isinstance(node.name, str): self.handle_node_store(node) self.handle_children(node)
timothycrosley/deprecated.frosted
frosted/checker.py
Checker.is_docstring
python
def is_docstring(self, node): return isinstance(node, ast.Str) or (isinstance(node, ast.Expr) and isinstance(node.value, ast.Str))
Determine if the given node is a docstring, as long as it is at the correct place in the node tree.
train
https://github.com/timothycrosley/deprecated.frosted/blob/61ba7f341fc55676c3580c8c4e52117986cd5e12/frosted/checker.py#L539-L543
null
class Checker(object): """The core of frosted, checks the cleanliness and sanity of Python code.""" node_depth = 0 offset = None trace_tree = False frosted_builtins = FROSTED_BUILTINS def __init__(self, tree, filename='(none)', builtins=None, ignore_lines=(), **settings): self.settings = settings self.ignore_errors = settings.get('ignore_frosted_errors', []) self.ignore_lines = ignore_lines file_specific_ignores = settings.get('ignore_frosted_errors_for_' + (os.path.basename(filename) or ""), None) if file_specific_ignores: self.ignore_errors += file_specific_ignores self._node_handlers = {} self._deferred_functions = [] self._deferred_assignments = [] self.dead_scopes = [] self.messages = [] self.filename = filename if builtins: self.frosted_builtins = self.frosted_builtins.union(builtins) self.scope_stack = [ModuleScope()] self.except_handlers = [()] self.futures_allowed = True self.root = tree self.handle_children(tree) self.run_deferred(self._deferred_functions) self._deferred_functions = None self.run_deferred(self._deferred_assignments) self._deferred_assignments = None del self.scope_stack[1:] self.pop_scope() self.check_dead_scopes() self.check_plugins() def check_plugins(self): """ collect plugins from entry point 'frosted.plugins' and run their check() method, passing the filename """ checkers = {} for ep in pkg_resources.iter_entry_points(group='frosted.plugins'): checkers.update({ep.name: ep.load()}) for plugin_name, plugin in checkers.items(): if self.filename != '(none)': messages = plugin.check(self.filename) for message, loc, args, kwargs in messages: self.report(message, loc, *args, **kwargs) def defer_function(self, callable): """Schedule a function handler to be called just before completion. This is used for handling function bodies, which must be deferred because code later in the file might modify the global scope. When 'callable' is called, the scope at the time this is called will be restored, however it will contain any new bindings added to it. """ self._deferred_functions.append((callable, self.scope_stack[:], self.offset)) def defer_assignment(self, callable): """Schedule an assignment handler to be called just after deferred function handlers.""" self._deferred_assignments.append((callable, self.scope_stack[:], self.offset)) def run_deferred(self, deferred): """Run the callables in deferred using their associated scope stack.""" for handler, scope, offset in deferred: self.scope_stack = scope self.offset = offset handler() @property def scope(self): return self.scope_stack[-1] def pop_scope(self): self.dead_scopes.append(self.scope_stack.pop()) def check_dead_scopes(self): """Look at scopes which have been fully examined and report names in them which were imported but unused.""" for scope in self.dead_scopes: export = isinstance(scope.get('__all__'), ExportBinding) if export: all = scope['__all__'].names() # Look for possible mistakes in the export list if not scope.importStarred and os.path.basename(self.filename) != '__init__.py': undefined = set(all) - set(scope) for name in undefined: self.report(messages.UndefinedExport, scope['__all__'].source, name) else: all = [] # Look for imported names that aren't used without checking imports in namespace definition for importation in scope.values(): if isinstance(importation, Importation) and not importation.used and importation.name not in all: self.report(messages.UnusedImport, importation.source, importation.name) def push_scope(self, scope_class=FunctionScope): self.scope_stack.append(scope_class()) def push_function_scope(self): # XXX Deprecated self.push_scope(FunctionScope) def push_class_scope(self): # XXX Deprecated self.push_scope(ClassScope) def report(self, message_class, *args, **kwargs): error_code = message_class.error_code if(not error_code[:2] + "00" in self.ignore_errors and not error_code in self.ignore_errors and not str(message_class.error_number) in self.ignore_errors): kwargs['verbose'] = self.settings.get('verbose') message = message_class(self.filename, *args, **kwargs) if message.lineno not in self.ignore_lines: self.messages.append(message) def has_parent(self, node, kind): while hasattr(node, 'parent'): node = node.parent if isinstance(node, kind): return True def get_common_ancestor(self, lnode, rnode, stop=None): stop = stop or self.root if lnode is rnode: return lnode if stop in (lnode, rnode): return stop if not hasattr(lnode, 'parent') or not hasattr(rnode, 'parent'): return if (lnode.level > rnode.level): return self.get_common_ancestor(lnode.parent, rnode, stop) if (rnode.level > lnode.level): return self.get_common_ancestor(lnode, rnode.parent, stop) return self.get_common_ancestor(lnode.parent, rnode.parent, stop) def descendant_of(self, node, ancestors, stop=None): for ancestor in ancestors: if self.get_common_ancestor(node, ancestor, stop) not in (stop, None): return True return False def on_fork(self, parent, lnode, rnode, items): return (self.descendant_of(lnode, items, parent) ^ self.descendant_of(rnode, items, parent)) def different_forks(self, lnode, rnode): """True, if lnode and rnode are located on different forks of IF/TRY.""" ancestor = self.get_common_ancestor(lnode, rnode) if isinstance(ancestor, ast.If): for fork in (ancestor.body, ancestor.orelse): if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.Try): body = ancestor.body + ancestor.orelse for fork in [body] + [[hdl] for hdl in ancestor.handlers]: if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.TryFinally): if self.on_fork(ancestor, lnode, rnode, ancestor.body): return True return False def add_binding(self, node, value, report_redef=True): """Called when a binding is altered. - `node` is the statement responsible for the change - `value` is the optional new value, a Binding instance, associated with the binding; if None, the binding is deleted if it exists. - if `report_redef` is True (default), rebinding while unused will be reported. """ redefinedWhileUnused = False if not isinstance(self.scope, ClassScope): for scope in self.scope_stack[::-1]: existing = scope.get(value.name) if (isinstance(existing, Importation) and not existing.used and (not isinstance(value, Importation) or value.fullName == existing.fullName) and report_redef and not self.different_forks(node, existing.source)): redefinedWhileUnused = True self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) existing = self.scope.get(value.name) if not redefinedWhileUnused and self.has_parent(value.source, ast.ListComp): if (existing and report_redef and not self.has_parent(existing.source, (ast.For, ast.ListComp)) and not self.different_forks(node, existing.source)): self.report(messages.RedefinedInListComp, node, value.name, existing.source) if (isinstance(existing, Definition) and not existing.used and not self.different_forks(node, existing.source)): self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) else: self.scope[value.name] = value def get_node_handler(self, node_class): try: return self._node_handlers[node_class] except KeyError: nodeType = str(node_class.__name__).upper() self._node_handlers[node_class] = handler = getattr(self, nodeType) return handler def iter_visible_scopes(self): outerScopes = itertools.islice(self.scope_stack, len(self.scope_stack) - 1) scopes = [scope for scope in outerScopes if isinstance(scope, (FunctionScope, ModuleScope))] if (isinstance(self.scope, GeneratorScope) and scopes[-1] != self.scope_stack[-2]): scopes.append(self.scope_stack[-2]) scopes.append(self.scope_stack[-1]) return iter(reversed(scopes)) def handle_node_load(self, node): name = node_name(node) if not name: return importStarred = False for scope in self.iter_visible_scopes(): importStarred = importStarred or scope.importStarred try: scope[name].used = (self.scope, node) except KeyError: pass else: return # look in the built-ins if importStarred or name in self.frosted_builtins: return if name == '__path__' and os.path.basename(self.filename) == '__init__.py': # the special name __path__ is valid only in packages return # protected with a NameError handler? if 'NameError' not in self.except_handlers[-1]: self.report(messages.UndefinedName, node, name) def handle_node_store(self, node): name = node_name(node) if not name: return # if the name hasn't already been defined in the current scope if isinstance(self.scope, FunctionScope) and name not in self.scope: # for each function or module scope above us for scope in self.scope_stack[:-1]: if not isinstance(scope, (FunctionScope, ModuleScope)): continue # if the name was defined in that scope, and the name has # been accessed already in the current scope, and hasn't # been declared global used = name in scope and scope[name].used if used and used[0] is self.scope and name not in self.scope.globals: # then it's probably a mistake self.report(messages.UndefinedLocal, scope[name].used[1], name, scope[name].source) break parent = getattr(node, 'parent', None) if isinstance(parent, (ast.For, ast.comprehension, ast.Tuple, ast.List)): binding = Binding(name, node) elif (parent is not None and name == '__all__' and isinstance(self.scope, ModuleScope)): binding = ExportBinding(name, parent.value) else: binding = Assignment(name, node) if name in self.scope: binding.used = self.scope[name].used self.add_binding(node, binding) def handle_node_delete(self, node): name = node_name(node) if not name: return if isinstance(self.scope, FunctionScope) and name in self.scope.globals: self.scope.globals.remove(name) else: try: del self.scope[name] except KeyError: self.report(messages.UndefinedName, node, name) def handle_children(self, tree): for node in ast.iter_child_nodes(tree): self.handleNode(node, tree) def docstring(self, node): if isinstance(node, ast.Expr): node = node.value if not isinstance(node, ast.Str): return (None, None) # Computed incorrectly if the docstring has backslash doctest_lineno = node.lineno - node.s.count('\n') - 1 return (node.s, doctest_lineno) def handleNode(self, node, parent): if node is None: return if self.offset and getattr(node, 'lineno', None) is not None: node.lineno += self.offset[0] node.col_offset += self.offset[1] if self.trace_tree: print(' ' * self.node_depth + node.__class__.__name__) if self.futures_allowed and not (isinstance(node, ast.ImportFrom) or self.is_docstring(node)): self.futures_allowed = False self.node_depth += 1 node.level = self.node_depth node.parent = parent try: handler = self.get_node_handler(node.__class__) handler(node) finally: self.node_depth -= 1 if self.trace_tree: print(' ' * self.node_depth + 'end ' + node.__class__.__name__) _get_doctest_examples = doctest.DocTestParser().get_examples def handle_doctests(self, node): try: docstring, node_lineno = self.docstring(node.body[0]) if not docstring: return examples = self._get_doctest_examples(docstring) except (ValueError, IndexError): # e.g. line 6 of the docstring for <string> has inconsistent # leading whitespace: ... return node_offset = self.offset or (0, 0) self.push_scope() for example in examples: try: tree = compile(example.source, "<doctest>", "exec", ast.PyCF_ONLY_AST) except SyntaxError: e = sys.exc_info()[1] position = (node_lineno + example.lineno + e.lineno, example.indent + 4 + (e.offset or 0)) self.report(messages.DoctestSyntaxError, node, position) else: self.offset = (node_offset[0] + node_lineno + example.lineno, node_offset[1] + example.indent + 4) self.handle_children(tree) self.offset = node_offset self.pop_scope() def find_return_with_argument(self, node): """Finds and returns a return statment that has an argument. Note that we should use node.returns in Python 3, but this method is never called in Python 3 so we don't bother checking. """ for item in node.body: if isinstance(item, ast.Return) and item.value: return item elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): return_with_argument = self.find_return_with_argument(item) if return_with_argument: return return_with_argument def is_generator(self, node): """Checks whether a function is a generator by looking for a yield statement or expression.""" if not isinstance(node.body, list): # lambdas can not be generators return False for item in node.body: if isinstance(item, (ast.Assign, ast.Expr)): if isinstance(item.value, ast.Yield): return True elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): if self.is_generator(item): return True return False def ignore(self, node): pass # "stmt" type nodes RETURN = DELETE = PRINT = WHILE = IF = WITH = WITHITEM = RAISE = TRYFINALLY = ASSERT = EXEC = EXPR = handle_children CONTINUE = BREAK = PASS = ignore # "expr" type nodes BOOLOP = BINOP = UNARYOP = IFEXP = DICT = SET = YIELD = YIELDFROM = COMPARE = REPR = ATTRIBUTE = SUBSCRIPT = \ LIST = TUPLE = STARRED = NAMECONSTANT = handle_children NUM = STR = BYTES = ELLIPSIS = ignore # "slice" type nodes SLICE = EXTSLICE = INDEX = handle_children # expression contexts are node instances too, though being constants LOAD = STORE = DEL = AUGLOAD = AUGSTORE = PARAM = ignore # same for operators AND = OR = ADD = SUB = MULT = DIV = MOD = POW = LSHIFT = RSHIFT = BITOR = BITXOR = BITAND = FLOORDIV = INVERT = \ NOT = UADD = USUB = EQ = NOTEQ = LT = LTE = GT = GTE = IS = ISNOT = IN = NOTIN = ignore # additional node types COMPREHENSION = KEYWORD = handle_children def GLOBAL(self, node): """Keep track of globals declarations.""" if isinstance(self.scope, FunctionScope): self.scope.globals.update(node.names) NONLOCAL = GLOBAL def LISTCOMP(self, node): # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) def GENERATOREXP(self, node): self.push_scope(GeneratorScope) # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) self.pop_scope() SETCOMP = GENERATOREXP def DICTCOMP(self, node): self.push_scope(GeneratorScope) for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.key, node) self.handleNode(node.value, node) self.pop_scope() def FOR(self, node): """Process bindings for loop variables.""" vars = [] def collectLoopVars(n): if isinstance(n, ast.Name): vars.append(n.id) elif isinstance(n, ast.expr_context): return else: for c in ast.iter_child_nodes(n): collectLoopVars(c) collectLoopVars(node.target) for varn in vars: if (isinstance(self.scope.get(varn), Importation) # unused ones will get an unused import warning and self.scope[varn].used): self.report(messages.ImportShadowedByLoopVar, node, varn, self.scope[varn].source) self.handle_children(node) def NAME(self, node): """Handle occurrence of Name (which can be a load/store/delete access.)""" # Locate the name in locals / function / globals scopes. if isinstance(node.ctx, (ast.Load, ast.AugLoad)): self.handle_node_load(node) if (node.id == 'locals' and isinstance(self.scope, FunctionScope) and isinstance(node.parent, ast.Call)): # we are doing locals() call in current scope self.scope.uses_locals = True elif isinstance(node.ctx, (ast.Store, ast.AugStore)): self.handle_node_store(node) elif isinstance(node.ctx, ast.Del): self.handle_node_delete(node) else: # must be a Param context -- this only happens for names in function # arguments, but these aren't dispatched through here raise RuntimeError("Got impossible expression context: %r" % (node.ctx,)) def CALL(self, node): f = node.func if isinstance(f, ast.Name): for scope in self.iter_visible_scopes(): definition = scope.get(f.id) if definition: if isinstance(definition, FunctionDefinition): definition.signature.checkCall(node, self, f.id) break self.handle_children(node) def FUNCTIONDEF(self, node): for deco in node.decorator_list: self.handleNode(deco, node) self.add_binding(node, FunctionDefinition(node.name, node)) self.LAMBDA(node) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) def LAMBDA(self, node): args = [] annotations = [] if PY2: def addArgs(arglist): for arg in arglist: if isinstance(arg, ast.Tuple): addArgs(arg.elts) else: if arg.id in args: self.report(messages.DuplicateArgument, node, arg.id) args.append(arg.id) addArgs(node.args.args) defaults = node.args.defaults else: for arg in node.args.args + node.args.kwonlyargs: annotations.append(arg.annotation) args.append(arg.arg) defaults = node.args.defaults + node.args.kw_defaults # Only for Python3 FunctionDefs is_py3_func = hasattr(node, 'returns') for arg_name in ('vararg', 'kwarg'): wildcard = getattr(node.args, arg_name) if not wildcard: continue args.append(getattr(wildcard, 'arg', wildcard)) if is_py3_func: if PY34_GTE: annotations.append(wildcard.annotation) else: argannotation = arg_name + 'annotation' annotations.append(getattr(node.args, argannotation)) if is_py3_func: annotations.append(node.returns) if PY3: if len(set(args)) < len(args): for (idx, arg) in enumerate(args): if arg in args[:idx]: self.report(messages.DuplicateArgument, node, arg) for child in annotations + defaults: if child: self.handleNode(child, node) def runFunction(): self.push_scope() for name in args: self.add_binding(node, Argument(name, node), report_redef=False) if isinstance(node.body, list): # case for FunctionDefs for stmt in node.body: self.handleNode(stmt, node) else: # case for Lambdas self.handleNode(node.body, node) def checkUnusedAssignments(): """Check to see if any assignments have not been used.""" for name, binding in self.scope.unusedAssignments(): self.report(messages.UnusedVariable, binding.source, name) self.defer_assignment(checkUnusedAssignments) if PY2: def checkReturnWithArgumentInsideGenerator(): """Check to see if there are any return statements with arguments but the function is a generator.""" if self.is_generator(node): stmt = self.find_return_with_argument(node) if stmt is not None: self.report(messages.ReturnWithArgsInsideGenerator, stmt) self.defer_assignment(checkReturnWithArgumentInsideGenerator) self.pop_scope() self.defer_function(runFunction) def CLASSDEF(self, node): """Check names used in a class definition, including its decorators, base classes, and the body of its definition. Additionally, add its name to the current scope. """ for deco in node.decorator_list: self.handleNode(deco, node) for baseNode in node.bases: self.handleNode(baseNode, node) if not PY2: for keywordNode in node.keywords: self.handleNode(keywordNode, node) self.push_scope(ClassScope) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) for stmt in node.body: self.handleNode(stmt, node) self.pop_scope() self.add_binding(node, ClassDefinition(node.name, node)) def ASSIGN(self, node): self.handleNode(node.value, node) for target in node.targets: self.handleNode(target, node) def AUGASSIGN(self, node): self.handle_node_load(node.target) self.handleNode(node.value, node) self.handleNode(node.target, node) def IMPORT(self, node): for alias in node.names: name = alias.asname or alias.name importation = Importation(name, node) self.add_binding(node, importation) def IMPORTFROM(self, node): if node.module == '__future__': if not self.futures_allowed: self.report(messages.LateFutureImport, node, [n.name for n in node.names]) else: self.futures_allowed = False for alias in node.names: if alias.name == '*': self.scope.importStarred = True self.report(messages.ImportStarUsed, node, node.module) continue name = alias.asname or alias.name importation = Importation(name, node) if node.module == '__future__': importation.used = (self.scope, node) self.add_binding(node, importation) def TRY(self, node): handler_names = [] # List the exception handlers for handler in node.handlers: if isinstance(handler.type, ast.Tuple): for exc_type in handler.type.elts: handler_names.append(node_name(exc_type)) elif handler.type: handler_names.append(node_name(handler.type)) # Memorize the except handlers and process the body self.except_handlers.append(handler_names) for child in node.body: self.handleNode(child, node) self.except_handlers.pop() # Process the other nodes: "except:", "else:", "finally:" for child in ast.iter_child_nodes(node): if child not in node.body: self.handleNode(child, node) TRYEXCEPT = TRY def EXCEPTHANDLER(self, node): # 3.x: in addition to handling children, we must handle the name of # the exception, which is not a Name node, but a simple string. if node.type is None: self.report(messages.BareExcept, node) if isinstance(node.name, str): self.handle_node_store(node) self.handle_children(node)
timothycrosley/deprecated.frosted
frosted/checker.py
Checker.find_return_with_argument
python
def find_return_with_argument(self, node): for item in node.body: if isinstance(item, ast.Return) and item.value: return item elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): return_with_argument = self.find_return_with_argument(item) if return_with_argument: return return_with_argument
Finds and returns a return statment that has an argument. Note that we should use node.returns in Python 3, but this method is never called in Python 3 so we don't bother checking.
train
https://github.com/timothycrosley/deprecated.frosted/blob/61ba7f341fc55676c3580c8c4e52117986cd5e12/frosted/checker.py#L605-L618
null
class Checker(object): """The core of frosted, checks the cleanliness and sanity of Python code.""" node_depth = 0 offset = None trace_tree = False frosted_builtins = FROSTED_BUILTINS def __init__(self, tree, filename='(none)', builtins=None, ignore_lines=(), **settings): self.settings = settings self.ignore_errors = settings.get('ignore_frosted_errors', []) self.ignore_lines = ignore_lines file_specific_ignores = settings.get('ignore_frosted_errors_for_' + (os.path.basename(filename) or ""), None) if file_specific_ignores: self.ignore_errors += file_specific_ignores self._node_handlers = {} self._deferred_functions = [] self._deferred_assignments = [] self.dead_scopes = [] self.messages = [] self.filename = filename if builtins: self.frosted_builtins = self.frosted_builtins.union(builtins) self.scope_stack = [ModuleScope()] self.except_handlers = [()] self.futures_allowed = True self.root = tree self.handle_children(tree) self.run_deferred(self._deferred_functions) self._deferred_functions = None self.run_deferred(self._deferred_assignments) self._deferred_assignments = None del self.scope_stack[1:] self.pop_scope() self.check_dead_scopes() self.check_plugins() def check_plugins(self): """ collect plugins from entry point 'frosted.plugins' and run their check() method, passing the filename """ checkers = {} for ep in pkg_resources.iter_entry_points(group='frosted.plugins'): checkers.update({ep.name: ep.load()}) for plugin_name, plugin in checkers.items(): if self.filename != '(none)': messages = plugin.check(self.filename) for message, loc, args, kwargs in messages: self.report(message, loc, *args, **kwargs) def defer_function(self, callable): """Schedule a function handler to be called just before completion. This is used for handling function bodies, which must be deferred because code later in the file might modify the global scope. When 'callable' is called, the scope at the time this is called will be restored, however it will contain any new bindings added to it. """ self._deferred_functions.append((callable, self.scope_stack[:], self.offset)) def defer_assignment(self, callable): """Schedule an assignment handler to be called just after deferred function handlers.""" self._deferred_assignments.append((callable, self.scope_stack[:], self.offset)) def run_deferred(self, deferred): """Run the callables in deferred using their associated scope stack.""" for handler, scope, offset in deferred: self.scope_stack = scope self.offset = offset handler() @property def scope(self): return self.scope_stack[-1] def pop_scope(self): self.dead_scopes.append(self.scope_stack.pop()) def check_dead_scopes(self): """Look at scopes which have been fully examined and report names in them which were imported but unused.""" for scope in self.dead_scopes: export = isinstance(scope.get('__all__'), ExportBinding) if export: all = scope['__all__'].names() # Look for possible mistakes in the export list if not scope.importStarred and os.path.basename(self.filename) != '__init__.py': undefined = set(all) - set(scope) for name in undefined: self.report(messages.UndefinedExport, scope['__all__'].source, name) else: all = [] # Look for imported names that aren't used without checking imports in namespace definition for importation in scope.values(): if isinstance(importation, Importation) and not importation.used and importation.name not in all: self.report(messages.UnusedImport, importation.source, importation.name) def push_scope(self, scope_class=FunctionScope): self.scope_stack.append(scope_class()) def push_function_scope(self): # XXX Deprecated self.push_scope(FunctionScope) def push_class_scope(self): # XXX Deprecated self.push_scope(ClassScope) def report(self, message_class, *args, **kwargs): error_code = message_class.error_code if(not error_code[:2] + "00" in self.ignore_errors and not error_code in self.ignore_errors and not str(message_class.error_number) in self.ignore_errors): kwargs['verbose'] = self.settings.get('verbose') message = message_class(self.filename, *args, **kwargs) if message.lineno not in self.ignore_lines: self.messages.append(message) def has_parent(self, node, kind): while hasattr(node, 'parent'): node = node.parent if isinstance(node, kind): return True def get_common_ancestor(self, lnode, rnode, stop=None): stop = stop or self.root if lnode is rnode: return lnode if stop in (lnode, rnode): return stop if not hasattr(lnode, 'parent') or not hasattr(rnode, 'parent'): return if (lnode.level > rnode.level): return self.get_common_ancestor(lnode.parent, rnode, stop) if (rnode.level > lnode.level): return self.get_common_ancestor(lnode, rnode.parent, stop) return self.get_common_ancestor(lnode.parent, rnode.parent, stop) def descendant_of(self, node, ancestors, stop=None): for ancestor in ancestors: if self.get_common_ancestor(node, ancestor, stop) not in (stop, None): return True return False def on_fork(self, parent, lnode, rnode, items): return (self.descendant_of(lnode, items, parent) ^ self.descendant_of(rnode, items, parent)) def different_forks(self, lnode, rnode): """True, if lnode and rnode are located on different forks of IF/TRY.""" ancestor = self.get_common_ancestor(lnode, rnode) if isinstance(ancestor, ast.If): for fork in (ancestor.body, ancestor.orelse): if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.Try): body = ancestor.body + ancestor.orelse for fork in [body] + [[hdl] for hdl in ancestor.handlers]: if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.TryFinally): if self.on_fork(ancestor, lnode, rnode, ancestor.body): return True return False def add_binding(self, node, value, report_redef=True): """Called when a binding is altered. - `node` is the statement responsible for the change - `value` is the optional new value, a Binding instance, associated with the binding; if None, the binding is deleted if it exists. - if `report_redef` is True (default), rebinding while unused will be reported. """ redefinedWhileUnused = False if not isinstance(self.scope, ClassScope): for scope in self.scope_stack[::-1]: existing = scope.get(value.name) if (isinstance(existing, Importation) and not existing.used and (not isinstance(value, Importation) or value.fullName == existing.fullName) and report_redef and not self.different_forks(node, existing.source)): redefinedWhileUnused = True self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) existing = self.scope.get(value.name) if not redefinedWhileUnused and self.has_parent(value.source, ast.ListComp): if (existing and report_redef and not self.has_parent(existing.source, (ast.For, ast.ListComp)) and not self.different_forks(node, existing.source)): self.report(messages.RedefinedInListComp, node, value.name, existing.source) if (isinstance(existing, Definition) and not existing.used and not self.different_forks(node, existing.source)): self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) else: self.scope[value.name] = value def get_node_handler(self, node_class): try: return self._node_handlers[node_class] except KeyError: nodeType = str(node_class.__name__).upper() self._node_handlers[node_class] = handler = getattr(self, nodeType) return handler def iter_visible_scopes(self): outerScopes = itertools.islice(self.scope_stack, len(self.scope_stack) - 1) scopes = [scope for scope in outerScopes if isinstance(scope, (FunctionScope, ModuleScope))] if (isinstance(self.scope, GeneratorScope) and scopes[-1] != self.scope_stack[-2]): scopes.append(self.scope_stack[-2]) scopes.append(self.scope_stack[-1]) return iter(reversed(scopes)) def handle_node_load(self, node): name = node_name(node) if not name: return importStarred = False for scope in self.iter_visible_scopes(): importStarred = importStarred or scope.importStarred try: scope[name].used = (self.scope, node) except KeyError: pass else: return # look in the built-ins if importStarred or name in self.frosted_builtins: return if name == '__path__' and os.path.basename(self.filename) == '__init__.py': # the special name __path__ is valid only in packages return # protected with a NameError handler? if 'NameError' not in self.except_handlers[-1]: self.report(messages.UndefinedName, node, name) def handle_node_store(self, node): name = node_name(node) if not name: return # if the name hasn't already been defined in the current scope if isinstance(self.scope, FunctionScope) and name not in self.scope: # for each function or module scope above us for scope in self.scope_stack[:-1]: if not isinstance(scope, (FunctionScope, ModuleScope)): continue # if the name was defined in that scope, and the name has # been accessed already in the current scope, and hasn't # been declared global used = name in scope and scope[name].used if used and used[0] is self.scope and name not in self.scope.globals: # then it's probably a mistake self.report(messages.UndefinedLocal, scope[name].used[1], name, scope[name].source) break parent = getattr(node, 'parent', None) if isinstance(parent, (ast.For, ast.comprehension, ast.Tuple, ast.List)): binding = Binding(name, node) elif (parent is not None and name == '__all__' and isinstance(self.scope, ModuleScope)): binding = ExportBinding(name, parent.value) else: binding = Assignment(name, node) if name in self.scope: binding.used = self.scope[name].used self.add_binding(node, binding) def handle_node_delete(self, node): name = node_name(node) if not name: return if isinstance(self.scope, FunctionScope) and name in self.scope.globals: self.scope.globals.remove(name) else: try: del self.scope[name] except KeyError: self.report(messages.UndefinedName, node, name) def handle_children(self, tree): for node in ast.iter_child_nodes(tree): self.handleNode(node, tree) def is_docstring(self, node): """Determine if the given node is a docstring, as long as it is at the correct place in the node tree.""" return isinstance(node, ast.Str) or (isinstance(node, ast.Expr) and isinstance(node.value, ast.Str)) def docstring(self, node): if isinstance(node, ast.Expr): node = node.value if not isinstance(node, ast.Str): return (None, None) # Computed incorrectly if the docstring has backslash doctest_lineno = node.lineno - node.s.count('\n') - 1 return (node.s, doctest_lineno) def handleNode(self, node, parent): if node is None: return if self.offset and getattr(node, 'lineno', None) is not None: node.lineno += self.offset[0] node.col_offset += self.offset[1] if self.trace_tree: print(' ' * self.node_depth + node.__class__.__name__) if self.futures_allowed and not (isinstance(node, ast.ImportFrom) or self.is_docstring(node)): self.futures_allowed = False self.node_depth += 1 node.level = self.node_depth node.parent = parent try: handler = self.get_node_handler(node.__class__) handler(node) finally: self.node_depth -= 1 if self.trace_tree: print(' ' * self.node_depth + 'end ' + node.__class__.__name__) _get_doctest_examples = doctest.DocTestParser().get_examples def handle_doctests(self, node): try: docstring, node_lineno = self.docstring(node.body[0]) if not docstring: return examples = self._get_doctest_examples(docstring) except (ValueError, IndexError): # e.g. line 6 of the docstring for <string> has inconsistent # leading whitespace: ... return node_offset = self.offset or (0, 0) self.push_scope() for example in examples: try: tree = compile(example.source, "<doctest>", "exec", ast.PyCF_ONLY_AST) except SyntaxError: e = sys.exc_info()[1] position = (node_lineno + example.lineno + e.lineno, example.indent + 4 + (e.offset or 0)) self.report(messages.DoctestSyntaxError, node, position) else: self.offset = (node_offset[0] + node_lineno + example.lineno, node_offset[1] + example.indent + 4) self.handle_children(tree) self.offset = node_offset self.pop_scope() def is_generator(self, node): """Checks whether a function is a generator by looking for a yield statement or expression.""" if not isinstance(node.body, list): # lambdas can not be generators return False for item in node.body: if isinstance(item, (ast.Assign, ast.Expr)): if isinstance(item.value, ast.Yield): return True elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): if self.is_generator(item): return True return False def ignore(self, node): pass # "stmt" type nodes RETURN = DELETE = PRINT = WHILE = IF = WITH = WITHITEM = RAISE = TRYFINALLY = ASSERT = EXEC = EXPR = handle_children CONTINUE = BREAK = PASS = ignore # "expr" type nodes BOOLOP = BINOP = UNARYOP = IFEXP = DICT = SET = YIELD = YIELDFROM = COMPARE = REPR = ATTRIBUTE = SUBSCRIPT = \ LIST = TUPLE = STARRED = NAMECONSTANT = handle_children NUM = STR = BYTES = ELLIPSIS = ignore # "slice" type nodes SLICE = EXTSLICE = INDEX = handle_children # expression contexts are node instances too, though being constants LOAD = STORE = DEL = AUGLOAD = AUGSTORE = PARAM = ignore # same for operators AND = OR = ADD = SUB = MULT = DIV = MOD = POW = LSHIFT = RSHIFT = BITOR = BITXOR = BITAND = FLOORDIV = INVERT = \ NOT = UADD = USUB = EQ = NOTEQ = LT = LTE = GT = GTE = IS = ISNOT = IN = NOTIN = ignore # additional node types COMPREHENSION = KEYWORD = handle_children def GLOBAL(self, node): """Keep track of globals declarations.""" if isinstance(self.scope, FunctionScope): self.scope.globals.update(node.names) NONLOCAL = GLOBAL def LISTCOMP(self, node): # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) def GENERATOREXP(self, node): self.push_scope(GeneratorScope) # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) self.pop_scope() SETCOMP = GENERATOREXP def DICTCOMP(self, node): self.push_scope(GeneratorScope) for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.key, node) self.handleNode(node.value, node) self.pop_scope() def FOR(self, node): """Process bindings for loop variables.""" vars = [] def collectLoopVars(n): if isinstance(n, ast.Name): vars.append(n.id) elif isinstance(n, ast.expr_context): return else: for c in ast.iter_child_nodes(n): collectLoopVars(c) collectLoopVars(node.target) for varn in vars: if (isinstance(self.scope.get(varn), Importation) # unused ones will get an unused import warning and self.scope[varn].used): self.report(messages.ImportShadowedByLoopVar, node, varn, self.scope[varn].source) self.handle_children(node) def NAME(self, node): """Handle occurrence of Name (which can be a load/store/delete access.)""" # Locate the name in locals / function / globals scopes. if isinstance(node.ctx, (ast.Load, ast.AugLoad)): self.handle_node_load(node) if (node.id == 'locals' and isinstance(self.scope, FunctionScope) and isinstance(node.parent, ast.Call)): # we are doing locals() call in current scope self.scope.uses_locals = True elif isinstance(node.ctx, (ast.Store, ast.AugStore)): self.handle_node_store(node) elif isinstance(node.ctx, ast.Del): self.handle_node_delete(node) else: # must be a Param context -- this only happens for names in function # arguments, but these aren't dispatched through here raise RuntimeError("Got impossible expression context: %r" % (node.ctx,)) def CALL(self, node): f = node.func if isinstance(f, ast.Name): for scope in self.iter_visible_scopes(): definition = scope.get(f.id) if definition: if isinstance(definition, FunctionDefinition): definition.signature.checkCall(node, self, f.id) break self.handle_children(node) def FUNCTIONDEF(self, node): for deco in node.decorator_list: self.handleNode(deco, node) self.add_binding(node, FunctionDefinition(node.name, node)) self.LAMBDA(node) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) def LAMBDA(self, node): args = [] annotations = [] if PY2: def addArgs(arglist): for arg in arglist: if isinstance(arg, ast.Tuple): addArgs(arg.elts) else: if arg.id in args: self.report(messages.DuplicateArgument, node, arg.id) args.append(arg.id) addArgs(node.args.args) defaults = node.args.defaults else: for arg in node.args.args + node.args.kwonlyargs: annotations.append(arg.annotation) args.append(arg.arg) defaults = node.args.defaults + node.args.kw_defaults # Only for Python3 FunctionDefs is_py3_func = hasattr(node, 'returns') for arg_name in ('vararg', 'kwarg'): wildcard = getattr(node.args, arg_name) if not wildcard: continue args.append(getattr(wildcard, 'arg', wildcard)) if is_py3_func: if PY34_GTE: annotations.append(wildcard.annotation) else: argannotation = arg_name + 'annotation' annotations.append(getattr(node.args, argannotation)) if is_py3_func: annotations.append(node.returns) if PY3: if len(set(args)) < len(args): for (idx, arg) in enumerate(args): if arg in args[:idx]: self.report(messages.DuplicateArgument, node, arg) for child in annotations + defaults: if child: self.handleNode(child, node) def runFunction(): self.push_scope() for name in args: self.add_binding(node, Argument(name, node), report_redef=False) if isinstance(node.body, list): # case for FunctionDefs for stmt in node.body: self.handleNode(stmt, node) else: # case for Lambdas self.handleNode(node.body, node) def checkUnusedAssignments(): """Check to see if any assignments have not been used.""" for name, binding in self.scope.unusedAssignments(): self.report(messages.UnusedVariable, binding.source, name) self.defer_assignment(checkUnusedAssignments) if PY2: def checkReturnWithArgumentInsideGenerator(): """Check to see if there are any return statements with arguments but the function is a generator.""" if self.is_generator(node): stmt = self.find_return_with_argument(node) if stmt is not None: self.report(messages.ReturnWithArgsInsideGenerator, stmt) self.defer_assignment(checkReturnWithArgumentInsideGenerator) self.pop_scope() self.defer_function(runFunction) def CLASSDEF(self, node): """Check names used in a class definition, including its decorators, base classes, and the body of its definition. Additionally, add its name to the current scope. """ for deco in node.decorator_list: self.handleNode(deco, node) for baseNode in node.bases: self.handleNode(baseNode, node) if not PY2: for keywordNode in node.keywords: self.handleNode(keywordNode, node) self.push_scope(ClassScope) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) for stmt in node.body: self.handleNode(stmt, node) self.pop_scope() self.add_binding(node, ClassDefinition(node.name, node)) def ASSIGN(self, node): self.handleNode(node.value, node) for target in node.targets: self.handleNode(target, node) def AUGASSIGN(self, node): self.handle_node_load(node.target) self.handleNode(node.value, node) self.handleNode(node.target, node) def IMPORT(self, node): for alias in node.names: name = alias.asname or alias.name importation = Importation(name, node) self.add_binding(node, importation) def IMPORTFROM(self, node): if node.module == '__future__': if not self.futures_allowed: self.report(messages.LateFutureImport, node, [n.name for n in node.names]) else: self.futures_allowed = False for alias in node.names: if alias.name == '*': self.scope.importStarred = True self.report(messages.ImportStarUsed, node, node.module) continue name = alias.asname or alias.name importation = Importation(name, node) if node.module == '__future__': importation.used = (self.scope, node) self.add_binding(node, importation) def TRY(self, node): handler_names = [] # List the exception handlers for handler in node.handlers: if isinstance(handler.type, ast.Tuple): for exc_type in handler.type.elts: handler_names.append(node_name(exc_type)) elif handler.type: handler_names.append(node_name(handler.type)) # Memorize the except handlers and process the body self.except_handlers.append(handler_names) for child in node.body: self.handleNode(child, node) self.except_handlers.pop() # Process the other nodes: "except:", "else:", "finally:" for child in ast.iter_child_nodes(node): if child not in node.body: self.handleNode(child, node) TRYEXCEPT = TRY def EXCEPTHANDLER(self, node): # 3.x: in addition to handling children, we must handle the name of # the exception, which is not a Name node, but a simple string. if node.type is None: self.report(messages.BareExcept, node) if isinstance(node.name, str): self.handle_node_store(node) self.handle_children(node)
timothycrosley/deprecated.frosted
frosted/checker.py
Checker.is_generator
python
def is_generator(self, node): if not isinstance(node.body, list): # lambdas can not be generators return False for item in node.body: if isinstance(item, (ast.Assign, ast.Expr)): if isinstance(item.value, ast.Yield): return True elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): if self.is_generator(item): return True return False
Checks whether a function is a generator by looking for a yield statement or expression.
train
https://github.com/timothycrosley/deprecated.frosted/blob/61ba7f341fc55676c3580c8c4e52117986cd5e12/frosted/checker.py#L620-L633
null
class Checker(object): """The core of frosted, checks the cleanliness and sanity of Python code.""" node_depth = 0 offset = None trace_tree = False frosted_builtins = FROSTED_BUILTINS def __init__(self, tree, filename='(none)', builtins=None, ignore_lines=(), **settings): self.settings = settings self.ignore_errors = settings.get('ignore_frosted_errors', []) self.ignore_lines = ignore_lines file_specific_ignores = settings.get('ignore_frosted_errors_for_' + (os.path.basename(filename) or ""), None) if file_specific_ignores: self.ignore_errors += file_specific_ignores self._node_handlers = {} self._deferred_functions = [] self._deferred_assignments = [] self.dead_scopes = [] self.messages = [] self.filename = filename if builtins: self.frosted_builtins = self.frosted_builtins.union(builtins) self.scope_stack = [ModuleScope()] self.except_handlers = [()] self.futures_allowed = True self.root = tree self.handle_children(tree) self.run_deferred(self._deferred_functions) self._deferred_functions = None self.run_deferred(self._deferred_assignments) self._deferred_assignments = None del self.scope_stack[1:] self.pop_scope() self.check_dead_scopes() self.check_plugins() def check_plugins(self): """ collect plugins from entry point 'frosted.plugins' and run their check() method, passing the filename """ checkers = {} for ep in pkg_resources.iter_entry_points(group='frosted.plugins'): checkers.update({ep.name: ep.load()}) for plugin_name, plugin in checkers.items(): if self.filename != '(none)': messages = plugin.check(self.filename) for message, loc, args, kwargs in messages: self.report(message, loc, *args, **kwargs) def defer_function(self, callable): """Schedule a function handler to be called just before completion. This is used for handling function bodies, which must be deferred because code later in the file might modify the global scope. When 'callable' is called, the scope at the time this is called will be restored, however it will contain any new bindings added to it. """ self._deferred_functions.append((callable, self.scope_stack[:], self.offset)) def defer_assignment(self, callable): """Schedule an assignment handler to be called just after deferred function handlers.""" self._deferred_assignments.append((callable, self.scope_stack[:], self.offset)) def run_deferred(self, deferred): """Run the callables in deferred using their associated scope stack.""" for handler, scope, offset in deferred: self.scope_stack = scope self.offset = offset handler() @property def scope(self): return self.scope_stack[-1] def pop_scope(self): self.dead_scopes.append(self.scope_stack.pop()) def check_dead_scopes(self): """Look at scopes which have been fully examined and report names in them which were imported but unused.""" for scope in self.dead_scopes: export = isinstance(scope.get('__all__'), ExportBinding) if export: all = scope['__all__'].names() # Look for possible mistakes in the export list if not scope.importStarred and os.path.basename(self.filename) != '__init__.py': undefined = set(all) - set(scope) for name in undefined: self.report(messages.UndefinedExport, scope['__all__'].source, name) else: all = [] # Look for imported names that aren't used without checking imports in namespace definition for importation in scope.values(): if isinstance(importation, Importation) and not importation.used and importation.name not in all: self.report(messages.UnusedImport, importation.source, importation.name) def push_scope(self, scope_class=FunctionScope): self.scope_stack.append(scope_class()) def push_function_scope(self): # XXX Deprecated self.push_scope(FunctionScope) def push_class_scope(self): # XXX Deprecated self.push_scope(ClassScope) def report(self, message_class, *args, **kwargs): error_code = message_class.error_code if(not error_code[:2] + "00" in self.ignore_errors and not error_code in self.ignore_errors and not str(message_class.error_number) in self.ignore_errors): kwargs['verbose'] = self.settings.get('verbose') message = message_class(self.filename, *args, **kwargs) if message.lineno not in self.ignore_lines: self.messages.append(message) def has_parent(self, node, kind): while hasattr(node, 'parent'): node = node.parent if isinstance(node, kind): return True def get_common_ancestor(self, lnode, rnode, stop=None): stop = stop or self.root if lnode is rnode: return lnode if stop in (lnode, rnode): return stop if not hasattr(lnode, 'parent') or not hasattr(rnode, 'parent'): return if (lnode.level > rnode.level): return self.get_common_ancestor(lnode.parent, rnode, stop) if (rnode.level > lnode.level): return self.get_common_ancestor(lnode, rnode.parent, stop) return self.get_common_ancestor(lnode.parent, rnode.parent, stop) def descendant_of(self, node, ancestors, stop=None): for ancestor in ancestors: if self.get_common_ancestor(node, ancestor, stop) not in (stop, None): return True return False def on_fork(self, parent, lnode, rnode, items): return (self.descendant_of(lnode, items, parent) ^ self.descendant_of(rnode, items, parent)) def different_forks(self, lnode, rnode): """True, if lnode and rnode are located on different forks of IF/TRY.""" ancestor = self.get_common_ancestor(lnode, rnode) if isinstance(ancestor, ast.If): for fork in (ancestor.body, ancestor.orelse): if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.Try): body = ancestor.body + ancestor.orelse for fork in [body] + [[hdl] for hdl in ancestor.handlers]: if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.TryFinally): if self.on_fork(ancestor, lnode, rnode, ancestor.body): return True return False def add_binding(self, node, value, report_redef=True): """Called when a binding is altered. - `node` is the statement responsible for the change - `value` is the optional new value, a Binding instance, associated with the binding; if None, the binding is deleted if it exists. - if `report_redef` is True (default), rebinding while unused will be reported. """ redefinedWhileUnused = False if not isinstance(self.scope, ClassScope): for scope in self.scope_stack[::-1]: existing = scope.get(value.name) if (isinstance(existing, Importation) and not existing.used and (not isinstance(value, Importation) or value.fullName == existing.fullName) and report_redef and not self.different_forks(node, existing.source)): redefinedWhileUnused = True self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) existing = self.scope.get(value.name) if not redefinedWhileUnused and self.has_parent(value.source, ast.ListComp): if (existing and report_redef and not self.has_parent(existing.source, (ast.For, ast.ListComp)) and not self.different_forks(node, existing.source)): self.report(messages.RedefinedInListComp, node, value.name, existing.source) if (isinstance(existing, Definition) and not existing.used and not self.different_forks(node, existing.source)): self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) else: self.scope[value.name] = value def get_node_handler(self, node_class): try: return self._node_handlers[node_class] except KeyError: nodeType = str(node_class.__name__).upper() self._node_handlers[node_class] = handler = getattr(self, nodeType) return handler def iter_visible_scopes(self): outerScopes = itertools.islice(self.scope_stack, len(self.scope_stack) - 1) scopes = [scope for scope in outerScopes if isinstance(scope, (FunctionScope, ModuleScope))] if (isinstance(self.scope, GeneratorScope) and scopes[-1] != self.scope_stack[-2]): scopes.append(self.scope_stack[-2]) scopes.append(self.scope_stack[-1]) return iter(reversed(scopes)) def handle_node_load(self, node): name = node_name(node) if not name: return importStarred = False for scope in self.iter_visible_scopes(): importStarred = importStarred or scope.importStarred try: scope[name].used = (self.scope, node) except KeyError: pass else: return # look in the built-ins if importStarred or name in self.frosted_builtins: return if name == '__path__' and os.path.basename(self.filename) == '__init__.py': # the special name __path__ is valid only in packages return # protected with a NameError handler? if 'NameError' not in self.except_handlers[-1]: self.report(messages.UndefinedName, node, name) def handle_node_store(self, node): name = node_name(node) if not name: return # if the name hasn't already been defined in the current scope if isinstance(self.scope, FunctionScope) and name not in self.scope: # for each function or module scope above us for scope in self.scope_stack[:-1]: if not isinstance(scope, (FunctionScope, ModuleScope)): continue # if the name was defined in that scope, and the name has # been accessed already in the current scope, and hasn't # been declared global used = name in scope and scope[name].used if used and used[0] is self.scope and name not in self.scope.globals: # then it's probably a mistake self.report(messages.UndefinedLocal, scope[name].used[1], name, scope[name].source) break parent = getattr(node, 'parent', None) if isinstance(parent, (ast.For, ast.comprehension, ast.Tuple, ast.List)): binding = Binding(name, node) elif (parent is not None and name == '__all__' and isinstance(self.scope, ModuleScope)): binding = ExportBinding(name, parent.value) else: binding = Assignment(name, node) if name in self.scope: binding.used = self.scope[name].used self.add_binding(node, binding) def handle_node_delete(self, node): name = node_name(node) if not name: return if isinstance(self.scope, FunctionScope) and name in self.scope.globals: self.scope.globals.remove(name) else: try: del self.scope[name] except KeyError: self.report(messages.UndefinedName, node, name) def handle_children(self, tree): for node in ast.iter_child_nodes(tree): self.handleNode(node, tree) def is_docstring(self, node): """Determine if the given node is a docstring, as long as it is at the correct place in the node tree.""" return isinstance(node, ast.Str) or (isinstance(node, ast.Expr) and isinstance(node.value, ast.Str)) def docstring(self, node): if isinstance(node, ast.Expr): node = node.value if not isinstance(node, ast.Str): return (None, None) # Computed incorrectly if the docstring has backslash doctest_lineno = node.lineno - node.s.count('\n') - 1 return (node.s, doctest_lineno) def handleNode(self, node, parent): if node is None: return if self.offset and getattr(node, 'lineno', None) is not None: node.lineno += self.offset[0] node.col_offset += self.offset[1] if self.trace_tree: print(' ' * self.node_depth + node.__class__.__name__) if self.futures_allowed and not (isinstance(node, ast.ImportFrom) or self.is_docstring(node)): self.futures_allowed = False self.node_depth += 1 node.level = self.node_depth node.parent = parent try: handler = self.get_node_handler(node.__class__) handler(node) finally: self.node_depth -= 1 if self.trace_tree: print(' ' * self.node_depth + 'end ' + node.__class__.__name__) _get_doctest_examples = doctest.DocTestParser().get_examples def handle_doctests(self, node): try: docstring, node_lineno = self.docstring(node.body[0]) if not docstring: return examples = self._get_doctest_examples(docstring) except (ValueError, IndexError): # e.g. line 6 of the docstring for <string> has inconsistent # leading whitespace: ... return node_offset = self.offset or (0, 0) self.push_scope() for example in examples: try: tree = compile(example.source, "<doctest>", "exec", ast.PyCF_ONLY_AST) except SyntaxError: e = sys.exc_info()[1] position = (node_lineno + example.lineno + e.lineno, example.indent + 4 + (e.offset or 0)) self.report(messages.DoctestSyntaxError, node, position) else: self.offset = (node_offset[0] + node_lineno + example.lineno, node_offset[1] + example.indent + 4) self.handle_children(tree) self.offset = node_offset self.pop_scope() def find_return_with_argument(self, node): """Finds and returns a return statment that has an argument. Note that we should use node.returns in Python 3, but this method is never called in Python 3 so we don't bother checking. """ for item in node.body: if isinstance(item, ast.Return) and item.value: return item elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): return_with_argument = self.find_return_with_argument(item) if return_with_argument: return return_with_argument def ignore(self, node): pass # "stmt" type nodes RETURN = DELETE = PRINT = WHILE = IF = WITH = WITHITEM = RAISE = TRYFINALLY = ASSERT = EXEC = EXPR = handle_children CONTINUE = BREAK = PASS = ignore # "expr" type nodes BOOLOP = BINOP = UNARYOP = IFEXP = DICT = SET = YIELD = YIELDFROM = COMPARE = REPR = ATTRIBUTE = SUBSCRIPT = \ LIST = TUPLE = STARRED = NAMECONSTANT = handle_children NUM = STR = BYTES = ELLIPSIS = ignore # "slice" type nodes SLICE = EXTSLICE = INDEX = handle_children # expression contexts are node instances too, though being constants LOAD = STORE = DEL = AUGLOAD = AUGSTORE = PARAM = ignore # same for operators AND = OR = ADD = SUB = MULT = DIV = MOD = POW = LSHIFT = RSHIFT = BITOR = BITXOR = BITAND = FLOORDIV = INVERT = \ NOT = UADD = USUB = EQ = NOTEQ = LT = LTE = GT = GTE = IS = ISNOT = IN = NOTIN = ignore # additional node types COMPREHENSION = KEYWORD = handle_children def GLOBAL(self, node): """Keep track of globals declarations.""" if isinstance(self.scope, FunctionScope): self.scope.globals.update(node.names) NONLOCAL = GLOBAL def LISTCOMP(self, node): # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) def GENERATOREXP(self, node): self.push_scope(GeneratorScope) # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) self.pop_scope() SETCOMP = GENERATOREXP def DICTCOMP(self, node): self.push_scope(GeneratorScope) for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.key, node) self.handleNode(node.value, node) self.pop_scope() def FOR(self, node): """Process bindings for loop variables.""" vars = [] def collectLoopVars(n): if isinstance(n, ast.Name): vars.append(n.id) elif isinstance(n, ast.expr_context): return else: for c in ast.iter_child_nodes(n): collectLoopVars(c) collectLoopVars(node.target) for varn in vars: if (isinstance(self.scope.get(varn), Importation) # unused ones will get an unused import warning and self.scope[varn].used): self.report(messages.ImportShadowedByLoopVar, node, varn, self.scope[varn].source) self.handle_children(node) def NAME(self, node): """Handle occurrence of Name (which can be a load/store/delete access.)""" # Locate the name in locals / function / globals scopes. if isinstance(node.ctx, (ast.Load, ast.AugLoad)): self.handle_node_load(node) if (node.id == 'locals' and isinstance(self.scope, FunctionScope) and isinstance(node.parent, ast.Call)): # we are doing locals() call in current scope self.scope.uses_locals = True elif isinstance(node.ctx, (ast.Store, ast.AugStore)): self.handle_node_store(node) elif isinstance(node.ctx, ast.Del): self.handle_node_delete(node) else: # must be a Param context -- this only happens for names in function # arguments, but these aren't dispatched through here raise RuntimeError("Got impossible expression context: %r" % (node.ctx,)) def CALL(self, node): f = node.func if isinstance(f, ast.Name): for scope in self.iter_visible_scopes(): definition = scope.get(f.id) if definition: if isinstance(definition, FunctionDefinition): definition.signature.checkCall(node, self, f.id) break self.handle_children(node) def FUNCTIONDEF(self, node): for deco in node.decorator_list: self.handleNode(deco, node) self.add_binding(node, FunctionDefinition(node.name, node)) self.LAMBDA(node) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) def LAMBDA(self, node): args = [] annotations = [] if PY2: def addArgs(arglist): for arg in arglist: if isinstance(arg, ast.Tuple): addArgs(arg.elts) else: if arg.id in args: self.report(messages.DuplicateArgument, node, arg.id) args.append(arg.id) addArgs(node.args.args) defaults = node.args.defaults else: for arg in node.args.args + node.args.kwonlyargs: annotations.append(arg.annotation) args.append(arg.arg) defaults = node.args.defaults + node.args.kw_defaults # Only for Python3 FunctionDefs is_py3_func = hasattr(node, 'returns') for arg_name in ('vararg', 'kwarg'): wildcard = getattr(node.args, arg_name) if not wildcard: continue args.append(getattr(wildcard, 'arg', wildcard)) if is_py3_func: if PY34_GTE: annotations.append(wildcard.annotation) else: argannotation = arg_name + 'annotation' annotations.append(getattr(node.args, argannotation)) if is_py3_func: annotations.append(node.returns) if PY3: if len(set(args)) < len(args): for (idx, arg) in enumerate(args): if arg in args[:idx]: self.report(messages.DuplicateArgument, node, arg) for child in annotations + defaults: if child: self.handleNode(child, node) def runFunction(): self.push_scope() for name in args: self.add_binding(node, Argument(name, node), report_redef=False) if isinstance(node.body, list): # case for FunctionDefs for stmt in node.body: self.handleNode(stmt, node) else: # case for Lambdas self.handleNode(node.body, node) def checkUnusedAssignments(): """Check to see if any assignments have not been used.""" for name, binding in self.scope.unusedAssignments(): self.report(messages.UnusedVariable, binding.source, name) self.defer_assignment(checkUnusedAssignments) if PY2: def checkReturnWithArgumentInsideGenerator(): """Check to see if there are any return statements with arguments but the function is a generator.""" if self.is_generator(node): stmt = self.find_return_with_argument(node) if stmt is not None: self.report(messages.ReturnWithArgsInsideGenerator, stmt) self.defer_assignment(checkReturnWithArgumentInsideGenerator) self.pop_scope() self.defer_function(runFunction) def CLASSDEF(self, node): """Check names used in a class definition, including its decorators, base classes, and the body of its definition. Additionally, add its name to the current scope. """ for deco in node.decorator_list: self.handleNode(deco, node) for baseNode in node.bases: self.handleNode(baseNode, node) if not PY2: for keywordNode in node.keywords: self.handleNode(keywordNode, node) self.push_scope(ClassScope) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) for stmt in node.body: self.handleNode(stmt, node) self.pop_scope() self.add_binding(node, ClassDefinition(node.name, node)) def ASSIGN(self, node): self.handleNode(node.value, node) for target in node.targets: self.handleNode(target, node) def AUGASSIGN(self, node): self.handle_node_load(node.target) self.handleNode(node.value, node) self.handleNode(node.target, node) def IMPORT(self, node): for alias in node.names: name = alias.asname or alias.name importation = Importation(name, node) self.add_binding(node, importation) def IMPORTFROM(self, node): if node.module == '__future__': if not self.futures_allowed: self.report(messages.LateFutureImport, node, [n.name for n in node.names]) else: self.futures_allowed = False for alias in node.names: if alias.name == '*': self.scope.importStarred = True self.report(messages.ImportStarUsed, node, node.module) continue name = alias.asname or alias.name importation = Importation(name, node) if node.module == '__future__': importation.used = (self.scope, node) self.add_binding(node, importation) def TRY(self, node): handler_names = [] # List the exception handlers for handler in node.handlers: if isinstance(handler.type, ast.Tuple): for exc_type in handler.type.elts: handler_names.append(node_name(exc_type)) elif handler.type: handler_names.append(node_name(handler.type)) # Memorize the except handlers and process the body self.except_handlers.append(handler_names) for child in node.body: self.handleNode(child, node) self.except_handlers.pop() # Process the other nodes: "except:", "else:", "finally:" for child in ast.iter_child_nodes(node): if child not in node.body: self.handleNode(child, node) TRYEXCEPT = TRY def EXCEPTHANDLER(self, node): # 3.x: in addition to handling children, we must handle the name of # the exception, which is not a Name node, but a simple string. if node.type is None: self.report(messages.BareExcept, node) if isinstance(node.name, str): self.handle_node_store(node) self.handle_children(node)
timothycrosley/deprecated.frosted
frosted/checker.py
Checker.GLOBAL
python
def GLOBAL(self, node): if isinstance(self.scope, FunctionScope): self.scope.globals.update(node.names)
Keep track of globals declarations.
train
https://github.com/timothycrosley/deprecated.frosted/blob/61ba7f341fc55676c3580c8c4e52117986cd5e12/frosted/checker.py#L662-L665
null
class Checker(object): """The core of frosted, checks the cleanliness and sanity of Python code.""" node_depth = 0 offset = None trace_tree = False frosted_builtins = FROSTED_BUILTINS def __init__(self, tree, filename='(none)', builtins=None, ignore_lines=(), **settings): self.settings = settings self.ignore_errors = settings.get('ignore_frosted_errors', []) self.ignore_lines = ignore_lines file_specific_ignores = settings.get('ignore_frosted_errors_for_' + (os.path.basename(filename) or ""), None) if file_specific_ignores: self.ignore_errors += file_specific_ignores self._node_handlers = {} self._deferred_functions = [] self._deferred_assignments = [] self.dead_scopes = [] self.messages = [] self.filename = filename if builtins: self.frosted_builtins = self.frosted_builtins.union(builtins) self.scope_stack = [ModuleScope()] self.except_handlers = [()] self.futures_allowed = True self.root = tree self.handle_children(tree) self.run_deferred(self._deferred_functions) self._deferred_functions = None self.run_deferred(self._deferred_assignments) self._deferred_assignments = None del self.scope_stack[1:] self.pop_scope() self.check_dead_scopes() self.check_plugins() def check_plugins(self): """ collect plugins from entry point 'frosted.plugins' and run their check() method, passing the filename """ checkers = {} for ep in pkg_resources.iter_entry_points(group='frosted.plugins'): checkers.update({ep.name: ep.load()}) for plugin_name, plugin in checkers.items(): if self.filename != '(none)': messages = plugin.check(self.filename) for message, loc, args, kwargs in messages: self.report(message, loc, *args, **kwargs) def defer_function(self, callable): """Schedule a function handler to be called just before completion. This is used for handling function bodies, which must be deferred because code later in the file might modify the global scope. When 'callable' is called, the scope at the time this is called will be restored, however it will contain any new bindings added to it. """ self._deferred_functions.append((callable, self.scope_stack[:], self.offset)) def defer_assignment(self, callable): """Schedule an assignment handler to be called just after deferred function handlers.""" self._deferred_assignments.append((callable, self.scope_stack[:], self.offset)) def run_deferred(self, deferred): """Run the callables in deferred using their associated scope stack.""" for handler, scope, offset in deferred: self.scope_stack = scope self.offset = offset handler() @property def scope(self): return self.scope_stack[-1] def pop_scope(self): self.dead_scopes.append(self.scope_stack.pop()) def check_dead_scopes(self): """Look at scopes which have been fully examined and report names in them which were imported but unused.""" for scope in self.dead_scopes: export = isinstance(scope.get('__all__'), ExportBinding) if export: all = scope['__all__'].names() # Look for possible mistakes in the export list if not scope.importStarred and os.path.basename(self.filename) != '__init__.py': undefined = set(all) - set(scope) for name in undefined: self.report(messages.UndefinedExport, scope['__all__'].source, name) else: all = [] # Look for imported names that aren't used without checking imports in namespace definition for importation in scope.values(): if isinstance(importation, Importation) and not importation.used and importation.name not in all: self.report(messages.UnusedImport, importation.source, importation.name) def push_scope(self, scope_class=FunctionScope): self.scope_stack.append(scope_class()) def push_function_scope(self): # XXX Deprecated self.push_scope(FunctionScope) def push_class_scope(self): # XXX Deprecated self.push_scope(ClassScope) def report(self, message_class, *args, **kwargs): error_code = message_class.error_code if(not error_code[:2] + "00" in self.ignore_errors and not error_code in self.ignore_errors and not str(message_class.error_number) in self.ignore_errors): kwargs['verbose'] = self.settings.get('verbose') message = message_class(self.filename, *args, **kwargs) if message.lineno not in self.ignore_lines: self.messages.append(message) def has_parent(self, node, kind): while hasattr(node, 'parent'): node = node.parent if isinstance(node, kind): return True def get_common_ancestor(self, lnode, rnode, stop=None): stop = stop or self.root if lnode is rnode: return lnode if stop in (lnode, rnode): return stop if not hasattr(lnode, 'parent') or not hasattr(rnode, 'parent'): return if (lnode.level > rnode.level): return self.get_common_ancestor(lnode.parent, rnode, stop) if (rnode.level > lnode.level): return self.get_common_ancestor(lnode, rnode.parent, stop) return self.get_common_ancestor(lnode.parent, rnode.parent, stop) def descendant_of(self, node, ancestors, stop=None): for ancestor in ancestors: if self.get_common_ancestor(node, ancestor, stop) not in (stop, None): return True return False def on_fork(self, parent, lnode, rnode, items): return (self.descendant_of(lnode, items, parent) ^ self.descendant_of(rnode, items, parent)) def different_forks(self, lnode, rnode): """True, if lnode and rnode are located on different forks of IF/TRY.""" ancestor = self.get_common_ancestor(lnode, rnode) if isinstance(ancestor, ast.If): for fork in (ancestor.body, ancestor.orelse): if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.Try): body = ancestor.body + ancestor.orelse for fork in [body] + [[hdl] for hdl in ancestor.handlers]: if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.TryFinally): if self.on_fork(ancestor, lnode, rnode, ancestor.body): return True return False def add_binding(self, node, value, report_redef=True): """Called when a binding is altered. - `node` is the statement responsible for the change - `value` is the optional new value, a Binding instance, associated with the binding; if None, the binding is deleted if it exists. - if `report_redef` is True (default), rebinding while unused will be reported. """ redefinedWhileUnused = False if not isinstance(self.scope, ClassScope): for scope in self.scope_stack[::-1]: existing = scope.get(value.name) if (isinstance(existing, Importation) and not existing.used and (not isinstance(value, Importation) or value.fullName == existing.fullName) and report_redef and not self.different_forks(node, existing.source)): redefinedWhileUnused = True self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) existing = self.scope.get(value.name) if not redefinedWhileUnused and self.has_parent(value.source, ast.ListComp): if (existing and report_redef and not self.has_parent(existing.source, (ast.For, ast.ListComp)) and not self.different_forks(node, existing.source)): self.report(messages.RedefinedInListComp, node, value.name, existing.source) if (isinstance(existing, Definition) and not existing.used and not self.different_forks(node, existing.source)): self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) else: self.scope[value.name] = value def get_node_handler(self, node_class): try: return self._node_handlers[node_class] except KeyError: nodeType = str(node_class.__name__).upper() self._node_handlers[node_class] = handler = getattr(self, nodeType) return handler def iter_visible_scopes(self): outerScopes = itertools.islice(self.scope_stack, len(self.scope_stack) - 1) scopes = [scope for scope in outerScopes if isinstance(scope, (FunctionScope, ModuleScope))] if (isinstance(self.scope, GeneratorScope) and scopes[-1] != self.scope_stack[-2]): scopes.append(self.scope_stack[-2]) scopes.append(self.scope_stack[-1]) return iter(reversed(scopes)) def handle_node_load(self, node): name = node_name(node) if not name: return importStarred = False for scope in self.iter_visible_scopes(): importStarred = importStarred or scope.importStarred try: scope[name].used = (self.scope, node) except KeyError: pass else: return # look in the built-ins if importStarred or name in self.frosted_builtins: return if name == '__path__' and os.path.basename(self.filename) == '__init__.py': # the special name __path__ is valid only in packages return # protected with a NameError handler? if 'NameError' not in self.except_handlers[-1]: self.report(messages.UndefinedName, node, name) def handle_node_store(self, node): name = node_name(node) if not name: return # if the name hasn't already been defined in the current scope if isinstance(self.scope, FunctionScope) and name not in self.scope: # for each function or module scope above us for scope in self.scope_stack[:-1]: if not isinstance(scope, (FunctionScope, ModuleScope)): continue # if the name was defined in that scope, and the name has # been accessed already in the current scope, and hasn't # been declared global used = name in scope and scope[name].used if used and used[0] is self.scope and name not in self.scope.globals: # then it's probably a mistake self.report(messages.UndefinedLocal, scope[name].used[1], name, scope[name].source) break parent = getattr(node, 'parent', None) if isinstance(parent, (ast.For, ast.comprehension, ast.Tuple, ast.List)): binding = Binding(name, node) elif (parent is not None and name == '__all__' and isinstance(self.scope, ModuleScope)): binding = ExportBinding(name, parent.value) else: binding = Assignment(name, node) if name in self.scope: binding.used = self.scope[name].used self.add_binding(node, binding) def handle_node_delete(self, node): name = node_name(node) if not name: return if isinstance(self.scope, FunctionScope) and name in self.scope.globals: self.scope.globals.remove(name) else: try: del self.scope[name] except KeyError: self.report(messages.UndefinedName, node, name) def handle_children(self, tree): for node in ast.iter_child_nodes(tree): self.handleNode(node, tree) def is_docstring(self, node): """Determine if the given node is a docstring, as long as it is at the correct place in the node tree.""" return isinstance(node, ast.Str) or (isinstance(node, ast.Expr) and isinstance(node.value, ast.Str)) def docstring(self, node): if isinstance(node, ast.Expr): node = node.value if not isinstance(node, ast.Str): return (None, None) # Computed incorrectly if the docstring has backslash doctest_lineno = node.lineno - node.s.count('\n') - 1 return (node.s, doctest_lineno) def handleNode(self, node, parent): if node is None: return if self.offset and getattr(node, 'lineno', None) is not None: node.lineno += self.offset[0] node.col_offset += self.offset[1] if self.trace_tree: print(' ' * self.node_depth + node.__class__.__name__) if self.futures_allowed and not (isinstance(node, ast.ImportFrom) or self.is_docstring(node)): self.futures_allowed = False self.node_depth += 1 node.level = self.node_depth node.parent = parent try: handler = self.get_node_handler(node.__class__) handler(node) finally: self.node_depth -= 1 if self.trace_tree: print(' ' * self.node_depth + 'end ' + node.__class__.__name__) _get_doctest_examples = doctest.DocTestParser().get_examples def handle_doctests(self, node): try: docstring, node_lineno = self.docstring(node.body[0]) if not docstring: return examples = self._get_doctest_examples(docstring) except (ValueError, IndexError): # e.g. line 6 of the docstring for <string> has inconsistent # leading whitespace: ... return node_offset = self.offset or (0, 0) self.push_scope() for example in examples: try: tree = compile(example.source, "<doctest>", "exec", ast.PyCF_ONLY_AST) except SyntaxError: e = sys.exc_info()[1] position = (node_lineno + example.lineno + e.lineno, example.indent + 4 + (e.offset or 0)) self.report(messages.DoctestSyntaxError, node, position) else: self.offset = (node_offset[0] + node_lineno + example.lineno, node_offset[1] + example.indent + 4) self.handle_children(tree) self.offset = node_offset self.pop_scope() def find_return_with_argument(self, node): """Finds and returns a return statment that has an argument. Note that we should use node.returns in Python 3, but this method is never called in Python 3 so we don't bother checking. """ for item in node.body: if isinstance(item, ast.Return) and item.value: return item elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): return_with_argument = self.find_return_with_argument(item) if return_with_argument: return return_with_argument def is_generator(self, node): """Checks whether a function is a generator by looking for a yield statement or expression.""" if not isinstance(node.body, list): # lambdas can not be generators return False for item in node.body: if isinstance(item, (ast.Assign, ast.Expr)): if isinstance(item.value, ast.Yield): return True elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): if self.is_generator(item): return True return False def ignore(self, node): pass # "stmt" type nodes RETURN = DELETE = PRINT = WHILE = IF = WITH = WITHITEM = RAISE = TRYFINALLY = ASSERT = EXEC = EXPR = handle_children CONTINUE = BREAK = PASS = ignore # "expr" type nodes BOOLOP = BINOP = UNARYOP = IFEXP = DICT = SET = YIELD = YIELDFROM = COMPARE = REPR = ATTRIBUTE = SUBSCRIPT = \ LIST = TUPLE = STARRED = NAMECONSTANT = handle_children NUM = STR = BYTES = ELLIPSIS = ignore # "slice" type nodes SLICE = EXTSLICE = INDEX = handle_children # expression contexts are node instances too, though being constants LOAD = STORE = DEL = AUGLOAD = AUGSTORE = PARAM = ignore # same for operators AND = OR = ADD = SUB = MULT = DIV = MOD = POW = LSHIFT = RSHIFT = BITOR = BITXOR = BITAND = FLOORDIV = INVERT = \ NOT = UADD = USUB = EQ = NOTEQ = LT = LTE = GT = GTE = IS = ISNOT = IN = NOTIN = ignore # additional node types COMPREHENSION = KEYWORD = handle_children NONLOCAL = GLOBAL def LISTCOMP(self, node): # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) def GENERATOREXP(self, node): self.push_scope(GeneratorScope) # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) self.pop_scope() SETCOMP = GENERATOREXP def DICTCOMP(self, node): self.push_scope(GeneratorScope) for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.key, node) self.handleNode(node.value, node) self.pop_scope() def FOR(self, node): """Process bindings for loop variables.""" vars = [] def collectLoopVars(n): if isinstance(n, ast.Name): vars.append(n.id) elif isinstance(n, ast.expr_context): return else: for c in ast.iter_child_nodes(n): collectLoopVars(c) collectLoopVars(node.target) for varn in vars: if (isinstance(self.scope.get(varn), Importation) # unused ones will get an unused import warning and self.scope[varn].used): self.report(messages.ImportShadowedByLoopVar, node, varn, self.scope[varn].source) self.handle_children(node) def NAME(self, node): """Handle occurrence of Name (which can be a load/store/delete access.)""" # Locate the name in locals / function / globals scopes. if isinstance(node.ctx, (ast.Load, ast.AugLoad)): self.handle_node_load(node) if (node.id == 'locals' and isinstance(self.scope, FunctionScope) and isinstance(node.parent, ast.Call)): # we are doing locals() call in current scope self.scope.uses_locals = True elif isinstance(node.ctx, (ast.Store, ast.AugStore)): self.handle_node_store(node) elif isinstance(node.ctx, ast.Del): self.handle_node_delete(node) else: # must be a Param context -- this only happens for names in function # arguments, but these aren't dispatched through here raise RuntimeError("Got impossible expression context: %r" % (node.ctx,)) def CALL(self, node): f = node.func if isinstance(f, ast.Name): for scope in self.iter_visible_scopes(): definition = scope.get(f.id) if definition: if isinstance(definition, FunctionDefinition): definition.signature.checkCall(node, self, f.id) break self.handle_children(node) def FUNCTIONDEF(self, node): for deco in node.decorator_list: self.handleNode(deco, node) self.add_binding(node, FunctionDefinition(node.name, node)) self.LAMBDA(node) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) def LAMBDA(self, node): args = [] annotations = [] if PY2: def addArgs(arglist): for arg in arglist: if isinstance(arg, ast.Tuple): addArgs(arg.elts) else: if arg.id in args: self.report(messages.DuplicateArgument, node, arg.id) args.append(arg.id) addArgs(node.args.args) defaults = node.args.defaults else: for arg in node.args.args + node.args.kwonlyargs: annotations.append(arg.annotation) args.append(arg.arg) defaults = node.args.defaults + node.args.kw_defaults # Only for Python3 FunctionDefs is_py3_func = hasattr(node, 'returns') for arg_name in ('vararg', 'kwarg'): wildcard = getattr(node.args, arg_name) if not wildcard: continue args.append(getattr(wildcard, 'arg', wildcard)) if is_py3_func: if PY34_GTE: annotations.append(wildcard.annotation) else: argannotation = arg_name + 'annotation' annotations.append(getattr(node.args, argannotation)) if is_py3_func: annotations.append(node.returns) if PY3: if len(set(args)) < len(args): for (idx, arg) in enumerate(args): if arg in args[:idx]: self.report(messages.DuplicateArgument, node, arg) for child in annotations + defaults: if child: self.handleNode(child, node) def runFunction(): self.push_scope() for name in args: self.add_binding(node, Argument(name, node), report_redef=False) if isinstance(node.body, list): # case for FunctionDefs for stmt in node.body: self.handleNode(stmt, node) else: # case for Lambdas self.handleNode(node.body, node) def checkUnusedAssignments(): """Check to see if any assignments have not been used.""" for name, binding in self.scope.unusedAssignments(): self.report(messages.UnusedVariable, binding.source, name) self.defer_assignment(checkUnusedAssignments) if PY2: def checkReturnWithArgumentInsideGenerator(): """Check to see if there are any return statements with arguments but the function is a generator.""" if self.is_generator(node): stmt = self.find_return_with_argument(node) if stmt is not None: self.report(messages.ReturnWithArgsInsideGenerator, stmt) self.defer_assignment(checkReturnWithArgumentInsideGenerator) self.pop_scope() self.defer_function(runFunction) def CLASSDEF(self, node): """Check names used in a class definition, including its decorators, base classes, and the body of its definition. Additionally, add its name to the current scope. """ for deco in node.decorator_list: self.handleNode(deco, node) for baseNode in node.bases: self.handleNode(baseNode, node) if not PY2: for keywordNode in node.keywords: self.handleNode(keywordNode, node) self.push_scope(ClassScope) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) for stmt in node.body: self.handleNode(stmt, node) self.pop_scope() self.add_binding(node, ClassDefinition(node.name, node)) def ASSIGN(self, node): self.handleNode(node.value, node) for target in node.targets: self.handleNode(target, node) def AUGASSIGN(self, node): self.handle_node_load(node.target) self.handleNode(node.value, node) self.handleNode(node.target, node) def IMPORT(self, node): for alias in node.names: name = alias.asname or alias.name importation = Importation(name, node) self.add_binding(node, importation) def IMPORTFROM(self, node): if node.module == '__future__': if not self.futures_allowed: self.report(messages.LateFutureImport, node, [n.name for n in node.names]) else: self.futures_allowed = False for alias in node.names: if alias.name == '*': self.scope.importStarred = True self.report(messages.ImportStarUsed, node, node.module) continue name = alias.asname or alias.name importation = Importation(name, node) if node.module == '__future__': importation.used = (self.scope, node) self.add_binding(node, importation) def TRY(self, node): handler_names = [] # List the exception handlers for handler in node.handlers: if isinstance(handler.type, ast.Tuple): for exc_type in handler.type.elts: handler_names.append(node_name(exc_type)) elif handler.type: handler_names.append(node_name(handler.type)) # Memorize the except handlers and process the body self.except_handlers.append(handler_names) for child in node.body: self.handleNode(child, node) self.except_handlers.pop() # Process the other nodes: "except:", "else:", "finally:" for child in ast.iter_child_nodes(node): if child not in node.body: self.handleNode(child, node) TRYEXCEPT = TRY def EXCEPTHANDLER(self, node): # 3.x: in addition to handling children, we must handle the name of # the exception, which is not a Name node, but a simple string. if node.type is None: self.report(messages.BareExcept, node) if isinstance(node.name, str): self.handle_node_store(node) self.handle_children(node)
timothycrosley/deprecated.frosted
frosted/checker.py
Checker.FOR
python
def FOR(self, node): vars = [] def collectLoopVars(n): if isinstance(n, ast.Name): vars.append(n.id) elif isinstance(n, ast.expr_context): return else: for c in ast.iter_child_nodes(n): collectLoopVars(c) collectLoopVars(node.target) for varn in vars: if (isinstance(self.scope.get(varn), Importation) # unused ones will get an unused import warning and self.scope[varn].used): self.report(messages.ImportShadowedByLoopVar, node, varn, self.scope[varn].source) self.handle_children(node)
Process bindings for loop variables.
train
https://github.com/timothycrosley/deprecated.frosted/blob/61ba7f341fc55676c3580c8c4e52117986cd5e12/frosted/checker.py#L693-L714
[ "def collectLoopVars(n):\n if isinstance(n, ast.Name):\n vars.append(n.id)\n elif isinstance(n, ast.expr_context):\n return\n else:\n for c in ast.iter_child_nodes(n):\n collectLoopVars(c)\n" ]
class Checker(object): """The core of frosted, checks the cleanliness and sanity of Python code.""" node_depth = 0 offset = None trace_tree = False frosted_builtins = FROSTED_BUILTINS def __init__(self, tree, filename='(none)', builtins=None, ignore_lines=(), **settings): self.settings = settings self.ignore_errors = settings.get('ignore_frosted_errors', []) self.ignore_lines = ignore_lines file_specific_ignores = settings.get('ignore_frosted_errors_for_' + (os.path.basename(filename) or ""), None) if file_specific_ignores: self.ignore_errors += file_specific_ignores self._node_handlers = {} self._deferred_functions = [] self._deferred_assignments = [] self.dead_scopes = [] self.messages = [] self.filename = filename if builtins: self.frosted_builtins = self.frosted_builtins.union(builtins) self.scope_stack = [ModuleScope()] self.except_handlers = [()] self.futures_allowed = True self.root = tree self.handle_children(tree) self.run_deferred(self._deferred_functions) self._deferred_functions = None self.run_deferred(self._deferred_assignments) self._deferred_assignments = None del self.scope_stack[1:] self.pop_scope() self.check_dead_scopes() self.check_plugins() def check_plugins(self): """ collect plugins from entry point 'frosted.plugins' and run their check() method, passing the filename """ checkers = {} for ep in pkg_resources.iter_entry_points(group='frosted.plugins'): checkers.update({ep.name: ep.load()}) for plugin_name, plugin in checkers.items(): if self.filename != '(none)': messages = plugin.check(self.filename) for message, loc, args, kwargs in messages: self.report(message, loc, *args, **kwargs) def defer_function(self, callable): """Schedule a function handler to be called just before completion. This is used for handling function bodies, which must be deferred because code later in the file might modify the global scope. When 'callable' is called, the scope at the time this is called will be restored, however it will contain any new bindings added to it. """ self._deferred_functions.append((callable, self.scope_stack[:], self.offset)) def defer_assignment(self, callable): """Schedule an assignment handler to be called just after deferred function handlers.""" self._deferred_assignments.append((callable, self.scope_stack[:], self.offset)) def run_deferred(self, deferred): """Run the callables in deferred using their associated scope stack.""" for handler, scope, offset in deferred: self.scope_stack = scope self.offset = offset handler() @property def scope(self): return self.scope_stack[-1] def pop_scope(self): self.dead_scopes.append(self.scope_stack.pop()) def check_dead_scopes(self): """Look at scopes which have been fully examined and report names in them which were imported but unused.""" for scope in self.dead_scopes: export = isinstance(scope.get('__all__'), ExportBinding) if export: all = scope['__all__'].names() # Look for possible mistakes in the export list if not scope.importStarred and os.path.basename(self.filename) != '__init__.py': undefined = set(all) - set(scope) for name in undefined: self.report(messages.UndefinedExport, scope['__all__'].source, name) else: all = [] # Look for imported names that aren't used without checking imports in namespace definition for importation in scope.values(): if isinstance(importation, Importation) and not importation.used and importation.name not in all: self.report(messages.UnusedImport, importation.source, importation.name) def push_scope(self, scope_class=FunctionScope): self.scope_stack.append(scope_class()) def push_function_scope(self): # XXX Deprecated self.push_scope(FunctionScope) def push_class_scope(self): # XXX Deprecated self.push_scope(ClassScope) def report(self, message_class, *args, **kwargs): error_code = message_class.error_code if(not error_code[:2] + "00" in self.ignore_errors and not error_code in self.ignore_errors and not str(message_class.error_number) in self.ignore_errors): kwargs['verbose'] = self.settings.get('verbose') message = message_class(self.filename, *args, **kwargs) if message.lineno not in self.ignore_lines: self.messages.append(message) def has_parent(self, node, kind): while hasattr(node, 'parent'): node = node.parent if isinstance(node, kind): return True def get_common_ancestor(self, lnode, rnode, stop=None): stop = stop or self.root if lnode is rnode: return lnode if stop in (lnode, rnode): return stop if not hasattr(lnode, 'parent') or not hasattr(rnode, 'parent'): return if (lnode.level > rnode.level): return self.get_common_ancestor(lnode.parent, rnode, stop) if (rnode.level > lnode.level): return self.get_common_ancestor(lnode, rnode.parent, stop) return self.get_common_ancestor(lnode.parent, rnode.parent, stop) def descendant_of(self, node, ancestors, stop=None): for ancestor in ancestors: if self.get_common_ancestor(node, ancestor, stop) not in (stop, None): return True return False def on_fork(self, parent, lnode, rnode, items): return (self.descendant_of(lnode, items, parent) ^ self.descendant_of(rnode, items, parent)) def different_forks(self, lnode, rnode): """True, if lnode and rnode are located on different forks of IF/TRY.""" ancestor = self.get_common_ancestor(lnode, rnode) if isinstance(ancestor, ast.If): for fork in (ancestor.body, ancestor.orelse): if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.Try): body = ancestor.body + ancestor.orelse for fork in [body] + [[hdl] for hdl in ancestor.handlers]: if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.TryFinally): if self.on_fork(ancestor, lnode, rnode, ancestor.body): return True return False def add_binding(self, node, value, report_redef=True): """Called when a binding is altered. - `node` is the statement responsible for the change - `value` is the optional new value, a Binding instance, associated with the binding; if None, the binding is deleted if it exists. - if `report_redef` is True (default), rebinding while unused will be reported. """ redefinedWhileUnused = False if not isinstance(self.scope, ClassScope): for scope in self.scope_stack[::-1]: existing = scope.get(value.name) if (isinstance(existing, Importation) and not existing.used and (not isinstance(value, Importation) or value.fullName == existing.fullName) and report_redef and not self.different_forks(node, existing.source)): redefinedWhileUnused = True self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) existing = self.scope.get(value.name) if not redefinedWhileUnused and self.has_parent(value.source, ast.ListComp): if (existing and report_redef and not self.has_parent(existing.source, (ast.For, ast.ListComp)) and not self.different_forks(node, existing.source)): self.report(messages.RedefinedInListComp, node, value.name, existing.source) if (isinstance(existing, Definition) and not existing.used and not self.different_forks(node, existing.source)): self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) else: self.scope[value.name] = value def get_node_handler(self, node_class): try: return self._node_handlers[node_class] except KeyError: nodeType = str(node_class.__name__).upper() self._node_handlers[node_class] = handler = getattr(self, nodeType) return handler def iter_visible_scopes(self): outerScopes = itertools.islice(self.scope_stack, len(self.scope_stack) - 1) scopes = [scope for scope in outerScopes if isinstance(scope, (FunctionScope, ModuleScope))] if (isinstance(self.scope, GeneratorScope) and scopes[-1] != self.scope_stack[-2]): scopes.append(self.scope_stack[-2]) scopes.append(self.scope_stack[-1]) return iter(reversed(scopes)) def handle_node_load(self, node): name = node_name(node) if not name: return importStarred = False for scope in self.iter_visible_scopes(): importStarred = importStarred or scope.importStarred try: scope[name].used = (self.scope, node) except KeyError: pass else: return # look in the built-ins if importStarred or name in self.frosted_builtins: return if name == '__path__' and os.path.basename(self.filename) == '__init__.py': # the special name __path__ is valid only in packages return # protected with a NameError handler? if 'NameError' not in self.except_handlers[-1]: self.report(messages.UndefinedName, node, name) def handle_node_store(self, node): name = node_name(node) if not name: return # if the name hasn't already been defined in the current scope if isinstance(self.scope, FunctionScope) and name not in self.scope: # for each function or module scope above us for scope in self.scope_stack[:-1]: if not isinstance(scope, (FunctionScope, ModuleScope)): continue # if the name was defined in that scope, and the name has # been accessed already in the current scope, and hasn't # been declared global used = name in scope and scope[name].used if used and used[0] is self.scope and name not in self.scope.globals: # then it's probably a mistake self.report(messages.UndefinedLocal, scope[name].used[1], name, scope[name].source) break parent = getattr(node, 'parent', None) if isinstance(parent, (ast.For, ast.comprehension, ast.Tuple, ast.List)): binding = Binding(name, node) elif (parent is not None and name == '__all__' and isinstance(self.scope, ModuleScope)): binding = ExportBinding(name, parent.value) else: binding = Assignment(name, node) if name in self.scope: binding.used = self.scope[name].used self.add_binding(node, binding) def handle_node_delete(self, node): name = node_name(node) if not name: return if isinstance(self.scope, FunctionScope) and name in self.scope.globals: self.scope.globals.remove(name) else: try: del self.scope[name] except KeyError: self.report(messages.UndefinedName, node, name) def handle_children(self, tree): for node in ast.iter_child_nodes(tree): self.handleNode(node, tree) def is_docstring(self, node): """Determine if the given node is a docstring, as long as it is at the correct place in the node tree.""" return isinstance(node, ast.Str) or (isinstance(node, ast.Expr) and isinstance(node.value, ast.Str)) def docstring(self, node): if isinstance(node, ast.Expr): node = node.value if not isinstance(node, ast.Str): return (None, None) # Computed incorrectly if the docstring has backslash doctest_lineno = node.lineno - node.s.count('\n') - 1 return (node.s, doctest_lineno) def handleNode(self, node, parent): if node is None: return if self.offset and getattr(node, 'lineno', None) is not None: node.lineno += self.offset[0] node.col_offset += self.offset[1] if self.trace_tree: print(' ' * self.node_depth + node.__class__.__name__) if self.futures_allowed and not (isinstance(node, ast.ImportFrom) or self.is_docstring(node)): self.futures_allowed = False self.node_depth += 1 node.level = self.node_depth node.parent = parent try: handler = self.get_node_handler(node.__class__) handler(node) finally: self.node_depth -= 1 if self.trace_tree: print(' ' * self.node_depth + 'end ' + node.__class__.__name__) _get_doctest_examples = doctest.DocTestParser().get_examples def handle_doctests(self, node): try: docstring, node_lineno = self.docstring(node.body[0]) if not docstring: return examples = self._get_doctest_examples(docstring) except (ValueError, IndexError): # e.g. line 6 of the docstring for <string> has inconsistent # leading whitespace: ... return node_offset = self.offset or (0, 0) self.push_scope() for example in examples: try: tree = compile(example.source, "<doctest>", "exec", ast.PyCF_ONLY_AST) except SyntaxError: e = sys.exc_info()[1] position = (node_lineno + example.lineno + e.lineno, example.indent + 4 + (e.offset or 0)) self.report(messages.DoctestSyntaxError, node, position) else: self.offset = (node_offset[0] + node_lineno + example.lineno, node_offset[1] + example.indent + 4) self.handle_children(tree) self.offset = node_offset self.pop_scope() def find_return_with_argument(self, node): """Finds and returns a return statment that has an argument. Note that we should use node.returns in Python 3, but this method is never called in Python 3 so we don't bother checking. """ for item in node.body: if isinstance(item, ast.Return) and item.value: return item elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): return_with_argument = self.find_return_with_argument(item) if return_with_argument: return return_with_argument def is_generator(self, node): """Checks whether a function is a generator by looking for a yield statement or expression.""" if not isinstance(node.body, list): # lambdas can not be generators return False for item in node.body: if isinstance(item, (ast.Assign, ast.Expr)): if isinstance(item.value, ast.Yield): return True elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): if self.is_generator(item): return True return False def ignore(self, node): pass # "stmt" type nodes RETURN = DELETE = PRINT = WHILE = IF = WITH = WITHITEM = RAISE = TRYFINALLY = ASSERT = EXEC = EXPR = handle_children CONTINUE = BREAK = PASS = ignore # "expr" type nodes BOOLOP = BINOP = UNARYOP = IFEXP = DICT = SET = YIELD = YIELDFROM = COMPARE = REPR = ATTRIBUTE = SUBSCRIPT = \ LIST = TUPLE = STARRED = NAMECONSTANT = handle_children NUM = STR = BYTES = ELLIPSIS = ignore # "slice" type nodes SLICE = EXTSLICE = INDEX = handle_children # expression contexts are node instances too, though being constants LOAD = STORE = DEL = AUGLOAD = AUGSTORE = PARAM = ignore # same for operators AND = OR = ADD = SUB = MULT = DIV = MOD = POW = LSHIFT = RSHIFT = BITOR = BITXOR = BITAND = FLOORDIV = INVERT = \ NOT = UADD = USUB = EQ = NOTEQ = LT = LTE = GT = GTE = IS = ISNOT = IN = NOTIN = ignore # additional node types COMPREHENSION = KEYWORD = handle_children def GLOBAL(self, node): """Keep track of globals declarations.""" if isinstance(self.scope, FunctionScope): self.scope.globals.update(node.names) NONLOCAL = GLOBAL def LISTCOMP(self, node): # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) def GENERATOREXP(self, node): self.push_scope(GeneratorScope) # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) self.pop_scope() SETCOMP = GENERATOREXP def DICTCOMP(self, node): self.push_scope(GeneratorScope) for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.key, node) self.handleNode(node.value, node) self.pop_scope() def NAME(self, node): """Handle occurrence of Name (which can be a load/store/delete access.)""" # Locate the name in locals / function / globals scopes. if isinstance(node.ctx, (ast.Load, ast.AugLoad)): self.handle_node_load(node) if (node.id == 'locals' and isinstance(self.scope, FunctionScope) and isinstance(node.parent, ast.Call)): # we are doing locals() call in current scope self.scope.uses_locals = True elif isinstance(node.ctx, (ast.Store, ast.AugStore)): self.handle_node_store(node) elif isinstance(node.ctx, ast.Del): self.handle_node_delete(node) else: # must be a Param context -- this only happens for names in function # arguments, but these aren't dispatched through here raise RuntimeError("Got impossible expression context: %r" % (node.ctx,)) def CALL(self, node): f = node.func if isinstance(f, ast.Name): for scope in self.iter_visible_scopes(): definition = scope.get(f.id) if definition: if isinstance(definition, FunctionDefinition): definition.signature.checkCall(node, self, f.id) break self.handle_children(node) def FUNCTIONDEF(self, node): for deco in node.decorator_list: self.handleNode(deco, node) self.add_binding(node, FunctionDefinition(node.name, node)) self.LAMBDA(node) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) def LAMBDA(self, node): args = [] annotations = [] if PY2: def addArgs(arglist): for arg in arglist: if isinstance(arg, ast.Tuple): addArgs(arg.elts) else: if arg.id in args: self.report(messages.DuplicateArgument, node, arg.id) args.append(arg.id) addArgs(node.args.args) defaults = node.args.defaults else: for arg in node.args.args + node.args.kwonlyargs: annotations.append(arg.annotation) args.append(arg.arg) defaults = node.args.defaults + node.args.kw_defaults # Only for Python3 FunctionDefs is_py3_func = hasattr(node, 'returns') for arg_name in ('vararg', 'kwarg'): wildcard = getattr(node.args, arg_name) if not wildcard: continue args.append(getattr(wildcard, 'arg', wildcard)) if is_py3_func: if PY34_GTE: annotations.append(wildcard.annotation) else: argannotation = arg_name + 'annotation' annotations.append(getattr(node.args, argannotation)) if is_py3_func: annotations.append(node.returns) if PY3: if len(set(args)) < len(args): for (idx, arg) in enumerate(args): if arg in args[:idx]: self.report(messages.DuplicateArgument, node, arg) for child in annotations + defaults: if child: self.handleNode(child, node) def runFunction(): self.push_scope() for name in args: self.add_binding(node, Argument(name, node), report_redef=False) if isinstance(node.body, list): # case for FunctionDefs for stmt in node.body: self.handleNode(stmt, node) else: # case for Lambdas self.handleNode(node.body, node) def checkUnusedAssignments(): """Check to see if any assignments have not been used.""" for name, binding in self.scope.unusedAssignments(): self.report(messages.UnusedVariable, binding.source, name) self.defer_assignment(checkUnusedAssignments) if PY2: def checkReturnWithArgumentInsideGenerator(): """Check to see if there are any return statements with arguments but the function is a generator.""" if self.is_generator(node): stmt = self.find_return_with_argument(node) if stmt is not None: self.report(messages.ReturnWithArgsInsideGenerator, stmt) self.defer_assignment(checkReturnWithArgumentInsideGenerator) self.pop_scope() self.defer_function(runFunction) def CLASSDEF(self, node): """Check names used in a class definition, including its decorators, base classes, and the body of its definition. Additionally, add its name to the current scope. """ for deco in node.decorator_list: self.handleNode(deco, node) for baseNode in node.bases: self.handleNode(baseNode, node) if not PY2: for keywordNode in node.keywords: self.handleNode(keywordNode, node) self.push_scope(ClassScope) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) for stmt in node.body: self.handleNode(stmt, node) self.pop_scope() self.add_binding(node, ClassDefinition(node.name, node)) def ASSIGN(self, node): self.handleNode(node.value, node) for target in node.targets: self.handleNode(target, node) def AUGASSIGN(self, node): self.handle_node_load(node.target) self.handleNode(node.value, node) self.handleNode(node.target, node) def IMPORT(self, node): for alias in node.names: name = alias.asname or alias.name importation = Importation(name, node) self.add_binding(node, importation) def IMPORTFROM(self, node): if node.module == '__future__': if not self.futures_allowed: self.report(messages.LateFutureImport, node, [n.name for n in node.names]) else: self.futures_allowed = False for alias in node.names: if alias.name == '*': self.scope.importStarred = True self.report(messages.ImportStarUsed, node, node.module) continue name = alias.asname or alias.name importation = Importation(name, node) if node.module == '__future__': importation.used = (self.scope, node) self.add_binding(node, importation) def TRY(self, node): handler_names = [] # List the exception handlers for handler in node.handlers: if isinstance(handler.type, ast.Tuple): for exc_type in handler.type.elts: handler_names.append(node_name(exc_type)) elif handler.type: handler_names.append(node_name(handler.type)) # Memorize the except handlers and process the body self.except_handlers.append(handler_names) for child in node.body: self.handleNode(child, node) self.except_handlers.pop() # Process the other nodes: "except:", "else:", "finally:" for child in ast.iter_child_nodes(node): if child not in node.body: self.handleNode(child, node) TRYEXCEPT = TRY def EXCEPTHANDLER(self, node): # 3.x: in addition to handling children, we must handle the name of # the exception, which is not a Name node, but a simple string. if node.type is None: self.report(messages.BareExcept, node) if isinstance(node.name, str): self.handle_node_store(node) self.handle_children(node)
timothycrosley/deprecated.frosted
frosted/checker.py
Checker.NAME
python
def NAME(self, node): # Locate the name in locals / function / globals scopes. if isinstance(node.ctx, (ast.Load, ast.AugLoad)): self.handle_node_load(node) if (node.id == 'locals' and isinstance(self.scope, FunctionScope) and isinstance(node.parent, ast.Call)): # we are doing locals() call in current scope self.scope.uses_locals = True elif isinstance(node.ctx, (ast.Store, ast.AugStore)): self.handle_node_store(node) elif isinstance(node.ctx, ast.Del): self.handle_node_delete(node) else: # must be a Param context -- this only happens for names in function # arguments, but these aren't dispatched through here raise RuntimeError("Got impossible expression context: %r" % (node.ctx,))
Handle occurrence of Name (which can be a load/store/delete access.)
train
https://github.com/timothycrosley/deprecated.frosted/blob/61ba7f341fc55676c3580c8c4e52117986cd5e12/frosted/checker.py#L716-L733
null
class Checker(object): """The core of frosted, checks the cleanliness and sanity of Python code.""" node_depth = 0 offset = None trace_tree = False frosted_builtins = FROSTED_BUILTINS def __init__(self, tree, filename='(none)', builtins=None, ignore_lines=(), **settings): self.settings = settings self.ignore_errors = settings.get('ignore_frosted_errors', []) self.ignore_lines = ignore_lines file_specific_ignores = settings.get('ignore_frosted_errors_for_' + (os.path.basename(filename) or ""), None) if file_specific_ignores: self.ignore_errors += file_specific_ignores self._node_handlers = {} self._deferred_functions = [] self._deferred_assignments = [] self.dead_scopes = [] self.messages = [] self.filename = filename if builtins: self.frosted_builtins = self.frosted_builtins.union(builtins) self.scope_stack = [ModuleScope()] self.except_handlers = [()] self.futures_allowed = True self.root = tree self.handle_children(tree) self.run_deferred(self._deferred_functions) self._deferred_functions = None self.run_deferred(self._deferred_assignments) self._deferred_assignments = None del self.scope_stack[1:] self.pop_scope() self.check_dead_scopes() self.check_plugins() def check_plugins(self): """ collect plugins from entry point 'frosted.plugins' and run their check() method, passing the filename """ checkers = {} for ep in pkg_resources.iter_entry_points(group='frosted.plugins'): checkers.update({ep.name: ep.load()}) for plugin_name, plugin in checkers.items(): if self.filename != '(none)': messages = plugin.check(self.filename) for message, loc, args, kwargs in messages: self.report(message, loc, *args, **kwargs) def defer_function(self, callable): """Schedule a function handler to be called just before completion. This is used for handling function bodies, which must be deferred because code later in the file might modify the global scope. When 'callable' is called, the scope at the time this is called will be restored, however it will contain any new bindings added to it. """ self._deferred_functions.append((callable, self.scope_stack[:], self.offset)) def defer_assignment(self, callable): """Schedule an assignment handler to be called just after deferred function handlers.""" self._deferred_assignments.append((callable, self.scope_stack[:], self.offset)) def run_deferred(self, deferred): """Run the callables in deferred using their associated scope stack.""" for handler, scope, offset in deferred: self.scope_stack = scope self.offset = offset handler() @property def scope(self): return self.scope_stack[-1] def pop_scope(self): self.dead_scopes.append(self.scope_stack.pop()) def check_dead_scopes(self): """Look at scopes which have been fully examined and report names in them which were imported but unused.""" for scope in self.dead_scopes: export = isinstance(scope.get('__all__'), ExportBinding) if export: all = scope['__all__'].names() # Look for possible mistakes in the export list if not scope.importStarred and os.path.basename(self.filename) != '__init__.py': undefined = set(all) - set(scope) for name in undefined: self.report(messages.UndefinedExport, scope['__all__'].source, name) else: all = [] # Look for imported names that aren't used without checking imports in namespace definition for importation in scope.values(): if isinstance(importation, Importation) and not importation.used and importation.name not in all: self.report(messages.UnusedImport, importation.source, importation.name) def push_scope(self, scope_class=FunctionScope): self.scope_stack.append(scope_class()) def push_function_scope(self): # XXX Deprecated self.push_scope(FunctionScope) def push_class_scope(self): # XXX Deprecated self.push_scope(ClassScope) def report(self, message_class, *args, **kwargs): error_code = message_class.error_code if(not error_code[:2] + "00" in self.ignore_errors and not error_code in self.ignore_errors and not str(message_class.error_number) in self.ignore_errors): kwargs['verbose'] = self.settings.get('verbose') message = message_class(self.filename, *args, **kwargs) if message.lineno not in self.ignore_lines: self.messages.append(message) def has_parent(self, node, kind): while hasattr(node, 'parent'): node = node.parent if isinstance(node, kind): return True def get_common_ancestor(self, lnode, rnode, stop=None): stop = stop or self.root if lnode is rnode: return lnode if stop in (lnode, rnode): return stop if not hasattr(lnode, 'parent') or not hasattr(rnode, 'parent'): return if (lnode.level > rnode.level): return self.get_common_ancestor(lnode.parent, rnode, stop) if (rnode.level > lnode.level): return self.get_common_ancestor(lnode, rnode.parent, stop) return self.get_common_ancestor(lnode.parent, rnode.parent, stop) def descendant_of(self, node, ancestors, stop=None): for ancestor in ancestors: if self.get_common_ancestor(node, ancestor, stop) not in (stop, None): return True return False def on_fork(self, parent, lnode, rnode, items): return (self.descendant_of(lnode, items, parent) ^ self.descendant_of(rnode, items, parent)) def different_forks(self, lnode, rnode): """True, if lnode and rnode are located on different forks of IF/TRY.""" ancestor = self.get_common_ancestor(lnode, rnode) if isinstance(ancestor, ast.If): for fork in (ancestor.body, ancestor.orelse): if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.Try): body = ancestor.body + ancestor.orelse for fork in [body] + [[hdl] for hdl in ancestor.handlers]: if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.TryFinally): if self.on_fork(ancestor, lnode, rnode, ancestor.body): return True return False def add_binding(self, node, value, report_redef=True): """Called when a binding is altered. - `node` is the statement responsible for the change - `value` is the optional new value, a Binding instance, associated with the binding; if None, the binding is deleted if it exists. - if `report_redef` is True (default), rebinding while unused will be reported. """ redefinedWhileUnused = False if not isinstance(self.scope, ClassScope): for scope in self.scope_stack[::-1]: existing = scope.get(value.name) if (isinstance(existing, Importation) and not existing.used and (not isinstance(value, Importation) or value.fullName == existing.fullName) and report_redef and not self.different_forks(node, existing.source)): redefinedWhileUnused = True self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) existing = self.scope.get(value.name) if not redefinedWhileUnused and self.has_parent(value.source, ast.ListComp): if (existing and report_redef and not self.has_parent(existing.source, (ast.For, ast.ListComp)) and not self.different_forks(node, existing.source)): self.report(messages.RedefinedInListComp, node, value.name, existing.source) if (isinstance(existing, Definition) and not existing.used and not self.different_forks(node, existing.source)): self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) else: self.scope[value.name] = value def get_node_handler(self, node_class): try: return self._node_handlers[node_class] except KeyError: nodeType = str(node_class.__name__).upper() self._node_handlers[node_class] = handler = getattr(self, nodeType) return handler def iter_visible_scopes(self): outerScopes = itertools.islice(self.scope_stack, len(self.scope_stack) - 1) scopes = [scope for scope in outerScopes if isinstance(scope, (FunctionScope, ModuleScope))] if (isinstance(self.scope, GeneratorScope) and scopes[-1] != self.scope_stack[-2]): scopes.append(self.scope_stack[-2]) scopes.append(self.scope_stack[-1]) return iter(reversed(scopes)) def handle_node_load(self, node): name = node_name(node) if not name: return importStarred = False for scope in self.iter_visible_scopes(): importStarred = importStarred or scope.importStarred try: scope[name].used = (self.scope, node) except KeyError: pass else: return # look in the built-ins if importStarred or name in self.frosted_builtins: return if name == '__path__' and os.path.basename(self.filename) == '__init__.py': # the special name __path__ is valid only in packages return # protected with a NameError handler? if 'NameError' not in self.except_handlers[-1]: self.report(messages.UndefinedName, node, name) def handle_node_store(self, node): name = node_name(node) if not name: return # if the name hasn't already been defined in the current scope if isinstance(self.scope, FunctionScope) and name not in self.scope: # for each function or module scope above us for scope in self.scope_stack[:-1]: if not isinstance(scope, (FunctionScope, ModuleScope)): continue # if the name was defined in that scope, and the name has # been accessed already in the current scope, and hasn't # been declared global used = name in scope and scope[name].used if used and used[0] is self.scope and name not in self.scope.globals: # then it's probably a mistake self.report(messages.UndefinedLocal, scope[name].used[1], name, scope[name].source) break parent = getattr(node, 'parent', None) if isinstance(parent, (ast.For, ast.comprehension, ast.Tuple, ast.List)): binding = Binding(name, node) elif (parent is not None and name == '__all__' and isinstance(self.scope, ModuleScope)): binding = ExportBinding(name, parent.value) else: binding = Assignment(name, node) if name in self.scope: binding.used = self.scope[name].used self.add_binding(node, binding) def handle_node_delete(self, node): name = node_name(node) if not name: return if isinstance(self.scope, FunctionScope) and name in self.scope.globals: self.scope.globals.remove(name) else: try: del self.scope[name] except KeyError: self.report(messages.UndefinedName, node, name) def handle_children(self, tree): for node in ast.iter_child_nodes(tree): self.handleNode(node, tree) def is_docstring(self, node): """Determine if the given node is a docstring, as long as it is at the correct place in the node tree.""" return isinstance(node, ast.Str) or (isinstance(node, ast.Expr) and isinstance(node.value, ast.Str)) def docstring(self, node): if isinstance(node, ast.Expr): node = node.value if not isinstance(node, ast.Str): return (None, None) # Computed incorrectly if the docstring has backslash doctest_lineno = node.lineno - node.s.count('\n') - 1 return (node.s, doctest_lineno) def handleNode(self, node, parent): if node is None: return if self.offset and getattr(node, 'lineno', None) is not None: node.lineno += self.offset[0] node.col_offset += self.offset[1] if self.trace_tree: print(' ' * self.node_depth + node.__class__.__name__) if self.futures_allowed and not (isinstance(node, ast.ImportFrom) or self.is_docstring(node)): self.futures_allowed = False self.node_depth += 1 node.level = self.node_depth node.parent = parent try: handler = self.get_node_handler(node.__class__) handler(node) finally: self.node_depth -= 1 if self.trace_tree: print(' ' * self.node_depth + 'end ' + node.__class__.__name__) _get_doctest_examples = doctest.DocTestParser().get_examples def handle_doctests(self, node): try: docstring, node_lineno = self.docstring(node.body[0]) if not docstring: return examples = self._get_doctest_examples(docstring) except (ValueError, IndexError): # e.g. line 6 of the docstring for <string> has inconsistent # leading whitespace: ... return node_offset = self.offset or (0, 0) self.push_scope() for example in examples: try: tree = compile(example.source, "<doctest>", "exec", ast.PyCF_ONLY_AST) except SyntaxError: e = sys.exc_info()[1] position = (node_lineno + example.lineno + e.lineno, example.indent + 4 + (e.offset or 0)) self.report(messages.DoctestSyntaxError, node, position) else: self.offset = (node_offset[0] + node_lineno + example.lineno, node_offset[1] + example.indent + 4) self.handle_children(tree) self.offset = node_offset self.pop_scope() def find_return_with_argument(self, node): """Finds and returns a return statment that has an argument. Note that we should use node.returns in Python 3, but this method is never called in Python 3 so we don't bother checking. """ for item in node.body: if isinstance(item, ast.Return) and item.value: return item elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): return_with_argument = self.find_return_with_argument(item) if return_with_argument: return return_with_argument def is_generator(self, node): """Checks whether a function is a generator by looking for a yield statement or expression.""" if not isinstance(node.body, list): # lambdas can not be generators return False for item in node.body: if isinstance(item, (ast.Assign, ast.Expr)): if isinstance(item.value, ast.Yield): return True elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): if self.is_generator(item): return True return False def ignore(self, node): pass # "stmt" type nodes RETURN = DELETE = PRINT = WHILE = IF = WITH = WITHITEM = RAISE = TRYFINALLY = ASSERT = EXEC = EXPR = handle_children CONTINUE = BREAK = PASS = ignore # "expr" type nodes BOOLOP = BINOP = UNARYOP = IFEXP = DICT = SET = YIELD = YIELDFROM = COMPARE = REPR = ATTRIBUTE = SUBSCRIPT = \ LIST = TUPLE = STARRED = NAMECONSTANT = handle_children NUM = STR = BYTES = ELLIPSIS = ignore # "slice" type nodes SLICE = EXTSLICE = INDEX = handle_children # expression contexts are node instances too, though being constants LOAD = STORE = DEL = AUGLOAD = AUGSTORE = PARAM = ignore # same for operators AND = OR = ADD = SUB = MULT = DIV = MOD = POW = LSHIFT = RSHIFT = BITOR = BITXOR = BITAND = FLOORDIV = INVERT = \ NOT = UADD = USUB = EQ = NOTEQ = LT = LTE = GT = GTE = IS = ISNOT = IN = NOTIN = ignore # additional node types COMPREHENSION = KEYWORD = handle_children def GLOBAL(self, node): """Keep track of globals declarations.""" if isinstance(self.scope, FunctionScope): self.scope.globals.update(node.names) NONLOCAL = GLOBAL def LISTCOMP(self, node): # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) def GENERATOREXP(self, node): self.push_scope(GeneratorScope) # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) self.pop_scope() SETCOMP = GENERATOREXP def DICTCOMP(self, node): self.push_scope(GeneratorScope) for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.key, node) self.handleNode(node.value, node) self.pop_scope() def FOR(self, node): """Process bindings for loop variables.""" vars = [] def collectLoopVars(n): if isinstance(n, ast.Name): vars.append(n.id) elif isinstance(n, ast.expr_context): return else: for c in ast.iter_child_nodes(n): collectLoopVars(c) collectLoopVars(node.target) for varn in vars: if (isinstance(self.scope.get(varn), Importation) # unused ones will get an unused import warning and self.scope[varn].used): self.report(messages.ImportShadowedByLoopVar, node, varn, self.scope[varn].source) self.handle_children(node) def CALL(self, node): f = node.func if isinstance(f, ast.Name): for scope in self.iter_visible_scopes(): definition = scope.get(f.id) if definition: if isinstance(definition, FunctionDefinition): definition.signature.checkCall(node, self, f.id) break self.handle_children(node) def FUNCTIONDEF(self, node): for deco in node.decorator_list: self.handleNode(deco, node) self.add_binding(node, FunctionDefinition(node.name, node)) self.LAMBDA(node) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) def LAMBDA(self, node): args = [] annotations = [] if PY2: def addArgs(arglist): for arg in arglist: if isinstance(arg, ast.Tuple): addArgs(arg.elts) else: if arg.id in args: self.report(messages.DuplicateArgument, node, arg.id) args.append(arg.id) addArgs(node.args.args) defaults = node.args.defaults else: for arg in node.args.args + node.args.kwonlyargs: annotations.append(arg.annotation) args.append(arg.arg) defaults = node.args.defaults + node.args.kw_defaults # Only for Python3 FunctionDefs is_py3_func = hasattr(node, 'returns') for arg_name in ('vararg', 'kwarg'): wildcard = getattr(node.args, arg_name) if not wildcard: continue args.append(getattr(wildcard, 'arg', wildcard)) if is_py3_func: if PY34_GTE: annotations.append(wildcard.annotation) else: argannotation = arg_name + 'annotation' annotations.append(getattr(node.args, argannotation)) if is_py3_func: annotations.append(node.returns) if PY3: if len(set(args)) < len(args): for (idx, arg) in enumerate(args): if arg in args[:idx]: self.report(messages.DuplicateArgument, node, arg) for child in annotations + defaults: if child: self.handleNode(child, node) def runFunction(): self.push_scope() for name in args: self.add_binding(node, Argument(name, node), report_redef=False) if isinstance(node.body, list): # case for FunctionDefs for stmt in node.body: self.handleNode(stmt, node) else: # case for Lambdas self.handleNode(node.body, node) def checkUnusedAssignments(): """Check to see if any assignments have not been used.""" for name, binding in self.scope.unusedAssignments(): self.report(messages.UnusedVariable, binding.source, name) self.defer_assignment(checkUnusedAssignments) if PY2: def checkReturnWithArgumentInsideGenerator(): """Check to see if there are any return statements with arguments but the function is a generator.""" if self.is_generator(node): stmt = self.find_return_with_argument(node) if stmt is not None: self.report(messages.ReturnWithArgsInsideGenerator, stmt) self.defer_assignment(checkReturnWithArgumentInsideGenerator) self.pop_scope() self.defer_function(runFunction) def CLASSDEF(self, node): """Check names used in a class definition, including its decorators, base classes, and the body of its definition. Additionally, add its name to the current scope. """ for deco in node.decorator_list: self.handleNode(deco, node) for baseNode in node.bases: self.handleNode(baseNode, node) if not PY2: for keywordNode in node.keywords: self.handleNode(keywordNode, node) self.push_scope(ClassScope) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) for stmt in node.body: self.handleNode(stmt, node) self.pop_scope() self.add_binding(node, ClassDefinition(node.name, node)) def ASSIGN(self, node): self.handleNode(node.value, node) for target in node.targets: self.handleNode(target, node) def AUGASSIGN(self, node): self.handle_node_load(node.target) self.handleNode(node.value, node) self.handleNode(node.target, node) def IMPORT(self, node): for alias in node.names: name = alias.asname or alias.name importation = Importation(name, node) self.add_binding(node, importation) def IMPORTFROM(self, node): if node.module == '__future__': if not self.futures_allowed: self.report(messages.LateFutureImport, node, [n.name for n in node.names]) else: self.futures_allowed = False for alias in node.names: if alias.name == '*': self.scope.importStarred = True self.report(messages.ImportStarUsed, node, node.module) continue name = alias.asname or alias.name importation = Importation(name, node) if node.module == '__future__': importation.used = (self.scope, node) self.add_binding(node, importation) def TRY(self, node): handler_names = [] # List the exception handlers for handler in node.handlers: if isinstance(handler.type, ast.Tuple): for exc_type in handler.type.elts: handler_names.append(node_name(exc_type)) elif handler.type: handler_names.append(node_name(handler.type)) # Memorize the except handlers and process the body self.except_handlers.append(handler_names) for child in node.body: self.handleNode(child, node) self.except_handlers.pop() # Process the other nodes: "except:", "else:", "finally:" for child in ast.iter_child_nodes(node): if child not in node.body: self.handleNode(child, node) TRYEXCEPT = TRY def EXCEPTHANDLER(self, node): # 3.x: in addition to handling children, we must handle the name of # the exception, which is not a Name node, but a simple string. if node.type is None: self.report(messages.BareExcept, node) if isinstance(node.name, str): self.handle_node_store(node) self.handle_children(node)
timothycrosley/deprecated.frosted
frosted/checker.py
Checker.CLASSDEF
python
def CLASSDEF(self, node): for deco in node.decorator_list: self.handleNode(deco, node) for baseNode in node.bases: self.handleNode(baseNode, node) if not PY2: for keywordNode in node.keywords: self.handleNode(keywordNode, node) self.push_scope(ClassScope) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) for stmt in node.body: self.handleNode(stmt, node) self.pop_scope() self.add_binding(node, ClassDefinition(node.name, node))
Check names used in a class definition, including its decorators, base classes, and the body of its definition. Additionally, add its name to the current scope.
train
https://github.com/timothycrosley/deprecated.frosted/blob/61ba7f341fc55676c3580c8c4e52117986cd5e12/frosted/checker.py#L837-L857
null
class Checker(object): """The core of frosted, checks the cleanliness and sanity of Python code.""" node_depth = 0 offset = None trace_tree = False frosted_builtins = FROSTED_BUILTINS def __init__(self, tree, filename='(none)', builtins=None, ignore_lines=(), **settings): self.settings = settings self.ignore_errors = settings.get('ignore_frosted_errors', []) self.ignore_lines = ignore_lines file_specific_ignores = settings.get('ignore_frosted_errors_for_' + (os.path.basename(filename) or ""), None) if file_specific_ignores: self.ignore_errors += file_specific_ignores self._node_handlers = {} self._deferred_functions = [] self._deferred_assignments = [] self.dead_scopes = [] self.messages = [] self.filename = filename if builtins: self.frosted_builtins = self.frosted_builtins.union(builtins) self.scope_stack = [ModuleScope()] self.except_handlers = [()] self.futures_allowed = True self.root = tree self.handle_children(tree) self.run_deferred(self._deferred_functions) self._deferred_functions = None self.run_deferred(self._deferred_assignments) self._deferred_assignments = None del self.scope_stack[1:] self.pop_scope() self.check_dead_scopes() self.check_plugins() def check_plugins(self): """ collect plugins from entry point 'frosted.plugins' and run their check() method, passing the filename """ checkers = {} for ep in pkg_resources.iter_entry_points(group='frosted.plugins'): checkers.update({ep.name: ep.load()}) for plugin_name, plugin in checkers.items(): if self.filename != '(none)': messages = plugin.check(self.filename) for message, loc, args, kwargs in messages: self.report(message, loc, *args, **kwargs) def defer_function(self, callable): """Schedule a function handler to be called just before completion. This is used for handling function bodies, which must be deferred because code later in the file might modify the global scope. When 'callable' is called, the scope at the time this is called will be restored, however it will contain any new bindings added to it. """ self._deferred_functions.append((callable, self.scope_stack[:], self.offset)) def defer_assignment(self, callable): """Schedule an assignment handler to be called just after deferred function handlers.""" self._deferred_assignments.append((callable, self.scope_stack[:], self.offset)) def run_deferred(self, deferred): """Run the callables in deferred using their associated scope stack.""" for handler, scope, offset in deferred: self.scope_stack = scope self.offset = offset handler() @property def scope(self): return self.scope_stack[-1] def pop_scope(self): self.dead_scopes.append(self.scope_stack.pop()) def check_dead_scopes(self): """Look at scopes which have been fully examined and report names in them which were imported but unused.""" for scope in self.dead_scopes: export = isinstance(scope.get('__all__'), ExportBinding) if export: all = scope['__all__'].names() # Look for possible mistakes in the export list if not scope.importStarred and os.path.basename(self.filename) != '__init__.py': undefined = set(all) - set(scope) for name in undefined: self.report(messages.UndefinedExport, scope['__all__'].source, name) else: all = [] # Look for imported names that aren't used without checking imports in namespace definition for importation in scope.values(): if isinstance(importation, Importation) and not importation.used and importation.name not in all: self.report(messages.UnusedImport, importation.source, importation.name) def push_scope(self, scope_class=FunctionScope): self.scope_stack.append(scope_class()) def push_function_scope(self): # XXX Deprecated self.push_scope(FunctionScope) def push_class_scope(self): # XXX Deprecated self.push_scope(ClassScope) def report(self, message_class, *args, **kwargs): error_code = message_class.error_code if(not error_code[:2] + "00" in self.ignore_errors and not error_code in self.ignore_errors and not str(message_class.error_number) in self.ignore_errors): kwargs['verbose'] = self.settings.get('verbose') message = message_class(self.filename, *args, **kwargs) if message.lineno not in self.ignore_lines: self.messages.append(message) def has_parent(self, node, kind): while hasattr(node, 'parent'): node = node.parent if isinstance(node, kind): return True def get_common_ancestor(self, lnode, rnode, stop=None): stop = stop or self.root if lnode is rnode: return lnode if stop in (lnode, rnode): return stop if not hasattr(lnode, 'parent') or not hasattr(rnode, 'parent'): return if (lnode.level > rnode.level): return self.get_common_ancestor(lnode.parent, rnode, stop) if (rnode.level > lnode.level): return self.get_common_ancestor(lnode, rnode.parent, stop) return self.get_common_ancestor(lnode.parent, rnode.parent, stop) def descendant_of(self, node, ancestors, stop=None): for ancestor in ancestors: if self.get_common_ancestor(node, ancestor, stop) not in (stop, None): return True return False def on_fork(self, parent, lnode, rnode, items): return (self.descendant_of(lnode, items, parent) ^ self.descendant_of(rnode, items, parent)) def different_forks(self, lnode, rnode): """True, if lnode and rnode are located on different forks of IF/TRY.""" ancestor = self.get_common_ancestor(lnode, rnode) if isinstance(ancestor, ast.If): for fork in (ancestor.body, ancestor.orelse): if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.Try): body = ancestor.body + ancestor.orelse for fork in [body] + [[hdl] for hdl in ancestor.handlers]: if self.on_fork(ancestor, lnode, rnode, fork): return True elif isinstance(ancestor, ast.TryFinally): if self.on_fork(ancestor, lnode, rnode, ancestor.body): return True return False def add_binding(self, node, value, report_redef=True): """Called when a binding is altered. - `node` is the statement responsible for the change - `value` is the optional new value, a Binding instance, associated with the binding; if None, the binding is deleted if it exists. - if `report_redef` is True (default), rebinding while unused will be reported. """ redefinedWhileUnused = False if not isinstance(self.scope, ClassScope): for scope in self.scope_stack[::-1]: existing = scope.get(value.name) if (isinstance(existing, Importation) and not existing.used and (not isinstance(value, Importation) or value.fullName == existing.fullName) and report_redef and not self.different_forks(node, existing.source)): redefinedWhileUnused = True self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) existing = self.scope.get(value.name) if not redefinedWhileUnused and self.has_parent(value.source, ast.ListComp): if (existing and report_redef and not self.has_parent(existing.source, (ast.For, ast.ListComp)) and not self.different_forks(node, existing.source)): self.report(messages.RedefinedInListComp, node, value.name, existing.source) if (isinstance(existing, Definition) and not existing.used and not self.different_forks(node, existing.source)): self.report(messages.RedefinedWhileUnused, node, value.name, existing.source) else: self.scope[value.name] = value def get_node_handler(self, node_class): try: return self._node_handlers[node_class] except KeyError: nodeType = str(node_class.__name__).upper() self._node_handlers[node_class] = handler = getattr(self, nodeType) return handler def iter_visible_scopes(self): outerScopes = itertools.islice(self.scope_stack, len(self.scope_stack) - 1) scopes = [scope for scope in outerScopes if isinstance(scope, (FunctionScope, ModuleScope))] if (isinstance(self.scope, GeneratorScope) and scopes[-1] != self.scope_stack[-2]): scopes.append(self.scope_stack[-2]) scopes.append(self.scope_stack[-1]) return iter(reversed(scopes)) def handle_node_load(self, node): name = node_name(node) if not name: return importStarred = False for scope in self.iter_visible_scopes(): importStarred = importStarred or scope.importStarred try: scope[name].used = (self.scope, node) except KeyError: pass else: return # look in the built-ins if importStarred or name in self.frosted_builtins: return if name == '__path__' and os.path.basename(self.filename) == '__init__.py': # the special name __path__ is valid only in packages return # protected with a NameError handler? if 'NameError' not in self.except_handlers[-1]: self.report(messages.UndefinedName, node, name) def handle_node_store(self, node): name = node_name(node) if not name: return # if the name hasn't already been defined in the current scope if isinstance(self.scope, FunctionScope) and name not in self.scope: # for each function or module scope above us for scope in self.scope_stack[:-1]: if not isinstance(scope, (FunctionScope, ModuleScope)): continue # if the name was defined in that scope, and the name has # been accessed already in the current scope, and hasn't # been declared global used = name in scope and scope[name].used if used and used[0] is self.scope and name not in self.scope.globals: # then it's probably a mistake self.report(messages.UndefinedLocal, scope[name].used[1], name, scope[name].source) break parent = getattr(node, 'parent', None) if isinstance(parent, (ast.For, ast.comprehension, ast.Tuple, ast.List)): binding = Binding(name, node) elif (parent is not None and name == '__all__' and isinstance(self.scope, ModuleScope)): binding = ExportBinding(name, parent.value) else: binding = Assignment(name, node) if name in self.scope: binding.used = self.scope[name].used self.add_binding(node, binding) def handle_node_delete(self, node): name = node_name(node) if not name: return if isinstance(self.scope, FunctionScope) and name in self.scope.globals: self.scope.globals.remove(name) else: try: del self.scope[name] except KeyError: self.report(messages.UndefinedName, node, name) def handle_children(self, tree): for node in ast.iter_child_nodes(tree): self.handleNode(node, tree) def is_docstring(self, node): """Determine if the given node is a docstring, as long as it is at the correct place in the node tree.""" return isinstance(node, ast.Str) or (isinstance(node, ast.Expr) and isinstance(node.value, ast.Str)) def docstring(self, node): if isinstance(node, ast.Expr): node = node.value if not isinstance(node, ast.Str): return (None, None) # Computed incorrectly if the docstring has backslash doctest_lineno = node.lineno - node.s.count('\n') - 1 return (node.s, doctest_lineno) def handleNode(self, node, parent): if node is None: return if self.offset and getattr(node, 'lineno', None) is not None: node.lineno += self.offset[0] node.col_offset += self.offset[1] if self.trace_tree: print(' ' * self.node_depth + node.__class__.__name__) if self.futures_allowed and not (isinstance(node, ast.ImportFrom) or self.is_docstring(node)): self.futures_allowed = False self.node_depth += 1 node.level = self.node_depth node.parent = parent try: handler = self.get_node_handler(node.__class__) handler(node) finally: self.node_depth -= 1 if self.trace_tree: print(' ' * self.node_depth + 'end ' + node.__class__.__name__) _get_doctest_examples = doctest.DocTestParser().get_examples def handle_doctests(self, node): try: docstring, node_lineno = self.docstring(node.body[0]) if not docstring: return examples = self._get_doctest_examples(docstring) except (ValueError, IndexError): # e.g. line 6 of the docstring for <string> has inconsistent # leading whitespace: ... return node_offset = self.offset or (0, 0) self.push_scope() for example in examples: try: tree = compile(example.source, "<doctest>", "exec", ast.PyCF_ONLY_AST) except SyntaxError: e = sys.exc_info()[1] position = (node_lineno + example.lineno + e.lineno, example.indent + 4 + (e.offset or 0)) self.report(messages.DoctestSyntaxError, node, position) else: self.offset = (node_offset[0] + node_lineno + example.lineno, node_offset[1] + example.indent + 4) self.handle_children(tree) self.offset = node_offset self.pop_scope() def find_return_with_argument(self, node): """Finds and returns a return statment that has an argument. Note that we should use node.returns in Python 3, but this method is never called in Python 3 so we don't bother checking. """ for item in node.body: if isinstance(item, ast.Return) and item.value: return item elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): return_with_argument = self.find_return_with_argument(item) if return_with_argument: return return_with_argument def is_generator(self, node): """Checks whether a function is a generator by looking for a yield statement or expression.""" if not isinstance(node.body, list): # lambdas can not be generators return False for item in node.body: if isinstance(item, (ast.Assign, ast.Expr)): if isinstance(item.value, ast.Yield): return True elif not isinstance(item, ast.FunctionDef) and hasattr(item, 'body'): if self.is_generator(item): return True return False def ignore(self, node): pass # "stmt" type nodes RETURN = DELETE = PRINT = WHILE = IF = WITH = WITHITEM = RAISE = TRYFINALLY = ASSERT = EXEC = EXPR = handle_children CONTINUE = BREAK = PASS = ignore # "expr" type nodes BOOLOP = BINOP = UNARYOP = IFEXP = DICT = SET = YIELD = YIELDFROM = COMPARE = REPR = ATTRIBUTE = SUBSCRIPT = \ LIST = TUPLE = STARRED = NAMECONSTANT = handle_children NUM = STR = BYTES = ELLIPSIS = ignore # "slice" type nodes SLICE = EXTSLICE = INDEX = handle_children # expression contexts are node instances too, though being constants LOAD = STORE = DEL = AUGLOAD = AUGSTORE = PARAM = ignore # same for operators AND = OR = ADD = SUB = MULT = DIV = MOD = POW = LSHIFT = RSHIFT = BITOR = BITXOR = BITAND = FLOORDIV = INVERT = \ NOT = UADD = USUB = EQ = NOTEQ = LT = LTE = GT = GTE = IS = ISNOT = IN = NOTIN = ignore # additional node types COMPREHENSION = KEYWORD = handle_children def GLOBAL(self, node): """Keep track of globals declarations.""" if isinstance(self.scope, FunctionScope): self.scope.globals.update(node.names) NONLOCAL = GLOBAL def LISTCOMP(self, node): # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) def GENERATOREXP(self, node): self.push_scope(GeneratorScope) # handle generators before element for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.elt, node) self.pop_scope() SETCOMP = GENERATOREXP def DICTCOMP(self, node): self.push_scope(GeneratorScope) for gen in node.generators: self.handleNode(gen, node) self.handleNode(node.key, node) self.handleNode(node.value, node) self.pop_scope() def FOR(self, node): """Process bindings for loop variables.""" vars = [] def collectLoopVars(n): if isinstance(n, ast.Name): vars.append(n.id) elif isinstance(n, ast.expr_context): return else: for c in ast.iter_child_nodes(n): collectLoopVars(c) collectLoopVars(node.target) for varn in vars: if (isinstance(self.scope.get(varn), Importation) # unused ones will get an unused import warning and self.scope[varn].used): self.report(messages.ImportShadowedByLoopVar, node, varn, self.scope[varn].source) self.handle_children(node) def NAME(self, node): """Handle occurrence of Name (which can be a load/store/delete access.)""" # Locate the name in locals / function / globals scopes. if isinstance(node.ctx, (ast.Load, ast.AugLoad)): self.handle_node_load(node) if (node.id == 'locals' and isinstance(self.scope, FunctionScope) and isinstance(node.parent, ast.Call)): # we are doing locals() call in current scope self.scope.uses_locals = True elif isinstance(node.ctx, (ast.Store, ast.AugStore)): self.handle_node_store(node) elif isinstance(node.ctx, ast.Del): self.handle_node_delete(node) else: # must be a Param context -- this only happens for names in function # arguments, but these aren't dispatched through here raise RuntimeError("Got impossible expression context: %r" % (node.ctx,)) def CALL(self, node): f = node.func if isinstance(f, ast.Name): for scope in self.iter_visible_scopes(): definition = scope.get(f.id) if definition: if isinstance(definition, FunctionDefinition): definition.signature.checkCall(node, self, f.id) break self.handle_children(node) def FUNCTIONDEF(self, node): for deco in node.decorator_list: self.handleNode(deco, node) self.add_binding(node, FunctionDefinition(node.name, node)) self.LAMBDA(node) if self.settings.get('run_doctests', False): self.defer_function(lambda: self.handle_doctests(node)) def LAMBDA(self, node): args = [] annotations = [] if PY2: def addArgs(arglist): for arg in arglist: if isinstance(arg, ast.Tuple): addArgs(arg.elts) else: if arg.id in args: self.report(messages.DuplicateArgument, node, arg.id) args.append(arg.id) addArgs(node.args.args) defaults = node.args.defaults else: for arg in node.args.args + node.args.kwonlyargs: annotations.append(arg.annotation) args.append(arg.arg) defaults = node.args.defaults + node.args.kw_defaults # Only for Python3 FunctionDefs is_py3_func = hasattr(node, 'returns') for arg_name in ('vararg', 'kwarg'): wildcard = getattr(node.args, arg_name) if not wildcard: continue args.append(getattr(wildcard, 'arg', wildcard)) if is_py3_func: if PY34_GTE: annotations.append(wildcard.annotation) else: argannotation = arg_name + 'annotation' annotations.append(getattr(node.args, argannotation)) if is_py3_func: annotations.append(node.returns) if PY3: if len(set(args)) < len(args): for (idx, arg) in enumerate(args): if arg in args[:idx]: self.report(messages.DuplicateArgument, node, arg) for child in annotations + defaults: if child: self.handleNode(child, node) def runFunction(): self.push_scope() for name in args: self.add_binding(node, Argument(name, node), report_redef=False) if isinstance(node.body, list): # case for FunctionDefs for stmt in node.body: self.handleNode(stmt, node) else: # case for Lambdas self.handleNode(node.body, node) def checkUnusedAssignments(): """Check to see if any assignments have not been used.""" for name, binding in self.scope.unusedAssignments(): self.report(messages.UnusedVariable, binding.source, name) self.defer_assignment(checkUnusedAssignments) if PY2: def checkReturnWithArgumentInsideGenerator(): """Check to see if there are any return statements with arguments but the function is a generator.""" if self.is_generator(node): stmt = self.find_return_with_argument(node) if stmt is not None: self.report(messages.ReturnWithArgsInsideGenerator, stmt) self.defer_assignment(checkReturnWithArgumentInsideGenerator) self.pop_scope() self.defer_function(runFunction) def ASSIGN(self, node): self.handleNode(node.value, node) for target in node.targets: self.handleNode(target, node) def AUGASSIGN(self, node): self.handle_node_load(node.target) self.handleNode(node.value, node) self.handleNode(node.target, node) def IMPORT(self, node): for alias in node.names: name = alias.asname or alias.name importation = Importation(name, node) self.add_binding(node, importation) def IMPORTFROM(self, node): if node.module == '__future__': if not self.futures_allowed: self.report(messages.LateFutureImport, node, [n.name for n in node.names]) else: self.futures_allowed = False for alias in node.names: if alias.name == '*': self.scope.importStarred = True self.report(messages.ImportStarUsed, node, node.module) continue name = alias.asname or alias.name importation = Importation(name, node) if node.module == '__future__': importation.used = (self.scope, node) self.add_binding(node, importation) def TRY(self, node): handler_names = [] # List the exception handlers for handler in node.handlers: if isinstance(handler.type, ast.Tuple): for exc_type in handler.type.elts: handler_names.append(node_name(exc_type)) elif handler.type: handler_names.append(node_name(handler.type)) # Memorize the except handlers and process the body self.except_handlers.append(handler_names) for child in node.body: self.handleNode(child, node) self.except_handlers.pop() # Process the other nodes: "except:", "else:", "finally:" for child in ast.iter_child_nodes(node): if child not in node.body: self.handleNode(child, node) TRYEXCEPT = TRY def EXCEPTHANDLER(self, node): # 3.x: in addition to handling children, we must handle the name of # the exception, which is not a Name node, but a simple string. if node.type is None: self.report(messages.BareExcept, node) if isinstance(node.name, str): self.handle_node_store(node) self.handle_children(node)
timothycrosley/deprecated.frosted
frosted/api.py
check
python
def check(codeString, filename, reporter=modReporter.Default, settings_path=None, **setting_overrides): if not settings_path and filename: settings_path = os.path.dirname(os.path.abspath(filename)) settings_path = settings_path or os.getcwd() active_settings = settings.from_path(settings_path).copy() for key, value in itemsview(setting_overrides): access_key = key.replace('not_', '').lower() if type(active_settings.get(access_key)) in (list, tuple): if key.startswith('not_'): active_settings[access_key] = list(set(active_settings[access_key]).difference(value)) else: active_settings[access_key] = list(set(active_settings[access_key]).union(value)) else: active_settings[key] = value active_settings.update(setting_overrides) if _should_skip(filename, active_settings.get('skip', [])): if active_settings.get('directly_being_checked', None) == 1: reporter.flake(FileSkipped(filename)) return 1 elif active_settings.get('verbose', False): ignore = active_settings.get('ignore_frosted_errors', []) if(not "W200" in ignore and not "W201" in ignore): reporter.flake(FileSkipped(filename, None, verbose=active_settings.get('verbose'))) return 0 # First, compile into an AST and handle syntax errors. try: tree = compile(codeString, filename, "exec", _ast.PyCF_ONLY_AST) except SyntaxError: value = sys.exc_info()[1] msg = value.args[0] (lineno, offset, text) = value.lineno, value.offset, value.text # If there's an encoding problem with the file, the text is None. if text is None: # Avoid using msg, since for the only known case, it contains a # bogus message that claims the encoding the file declared was # unknown. reporter.unexpected_error(filename, 'problem decoding source') else: reporter.flake(PythonSyntaxError(filename, msg, lineno, offset, text, verbose=active_settings.get('verbose'))) return 1 except Exception: reporter.unexpected_error(filename, 'problem decoding source') return 1 # Okay, it's syntactically valid. Now check it. w = checker.Checker(tree, filename, None, ignore_lines=_noqa_lines(codeString), **active_settings) w.messages.sort(key=lambda m: m.lineno) for warning in w.messages: reporter.flake(warning) return len(w.messages)
Check the Python source given by codeString for unfrosted flakes.
train
https://github.com/timothycrosley/deprecated.frosted/blob/61ba7f341fc55676c3580c8c4e52117986cd5e12/frosted/api.py#L62-L118
[ "def _noqa_lines(codeString):\n line_nums = []\n g = tokenize.generate_tokens(StringIO(str(codeString)).readline) # tokenize the string\n for toknum, tokval, begins, _, _ in g:\n lineno = begins[0]\n # not sure what N_TOKENS really means, but in testing, that was what comments were\n # tokenized as\n if toknum == N_TOKENS:\n if _re_noqa.search(tokval):\n line_nums.append(lineno)\n return line_nums\n", "def _should_skip(filename, skip):\n if filename in skip:\n return True\n\n position = os.path.split(filename)\n while position[1]:\n if position[1] in skip:\n return True\n position = os.path.split(position[0])\n" ]
"""frosted/api.py. Defines the api for the command-line frosted utility Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR """ import os import re import sys import tokenize from io import StringIO from token import N_TOKENS from pies.overrides import * import _ast from frosted import reporter as modReporter from frosted import checker, settings from frosted.messages import FileSkipped, PythonSyntaxError __all__ = ['check', 'check_path', 'check_recursive', 'iter_source_code'] _re_noqa = re.compile(r'((frosted)[:=]\s*noqa)|(#\s*noqa)', re.I) def _noqa_lines(codeString): line_nums = [] g = tokenize.generate_tokens(StringIO(str(codeString)).readline) # tokenize the string for toknum, tokval, begins, _, _ in g: lineno = begins[0] # not sure what N_TOKENS really means, but in testing, that was what comments were # tokenized as if toknum == N_TOKENS: if _re_noqa.search(tokval): line_nums.append(lineno) return line_nums def _should_skip(filename, skip): if filename in skip: return True position = os.path.split(filename) while position[1]: if position[1] in skip: return True position = os.path.split(position[0]) def check_path(filename, reporter=modReporter.Default, settings_path=None, **setting_overrides): """Check the given path, printing out any warnings detected.""" try: with open(filename, 'U') as f: codestr = f.read() + '\n' except UnicodeError: reporter.unexpected_error(filename, 'problem decoding source') return 1 except IOError: msg = sys.exc_info()[1] reporter.unexpected_error(filename, msg.args[1]) return 1 return check(codestr, filename, reporter, settings_path, **setting_overrides) def iter_source_code(paths): """Iterate over all Python source files defined in paths.""" for path in paths: if os.path.isdir(path): for dirpath, dirnames, filenames in os.walk(path): for filename in filenames: if filename.endswith('.py'): yield os.path.join(dirpath, filename) else: yield path def check_recursive(paths, reporter=modReporter.Default, settings_path=None, **setting_overrides): """Recursively check all source files defined in paths.""" warnings = 0 for source_path in iter_source_code(paths): warnings += check_path(source_path, reporter, settings_path=None, **setting_overrides) return warnings
timothycrosley/deprecated.frosted
frosted/api.py
check_path
python
def check_path(filename, reporter=modReporter.Default, settings_path=None, **setting_overrides): try: with open(filename, 'U') as f: codestr = f.read() + '\n' except UnicodeError: reporter.unexpected_error(filename, 'problem decoding source') return 1 except IOError: msg = sys.exc_info()[1] reporter.unexpected_error(filename, msg.args[1]) return 1 return check(codestr, filename, reporter, settings_path, **setting_overrides)
Check the given path, printing out any warnings detected.
train
https://github.com/timothycrosley/deprecated.frosted/blob/61ba7f341fc55676c3580c8c4e52117986cd5e12/frosted/api.py#L121-L133
[ "def check(codeString, filename, reporter=modReporter.Default, settings_path=None, **setting_overrides):\n \"\"\"Check the Python source given by codeString for unfrosted flakes.\"\"\"\n\n if not settings_path and filename:\n settings_path = os.path.dirname(os.path.abspath(filename))\n settings_path = settings_path or os.getcwd()\n\n active_settings = settings.from_path(settings_path).copy()\n for key, value in itemsview(setting_overrides):\n access_key = key.replace('not_', '').lower()\n if type(active_settings.get(access_key)) in (list, tuple):\n if key.startswith('not_'):\n active_settings[access_key] = list(set(active_settings[access_key]).difference(value))\n else:\n active_settings[access_key] = list(set(active_settings[access_key]).union(value))\n else:\n active_settings[key] = value\n active_settings.update(setting_overrides)\n\n if _should_skip(filename, active_settings.get('skip', [])):\n if active_settings.get('directly_being_checked', None) == 1:\n reporter.flake(FileSkipped(filename))\n return 1\n elif active_settings.get('verbose', False):\n ignore = active_settings.get('ignore_frosted_errors', [])\n if(not \"W200\" in ignore and not \"W201\" in ignore):\n reporter.flake(FileSkipped(filename, None, verbose=active_settings.get('verbose')))\n return 0\n\n # First, compile into an AST and handle syntax errors.\n try:\n tree = compile(codeString, filename, \"exec\", _ast.PyCF_ONLY_AST)\n except SyntaxError:\n value = sys.exc_info()[1]\n msg = value.args[0]\n\n (lineno, offset, text) = value.lineno, value.offset, value.text\n\n # If there's an encoding problem with the file, the text is None.\n if text is None:\n # Avoid using msg, since for the only known case, it contains a\n # bogus message that claims the encoding the file declared was\n # unknown.\n reporter.unexpected_error(filename, 'problem decoding source')\n else:\n reporter.flake(PythonSyntaxError(filename, msg, lineno, offset, text,\n verbose=active_settings.get('verbose')))\n return 1\n except Exception:\n reporter.unexpected_error(filename, 'problem decoding source')\n return 1\n # Okay, it's syntactically valid. Now check it.\n w = checker.Checker(tree, filename, None, ignore_lines=_noqa_lines(codeString), **active_settings)\n w.messages.sort(key=lambda m: m.lineno)\n for warning in w.messages:\n reporter.flake(warning)\n return len(w.messages)\n" ]
"""frosted/api.py. Defines the api for the command-line frosted utility Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR """ import os import re import sys import tokenize from io import StringIO from token import N_TOKENS from pies.overrides import * import _ast from frosted import reporter as modReporter from frosted import checker, settings from frosted.messages import FileSkipped, PythonSyntaxError __all__ = ['check', 'check_path', 'check_recursive', 'iter_source_code'] _re_noqa = re.compile(r'((frosted)[:=]\s*noqa)|(#\s*noqa)', re.I) def _noqa_lines(codeString): line_nums = [] g = tokenize.generate_tokens(StringIO(str(codeString)).readline) # tokenize the string for toknum, tokval, begins, _, _ in g: lineno = begins[0] # not sure what N_TOKENS really means, but in testing, that was what comments were # tokenized as if toknum == N_TOKENS: if _re_noqa.search(tokval): line_nums.append(lineno) return line_nums def _should_skip(filename, skip): if filename in skip: return True position = os.path.split(filename) while position[1]: if position[1] in skip: return True position = os.path.split(position[0]) def check(codeString, filename, reporter=modReporter.Default, settings_path=None, **setting_overrides): """Check the Python source given by codeString for unfrosted flakes.""" if not settings_path and filename: settings_path = os.path.dirname(os.path.abspath(filename)) settings_path = settings_path or os.getcwd() active_settings = settings.from_path(settings_path).copy() for key, value in itemsview(setting_overrides): access_key = key.replace('not_', '').lower() if type(active_settings.get(access_key)) in (list, tuple): if key.startswith('not_'): active_settings[access_key] = list(set(active_settings[access_key]).difference(value)) else: active_settings[access_key] = list(set(active_settings[access_key]).union(value)) else: active_settings[key] = value active_settings.update(setting_overrides) if _should_skip(filename, active_settings.get('skip', [])): if active_settings.get('directly_being_checked', None) == 1: reporter.flake(FileSkipped(filename)) return 1 elif active_settings.get('verbose', False): ignore = active_settings.get('ignore_frosted_errors', []) if(not "W200" in ignore and not "W201" in ignore): reporter.flake(FileSkipped(filename, None, verbose=active_settings.get('verbose'))) return 0 # First, compile into an AST and handle syntax errors. try: tree = compile(codeString, filename, "exec", _ast.PyCF_ONLY_AST) except SyntaxError: value = sys.exc_info()[1] msg = value.args[0] (lineno, offset, text) = value.lineno, value.offset, value.text # If there's an encoding problem with the file, the text is None. if text is None: # Avoid using msg, since for the only known case, it contains a # bogus message that claims the encoding the file declared was # unknown. reporter.unexpected_error(filename, 'problem decoding source') else: reporter.flake(PythonSyntaxError(filename, msg, lineno, offset, text, verbose=active_settings.get('verbose'))) return 1 except Exception: reporter.unexpected_error(filename, 'problem decoding source') return 1 # Okay, it's syntactically valid. Now check it. w = checker.Checker(tree, filename, None, ignore_lines=_noqa_lines(codeString), **active_settings) w.messages.sort(key=lambda m: m.lineno) for warning in w.messages: reporter.flake(warning) return len(w.messages) def iter_source_code(paths): """Iterate over all Python source files defined in paths.""" for path in paths: if os.path.isdir(path): for dirpath, dirnames, filenames in os.walk(path): for filename in filenames: if filename.endswith('.py'): yield os.path.join(dirpath, filename) else: yield path def check_recursive(paths, reporter=modReporter.Default, settings_path=None, **setting_overrides): """Recursively check all source files defined in paths.""" warnings = 0 for source_path in iter_source_code(paths): warnings += check_path(source_path, reporter, settings_path=None, **setting_overrides) return warnings
timothycrosley/deprecated.frosted
frosted/api.py
check_recursive
python
def check_recursive(paths, reporter=modReporter.Default, settings_path=None, **setting_overrides): warnings = 0 for source_path in iter_source_code(paths): warnings += check_path(source_path, reporter, settings_path=None, **setting_overrides) return warnings
Recursively check all source files defined in paths.
train
https://github.com/timothycrosley/deprecated.frosted/blob/61ba7f341fc55676c3580c8c4e52117986cd5e12/frosted/api.py#L148-L153
[ "def check_path(filename, reporter=modReporter.Default, settings_path=None, **setting_overrides):\n \"\"\"Check the given path, printing out any warnings detected.\"\"\"\n try:\n with open(filename, 'U') as f:\n codestr = f.read() + '\\n'\n except UnicodeError:\n reporter.unexpected_error(filename, 'problem decoding source')\n return 1\n except IOError:\n msg = sys.exc_info()[1]\n reporter.unexpected_error(filename, msg.args[1])\n return 1\n return check(codestr, filename, reporter, settings_path, **setting_overrides)\n", "def iter_source_code(paths):\n \"\"\"Iterate over all Python source files defined in paths.\"\"\"\n for path in paths:\n if os.path.isdir(path):\n for dirpath, dirnames, filenames in os.walk(path):\n for filename in filenames:\n if filename.endswith('.py'):\n yield os.path.join(dirpath, filename)\n else:\n yield path\n" ]
"""frosted/api.py. Defines the api for the command-line frosted utility Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR """ import os import re import sys import tokenize from io import StringIO from token import N_TOKENS from pies.overrides import * import _ast from frosted import reporter as modReporter from frosted import checker, settings from frosted.messages import FileSkipped, PythonSyntaxError __all__ = ['check', 'check_path', 'check_recursive', 'iter_source_code'] _re_noqa = re.compile(r'((frosted)[:=]\s*noqa)|(#\s*noqa)', re.I) def _noqa_lines(codeString): line_nums = [] g = tokenize.generate_tokens(StringIO(str(codeString)).readline) # tokenize the string for toknum, tokval, begins, _, _ in g: lineno = begins[0] # not sure what N_TOKENS really means, but in testing, that was what comments were # tokenized as if toknum == N_TOKENS: if _re_noqa.search(tokval): line_nums.append(lineno) return line_nums def _should_skip(filename, skip): if filename in skip: return True position = os.path.split(filename) while position[1]: if position[1] in skip: return True position = os.path.split(position[0]) def check(codeString, filename, reporter=modReporter.Default, settings_path=None, **setting_overrides): """Check the Python source given by codeString for unfrosted flakes.""" if not settings_path and filename: settings_path = os.path.dirname(os.path.abspath(filename)) settings_path = settings_path or os.getcwd() active_settings = settings.from_path(settings_path).copy() for key, value in itemsview(setting_overrides): access_key = key.replace('not_', '').lower() if type(active_settings.get(access_key)) in (list, tuple): if key.startswith('not_'): active_settings[access_key] = list(set(active_settings[access_key]).difference(value)) else: active_settings[access_key] = list(set(active_settings[access_key]).union(value)) else: active_settings[key] = value active_settings.update(setting_overrides) if _should_skip(filename, active_settings.get('skip', [])): if active_settings.get('directly_being_checked', None) == 1: reporter.flake(FileSkipped(filename)) return 1 elif active_settings.get('verbose', False): ignore = active_settings.get('ignore_frosted_errors', []) if(not "W200" in ignore and not "W201" in ignore): reporter.flake(FileSkipped(filename, None, verbose=active_settings.get('verbose'))) return 0 # First, compile into an AST and handle syntax errors. try: tree = compile(codeString, filename, "exec", _ast.PyCF_ONLY_AST) except SyntaxError: value = sys.exc_info()[1] msg = value.args[0] (lineno, offset, text) = value.lineno, value.offset, value.text # If there's an encoding problem with the file, the text is None. if text is None: # Avoid using msg, since for the only known case, it contains a # bogus message that claims the encoding the file declared was # unknown. reporter.unexpected_error(filename, 'problem decoding source') else: reporter.flake(PythonSyntaxError(filename, msg, lineno, offset, text, verbose=active_settings.get('verbose'))) return 1 except Exception: reporter.unexpected_error(filename, 'problem decoding source') return 1 # Okay, it's syntactically valid. Now check it. w = checker.Checker(tree, filename, None, ignore_lines=_noqa_lines(codeString), **active_settings) w.messages.sort(key=lambda m: m.lineno) for warning in w.messages: reporter.flake(warning) return len(w.messages) def check_path(filename, reporter=modReporter.Default, settings_path=None, **setting_overrides): """Check the given path, printing out any warnings detected.""" try: with open(filename, 'U') as f: codestr = f.read() + '\n' except UnicodeError: reporter.unexpected_error(filename, 'problem decoding source') return 1 except IOError: msg = sys.exc_info()[1] reporter.unexpected_error(filename, msg.args[1]) return 1 return check(codestr, filename, reporter, settings_path, **setting_overrides) def iter_source_code(paths): """Iterate over all Python source files defined in paths.""" for path in paths: if os.path.isdir(path): for dirpath, dirnames, filenames in os.walk(path): for filename in filenames: if filename.endswith('.py'): yield os.path.join(dirpath, filename) else: yield path
kfdm/gntp
gntp/config.py
mini
python
def mini(description, **kwargs): kwargs['notifierFactory'] = GrowlNotifier gntp.notifier.mini(description, **kwargs)
Single notification function Simple notification function in one line. Has only one required parameter and attempts to use reasonable defaults for everything else :param string description: Notification message
train
https://github.com/kfdm/gntp/blob/772a5f4db3707ea0253691d930bf648d1344913a/gntp/config.py#L62-L70
[ "def mini(description, applicationName='PythonMini', noteType=\"Message\",\n\t\t\ttitle=\"Mini Message\", applicationIcon=None, hostname='localhost',\n\t\t\tpassword=None, port=23053, sticky=False, priority=None,\n\t\t\tcallback=None, notificationIcon=None, identifier=None,\n\t\t\tnotifierFactory=GrowlNotifier):\n\t\"\"\"Single notification function\n\n\tSimple notification function in one line. Has only one required parameter\n\tand attempts to use reasonable defaults for everything else\n\t:param string description: Notification message\n\n\t.. warning::\n\t\t\tFor now, only URL callbacks are supported. In the future, the\n\t\t\tcallback argument will also support a function\n\t\"\"\"\n\ttry:\n\t\tgrowl = notifierFactory(\n\t\t\tapplicationName=applicationName,\n\t\t\tnotifications=[noteType],\n\t\t\tdefaultNotifications=[noteType],\n\t\t\tapplicationIcon=applicationIcon,\n\t\t\thostname=hostname,\n\t\t\tpassword=password,\n\t\t\tport=port,\n\t\t)\n\t\tresult = growl.register()\n\t\tif result is not True:\n\t\t\treturn result\n\n\t\treturn growl.notify(\n\t\t\tnoteType=noteType,\n\t\t\ttitle=title,\n\t\t\tdescription=description,\n\t\t\ticon=notificationIcon,\n\t\t\tsticky=sticky,\n\t\t\tpriority=priority,\n\t\t\tcallback=callback,\n\t\t\tidentifier=identifier,\n\t\t)\n\texcept Exception:\n\t\t# We want the \"mini\" function to be simple and swallow Exceptions\n\t\t# in order to be less invasive\n\t\tlogger.exception(\"Growl error\")\n" ]
# Copyright: 2013 Paul Traylor # These sources are released under the terms of the MIT license: see LICENSE """ The gntp.config module is provided as an extended GrowlNotifier object that takes advantage of the ConfigParser module to allow us to setup some default values (such as hostname, password, and port) in a more global way to be shared among programs using gntp """ import logging import os import gntp.notifier import gntp.shim __all__ = [ 'mini', 'GrowlNotifier' ] logger = logging.getLogger(__name__) class GrowlNotifier(gntp.notifier.GrowlNotifier): """ ConfigParser enhanced GrowlNotifier object For right now, we are only interested in letting users overide certain values from ~/.gntp :: [gntp] hostname = ? password = ? port = ? """ def __init__(self, *args, **kwargs): config = gntp.shim.RawConfigParser({ 'hostname': kwargs.get('hostname', 'localhost'), 'password': kwargs.get('password'), 'port': kwargs.get('port', 23053), }) config.read([os.path.expanduser('~/.gntp')]) # If the file does not exist, then there will be no gntp section defined # and the config.get() lines below will get confused. Since we are not # saving the config, it should be safe to just add it here so the # code below doesn't complain if not config.has_section('gntp'): logger.info('Error reading ~/.gntp config file') config.add_section('gntp') kwargs['password'] = config.get('gntp', 'password') kwargs['hostname'] = config.get('gntp', 'hostname') kwargs['port'] = config.getint('gntp', 'port') super(GrowlNotifier, self).__init__(*args, **kwargs) if __name__ == '__main__': # If we're running this module directly we're likely running it as a test # so extra debugging is useful logging.basicConfig(level=logging.INFO) mini('Testing mini notification')
kfdm/gntp
gntp/core.py
parse_gntp
python
def parse_gntp(data, password=None): data = gntp.shim.u(data) match = GNTP_INFO_LINE_SHORT.match(data) if not match: raise errors.ParseError('INVALID_GNTP_INFO') info = match.groupdict() if info['messagetype'] == 'REGISTER': return GNTPRegister(data, password=password) elif info['messagetype'] == 'NOTIFY': return GNTPNotice(data, password=password) elif info['messagetype'] == 'SUBSCRIBE': return GNTPSubscribe(data, password=password) elif info['messagetype'] == '-OK': return GNTPOK(data) elif info['messagetype'] == '-ERROR': return GNTPError(data) raise errors.ParseError('INVALID_GNTP_MESSAGE')
Attempt to parse a message as a GNTP message :param string data: Message to be parsed :param string password: Optional password to be used to verify the message
train
https://github.com/kfdm/gntp/blob/772a5f4db3707ea0253691d930bf648d1344913a/gntp/core.py#L497-L518
[ "def u(s):\n\tif isinstance(s, bytes):\n\t\treturn s.decode('utf8', 'replace')\n\treturn s\n" ]
# Copyright: 2013 Paul Traylor # These sources are released under the terms of the MIT license: see LICENSE import hashlib import re import time import gntp.shim import gntp.errors as errors __all__ = [ 'GNTPRegister', 'GNTPNotice', 'GNTPSubscribe', 'GNTPOK', 'GNTPError', 'parse_gntp', ] #GNTP/<version> <messagetype> <encryptionAlgorithmID>[:<ivValue>][ <keyHashAlgorithmID>:<keyHash>.<salt>] GNTP_INFO_LINE = re.compile( 'GNTP/(?P<version>\d+\.\d+) (?P<messagetype>REGISTER|NOTIFY|SUBSCRIBE|\-OK|\-ERROR)' + ' (?P<encryptionAlgorithmID>[A-Z0-9]+(:(?P<ivValue>[A-F0-9]+))?) ?' + '((?P<keyHashAlgorithmID>[A-Z0-9]+):(?P<keyHash>[A-F0-9]+).(?P<salt>[A-F0-9]+))?\r\n', re.IGNORECASE ) GNTP_INFO_LINE_SHORT = re.compile( 'GNTP/(?P<version>\d+\.\d+) (?P<messagetype>REGISTER|NOTIFY|SUBSCRIBE|\-OK|\-ERROR)', re.IGNORECASE ) GNTP_HEADER = re.compile('([\w-]+):(.+)') GNTP_EOL = gntp.shim.b('\r\n') GNTP_SEP = gntp.shim.b(': ') class _GNTPBuffer(gntp.shim.StringIO): """GNTP Buffer class""" def writeln(self, value=None): if value: self.write(gntp.shim.b(value)) self.write(GNTP_EOL) def writeheader(self, key, value): if not isinstance(value, str): value = str(value) self.write(gntp.shim.b(key)) self.write(GNTP_SEP) self.write(gntp.shim.b(value)) self.write(GNTP_EOL) class _GNTPBase(object): """Base initilization :param string messagetype: GNTP Message type :param string version: GNTP Protocol version :param string encription: Encryption protocol """ def __init__(self, messagetype=None, version='1.0', encryption=None): self.info = { 'version': version, 'messagetype': messagetype, 'encryptionAlgorithmID': encryption } self.hash_algo = { 'MD5': hashlib.md5, 'SHA1': hashlib.sha1, 'SHA256': hashlib.sha256, 'SHA512': hashlib.sha512, } self.headers = {} self.resources = {} # For Python2 we can just return the bytes as is without worry # but on Python3 we want to make sure we return the packet as # a unicode string so that things like logging won't get confused if gntp.shim.PY2: def __str__(self): return self.encode() else: def __str__(self): return gntp.shim.u(self.encode()) def _parse_info(self, data): """Parse the first line of a GNTP message to get security and other info values :param string data: GNTP Message :return dict: Parsed GNTP Info line """ match = GNTP_INFO_LINE.match(data) if not match: raise errors.ParseError('ERROR_PARSING_INFO_LINE') info = match.groupdict() if info['encryptionAlgorithmID'] == 'NONE': info['encryptionAlgorithmID'] = None return info def set_password(self, password, encryptAlgo='MD5'): """Set a password for a GNTP Message :param string password: Null to clear password :param string encryptAlgo: Supports MD5, SHA1, SHA256, SHA512 """ if not password: self.info['encryptionAlgorithmID'] = None self.info['keyHashAlgorithm'] = None return self.password = gntp.shim.b(password) self.encryptAlgo = encryptAlgo.upper() if not self.encryptAlgo in self.hash_algo: raise errors.UnsupportedError('INVALID HASH "%s"' % self.encryptAlgo) hashfunction = self.hash_algo.get(self.encryptAlgo) password = password.encode('utf8') seed = time.ctime().encode('utf8') salt = hashfunction(seed).hexdigest() saltHash = hashfunction(seed).digest() keyBasis = password + saltHash key = hashfunction(keyBasis).digest() keyHash = hashfunction(key).hexdigest() self.info['keyHashAlgorithmID'] = self.encryptAlgo self.info['keyHash'] = keyHash.upper() self.info['salt'] = salt.upper() def _decode_hex(self, value): """Helper function to decode hex string to `proper` hex string :param string value: Human readable hex string :return string: Hex string """ result = '' for i in range(0, len(value), 2): tmp = int(value[i:i + 2], 16) result += chr(tmp) return result def _decode_binary(self, rawIdentifier, identifier): rawIdentifier += '\r\n\r\n' dataLength = int(identifier['Length']) pointerStart = self.raw.find(rawIdentifier) + len(rawIdentifier) pointerEnd = pointerStart + dataLength data = self.raw[pointerStart:pointerEnd] if not len(data) == dataLength: raise errors.ParseError('INVALID_DATA_LENGTH Expected: %s Recieved %s' % (dataLength, len(data))) return data def _validate_password(self, password): """Validate GNTP Message against stored password""" self.password = password if password is None: raise errors.AuthError('Missing password') keyHash = self.info.get('keyHash', None) if keyHash is None and self.password is None: return True if keyHash is None: raise errors.AuthError('Invalid keyHash') if self.password is None: raise errors.AuthError('Missing password') keyHashAlgorithmID = self.info.get('keyHashAlgorithmID','MD5') password = self.password.encode('utf8') saltHash = self._decode_hex(self.info['salt']) keyBasis = password + saltHash self.key = self.hash_algo[keyHashAlgorithmID](keyBasis).digest() keyHash = self.hash_algo[keyHashAlgorithmID](self.key).hexdigest() if not keyHash.upper() == self.info['keyHash'].upper(): raise errors.AuthError('Invalid Hash') return True def validate(self): """Verify required headers""" for header in self._requiredHeaders: if not self.headers.get(header, False): raise errors.ParseError('Missing Notification Header: ' + header) def _format_info(self): """Generate info line for GNTP Message :return string: """ info = 'GNTP/%s %s' % ( self.info.get('version'), self.info.get('messagetype'), ) if self.info.get('encryptionAlgorithmID', None): info += ' %s:%s' % ( self.info.get('encryptionAlgorithmID'), self.info.get('ivValue'), ) else: info += ' NONE' if self.info.get('keyHashAlgorithmID', None): info += ' %s:%s.%s' % ( self.info.get('keyHashAlgorithmID'), self.info.get('keyHash'), self.info.get('salt') ) return info def _parse_dict(self, data): """Helper function to parse blocks of GNTP headers into a dictionary :param string data: :return dict: Dictionary of parsed GNTP Headers """ d = {} for line in data.split('\r\n'): match = GNTP_HEADER.match(line) if not match: continue key = match.group(1).strip() val = match.group(2).strip() d[key] = val return d def add_header(self, key, value): self.headers[key] = value def add_resource(self, data): """Add binary resource :param string data: Binary Data """ data = gntp.shim.b(data) identifier = hashlib.md5(data).hexdigest() self.resources[identifier] = data return 'x-growl-resource://%s' % identifier def decode(self, data, password=None): """Decode GNTP Message :param string data: """ self.password = password self.raw = gntp.shim.u(data) parts = self.raw.split('\r\n\r\n') self.info = self._parse_info(self.raw) self.headers = self._parse_dict(parts[0]) def encode(self): """Encode a generic GNTP Message :return string: GNTP Message ready to be sent. Returned as a byte string """ buff = _GNTPBuffer() buff.writeln(self._format_info()) #Headers for k, v in self.headers.items(): buff.writeheader(k, v) buff.writeln() #Resources for resource, data in self.resources.items(): buff.writeheader('Identifier', resource) buff.writeheader('Length', len(data)) buff.writeln() buff.write(data) buff.writeln() buff.writeln() return buff.getvalue() class GNTPRegister(_GNTPBase): """Represents a GNTP Registration Command :param string data: (Optional) See decode() :param string password: (Optional) Password to use while encoding/decoding messages """ _requiredHeaders = [ 'Application-Name', 'Notifications-Count' ] _requiredNotificationHeaders = ['Notification-Name'] def __init__(self, data=None, password=None): _GNTPBase.__init__(self, 'REGISTER') self.notifications = [] if data: self.decode(data, password) else: self.set_password(password) self.add_header('Application-Name', 'pygntp') self.add_header('Notifications-Count', 0) def validate(self): '''Validate required headers and validate notification headers''' for header in self._requiredHeaders: if not self.headers.get(header, False): raise errors.ParseError('Missing Registration Header: ' + header) for notice in self.notifications: for header in self._requiredNotificationHeaders: if not notice.get(header, False): raise errors.ParseError('Missing Notification Header: ' + header) def decode(self, data, password): """Decode existing GNTP Registration message :param string data: Message to decode """ self.raw = gntp.shim.u(data) parts = self.raw.split('\r\n\r\n') self.info = self._parse_info(self.raw) self._validate_password(password) self.headers = self._parse_dict(parts[0]) for i, part in enumerate(parts): if i == 0: continue # Skip Header if part.strip() == '': continue notice = self._parse_dict(part) if notice.get('Notification-Name', False): self.notifications.append(notice) elif notice.get('Identifier', False): notice['Data'] = self._decode_binary(part, notice) #open('register.png','wblol').write(notice['Data']) self.resources[notice.get('Identifier')] = notice def add_notification(self, name, enabled=True): """Add new Notification to Registration message :param string name: Notification Name :param boolean enabled: Enable this notification by default """ notice = {} notice['Notification-Name'] = name notice['Notification-Enabled'] = enabled self.notifications.append(notice) self.add_header('Notifications-Count', len(self.notifications)) def encode(self): """Encode a GNTP Registration Message :return string: Encoded GNTP Registration message. Returned as a byte string """ buff = _GNTPBuffer() buff.writeln(self._format_info()) #Headers for k, v in self.headers.items(): buff.writeheader(k, v) buff.writeln() #Notifications if len(self.notifications) > 0: for notice in self.notifications: for k, v in notice.items(): buff.writeheader(k, v) buff.writeln() #Resources for resource, data in self.resources.items(): buff.writeheader('Identifier', resource) buff.writeheader('Length', len(data)) buff.writeln() buff.write(data) buff.writeln() buff.writeln() return buff.getvalue() class GNTPNotice(_GNTPBase): """Represents a GNTP Notification Command :param string data: (Optional) See decode() :param string app: (Optional) Set Application-Name :param string name: (Optional) Set Notification-Name :param string title: (Optional) Set Notification Title :param string password: (Optional) Password to use while encoding/decoding messages """ _requiredHeaders = [ 'Application-Name', 'Notification-Name', 'Notification-Title' ] def __init__(self, data=None, app=None, name=None, title=None, password=None): _GNTPBase.__init__(self, 'NOTIFY') if data: self.decode(data, password) else: self.set_password(password) if app: self.add_header('Application-Name', app) if name: self.add_header('Notification-Name', name) if title: self.add_header('Notification-Title', title) def decode(self, data, password): """Decode existing GNTP Notification message :param string data: Message to decode. """ self.raw = gntp.shim.u(data) parts = self.raw.split('\r\n\r\n') self.info = self._parse_info(self.raw) self._validate_password(password) self.headers = self._parse_dict(parts[0]) for i, part in enumerate(parts): if i == 0: continue # Skip Header if part.strip() == '': continue notice = self._parse_dict(part) if notice.get('Identifier', False): notice['Data'] = self._decode_binary(part, notice) #open('notice.png','wblol').write(notice['Data']) self.resources[notice.get('Identifier')] = notice class GNTPSubscribe(_GNTPBase): """Represents a GNTP Subscribe Command :param string data: (Optional) See decode() :param string password: (Optional) Password to use while encoding/decoding messages """ _requiredHeaders = [ 'Subscriber-ID', 'Subscriber-Name', ] def __init__(self, data=None, password=None): _GNTPBase.__init__(self, 'SUBSCRIBE') if data: self.decode(data, password) else: self.set_password(password) class GNTPOK(_GNTPBase): """Represents a GNTP OK Response :param string data: (Optional) See _GNTPResponse.decode() :param string action: (Optional) Set type of action the OK Response is for """ _requiredHeaders = ['Response-Action'] def __init__(self, data=None, action=None): _GNTPBase.__init__(self, '-OK') if data: self.decode(data) if action: self.add_header('Response-Action', action) class GNTPError(_GNTPBase): """Represents a GNTP Error response :param string data: (Optional) See _GNTPResponse.decode() :param string errorcode: (Optional) Error code :param string errordesc: (Optional) Error Description """ _requiredHeaders = ['Error-Code', 'Error-Description'] def __init__(self, data=None, errorcode=None, errordesc=None): _GNTPBase.__init__(self, '-ERROR') if data: self.decode(data) if errorcode: self.add_header('Error-Code', errorcode) self.add_header('Error-Description', errordesc) def error(self): return (self.headers.get('Error-Code', None), self.headers.get('Error-Description', None))
kfdm/gntp
gntp/core.py
_GNTPBase._parse_info
python
def _parse_info(self, data): match = GNTP_INFO_LINE.match(data) if not match: raise errors.ParseError('ERROR_PARSING_INFO_LINE') info = match.groupdict() if info['encryptionAlgorithmID'] == 'NONE': info['encryptionAlgorithmID'] = None return info
Parse the first line of a GNTP message to get security and other info values :param string data: GNTP Message :return dict: Parsed GNTP Info line
train
https://github.com/kfdm/gntp/blob/772a5f4db3707ea0253691d930bf648d1344913a/gntp/core.py#L87-L103
null
class _GNTPBase(object): """Base initilization :param string messagetype: GNTP Message type :param string version: GNTP Protocol version :param string encription: Encryption protocol """ def __init__(self, messagetype=None, version='1.0', encryption=None): self.info = { 'version': version, 'messagetype': messagetype, 'encryptionAlgorithmID': encryption } self.hash_algo = { 'MD5': hashlib.md5, 'SHA1': hashlib.sha1, 'SHA256': hashlib.sha256, 'SHA512': hashlib.sha512, } self.headers = {} self.resources = {} # For Python2 we can just return the bytes as is without worry # but on Python3 we want to make sure we return the packet as # a unicode string so that things like logging won't get confused if gntp.shim.PY2: def __str__(self): return self.encode() else: def __str__(self): return gntp.shim.u(self.encode()) def set_password(self, password, encryptAlgo='MD5'): """Set a password for a GNTP Message :param string password: Null to clear password :param string encryptAlgo: Supports MD5, SHA1, SHA256, SHA512 """ if not password: self.info['encryptionAlgorithmID'] = None self.info['keyHashAlgorithm'] = None return self.password = gntp.shim.b(password) self.encryptAlgo = encryptAlgo.upper() if not self.encryptAlgo in self.hash_algo: raise errors.UnsupportedError('INVALID HASH "%s"' % self.encryptAlgo) hashfunction = self.hash_algo.get(self.encryptAlgo) password = password.encode('utf8') seed = time.ctime().encode('utf8') salt = hashfunction(seed).hexdigest() saltHash = hashfunction(seed).digest() keyBasis = password + saltHash key = hashfunction(keyBasis).digest() keyHash = hashfunction(key).hexdigest() self.info['keyHashAlgorithmID'] = self.encryptAlgo self.info['keyHash'] = keyHash.upper() self.info['salt'] = salt.upper() def _decode_hex(self, value): """Helper function to decode hex string to `proper` hex string :param string value: Human readable hex string :return string: Hex string """ result = '' for i in range(0, len(value), 2): tmp = int(value[i:i + 2], 16) result += chr(tmp) return result def _decode_binary(self, rawIdentifier, identifier): rawIdentifier += '\r\n\r\n' dataLength = int(identifier['Length']) pointerStart = self.raw.find(rawIdentifier) + len(rawIdentifier) pointerEnd = pointerStart + dataLength data = self.raw[pointerStart:pointerEnd] if not len(data) == dataLength: raise errors.ParseError('INVALID_DATA_LENGTH Expected: %s Recieved %s' % (dataLength, len(data))) return data def _validate_password(self, password): """Validate GNTP Message against stored password""" self.password = password if password is None: raise errors.AuthError('Missing password') keyHash = self.info.get('keyHash', None) if keyHash is None and self.password is None: return True if keyHash is None: raise errors.AuthError('Invalid keyHash') if self.password is None: raise errors.AuthError('Missing password') keyHashAlgorithmID = self.info.get('keyHashAlgorithmID','MD5') password = self.password.encode('utf8') saltHash = self._decode_hex(self.info['salt']) keyBasis = password + saltHash self.key = self.hash_algo[keyHashAlgorithmID](keyBasis).digest() keyHash = self.hash_algo[keyHashAlgorithmID](self.key).hexdigest() if not keyHash.upper() == self.info['keyHash'].upper(): raise errors.AuthError('Invalid Hash') return True def validate(self): """Verify required headers""" for header in self._requiredHeaders: if not self.headers.get(header, False): raise errors.ParseError('Missing Notification Header: ' + header) def _format_info(self): """Generate info line for GNTP Message :return string: """ info = 'GNTP/%s %s' % ( self.info.get('version'), self.info.get('messagetype'), ) if self.info.get('encryptionAlgorithmID', None): info += ' %s:%s' % ( self.info.get('encryptionAlgorithmID'), self.info.get('ivValue'), ) else: info += ' NONE' if self.info.get('keyHashAlgorithmID', None): info += ' %s:%s.%s' % ( self.info.get('keyHashAlgorithmID'), self.info.get('keyHash'), self.info.get('salt') ) return info def _parse_dict(self, data): """Helper function to parse blocks of GNTP headers into a dictionary :param string data: :return dict: Dictionary of parsed GNTP Headers """ d = {} for line in data.split('\r\n'): match = GNTP_HEADER.match(line) if not match: continue key = match.group(1).strip() val = match.group(2).strip() d[key] = val return d def add_header(self, key, value): self.headers[key] = value def add_resource(self, data): """Add binary resource :param string data: Binary Data """ data = gntp.shim.b(data) identifier = hashlib.md5(data).hexdigest() self.resources[identifier] = data return 'x-growl-resource://%s' % identifier def decode(self, data, password=None): """Decode GNTP Message :param string data: """ self.password = password self.raw = gntp.shim.u(data) parts = self.raw.split('\r\n\r\n') self.info = self._parse_info(self.raw) self.headers = self._parse_dict(parts[0]) def encode(self): """Encode a generic GNTP Message :return string: GNTP Message ready to be sent. Returned as a byte string """ buff = _GNTPBuffer() buff.writeln(self._format_info()) #Headers for k, v in self.headers.items(): buff.writeheader(k, v) buff.writeln() #Resources for resource, data in self.resources.items(): buff.writeheader('Identifier', resource) buff.writeheader('Length', len(data)) buff.writeln() buff.write(data) buff.writeln() buff.writeln() return buff.getvalue()
kfdm/gntp
gntp/core.py
_GNTPBase.set_password
python
def set_password(self, password, encryptAlgo='MD5'): if not password: self.info['encryptionAlgorithmID'] = None self.info['keyHashAlgorithm'] = None return self.password = gntp.shim.b(password) self.encryptAlgo = encryptAlgo.upper() if not self.encryptAlgo in self.hash_algo: raise errors.UnsupportedError('INVALID HASH "%s"' % self.encryptAlgo) hashfunction = self.hash_algo.get(self.encryptAlgo) password = password.encode('utf8') seed = time.ctime().encode('utf8') salt = hashfunction(seed).hexdigest() saltHash = hashfunction(seed).digest() keyBasis = password + saltHash key = hashfunction(keyBasis).digest() keyHash = hashfunction(key).hexdigest() self.info['keyHashAlgorithmID'] = self.encryptAlgo self.info['keyHash'] = keyHash.upper() self.info['salt'] = salt.upper()
Set a password for a GNTP Message :param string password: Null to clear password :param string encryptAlgo: Supports MD5, SHA1, SHA256, SHA512
train
https://github.com/kfdm/gntp/blob/772a5f4db3707ea0253691d930bf648d1344913a/gntp/core.py#L105-L134
[ "def b(s):\n\tif isinstance(s, bytes):\n\t\treturn s\n\treturn s.encode('utf8', 'replace')\n" ]
class _GNTPBase(object): """Base initilization :param string messagetype: GNTP Message type :param string version: GNTP Protocol version :param string encription: Encryption protocol """ def __init__(self, messagetype=None, version='1.0', encryption=None): self.info = { 'version': version, 'messagetype': messagetype, 'encryptionAlgorithmID': encryption } self.hash_algo = { 'MD5': hashlib.md5, 'SHA1': hashlib.sha1, 'SHA256': hashlib.sha256, 'SHA512': hashlib.sha512, } self.headers = {} self.resources = {} # For Python2 we can just return the bytes as is without worry # but on Python3 we want to make sure we return the packet as # a unicode string so that things like logging won't get confused if gntp.shim.PY2: def __str__(self): return self.encode() else: def __str__(self): return gntp.shim.u(self.encode()) def _parse_info(self, data): """Parse the first line of a GNTP message to get security and other info values :param string data: GNTP Message :return dict: Parsed GNTP Info line """ match = GNTP_INFO_LINE.match(data) if not match: raise errors.ParseError('ERROR_PARSING_INFO_LINE') info = match.groupdict() if info['encryptionAlgorithmID'] == 'NONE': info['encryptionAlgorithmID'] = None return info def _decode_hex(self, value): """Helper function to decode hex string to `proper` hex string :param string value: Human readable hex string :return string: Hex string """ result = '' for i in range(0, len(value), 2): tmp = int(value[i:i + 2], 16) result += chr(tmp) return result def _decode_binary(self, rawIdentifier, identifier): rawIdentifier += '\r\n\r\n' dataLength = int(identifier['Length']) pointerStart = self.raw.find(rawIdentifier) + len(rawIdentifier) pointerEnd = pointerStart + dataLength data = self.raw[pointerStart:pointerEnd] if not len(data) == dataLength: raise errors.ParseError('INVALID_DATA_LENGTH Expected: %s Recieved %s' % (dataLength, len(data))) return data def _validate_password(self, password): """Validate GNTP Message against stored password""" self.password = password if password is None: raise errors.AuthError('Missing password') keyHash = self.info.get('keyHash', None) if keyHash is None and self.password is None: return True if keyHash is None: raise errors.AuthError('Invalid keyHash') if self.password is None: raise errors.AuthError('Missing password') keyHashAlgorithmID = self.info.get('keyHashAlgorithmID','MD5') password = self.password.encode('utf8') saltHash = self._decode_hex(self.info['salt']) keyBasis = password + saltHash self.key = self.hash_algo[keyHashAlgorithmID](keyBasis).digest() keyHash = self.hash_algo[keyHashAlgorithmID](self.key).hexdigest() if not keyHash.upper() == self.info['keyHash'].upper(): raise errors.AuthError('Invalid Hash') return True def validate(self): """Verify required headers""" for header in self._requiredHeaders: if not self.headers.get(header, False): raise errors.ParseError('Missing Notification Header: ' + header) def _format_info(self): """Generate info line for GNTP Message :return string: """ info = 'GNTP/%s %s' % ( self.info.get('version'), self.info.get('messagetype'), ) if self.info.get('encryptionAlgorithmID', None): info += ' %s:%s' % ( self.info.get('encryptionAlgorithmID'), self.info.get('ivValue'), ) else: info += ' NONE' if self.info.get('keyHashAlgorithmID', None): info += ' %s:%s.%s' % ( self.info.get('keyHashAlgorithmID'), self.info.get('keyHash'), self.info.get('salt') ) return info def _parse_dict(self, data): """Helper function to parse blocks of GNTP headers into a dictionary :param string data: :return dict: Dictionary of parsed GNTP Headers """ d = {} for line in data.split('\r\n'): match = GNTP_HEADER.match(line) if not match: continue key = match.group(1).strip() val = match.group(2).strip() d[key] = val return d def add_header(self, key, value): self.headers[key] = value def add_resource(self, data): """Add binary resource :param string data: Binary Data """ data = gntp.shim.b(data) identifier = hashlib.md5(data).hexdigest() self.resources[identifier] = data return 'x-growl-resource://%s' % identifier def decode(self, data, password=None): """Decode GNTP Message :param string data: """ self.password = password self.raw = gntp.shim.u(data) parts = self.raw.split('\r\n\r\n') self.info = self._parse_info(self.raw) self.headers = self._parse_dict(parts[0]) def encode(self): """Encode a generic GNTP Message :return string: GNTP Message ready to be sent. Returned as a byte string """ buff = _GNTPBuffer() buff.writeln(self._format_info()) #Headers for k, v in self.headers.items(): buff.writeheader(k, v) buff.writeln() #Resources for resource, data in self.resources.items(): buff.writeheader('Identifier', resource) buff.writeheader('Length', len(data)) buff.writeln() buff.write(data) buff.writeln() buff.writeln() return buff.getvalue()
kfdm/gntp
gntp/core.py
_GNTPBase._decode_hex
python
def _decode_hex(self, value): result = '' for i in range(0, len(value), 2): tmp = int(value[i:i + 2], 16) result += chr(tmp) return result
Helper function to decode hex string to `proper` hex string :param string value: Human readable hex string :return string: Hex string
train
https://github.com/kfdm/gntp/blob/772a5f4db3707ea0253691d930bf648d1344913a/gntp/core.py#L136-L146
null
class _GNTPBase(object): """Base initilization :param string messagetype: GNTP Message type :param string version: GNTP Protocol version :param string encription: Encryption protocol """ def __init__(self, messagetype=None, version='1.0', encryption=None): self.info = { 'version': version, 'messagetype': messagetype, 'encryptionAlgorithmID': encryption } self.hash_algo = { 'MD5': hashlib.md5, 'SHA1': hashlib.sha1, 'SHA256': hashlib.sha256, 'SHA512': hashlib.sha512, } self.headers = {} self.resources = {} # For Python2 we can just return the bytes as is without worry # but on Python3 we want to make sure we return the packet as # a unicode string so that things like logging won't get confused if gntp.shim.PY2: def __str__(self): return self.encode() else: def __str__(self): return gntp.shim.u(self.encode()) def _parse_info(self, data): """Parse the first line of a GNTP message to get security and other info values :param string data: GNTP Message :return dict: Parsed GNTP Info line """ match = GNTP_INFO_LINE.match(data) if not match: raise errors.ParseError('ERROR_PARSING_INFO_LINE') info = match.groupdict() if info['encryptionAlgorithmID'] == 'NONE': info['encryptionAlgorithmID'] = None return info def set_password(self, password, encryptAlgo='MD5'): """Set a password for a GNTP Message :param string password: Null to clear password :param string encryptAlgo: Supports MD5, SHA1, SHA256, SHA512 """ if not password: self.info['encryptionAlgorithmID'] = None self.info['keyHashAlgorithm'] = None return self.password = gntp.shim.b(password) self.encryptAlgo = encryptAlgo.upper() if not self.encryptAlgo in self.hash_algo: raise errors.UnsupportedError('INVALID HASH "%s"' % self.encryptAlgo) hashfunction = self.hash_algo.get(self.encryptAlgo) password = password.encode('utf8') seed = time.ctime().encode('utf8') salt = hashfunction(seed).hexdigest() saltHash = hashfunction(seed).digest() keyBasis = password + saltHash key = hashfunction(keyBasis).digest() keyHash = hashfunction(key).hexdigest() self.info['keyHashAlgorithmID'] = self.encryptAlgo self.info['keyHash'] = keyHash.upper() self.info['salt'] = salt.upper() def _decode_binary(self, rawIdentifier, identifier): rawIdentifier += '\r\n\r\n' dataLength = int(identifier['Length']) pointerStart = self.raw.find(rawIdentifier) + len(rawIdentifier) pointerEnd = pointerStart + dataLength data = self.raw[pointerStart:pointerEnd] if not len(data) == dataLength: raise errors.ParseError('INVALID_DATA_LENGTH Expected: %s Recieved %s' % (dataLength, len(data))) return data def _validate_password(self, password): """Validate GNTP Message against stored password""" self.password = password if password is None: raise errors.AuthError('Missing password') keyHash = self.info.get('keyHash', None) if keyHash is None and self.password is None: return True if keyHash is None: raise errors.AuthError('Invalid keyHash') if self.password is None: raise errors.AuthError('Missing password') keyHashAlgorithmID = self.info.get('keyHashAlgorithmID','MD5') password = self.password.encode('utf8') saltHash = self._decode_hex(self.info['salt']) keyBasis = password + saltHash self.key = self.hash_algo[keyHashAlgorithmID](keyBasis).digest() keyHash = self.hash_algo[keyHashAlgorithmID](self.key).hexdigest() if not keyHash.upper() == self.info['keyHash'].upper(): raise errors.AuthError('Invalid Hash') return True def validate(self): """Verify required headers""" for header in self._requiredHeaders: if not self.headers.get(header, False): raise errors.ParseError('Missing Notification Header: ' + header) def _format_info(self): """Generate info line for GNTP Message :return string: """ info = 'GNTP/%s %s' % ( self.info.get('version'), self.info.get('messagetype'), ) if self.info.get('encryptionAlgorithmID', None): info += ' %s:%s' % ( self.info.get('encryptionAlgorithmID'), self.info.get('ivValue'), ) else: info += ' NONE' if self.info.get('keyHashAlgorithmID', None): info += ' %s:%s.%s' % ( self.info.get('keyHashAlgorithmID'), self.info.get('keyHash'), self.info.get('salt') ) return info def _parse_dict(self, data): """Helper function to parse blocks of GNTP headers into a dictionary :param string data: :return dict: Dictionary of parsed GNTP Headers """ d = {} for line in data.split('\r\n'): match = GNTP_HEADER.match(line) if not match: continue key = match.group(1).strip() val = match.group(2).strip() d[key] = val return d def add_header(self, key, value): self.headers[key] = value def add_resource(self, data): """Add binary resource :param string data: Binary Data """ data = gntp.shim.b(data) identifier = hashlib.md5(data).hexdigest() self.resources[identifier] = data return 'x-growl-resource://%s' % identifier def decode(self, data, password=None): """Decode GNTP Message :param string data: """ self.password = password self.raw = gntp.shim.u(data) parts = self.raw.split('\r\n\r\n') self.info = self._parse_info(self.raw) self.headers = self._parse_dict(parts[0]) def encode(self): """Encode a generic GNTP Message :return string: GNTP Message ready to be sent. Returned as a byte string """ buff = _GNTPBuffer() buff.writeln(self._format_info()) #Headers for k, v in self.headers.items(): buff.writeheader(k, v) buff.writeln() #Resources for resource, data in self.resources.items(): buff.writeheader('Identifier', resource) buff.writeheader('Length', len(data)) buff.writeln() buff.write(data) buff.writeln() buff.writeln() return buff.getvalue()
kfdm/gntp
gntp/core.py
_GNTPBase._validate_password
python
def _validate_password(self, password): self.password = password if password is None: raise errors.AuthError('Missing password') keyHash = self.info.get('keyHash', None) if keyHash is None and self.password is None: return True if keyHash is None: raise errors.AuthError('Invalid keyHash') if self.password is None: raise errors.AuthError('Missing password') keyHashAlgorithmID = self.info.get('keyHashAlgorithmID','MD5') password = self.password.encode('utf8') saltHash = self._decode_hex(self.info['salt']) keyBasis = password + saltHash self.key = self.hash_algo[keyHashAlgorithmID](keyBasis).digest() keyHash = self.hash_algo[keyHashAlgorithmID](self.key).hexdigest() if not keyHash.upper() == self.info['keyHash'].upper(): raise errors.AuthError('Invalid Hash') return True
Validate GNTP Message against stored password
train
https://github.com/kfdm/gntp/blob/772a5f4db3707ea0253691d930bf648d1344913a/gntp/core.py#L158-L182
null
class _GNTPBase(object): """Base initilization :param string messagetype: GNTP Message type :param string version: GNTP Protocol version :param string encription: Encryption protocol """ def __init__(self, messagetype=None, version='1.0', encryption=None): self.info = { 'version': version, 'messagetype': messagetype, 'encryptionAlgorithmID': encryption } self.hash_algo = { 'MD5': hashlib.md5, 'SHA1': hashlib.sha1, 'SHA256': hashlib.sha256, 'SHA512': hashlib.sha512, } self.headers = {} self.resources = {} # For Python2 we can just return the bytes as is without worry # but on Python3 we want to make sure we return the packet as # a unicode string so that things like logging won't get confused if gntp.shim.PY2: def __str__(self): return self.encode() else: def __str__(self): return gntp.shim.u(self.encode()) def _parse_info(self, data): """Parse the first line of a GNTP message to get security and other info values :param string data: GNTP Message :return dict: Parsed GNTP Info line """ match = GNTP_INFO_LINE.match(data) if not match: raise errors.ParseError('ERROR_PARSING_INFO_LINE') info = match.groupdict() if info['encryptionAlgorithmID'] == 'NONE': info['encryptionAlgorithmID'] = None return info def set_password(self, password, encryptAlgo='MD5'): """Set a password for a GNTP Message :param string password: Null to clear password :param string encryptAlgo: Supports MD5, SHA1, SHA256, SHA512 """ if not password: self.info['encryptionAlgorithmID'] = None self.info['keyHashAlgorithm'] = None return self.password = gntp.shim.b(password) self.encryptAlgo = encryptAlgo.upper() if not self.encryptAlgo in self.hash_algo: raise errors.UnsupportedError('INVALID HASH "%s"' % self.encryptAlgo) hashfunction = self.hash_algo.get(self.encryptAlgo) password = password.encode('utf8') seed = time.ctime().encode('utf8') salt = hashfunction(seed).hexdigest() saltHash = hashfunction(seed).digest() keyBasis = password + saltHash key = hashfunction(keyBasis).digest() keyHash = hashfunction(key).hexdigest() self.info['keyHashAlgorithmID'] = self.encryptAlgo self.info['keyHash'] = keyHash.upper() self.info['salt'] = salt.upper() def _decode_hex(self, value): """Helper function to decode hex string to `proper` hex string :param string value: Human readable hex string :return string: Hex string """ result = '' for i in range(0, len(value), 2): tmp = int(value[i:i + 2], 16) result += chr(tmp) return result def _decode_binary(self, rawIdentifier, identifier): rawIdentifier += '\r\n\r\n' dataLength = int(identifier['Length']) pointerStart = self.raw.find(rawIdentifier) + len(rawIdentifier) pointerEnd = pointerStart + dataLength data = self.raw[pointerStart:pointerEnd] if not len(data) == dataLength: raise errors.ParseError('INVALID_DATA_LENGTH Expected: %s Recieved %s' % (dataLength, len(data))) return data def validate(self): """Verify required headers""" for header in self._requiredHeaders: if not self.headers.get(header, False): raise errors.ParseError('Missing Notification Header: ' + header) def _format_info(self): """Generate info line for GNTP Message :return string: """ info = 'GNTP/%s %s' % ( self.info.get('version'), self.info.get('messagetype'), ) if self.info.get('encryptionAlgorithmID', None): info += ' %s:%s' % ( self.info.get('encryptionAlgorithmID'), self.info.get('ivValue'), ) else: info += ' NONE' if self.info.get('keyHashAlgorithmID', None): info += ' %s:%s.%s' % ( self.info.get('keyHashAlgorithmID'), self.info.get('keyHash'), self.info.get('salt') ) return info def _parse_dict(self, data): """Helper function to parse blocks of GNTP headers into a dictionary :param string data: :return dict: Dictionary of parsed GNTP Headers """ d = {} for line in data.split('\r\n'): match = GNTP_HEADER.match(line) if not match: continue key = match.group(1).strip() val = match.group(2).strip() d[key] = val return d def add_header(self, key, value): self.headers[key] = value def add_resource(self, data): """Add binary resource :param string data: Binary Data """ data = gntp.shim.b(data) identifier = hashlib.md5(data).hexdigest() self.resources[identifier] = data return 'x-growl-resource://%s' % identifier def decode(self, data, password=None): """Decode GNTP Message :param string data: """ self.password = password self.raw = gntp.shim.u(data) parts = self.raw.split('\r\n\r\n') self.info = self._parse_info(self.raw) self.headers = self._parse_dict(parts[0]) def encode(self): """Encode a generic GNTP Message :return string: GNTP Message ready to be sent. Returned as a byte string """ buff = _GNTPBuffer() buff.writeln(self._format_info()) #Headers for k, v in self.headers.items(): buff.writeheader(k, v) buff.writeln() #Resources for resource, data in self.resources.items(): buff.writeheader('Identifier', resource) buff.writeheader('Length', len(data)) buff.writeln() buff.write(data) buff.writeln() buff.writeln() return buff.getvalue()
kfdm/gntp
gntp/core.py
_GNTPBase.validate
python
def validate(self): for header in self._requiredHeaders: if not self.headers.get(header, False): raise errors.ParseError('Missing Notification Header: ' + header)
Verify required headers
train
https://github.com/kfdm/gntp/blob/772a5f4db3707ea0253691d930bf648d1344913a/gntp/core.py#L184-L188
null
class _GNTPBase(object): """Base initilization :param string messagetype: GNTP Message type :param string version: GNTP Protocol version :param string encription: Encryption protocol """ def __init__(self, messagetype=None, version='1.0', encryption=None): self.info = { 'version': version, 'messagetype': messagetype, 'encryptionAlgorithmID': encryption } self.hash_algo = { 'MD5': hashlib.md5, 'SHA1': hashlib.sha1, 'SHA256': hashlib.sha256, 'SHA512': hashlib.sha512, } self.headers = {} self.resources = {} # For Python2 we can just return the bytes as is without worry # but on Python3 we want to make sure we return the packet as # a unicode string so that things like logging won't get confused if gntp.shim.PY2: def __str__(self): return self.encode() else: def __str__(self): return gntp.shim.u(self.encode()) def _parse_info(self, data): """Parse the first line of a GNTP message to get security and other info values :param string data: GNTP Message :return dict: Parsed GNTP Info line """ match = GNTP_INFO_LINE.match(data) if not match: raise errors.ParseError('ERROR_PARSING_INFO_LINE') info = match.groupdict() if info['encryptionAlgorithmID'] == 'NONE': info['encryptionAlgorithmID'] = None return info def set_password(self, password, encryptAlgo='MD5'): """Set a password for a GNTP Message :param string password: Null to clear password :param string encryptAlgo: Supports MD5, SHA1, SHA256, SHA512 """ if not password: self.info['encryptionAlgorithmID'] = None self.info['keyHashAlgorithm'] = None return self.password = gntp.shim.b(password) self.encryptAlgo = encryptAlgo.upper() if not self.encryptAlgo in self.hash_algo: raise errors.UnsupportedError('INVALID HASH "%s"' % self.encryptAlgo) hashfunction = self.hash_algo.get(self.encryptAlgo) password = password.encode('utf8') seed = time.ctime().encode('utf8') salt = hashfunction(seed).hexdigest() saltHash = hashfunction(seed).digest() keyBasis = password + saltHash key = hashfunction(keyBasis).digest() keyHash = hashfunction(key).hexdigest() self.info['keyHashAlgorithmID'] = self.encryptAlgo self.info['keyHash'] = keyHash.upper() self.info['salt'] = salt.upper() def _decode_hex(self, value): """Helper function to decode hex string to `proper` hex string :param string value: Human readable hex string :return string: Hex string """ result = '' for i in range(0, len(value), 2): tmp = int(value[i:i + 2], 16) result += chr(tmp) return result def _decode_binary(self, rawIdentifier, identifier): rawIdentifier += '\r\n\r\n' dataLength = int(identifier['Length']) pointerStart = self.raw.find(rawIdentifier) + len(rawIdentifier) pointerEnd = pointerStart + dataLength data = self.raw[pointerStart:pointerEnd] if not len(data) == dataLength: raise errors.ParseError('INVALID_DATA_LENGTH Expected: %s Recieved %s' % (dataLength, len(data))) return data def _validate_password(self, password): """Validate GNTP Message against stored password""" self.password = password if password is None: raise errors.AuthError('Missing password') keyHash = self.info.get('keyHash', None) if keyHash is None and self.password is None: return True if keyHash is None: raise errors.AuthError('Invalid keyHash') if self.password is None: raise errors.AuthError('Missing password') keyHashAlgorithmID = self.info.get('keyHashAlgorithmID','MD5') password = self.password.encode('utf8') saltHash = self._decode_hex(self.info['salt']) keyBasis = password + saltHash self.key = self.hash_algo[keyHashAlgorithmID](keyBasis).digest() keyHash = self.hash_algo[keyHashAlgorithmID](self.key).hexdigest() if not keyHash.upper() == self.info['keyHash'].upper(): raise errors.AuthError('Invalid Hash') return True def _format_info(self): """Generate info line for GNTP Message :return string: """ info = 'GNTP/%s %s' % ( self.info.get('version'), self.info.get('messagetype'), ) if self.info.get('encryptionAlgorithmID', None): info += ' %s:%s' % ( self.info.get('encryptionAlgorithmID'), self.info.get('ivValue'), ) else: info += ' NONE' if self.info.get('keyHashAlgorithmID', None): info += ' %s:%s.%s' % ( self.info.get('keyHashAlgorithmID'), self.info.get('keyHash'), self.info.get('salt') ) return info def _parse_dict(self, data): """Helper function to parse blocks of GNTP headers into a dictionary :param string data: :return dict: Dictionary of parsed GNTP Headers """ d = {} for line in data.split('\r\n'): match = GNTP_HEADER.match(line) if not match: continue key = match.group(1).strip() val = match.group(2).strip() d[key] = val return d def add_header(self, key, value): self.headers[key] = value def add_resource(self, data): """Add binary resource :param string data: Binary Data """ data = gntp.shim.b(data) identifier = hashlib.md5(data).hexdigest() self.resources[identifier] = data return 'x-growl-resource://%s' % identifier def decode(self, data, password=None): """Decode GNTP Message :param string data: """ self.password = password self.raw = gntp.shim.u(data) parts = self.raw.split('\r\n\r\n') self.info = self._parse_info(self.raw) self.headers = self._parse_dict(parts[0]) def encode(self): """Encode a generic GNTP Message :return string: GNTP Message ready to be sent. Returned as a byte string """ buff = _GNTPBuffer() buff.writeln(self._format_info()) #Headers for k, v in self.headers.items(): buff.writeheader(k, v) buff.writeln() #Resources for resource, data in self.resources.items(): buff.writeheader('Identifier', resource) buff.writeheader('Length', len(data)) buff.writeln() buff.write(data) buff.writeln() buff.writeln() return buff.getvalue()
kfdm/gntp
gntp/core.py
_GNTPBase._format_info
python
def _format_info(self): info = 'GNTP/%s %s' % ( self.info.get('version'), self.info.get('messagetype'), ) if self.info.get('encryptionAlgorithmID', None): info += ' %s:%s' % ( self.info.get('encryptionAlgorithmID'), self.info.get('ivValue'), ) else: info += ' NONE' if self.info.get('keyHashAlgorithmID', None): info += ' %s:%s.%s' % ( self.info.get('keyHashAlgorithmID'), self.info.get('keyHash'), self.info.get('salt') ) return info
Generate info line for GNTP Message :return string:
train
https://github.com/kfdm/gntp/blob/772a5f4db3707ea0253691d930bf648d1344913a/gntp/core.py#L190-L214
null
class _GNTPBase(object): """Base initilization :param string messagetype: GNTP Message type :param string version: GNTP Protocol version :param string encription: Encryption protocol """ def __init__(self, messagetype=None, version='1.0', encryption=None): self.info = { 'version': version, 'messagetype': messagetype, 'encryptionAlgorithmID': encryption } self.hash_algo = { 'MD5': hashlib.md5, 'SHA1': hashlib.sha1, 'SHA256': hashlib.sha256, 'SHA512': hashlib.sha512, } self.headers = {} self.resources = {} # For Python2 we can just return the bytes as is without worry # but on Python3 we want to make sure we return the packet as # a unicode string so that things like logging won't get confused if gntp.shim.PY2: def __str__(self): return self.encode() else: def __str__(self): return gntp.shim.u(self.encode()) def _parse_info(self, data): """Parse the first line of a GNTP message to get security and other info values :param string data: GNTP Message :return dict: Parsed GNTP Info line """ match = GNTP_INFO_LINE.match(data) if not match: raise errors.ParseError('ERROR_PARSING_INFO_LINE') info = match.groupdict() if info['encryptionAlgorithmID'] == 'NONE': info['encryptionAlgorithmID'] = None return info def set_password(self, password, encryptAlgo='MD5'): """Set a password for a GNTP Message :param string password: Null to clear password :param string encryptAlgo: Supports MD5, SHA1, SHA256, SHA512 """ if not password: self.info['encryptionAlgorithmID'] = None self.info['keyHashAlgorithm'] = None return self.password = gntp.shim.b(password) self.encryptAlgo = encryptAlgo.upper() if not self.encryptAlgo in self.hash_algo: raise errors.UnsupportedError('INVALID HASH "%s"' % self.encryptAlgo) hashfunction = self.hash_algo.get(self.encryptAlgo) password = password.encode('utf8') seed = time.ctime().encode('utf8') salt = hashfunction(seed).hexdigest() saltHash = hashfunction(seed).digest() keyBasis = password + saltHash key = hashfunction(keyBasis).digest() keyHash = hashfunction(key).hexdigest() self.info['keyHashAlgorithmID'] = self.encryptAlgo self.info['keyHash'] = keyHash.upper() self.info['salt'] = salt.upper() def _decode_hex(self, value): """Helper function to decode hex string to `proper` hex string :param string value: Human readable hex string :return string: Hex string """ result = '' for i in range(0, len(value), 2): tmp = int(value[i:i + 2], 16) result += chr(tmp) return result def _decode_binary(self, rawIdentifier, identifier): rawIdentifier += '\r\n\r\n' dataLength = int(identifier['Length']) pointerStart = self.raw.find(rawIdentifier) + len(rawIdentifier) pointerEnd = pointerStart + dataLength data = self.raw[pointerStart:pointerEnd] if not len(data) == dataLength: raise errors.ParseError('INVALID_DATA_LENGTH Expected: %s Recieved %s' % (dataLength, len(data))) return data def _validate_password(self, password): """Validate GNTP Message against stored password""" self.password = password if password is None: raise errors.AuthError('Missing password') keyHash = self.info.get('keyHash', None) if keyHash is None and self.password is None: return True if keyHash is None: raise errors.AuthError('Invalid keyHash') if self.password is None: raise errors.AuthError('Missing password') keyHashAlgorithmID = self.info.get('keyHashAlgorithmID','MD5') password = self.password.encode('utf8') saltHash = self._decode_hex(self.info['salt']) keyBasis = password + saltHash self.key = self.hash_algo[keyHashAlgorithmID](keyBasis).digest() keyHash = self.hash_algo[keyHashAlgorithmID](self.key).hexdigest() if not keyHash.upper() == self.info['keyHash'].upper(): raise errors.AuthError('Invalid Hash') return True def validate(self): """Verify required headers""" for header in self._requiredHeaders: if not self.headers.get(header, False): raise errors.ParseError('Missing Notification Header: ' + header) def _parse_dict(self, data): """Helper function to parse blocks of GNTP headers into a dictionary :param string data: :return dict: Dictionary of parsed GNTP Headers """ d = {} for line in data.split('\r\n'): match = GNTP_HEADER.match(line) if not match: continue key = match.group(1).strip() val = match.group(2).strip() d[key] = val return d def add_header(self, key, value): self.headers[key] = value def add_resource(self, data): """Add binary resource :param string data: Binary Data """ data = gntp.shim.b(data) identifier = hashlib.md5(data).hexdigest() self.resources[identifier] = data return 'x-growl-resource://%s' % identifier def decode(self, data, password=None): """Decode GNTP Message :param string data: """ self.password = password self.raw = gntp.shim.u(data) parts = self.raw.split('\r\n\r\n') self.info = self._parse_info(self.raw) self.headers = self._parse_dict(parts[0]) def encode(self): """Encode a generic GNTP Message :return string: GNTP Message ready to be sent. Returned as a byte string """ buff = _GNTPBuffer() buff.writeln(self._format_info()) #Headers for k, v in self.headers.items(): buff.writeheader(k, v) buff.writeln() #Resources for resource, data in self.resources.items(): buff.writeheader('Identifier', resource) buff.writeheader('Length', len(data)) buff.writeln() buff.write(data) buff.writeln() buff.writeln() return buff.getvalue()
kfdm/gntp
gntp/core.py
_GNTPBase._parse_dict
python
def _parse_dict(self, data): d = {} for line in data.split('\r\n'): match = GNTP_HEADER.match(line) if not match: continue key = match.group(1).strip() val = match.group(2).strip() d[key] = val return d
Helper function to parse blocks of GNTP headers into a dictionary :param string data: :return dict: Dictionary of parsed GNTP Headers
train
https://github.com/kfdm/gntp/blob/772a5f4db3707ea0253691d930bf648d1344913a/gntp/core.py#L216-L231
null
class _GNTPBase(object): """Base initilization :param string messagetype: GNTP Message type :param string version: GNTP Protocol version :param string encription: Encryption protocol """ def __init__(self, messagetype=None, version='1.0', encryption=None): self.info = { 'version': version, 'messagetype': messagetype, 'encryptionAlgorithmID': encryption } self.hash_algo = { 'MD5': hashlib.md5, 'SHA1': hashlib.sha1, 'SHA256': hashlib.sha256, 'SHA512': hashlib.sha512, } self.headers = {} self.resources = {} # For Python2 we can just return the bytes as is without worry # but on Python3 we want to make sure we return the packet as # a unicode string so that things like logging won't get confused if gntp.shim.PY2: def __str__(self): return self.encode() else: def __str__(self): return gntp.shim.u(self.encode()) def _parse_info(self, data): """Parse the first line of a GNTP message to get security and other info values :param string data: GNTP Message :return dict: Parsed GNTP Info line """ match = GNTP_INFO_LINE.match(data) if not match: raise errors.ParseError('ERROR_PARSING_INFO_LINE') info = match.groupdict() if info['encryptionAlgorithmID'] == 'NONE': info['encryptionAlgorithmID'] = None return info def set_password(self, password, encryptAlgo='MD5'): """Set a password for a GNTP Message :param string password: Null to clear password :param string encryptAlgo: Supports MD5, SHA1, SHA256, SHA512 """ if not password: self.info['encryptionAlgorithmID'] = None self.info['keyHashAlgorithm'] = None return self.password = gntp.shim.b(password) self.encryptAlgo = encryptAlgo.upper() if not self.encryptAlgo in self.hash_algo: raise errors.UnsupportedError('INVALID HASH "%s"' % self.encryptAlgo) hashfunction = self.hash_algo.get(self.encryptAlgo) password = password.encode('utf8') seed = time.ctime().encode('utf8') salt = hashfunction(seed).hexdigest() saltHash = hashfunction(seed).digest() keyBasis = password + saltHash key = hashfunction(keyBasis).digest() keyHash = hashfunction(key).hexdigest() self.info['keyHashAlgorithmID'] = self.encryptAlgo self.info['keyHash'] = keyHash.upper() self.info['salt'] = salt.upper() def _decode_hex(self, value): """Helper function to decode hex string to `proper` hex string :param string value: Human readable hex string :return string: Hex string """ result = '' for i in range(0, len(value), 2): tmp = int(value[i:i + 2], 16) result += chr(tmp) return result def _decode_binary(self, rawIdentifier, identifier): rawIdentifier += '\r\n\r\n' dataLength = int(identifier['Length']) pointerStart = self.raw.find(rawIdentifier) + len(rawIdentifier) pointerEnd = pointerStart + dataLength data = self.raw[pointerStart:pointerEnd] if not len(data) == dataLength: raise errors.ParseError('INVALID_DATA_LENGTH Expected: %s Recieved %s' % (dataLength, len(data))) return data def _validate_password(self, password): """Validate GNTP Message against stored password""" self.password = password if password is None: raise errors.AuthError('Missing password') keyHash = self.info.get('keyHash', None) if keyHash is None and self.password is None: return True if keyHash is None: raise errors.AuthError('Invalid keyHash') if self.password is None: raise errors.AuthError('Missing password') keyHashAlgorithmID = self.info.get('keyHashAlgorithmID','MD5') password = self.password.encode('utf8') saltHash = self._decode_hex(self.info['salt']) keyBasis = password + saltHash self.key = self.hash_algo[keyHashAlgorithmID](keyBasis).digest() keyHash = self.hash_algo[keyHashAlgorithmID](self.key).hexdigest() if not keyHash.upper() == self.info['keyHash'].upper(): raise errors.AuthError('Invalid Hash') return True def validate(self): """Verify required headers""" for header in self._requiredHeaders: if not self.headers.get(header, False): raise errors.ParseError('Missing Notification Header: ' + header) def _format_info(self): """Generate info line for GNTP Message :return string: """ info = 'GNTP/%s %s' % ( self.info.get('version'), self.info.get('messagetype'), ) if self.info.get('encryptionAlgorithmID', None): info += ' %s:%s' % ( self.info.get('encryptionAlgorithmID'), self.info.get('ivValue'), ) else: info += ' NONE' if self.info.get('keyHashAlgorithmID', None): info += ' %s:%s.%s' % ( self.info.get('keyHashAlgorithmID'), self.info.get('keyHash'), self.info.get('salt') ) return info def add_header(self, key, value): self.headers[key] = value def add_resource(self, data): """Add binary resource :param string data: Binary Data """ data = gntp.shim.b(data) identifier = hashlib.md5(data).hexdigest() self.resources[identifier] = data return 'x-growl-resource://%s' % identifier def decode(self, data, password=None): """Decode GNTP Message :param string data: """ self.password = password self.raw = gntp.shim.u(data) parts = self.raw.split('\r\n\r\n') self.info = self._parse_info(self.raw) self.headers = self._parse_dict(parts[0]) def encode(self): """Encode a generic GNTP Message :return string: GNTP Message ready to be sent. Returned as a byte string """ buff = _GNTPBuffer() buff.writeln(self._format_info()) #Headers for k, v in self.headers.items(): buff.writeheader(k, v) buff.writeln() #Resources for resource, data in self.resources.items(): buff.writeheader('Identifier', resource) buff.writeheader('Length', len(data)) buff.writeln() buff.write(data) buff.writeln() buff.writeln() return buff.getvalue()
kfdm/gntp
gntp/core.py
_GNTPBase.add_resource
python
def add_resource(self, data): data = gntp.shim.b(data) identifier = hashlib.md5(data).hexdigest() self.resources[identifier] = data return 'x-growl-resource://%s' % identifier
Add binary resource :param string data: Binary Data
train
https://github.com/kfdm/gntp/blob/772a5f4db3707ea0253691d930bf648d1344913a/gntp/core.py#L236-L244
[ "def b(s):\n\tif isinstance(s, bytes):\n\t\treturn s\n\treturn s.encode('utf8', 'replace')\n" ]
class _GNTPBase(object): """Base initilization :param string messagetype: GNTP Message type :param string version: GNTP Protocol version :param string encription: Encryption protocol """ def __init__(self, messagetype=None, version='1.0', encryption=None): self.info = { 'version': version, 'messagetype': messagetype, 'encryptionAlgorithmID': encryption } self.hash_algo = { 'MD5': hashlib.md5, 'SHA1': hashlib.sha1, 'SHA256': hashlib.sha256, 'SHA512': hashlib.sha512, } self.headers = {} self.resources = {} # For Python2 we can just return the bytes as is without worry # but on Python3 we want to make sure we return the packet as # a unicode string so that things like logging won't get confused if gntp.shim.PY2: def __str__(self): return self.encode() else: def __str__(self): return gntp.shim.u(self.encode()) def _parse_info(self, data): """Parse the first line of a GNTP message to get security and other info values :param string data: GNTP Message :return dict: Parsed GNTP Info line """ match = GNTP_INFO_LINE.match(data) if not match: raise errors.ParseError('ERROR_PARSING_INFO_LINE') info = match.groupdict() if info['encryptionAlgorithmID'] == 'NONE': info['encryptionAlgorithmID'] = None return info def set_password(self, password, encryptAlgo='MD5'): """Set a password for a GNTP Message :param string password: Null to clear password :param string encryptAlgo: Supports MD5, SHA1, SHA256, SHA512 """ if not password: self.info['encryptionAlgorithmID'] = None self.info['keyHashAlgorithm'] = None return self.password = gntp.shim.b(password) self.encryptAlgo = encryptAlgo.upper() if not self.encryptAlgo in self.hash_algo: raise errors.UnsupportedError('INVALID HASH "%s"' % self.encryptAlgo) hashfunction = self.hash_algo.get(self.encryptAlgo) password = password.encode('utf8') seed = time.ctime().encode('utf8') salt = hashfunction(seed).hexdigest() saltHash = hashfunction(seed).digest() keyBasis = password + saltHash key = hashfunction(keyBasis).digest() keyHash = hashfunction(key).hexdigest() self.info['keyHashAlgorithmID'] = self.encryptAlgo self.info['keyHash'] = keyHash.upper() self.info['salt'] = salt.upper() def _decode_hex(self, value): """Helper function to decode hex string to `proper` hex string :param string value: Human readable hex string :return string: Hex string """ result = '' for i in range(0, len(value), 2): tmp = int(value[i:i + 2], 16) result += chr(tmp) return result def _decode_binary(self, rawIdentifier, identifier): rawIdentifier += '\r\n\r\n' dataLength = int(identifier['Length']) pointerStart = self.raw.find(rawIdentifier) + len(rawIdentifier) pointerEnd = pointerStart + dataLength data = self.raw[pointerStart:pointerEnd] if not len(data) == dataLength: raise errors.ParseError('INVALID_DATA_LENGTH Expected: %s Recieved %s' % (dataLength, len(data))) return data def _validate_password(self, password): """Validate GNTP Message against stored password""" self.password = password if password is None: raise errors.AuthError('Missing password') keyHash = self.info.get('keyHash', None) if keyHash is None and self.password is None: return True if keyHash is None: raise errors.AuthError('Invalid keyHash') if self.password is None: raise errors.AuthError('Missing password') keyHashAlgorithmID = self.info.get('keyHashAlgorithmID','MD5') password = self.password.encode('utf8') saltHash = self._decode_hex(self.info['salt']) keyBasis = password + saltHash self.key = self.hash_algo[keyHashAlgorithmID](keyBasis).digest() keyHash = self.hash_algo[keyHashAlgorithmID](self.key).hexdigest() if not keyHash.upper() == self.info['keyHash'].upper(): raise errors.AuthError('Invalid Hash') return True def validate(self): """Verify required headers""" for header in self._requiredHeaders: if not self.headers.get(header, False): raise errors.ParseError('Missing Notification Header: ' + header) def _format_info(self): """Generate info line for GNTP Message :return string: """ info = 'GNTP/%s %s' % ( self.info.get('version'), self.info.get('messagetype'), ) if self.info.get('encryptionAlgorithmID', None): info += ' %s:%s' % ( self.info.get('encryptionAlgorithmID'), self.info.get('ivValue'), ) else: info += ' NONE' if self.info.get('keyHashAlgorithmID', None): info += ' %s:%s.%s' % ( self.info.get('keyHashAlgorithmID'), self.info.get('keyHash'), self.info.get('salt') ) return info def _parse_dict(self, data): """Helper function to parse blocks of GNTP headers into a dictionary :param string data: :return dict: Dictionary of parsed GNTP Headers """ d = {} for line in data.split('\r\n'): match = GNTP_HEADER.match(line) if not match: continue key = match.group(1).strip() val = match.group(2).strip() d[key] = val return d def add_header(self, key, value): self.headers[key] = value def decode(self, data, password=None): """Decode GNTP Message :param string data: """ self.password = password self.raw = gntp.shim.u(data) parts = self.raw.split('\r\n\r\n') self.info = self._parse_info(self.raw) self.headers = self._parse_dict(parts[0]) def encode(self): """Encode a generic GNTP Message :return string: GNTP Message ready to be sent. Returned as a byte string """ buff = _GNTPBuffer() buff.writeln(self._format_info()) #Headers for k, v in self.headers.items(): buff.writeheader(k, v) buff.writeln() #Resources for resource, data in self.resources.items(): buff.writeheader('Identifier', resource) buff.writeheader('Length', len(data)) buff.writeln() buff.write(data) buff.writeln() buff.writeln() return buff.getvalue()
kfdm/gntp
gntp/core.py
_GNTPBase.decode
python
def decode(self, data, password=None): self.password = password self.raw = gntp.shim.u(data) parts = self.raw.split('\r\n\r\n') self.info = self._parse_info(self.raw) self.headers = self._parse_dict(parts[0])
Decode GNTP Message :param string data:
train
https://github.com/kfdm/gntp/blob/772a5f4db3707ea0253691d930bf648d1344913a/gntp/core.py#L246-L255
[ "def u(s):\n\tif isinstance(s, bytes):\n\t\treturn s.decode('utf8', 'replace')\n\treturn s\n", "def _parse_info(self, data):\n\t\"\"\"Parse the first line of a GNTP message to get security and other info values\n\n\t:param string data: GNTP Message\n\t:return dict: Parsed GNTP Info line\n\t\"\"\"\n\n\tmatch = GNTP_INFO_LINE.match(data)\n\n\tif not match:\n\t\traise errors.ParseError('ERROR_PARSING_INFO_LINE')\n\n\tinfo = match.groupdict()\n\tif info['encryptionAlgorithmID'] == 'NONE':\n\t\tinfo['encryptionAlgorithmID'] = None\n\n\treturn info\n", "def _parse_dict(self, data):\n\t\"\"\"Helper function to parse blocks of GNTP headers into a dictionary\n\n\t:param string data:\n\t:return dict: Dictionary of parsed GNTP Headers\n\t\"\"\"\n\td = {}\n\tfor line in data.split('\\r\\n'):\n\t\tmatch = GNTP_HEADER.match(line)\n\t\tif not match:\n\t\t\tcontinue\n\n\t\tkey = match.group(1).strip()\n\t\tval = match.group(2).strip()\n\t\td[key] = val\n\treturn d\n" ]
class _GNTPBase(object): """Base initilization :param string messagetype: GNTP Message type :param string version: GNTP Protocol version :param string encription: Encryption protocol """ def __init__(self, messagetype=None, version='1.0', encryption=None): self.info = { 'version': version, 'messagetype': messagetype, 'encryptionAlgorithmID': encryption } self.hash_algo = { 'MD5': hashlib.md5, 'SHA1': hashlib.sha1, 'SHA256': hashlib.sha256, 'SHA512': hashlib.sha512, } self.headers = {} self.resources = {} # For Python2 we can just return the bytes as is without worry # but on Python3 we want to make sure we return the packet as # a unicode string so that things like logging won't get confused if gntp.shim.PY2: def __str__(self): return self.encode() else: def __str__(self): return gntp.shim.u(self.encode()) def _parse_info(self, data): """Parse the first line of a GNTP message to get security and other info values :param string data: GNTP Message :return dict: Parsed GNTP Info line """ match = GNTP_INFO_LINE.match(data) if not match: raise errors.ParseError('ERROR_PARSING_INFO_LINE') info = match.groupdict() if info['encryptionAlgorithmID'] == 'NONE': info['encryptionAlgorithmID'] = None return info def set_password(self, password, encryptAlgo='MD5'): """Set a password for a GNTP Message :param string password: Null to clear password :param string encryptAlgo: Supports MD5, SHA1, SHA256, SHA512 """ if not password: self.info['encryptionAlgorithmID'] = None self.info['keyHashAlgorithm'] = None return self.password = gntp.shim.b(password) self.encryptAlgo = encryptAlgo.upper() if not self.encryptAlgo in self.hash_algo: raise errors.UnsupportedError('INVALID HASH "%s"' % self.encryptAlgo) hashfunction = self.hash_algo.get(self.encryptAlgo) password = password.encode('utf8') seed = time.ctime().encode('utf8') salt = hashfunction(seed).hexdigest() saltHash = hashfunction(seed).digest() keyBasis = password + saltHash key = hashfunction(keyBasis).digest() keyHash = hashfunction(key).hexdigest() self.info['keyHashAlgorithmID'] = self.encryptAlgo self.info['keyHash'] = keyHash.upper() self.info['salt'] = salt.upper() def _decode_hex(self, value): """Helper function to decode hex string to `proper` hex string :param string value: Human readable hex string :return string: Hex string """ result = '' for i in range(0, len(value), 2): tmp = int(value[i:i + 2], 16) result += chr(tmp) return result def _decode_binary(self, rawIdentifier, identifier): rawIdentifier += '\r\n\r\n' dataLength = int(identifier['Length']) pointerStart = self.raw.find(rawIdentifier) + len(rawIdentifier) pointerEnd = pointerStart + dataLength data = self.raw[pointerStart:pointerEnd] if not len(data) == dataLength: raise errors.ParseError('INVALID_DATA_LENGTH Expected: %s Recieved %s' % (dataLength, len(data))) return data def _validate_password(self, password): """Validate GNTP Message against stored password""" self.password = password if password is None: raise errors.AuthError('Missing password') keyHash = self.info.get('keyHash', None) if keyHash is None and self.password is None: return True if keyHash is None: raise errors.AuthError('Invalid keyHash') if self.password is None: raise errors.AuthError('Missing password') keyHashAlgorithmID = self.info.get('keyHashAlgorithmID','MD5') password = self.password.encode('utf8') saltHash = self._decode_hex(self.info['salt']) keyBasis = password + saltHash self.key = self.hash_algo[keyHashAlgorithmID](keyBasis).digest() keyHash = self.hash_algo[keyHashAlgorithmID](self.key).hexdigest() if not keyHash.upper() == self.info['keyHash'].upper(): raise errors.AuthError('Invalid Hash') return True def validate(self): """Verify required headers""" for header in self._requiredHeaders: if not self.headers.get(header, False): raise errors.ParseError('Missing Notification Header: ' + header) def _format_info(self): """Generate info line for GNTP Message :return string: """ info = 'GNTP/%s %s' % ( self.info.get('version'), self.info.get('messagetype'), ) if self.info.get('encryptionAlgorithmID', None): info += ' %s:%s' % ( self.info.get('encryptionAlgorithmID'), self.info.get('ivValue'), ) else: info += ' NONE' if self.info.get('keyHashAlgorithmID', None): info += ' %s:%s.%s' % ( self.info.get('keyHashAlgorithmID'), self.info.get('keyHash'), self.info.get('salt') ) return info def _parse_dict(self, data): """Helper function to parse blocks of GNTP headers into a dictionary :param string data: :return dict: Dictionary of parsed GNTP Headers """ d = {} for line in data.split('\r\n'): match = GNTP_HEADER.match(line) if not match: continue key = match.group(1).strip() val = match.group(2).strip() d[key] = val return d def add_header(self, key, value): self.headers[key] = value def add_resource(self, data): """Add binary resource :param string data: Binary Data """ data = gntp.shim.b(data) identifier = hashlib.md5(data).hexdigest() self.resources[identifier] = data return 'x-growl-resource://%s' % identifier def encode(self): """Encode a generic GNTP Message :return string: GNTP Message ready to be sent. Returned as a byte string """ buff = _GNTPBuffer() buff.writeln(self._format_info()) #Headers for k, v in self.headers.items(): buff.writeheader(k, v) buff.writeln() #Resources for resource, data in self.resources.items(): buff.writeheader('Identifier', resource) buff.writeheader('Length', len(data)) buff.writeln() buff.write(data) buff.writeln() buff.writeln() return buff.getvalue()
kfdm/gntp
gntp/core.py
GNTPRegister.validate
python
def validate(self): '''Validate required headers and validate notification headers''' for header in self._requiredHeaders: if not self.headers.get(header, False): raise errors.ParseError('Missing Registration Header: ' + header) for notice in self.notifications: for header in self._requiredNotificationHeaders: if not notice.get(header, False): raise errors.ParseError('Missing Notification Header: ' + header)
Validate required headers and validate notification headers
train
https://github.com/kfdm/gntp/blob/772a5f4db3707ea0253691d930bf648d1344913a/gntp/core.py#L307-L315
null
class GNTPRegister(_GNTPBase): """Represents a GNTP Registration Command :param string data: (Optional) See decode() :param string password: (Optional) Password to use while encoding/decoding messages """ _requiredHeaders = [ 'Application-Name', 'Notifications-Count' ] _requiredNotificationHeaders = ['Notification-Name'] def __init__(self, data=None, password=None): _GNTPBase.__init__(self, 'REGISTER') self.notifications = [] if data: self.decode(data, password) else: self.set_password(password) self.add_header('Application-Name', 'pygntp') self.add_header('Notifications-Count', 0) def decode(self, data, password): """Decode existing GNTP Registration message :param string data: Message to decode """ self.raw = gntp.shim.u(data) parts = self.raw.split('\r\n\r\n') self.info = self._parse_info(self.raw) self._validate_password(password) self.headers = self._parse_dict(parts[0]) for i, part in enumerate(parts): if i == 0: continue # Skip Header if part.strip() == '': continue notice = self._parse_dict(part) if notice.get('Notification-Name', False): self.notifications.append(notice) elif notice.get('Identifier', False): notice['Data'] = self._decode_binary(part, notice) #open('register.png','wblol').write(notice['Data']) self.resources[notice.get('Identifier')] = notice def add_notification(self, name, enabled=True): """Add new Notification to Registration message :param string name: Notification Name :param boolean enabled: Enable this notification by default """ notice = {} notice['Notification-Name'] = name notice['Notification-Enabled'] = enabled self.notifications.append(notice) self.add_header('Notifications-Count', len(self.notifications)) def encode(self): """Encode a GNTP Registration Message :return string: Encoded GNTP Registration message. Returned as a byte string """ buff = _GNTPBuffer() buff.writeln(self._format_info()) #Headers for k, v in self.headers.items(): buff.writeheader(k, v) buff.writeln() #Notifications if len(self.notifications) > 0: for notice in self.notifications: for k, v in notice.items(): buff.writeheader(k, v) buff.writeln() #Resources for resource, data in self.resources.items(): buff.writeheader('Identifier', resource) buff.writeheader('Length', len(data)) buff.writeln() buff.write(data) buff.writeln() buff.writeln() return buff.getvalue()
kfdm/gntp
gntp/core.py
GNTPRegister.decode
python
def decode(self, data, password): self.raw = gntp.shim.u(data) parts = self.raw.split('\r\n\r\n') self.info = self._parse_info(self.raw) self._validate_password(password) self.headers = self._parse_dict(parts[0]) for i, part in enumerate(parts): if i == 0: continue # Skip Header if part.strip() == '': continue notice = self._parse_dict(part) if notice.get('Notification-Name', False): self.notifications.append(notice) elif notice.get('Identifier', False): notice['Data'] = self._decode_binary(part, notice) #open('register.png','wblol').write(notice['Data']) self.resources[notice.get('Identifier')] = notice
Decode existing GNTP Registration message :param string data: Message to decode
train
https://github.com/kfdm/gntp/blob/772a5f4db3707ea0253691d930bf648d1344913a/gntp/core.py#L317-L339
[ "def u(s):\n\tif isinstance(s, bytes):\n\t\treturn s.decode('utf8', 'replace')\n\treturn s\n", "def _parse_info(self, data):\n\t\"\"\"Parse the first line of a GNTP message to get security and other info values\n\n\t:param string data: GNTP Message\n\t:return dict: Parsed GNTP Info line\n\t\"\"\"\n\n\tmatch = GNTP_INFO_LINE.match(data)\n\n\tif not match:\n\t\traise errors.ParseError('ERROR_PARSING_INFO_LINE')\n\n\tinfo = match.groupdict()\n\tif info['encryptionAlgorithmID'] == 'NONE':\n\t\tinfo['encryptionAlgorithmID'] = None\n\n\treturn info\n", "def _validate_password(self, password):\n\t\"\"\"Validate GNTP Message against stored password\"\"\"\n\tself.password = password\n\tif password is None:\n\t\traise errors.AuthError('Missing password')\n\tkeyHash = self.info.get('keyHash', None)\n\tif keyHash is None and self.password is None:\n\t\treturn True\n\tif keyHash is None:\n\t\traise errors.AuthError('Invalid keyHash')\n\tif self.password is None:\n\t\traise errors.AuthError('Missing password')\n\n\tkeyHashAlgorithmID = self.info.get('keyHashAlgorithmID','MD5')\n\n\tpassword = self.password.encode('utf8')\n\tsaltHash = self._decode_hex(self.info['salt'])\n\n\tkeyBasis = password + saltHash\n\tself.key = self.hash_algo[keyHashAlgorithmID](keyBasis).digest()\n\tkeyHash = self.hash_algo[keyHashAlgorithmID](self.key).hexdigest()\n\n\tif not keyHash.upper() == self.info['keyHash'].upper():\n\t\traise errors.AuthError('Invalid Hash')\n\treturn True\n", "def _parse_dict(self, data):\n\t\"\"\"Helper function to parse blocks of GNTP headers into a dictionary\n\n\t:param string data:\n\t:return dict: Dictionary of parsed GNTP Headers\n\t\"\"\"\n\td = {}\n\tfor line in data.split('\\r\\n'):\n\t\tmatch = GNTP_HEADER.match(line)\n\t\tif not match:\n\t\t\tcontinue\n\n\t\tkey = match.group(1).strip()\n\t\tval = match.group(2).strip()\n\t\td[key] = val\n\treturn d\n" ]
class GNTPRegister(_GNTPBase): """Represents a GNTP Registration Command :param string data: (Optional) See decode() :param string password: (Optional) Password to use while encoding/decoding messages """ _requiredHeaders = [ 'Application-Name', 'Notifications-Count' ] _requiredNotificationHeaders = ['Notification-Name'] def __init__(self, data=None, password=None): _GNTPBase.__init__(self, 'REGISTER') self.notifications = [] if data: self.decode(data, password) else: self.set_password(password) self.add_header('Application-Name', 'pygntp') self.add_header('Notifications-Count', 0) def validate(self): '''Validate required headers and validate notification headers''' for header in self._requiredHeaders: if not self.headers.get(header, False): raise errors.ParseError('Missing Registration Header: ' + header) for notice in self.notifications: for header in self._requiredNotificationHeaders: if not notice.get(header, False): raise errors.ParseError('Missing Notification Header: ' + header) def add_notification(self, name, enabled=True): """Add new Notification to Registration message :param string name: Notification Name :param boolean enabled: Enable this notification by default """ notice = {} notice['Notification-Name'] = name notice['Notification-Enabled'] = enabled self.notifications.append(notice) self.add_header('Notifications-Count', len(self.notifications)) def encode(self): """Encode a GNTP Registration Message :return string: Encoded GNTP Registration message. Returned as a byte string """ buff = _GNTPBuffer() buff.writeln(self._format_info()) #Headers for k, v in self.headers.items(): buff.writeheader(k, v) buff.writeln() #Notifications if len(self.notifications) > 0: for notice in self.notifications: for k, v in notice.items(): buff.writeheader(k, v) buff.writeln() #Resources for resource, data in self.resources.items(): buff.writeheader('Identifier', resource) buff.writeheader('Length', len(data)) buff.writeln() buff.write(data) buff.writeln() buff.writeln() return buff.getvalue()
kfdm/gntp
gntp/core.py
GNTPRegister.add_notification
python
def add_notification(self, name, enabled=True): notice = {} notice['Notification-Name'] = name notice['Notification-Enabled'] = enabled self.notifications.append(notice) self.add_header('Notifications-Count', len(self.notifications))
Add new Notification to Registration message :param string name: Notification Name :param boolean enabled: Enable this notification by default
train
https://github.com/kfdm/gntp/blob/772a5f4db3707ea0253691d930bf648d1344913a/gntp/core.py#L341-L352
[ "def add_header(self, key, value):\n\tself.headers[key] = value\n" ]
class GNTPRegister(_GNTPBase): """Represents a GNTP Registration Command :param string data: (Optional) See decode() :param string password: (Optional) Password to use while encoding/decoding messages """ _requiredHeaders = [ 'Application-Name', 'Notifications-Count' ] _requiredNotificationHeaders = ['Notification-Name'] def __init__(self, data=None, password=None): _GNTPBase.__init__(self, 'REGISTER') self.notifications = [] if data: self.decode(data, password) else: self.set_password(password) self.add_header('Application-Name', 'pygntp') self.add_header('Notifications-Count', 0) def validate(self): '''Validate required headers and validate notification headers''' for header in self._requiredHeaders: if not self.headers.get(header, False): raise errors.ParseError('Missing Registration Header: ' + header) for notice in self.notifications: for header in self._requiredNotificationHeaders: if not notice.get(header, False): raise errors.ParseError('Missing Notification Header: ' + header) def decode(self, data, password): """Decode existing GNTP Registration message :param string data: Message to decode """ self.raw = gntp.shim.u(data) parts = self.raw.split('\r\n\r\n') self.info = self._parse_info(self.raw) self._validate_password(password) self.headers = self._parse_dict(parts[0]) for i, part in enumerate(parts): if i == 0: continue # Skip Header if part.strip() == '': continue notice = self._parse_dict(part) if notice.get('Notification-Name', False): self.notifications.append(notice) elif notice.get('Identifier', False): notice['Data'] = self._decode_binary(part, notice) #open('register.png','wblol').write(notice['Data']) self.resources[notice.get('Identifier')] = notice def encode(self): """Encode a GNTP Registration Message :return string: Encoded GNTP Registration message. Returned as a byte string """ buff = _GNTPBuffer() buff.writeln(self._format_info()) #Headers for k, v in self.headers.items(): buff.writeheader(k, v) buff.writeln() #Notifications if len(self.notifications) > 0: for notice in self.notifications: for k, v in notice.items(): buff.writeheader(k, v) buff.writeln() #Resources for resource, data in self.resources.items(): buff.writeheader('Identifier', resource) buff.writeheader('Length', len(data)) buff.writeln() buff.write(data) buff.writeln() buff.writeln() return buff.getvalue()
kfdm/gntp
gntp/core.py
GNTPRegister.encode
python
def encode(self): buff = _GNTPBuffer() buff.writeln(self._format_info()) #Headers for k, v in self.headers.items(): buff.writeheader(k, v) buff.writeln() #Notifications if len(self.notifications) > 0: for notice in self.notifications: for k, v in notice.items(): buff.writeheader(k, v) buff.writeln() #Resources for resource, data in self.resources.items(): buff.writeheader('Identifier', resource) buff.writeheader('Length', len(data)) buff.writeln() buff.write(data) buff.writeln() buff.writeln() return buff.getvalue()
Encode a GNTP Registration Message :return string: Encoded GNTP Registration message. Returned as a byte string
train
https://github.com/kfdm/gntp/blob/772a5f4db3707ea0253691d930bf648d1344913a/gntp/core.py#L354-L385
[ "def writeln(self, value=None):\n\tif value:\n\t\tself.write(gntp.shim.b(value))\n\tself.write(GNTP_EOL)\n", "def writeheader(self, key, value):\n\tif not isinstance(value, str):\n\t\tvalue = str(value)\n\tself.write(gntp.shim.b(key))\n\tself.write(GNTP_SEP)\n\tself.write(gntp.shim.b(value))\n\tself.write(GNTP_EOL)\n", "def _format_info(self):\n\t\"\"\"Generate info line for GNTP Message\n\n\t:return string:\n\t\"\"\"\n\tinfo = 'GNTP/%s %s' % (\n\t\tself.info.get('version'),\n\t\tself.info.get('messagetype'),\n\t)\n\tif self.info.get('encryptionAlgorithmID', None):\n\t\tinfo += ' %s:%s' % (\n\t\t\tself.info.get('encryptionAlgorithmID'),\n\t\t\tself.info.get('ivValue'),\n\t\t)\n\telse:\n\t\tinfo += ' NONE'\n\n\tif self.info.get('keyHashAlgorithmID', None):\n\t\tinfo += ' %s:%s.%s' % (\n\t\t\tself.info.get('keyHashAlgorithmID'),\n\t\t\tself.info.get('keyHash'),\n\t\t\tself.info.get('salt')\n\t\t)\n\n\treturn info\n" ]
class GNTPRegister(_GNTPBase): """Represents a GNTP Registration Command :param string data: (Optional) See decode() :param string password: (Optional) Password to use while encoding/decoding messages """ _requiredHeaders = [ 'Application-Name', 'Notifications-Count' ] _requiredNotificationHeaders = ['Notification-Name'] def __init__(self, data=None, password=None): _GNTPBase.__init__(self, 'REGISTER') self.notifications = [] if data: self.decode(data, password) else: self.set_password(password) self.add_header('Application-Name', 'pygntp') self.add_header('Notifications-Count', 0) def validate(self): '''Validate required headers and validate notification headers''' for header in self._requiredHeaders: if not self.headers.get(header, False): raise errors.ParseError('Missing Registration Header: ' + header) for notice in self.notifications: for header in self._requiredNotificationHeaders: if not notice.get(header, False): raise errors.ParseError('Missing Notification Header: ' + header) def decode(self, data, password): """Decode existing GNTP Registration message :param string data: Message to decode """ self.raw = gntp.shim.u(data) parts = self.raw.split('\r\n\r\n') self.info = self._parse_info(self.raw) self._validate_password(password) self.headers = self._parse_dict(parts[0]) for i, part in enumerate(parts): if i == 0: continue # Skip Header if part.strip() == '': continue notice = self._parse_dict(part) if notice.get('Notification-Name', False): self.notifications.append(notice) elif notice.get('Identifier', False): notice['Data'] = self._decode_binary(part, notice) #open('register.png','wblol').write(notice['Data']) self.resources[notice.get('Identifier')] = notice def add_notification(self, name, enabled=True): """Add new Notification to Registration message :param string name: Notification Name :param boolean enabled: Enable this notification by default """ notice = {} notice['Notification-Name'] = name notice['Notification-Enabled'] = enabled self.notifications.append(notice) self.add_header('Notifications-Count', len(self.notifications))
kfdm/gntp
gntp/notifier.py
mini
python
def mini(description, applicationName='PythonMini', noteType="Message", title="Mini Message", applicationIcon=None, hostname='localhost', password=None, port=23053, sticky=False, priority=None, callback=None, notificationIcon=None, identifier=None, notifierFactory=GrowlNotifier): try: growl = notifierFactory( applicationName=applicationName, notifications=[noteType], defaultNotifications=[noteType], applicationIcon=applicationIcon, hostname=hostname, password=password, port=port, ) result = growl.register() if result is not True: return result return growl.notify( noteType=noteType, title=title, description=description, icon=notificationIcon, sticky=sticky, priority=priority, callback=callback, identifier=identifier, ) except Exception: # We want the "mini" function to be simple and swallow Exceptions # in order to be less invasive logger.exception("Growl error")
Single notification function Simple notification function in one line. Has only one required parameter and attempts to use reasonable defaults for everything else :param string description: Notification message .. warning:: For now, only URL callbacks are supported. In the future, the callback argument will also support a function
train
https://github.com/kfdm/gntp/blob/772a5f4db3707ea0253691d930bf648d1344913a/gntp/notifier.py#L218-L260
[ "def register(self):\n\t\"\"\"Send GNTP Registration\n\n\t.. warning::\n\t\tBefore sending notifications to Growl, you need to have\n\t\tsent a registration message at least once\n\t\"\"\"\n\tlogger.info('Sending registration to %s:%s', self.hostname, self.port)\n\tregister = gntp.core.GNTPRegister()\n\tregister.add_header('Application-Name', self.applicationName)\n\tfor notification in self.notifications:\n\t\tenabled = notification in self.defaultNotifications\n\t\tregister.add_notification(notification, enabled)\n\tif self.applicationIcon:\n\t\tif self._checkIcon(self.applicationIcon):\n\t\t\tregister.add_header('Application-Icon', self.applicationIcon)\n\t\telse:\n\t\t\tresource = register.add_resource(self.applicationIcon)\n\t\t\tregister.add_header('Application-Icon', resource)\n\tif self.password:\n\t\tregister.set_password(self.password, self.passwordHash)\n\tself.add_origin_info(register)\n\tself.register_hook(register)\n\treturn self._send('register', register)\n", "def notify(self, noteType, title, description, icon=None, sticky=False,\n\t\tpriority=None, callback=None, identifier=None, custom={}):\n\t\"\"\"Send a GNTP notifications\n\n\t.. warning::\n\t\tMust have registered with growl beforehand or messages will be ignored\n\n\t:param string noteType: One of the notification names registered earlier\n\t:param string title: Notification title (usually displayed on the notification)\n\t:param string description: The main content of the notification\n\t:param string icon: Icon URL path\n\t:param boolean sticky: Sticky notification\n\t:param integer priority: Message priority level from -2 to 2\n\t:param string callback: URL callback\n\t:param dict custom: Custom attributes. Key names should be prefixed with X-\n\t\taccording to the spec but this is not enforced by this class\n\n\t.. warning::\n\t\tFor now, only URL callbacks are supported. In the future, the\n\t\tcallback argument will also support a function\n\t\"\"\"\n\tlogger.info('Sending notification [%s] to %s:%s', noteType, self.hostname, self.port)\n\tassert noteType in self.notifications\n\tnotice = gntp.core.GNTPNotice()\n\tnotice.add_header('Application-Name', self.applicationName)\n\tnotice.add_header('Notification-Name', noteType)\n\tnotice.add_header('Notification-Title', title)\n\tif self.password:\n\t\tnotice.set_password(self.password, self.passwordHash)\n\tif sticky:\n\t\tnotice.add_header('Notification-Sticky', sticky)\n\tif priority:\n\t\tnotice.add_header('Notification-Priority', priority)\n\tif icon:\n\t\tif self._checkIcon(icon):\n\t\t\tnotice.add_header('Notification-Icon', icon)\n\t\telse:\n\t\t\tresource = notice.add_resource(icon)\n\t\t\tnotice.add_header('Notification-Icon', resource)\n\n\tif description:\n\t\tnotice.add_header('Notification-Text', description)\n\tif callback:\n\t\tnotice.add_header('Notification-Callback-Target', callback)\n\tif identifier:\n\t\tnotice.add_header('Notification-Coalescing-ID', identifier)\n\n\tfor key in custom:\n\t\tnotice.add_header(key, custom[key])\n\n\tself.add_origin_info(notice)\n\tself.notify_hook(notice)\n\n\treturn self._send('notify', notice)\n" ]
# Copyright: 2013 Paul Traylor # These sources are released under the terms of the MIT license: see LICENSE """ The gntp.notifier module is provided as a simple way to send notifications using GNTP .. note:: This class is intended to mostly mirror the older Python bindings such that you should be able to replace instances of the old bindings with this class. `Original Python bindings <http://code.google.com/p/growl/source/browse/Bindings/python/Growl.py>`_ """ import logging import platform import socket import sys from gntp.version import __version__ import gntp.core import gntp.errors as errors import gntp.shim __all__ = [ 'mini', 'GrowlNotifier', ] logger = logging.getLogger(__name__) class GrowlNotifier(object): """Helper class to simplfy sending Growl messages :param string applicationName: Sending application name :param list notification: List of valid notifications :param list defaultNotifications: List of notifications that should be enabled by default :param string applicationIcon: Icon URL :param string hostname: Remote host :param integer port: Remote port """ passwordHash = 'MD5' socketTimeout = 3 def __init__(self, applicationName='Python GNTP', notifications=[], defaultNotifications=None, applicationIcon=None, hostname='localhost', password=None, port=23053): self.applicationName = applicationName self.notifications = list(notifications) if defaultNotifications: self.defaultNotifications = list(defaultNotifications) else: self.defaultNotifications = self.notifications self.applicationIcon = applicationIcon self.password = password self.hostname = hostname self.port = int(port) def _checkIcon(self, data): ''' Check the icon to see if it's valid If it's a simple URL icon, then we return True. If it's a data icon then we return False ''' logger.info('Checking icon') return gntp.shim.u(data)[:4] in ['http', 'file'] def register(self): """Send GNTP Registration .. warning:: Before sending notifications to Growl, you need to have sent a registration message at least once """ logger.info('Sending registration to %s:%s', self.hostname, self.port) register = gntp.core.GNTPRegister() register.add_header('Application-Name', self.applicationName) for notification in self.notifications: enabled = notification in self.defaultNotifications register.add_notification(notification, enabled) if self.applicationIcon: if self._checkIcon(self.applicationIcon): register.add_header('Application-Icon', self.applicationIcon) else: resource = register.add_resource(self.applicationIcon) register.add_header('Application-Icon', resource) if self.password: register.set_password(self.password, self.passwordHash) self.add_origin_info(register) self.register_hook(register) return self._send('register', register) def notify(self, noteType, title, description, icon=None, sticky=False, priority=None, callback=None, identifier=None, custom={}): """Send a GNTP notifications .. warning:: Must have registered with growl beforehand or messages will be ignored :param string noteType: One of the notification names registered earlier :param string title: Notification title (usually displayed on the notification) :param string description: The main content of the notification :param string icon: Icon URL path :param boolean sticky: Sticky notification :param integer priority: Message priority level from -2 to 2 :param string callback: URL callback :param dict custom: Custom attributes. Key names should be prefixed with X- according to the spec but this is not enforced by this class .. warning:: For now, only URL callbacks are supported. In the future, the callback argument will also support a function """ logger.info('Sending notification [%s] to %s:%s', noteType, self.hostname, self.port) assert noteType in self.notifications notice = gntp.core.GNTPNotice() notice.add_header('Application-Name', self.applicationName) notice.add_header('Notification-Name', noteType) notice.add_header('Notification-Title', title) if self.password: notice.set_password(self.password, self.passwordHash) if sticky: notice.add_header('Notification-Sticky', sticky) if priority: notice.add_header('Notification-Priority', priority) if icon: if self._checkIcon(icon): notice.add_header('Notification-Icon', icon) else: resource = notice.add_resource(icon) notice.add_header('Notification-Icon', resource) if description: notice.add_header('Notification-Text', description) if callback: notice.add_header('Notification-Callback-Target', callback) if identifier: notice.add_header('Notification-Coalescing-ID', identifier) for key in custom: notice.add_header(key, custom[key]) self.add_origin_info(notice) self.notify_hook(notice) return self._send('notify', notice) def subscribe(self, id, name, port): """Send a Subscribe request to a remote machine""" sub = gntp.core.GNTPSubscribe() sub.add_header('Subscriber-ID', id) sub.add_header('Subscriber-Name', name) sub.add_header('Subscriber-Port', port) if self.password: sub.set_password(self.password, self.passwordHash) self.add_origin_info(sub) self.subscribe_hook(sub) return self._send('subscribe', sub) def add_origin_info(self, packet): """Add optional Origin headers to message""" packet.add_header('Origin-Machine-Name', platform.node()) packet.add_header('Origin-Software-Name', 'gntp.py') packet.add_header('Origin-Software-Version', __version__) packet.add_header('Origin-Platform-Name', platform.system()) packet.add_header('Origin-Platform-Version', platform.platform()) def register_hook(self, packet): pass def notify_hook(self, packet): pass def subscribe_hook(self, packet): pass def _send(self, messagetype, packet): """Send the GNTP Packet""" packet.validate() data = packet.encode() logger.debug('To : %s:%s <%s>\n%s', self.hostname, self.port, packet.__class__, data) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.settimeout(self.socketTimeout) try: s.connect((self.hostname, self.port)) s.send(data) recv_data = s.recv(1024) while not recv_data.endswith(gntp.shim.b("\r\n\r\n")): recv_data += s.recv(1024) except socket.error: # Python2.5 and Python3 compatibile exception exc = sys.exc_info()[1] raise errors.NetworkError(exc) response = gntp.core.parse_gntp(recv_data) s.close() logger.debug('From : %s:%s <%s>\n%s', self.hostname, self.port, response.__class__, response) if type(response) == gntp.core.GNTPOK: return True logger.error('Invalid response: %s', response.error()) return response.error() if __name__ == '__main__': # If we're running this module directly we're likely running it as a test # so extra debugging is useful logging.basicConfig(level=logging.INFO) mini('Testing mini notification')
kfdm/gntp
gntp/notifier.py
GrowlNotifier.register
python
def register(self): logger.info('Sending registration to %s:%s', self.hostname, self.port) register = gntp.core.GNTPRegister() register.add_header('Application-Name', self.applicationName) for notification in self.notifications: enabled = notification in self.defaultNotifications register.add_notification(notification, enabled) if self.applicationIcon: if self._checkIcon(self.applicationIcon): register.add_header('Application-Icon', self.applicationIcon) else: resource = register.add_resource(self.applicationIcon) register.add_header('Application-Icon', resource) if self.password: register.set_password(self.password, self.passwordHash) self.add_origin_info(register) self.register_hook(register) return self._send('register', register)
Send GNTP Registration .. warning:: Before sending notifications to Growl, you need to have sent a registration message at least once
train
https://github.com/kfdm/gntp/blob/772a5f4db3707ea0253691d930bf648d1344913a/gntp/notifier.py#L75-L98
[ "def set_password(self, password, encryptAlgo='MD5'):\n\t\"\"\"Set a password for a GNTP Message\n\n\t:param string password: Null to clear password\n\t:param string encryptAlgo: Supports MD5, SHA1, SHA256, SHA512\n\t\"\"\"\n\tif not password:\n\t\tself.info['encryptionAlgorithmID'] = None\n\t\tself.info['keyHashAlgorithm'] = None\n\t\treturn\n\n\tself.password = gntp.shim.b(password)\n\tself.encryptAlgo = encryptAlgo.upper()\n\n\tif not self.encryptAlgo in self.hash_algo:\n\t\traise errors.UnsupportedError('INVALID HASH \"%s\"' % self.encryptAlgo)\n\n\thashfunction = self.hash_algo.get(self.encryptAlgo)\n\n\tpassword = password.encode('utf8')\n\tseed = time.ctime().encode('utf8')\n\tsalt = hashfunction(seed).hexdigest()\n\tsaltHash = hashfunction(seed).digest()\n\tkeyBasis = password + saltHash\n\tkey = hashfunction(keyBasis).digest()\n\tkeyHash = hashfunction(key).hexdigest()\n\n\tself.info['keyHashAlgorithmID'] = self.encryptAlgo\n\tself.info['keyHash'] = keyHash.upper()\n\tself.info['salt'] = salt.upper()\n", "def add_header(self, key, value):\n\tself.headers[key] = value\n", "def add_resource(self, data):\n\t\"\"\"Add binary resource\n\n\t:param string data: Binary Data\n\t\"\"\"\n\tdata = gntp.shim.b(data)\n\tidentifier = hashlib.md5(data).hexdigest()\n\tself.resources[identifier] = data\n\treturn 'x-growl-resource://%s' % identifier\n", "def add_notification(self, name, enabled=True):\n\t\"\"\"Add new Notification to Registration message\n\n\t:param string name: Notification Name\n\t:param boolean enabled: Enable this notification by default\n\t\"\"\"\n\tnotice = {}\n\tnotice['Notification-Name'] = name\n\tnotice['Notification-Enabled'] = enabled\n\n\tself.notifications.append(notice)\n\tself.add_header('Notifications-Count', len(self.notifications))\n", "def _checkIcon(self, data):\n\t'''\n\tCheck the icon to see if it's valid\n\n\tIf it's a simple URL icon, then we return True. If it's a data icon\n\tthen we return False\n\t'''\n\tlogger.info('Checking icon')\n\n\treturn gntp.shim.u(data)[:4] in ['http', 'file']\n", "def add_origin_info(self, packet):\n\t\"\"\"Add optional Origin headers to message\"\"\"\n\tpacket.add_header('Origin-Machine-Name', platform.node())\n\tpacket.add_header('Origin-Software-Name', 'gntp.py')\n\tpacket.add_header('Origin-Software-Version', __version__)\n\tpacket.add_header('Origin-Platform-Name', platform.system())\n\tpacket.add_header('Origin-Platform-Version', platform.platform())\n", "def register_hook(self, packet):\n\tpass\n", "def _send(self, messagetype, packet):\n\t\"\"\"Send the GNTP Packet\"\"\"\n\n\tpacket.validate()\n\tdata = packet.encode()\n\n\tlogger.debug('To : %s:%s <%s>\\n%s', self.hostname, self.port, packet.__class__, data)\n\n\ts = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n\ts.settimeout(self.socketTimeout)\n\ttry:\n\t\ts.connect((self.hostname, self.port))\n\t\ts.send(data)\n\t\trecv_data = s.recv(1024)\n\t\twhile not recv_data.endswith(gntp.shim.b(\"\\r\\n\\r\\n\")):\n\t\t\trecv_data += s.recv(1024)\n\texcept socket.error:\n\t\t# Python2.5 and Python3 compatibile exception\n\t\texc = sys.exc_info()[1]\n\t\traise errors.NetworkError(exc)\n\n\tresponse = gntp.core.parse_gntp(recv_data)\n\ts.close()\n\n\tlogger.debug('From : %s:%s <%s>\\n%s', self.hostname, self.port, response.__class__, response)\n\n\tif type(response) == gntp.core.GNTPOK:\n\t\treturn True\n\tlogger.error('Invalid response: %s', response.error())\n\treturn response.error()\n" ]
class GrowlNotifier(object): """Helper class to simplfy sending Growl messages :param string applicationName: Sending application name :param list notification: List of valid notifications :param list defaultNotifications: List of notifications that should be enabled by default :param string applicationIcon: Icon URL :param string hostname: Remote host :param integer port: Remote port """ passwordHash = 'MD5' socketTimeout = 3 def __init__(self, applicationName='Python GNTP', notifications=[], defaultNotifications=None, applicationIcon=None, hostname='localhost', password=None, port=23053): self.applicationName = applicationName self.notifications = list(notifications) if defaultNotifications: self.defaultNotifications = list(defaultNotifications) else: self.defaultNotifications = self.notifications self.applicationIcon = applicationIcon self.password = password self.hostname = hostname self.port = int(port) def _checkIcon(self, data): ''' Check the icon to see if it's valid If it's a simple URL icon, then we return True. If it's a data icon then we return False ''' logger.info('Checking icon') return gntp.shim.u(data)[:4] in ['http', 'file'] def notify(self, noteType, title, description, icon=None, sticky=False, priority=None, callback=None, identifier=None, custom={}): """Send a GNTP notifications .. warning:: Must have registered with growl beforehand or messages will be ignored :param string noteType: One of the notification names registered earlier :param string title: Notification title (usually displayed on the notification) :param string description: The main content of the notification :param string icon: Icon URL path :param boolean sticky: Sticky notification :param integer priority: Message priority level from -2 to 2 :param string callback: URL callback :param dict custom: Custom attributes. Key names should be prefixed with X- according to the spec but this is not enforced by this class .. warning:: For now, only URL callbacks are supported. In the future, the callback argument will also support a function """ logger.info('Sending notification [%s] to %s:%s', noteType, self.hostname, self.port) assert noteType in self.notifications notice = gntp.core.GNTPNotice() notice.add_header('Application-Name', self.applicationName) notice.add_header('Notification-Name', noteType) notice.add_header('Notification-Title', title) if self.password: notice.set_password(self.password, self.passwordHash) if sticky: notice.add_header('Notification-Sticky', sticky) if priority: notice.add_header('Notification-Priority', priority) if icon: if self._checkIcon(icon): notice.add_header('Notification-Icon', icon) else: resource = notice.add_resource(icon) notice.add_header('Notification-Icon', resource) if description: notice.add_header('Notification-Text', description) if callback: notice.add_header('Notification-Callback-Target', callback) if identifier: notice.add_header('Notification-Coalescing-ID', identifier) for key in custom: notice.add_header(key, custom[key]) self.add_origin_info(notice) self.notify_hook(notice) return self._send('notify', notice) def subscribe(self, id, name, port): """Send a Subscribe request to a remote machine""" sub = gntp.core.GNTPSubscribe() sub.add_header('Subscriber-ID', id) sub.add_header('Subscriber-Name', name) sub.add_header('Subscriber-Port', port) if self.password: sub.set_password(self.password, self.passwordHash) self.add_origin_info(sub) self.subscribe_hook(sub) return self._send('subscribe', sub) def add_origin_info(self, packet): """Add optional Origin headers to message""" packet.add_header('Origin-Machine-Name', platform.node()) packet.add_header('Origin-Software-Name', 'gntp.py') packet.add_header('Origin-Software-Version', __version__) packet.add_header('Origin-Platform-Name', platform.system()) packet.add_header('Origin-Platform-Version', platform.platform()) def register_hook(self, packet): pass def notify_hook(self, packet): pass def subscribe_hook(self, packet): pass def _send(self, messagetype, packet): """Send the GNTP Packet""" packet.validate() data = packet.encode() logger.debug('To : %s:%s <%s>\n%s', self.hostname, self.port, packet.__class__, data) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.settimeout(self.socketTimeout) try: s.connect((self.hostname, self.port)) s.send(data) recv_data = s.recv(1024) while not recv_data.endswith(gntp.shim.b("\r\n\r\n")): recv_data += s.recv(1024) except socket.error: # Python2.5 and Python3 compatibile exception exc = sys.exc_info()[1] raise errors.NetworkError(exc) response = gntp.core.parse_gntp(recv_data) s.close() logger.debug('From : %s:%s <%s>\n%s', self.hostname, self.port, response.__class__, response) if type(response) == gntp.core.GNTPOK: return True logger.error('Invalid response: %s', response.error()) return response.error()
kfdm/gntp
gntp/notifier.py
GrowlNotifier.notify
python
def notify(self, noteType, title, description, icon=None, sticky=False, priority=None, callback=None, identifier=None, custom={}): logger.info('Sending notification [%s] to %s:%s', noteType, self.hostname, self.port) assert noteType in self.notifications notice = gntp.core.GNTPNotice() notice.add_header('Application-Name', self.applicationName) notice.add_header('Notification-Name', noteType) notice.add_header('Notification-Title', title) if self.password: notice.set_password(self.password, self.passwordHash) if sticky: notice.add_header('Notification-Sticky', sticky) if priority: notice.add_header('Notification-Priority', priority) if icon: if self._checkIcon(icon): notice.add_header('Notification-Icon', icon) else: resource = notice.add_resource(icon) notice.add_header('Notification-Icon', resource) if description: notice.add_header('Notification-Text', description) if callback: notice.add_header('Notification-Callback-Target', callback) if identifier: notice.add_header('Notification-Coalescing-ID', identifier) for key in custom: notice.add_header(key, custom[key]) self.add_origin_info(notice) self.notify_hook(notice) return self._send('notify', notice)
Send a GNTP notifications .. warning:: Must have registered with growl beforehand or messages will be ignored :param string noteType: One of the notification names registered earlier :param string title: Notification title (usually displayed on the notification) :param string description: The main content of the notification :param string icon: Icon URL path :param boolean sticky: Sticky notification :param integer priority: Message priority level from -2 to 2 :param string callback: URL callback :param dict custom: Custom attributes. Key names should be prefixed with X- according to the spec but this is not enforced by this class .. warning:: For now, only URL callbacks are supported. In the future, the callback argument will also support a function
train
https://github.com/kfdm/gntp/blob/772a5f4db3707ea0253691d930bf648d1344913a/gntp/notifier.py#L100-L153
[ "def set_password(self, password, encryptAlgo='MD5'):\n\t\"\"\"Set a password for a GNTP Message\n\n\t:param string password: Null to clear password\n\t:param string encryptAlgo: Supports MD5, SHA1, SHA256, SHA512\n\t\"\"\"\n\tif not password:\n\t\tself.info['encryptionAlgorithmID'] = None\n\t\tself.info['keyHashAlgorithm'] = None\n\t\treturn\n\n\tself.password = gntp.shim.b(password)\n\tself.encryptAlgo = encryptAlgo.upper()\n\n\tif not self.encryptAlgo in self.hash_algo:\n\t\traise errors.UnsupportedError('INVALID HASH \"%s\"' % self.encryptAlgo)\n\n\thashfunction = self.hash_algo.get(self.encryptAlgo)\n\n\tpassword = password.encode('utf8')\n\tseed = time.ctime().encode('utf8')\n\tsalt = hashfunction(seed).hexdigest()\n\tsaltHash = hashfunction(seed).digest()\n\tkeyBasis = password + saltHash\n\tkey = hashfunction(keyBasis).digest()\n\tkeyHash = hashfunction(key).hexdigest()\n\n\tself.info['keyHashAlgorithmID'] = self.encryptAlgo\n\tself.info['keyHash'] = keyHash.upper()\n\tself.info['salt'] = salt.upper()\n", "def add_header(self, key, value):\n\tself.headers[key] = value\n", "def add_resource(self, data):\n\t\"\"\"Add binary resource\n\n\t:param string data: Binary Data\n\t\"\"\"\n\tdata = gntp.shim.b(data)\n\tidentifier = hashlib.md5(data).hexdigest()\n\tself.resources[identifier] = data\n\treturn 'x-growl-resource://%s' % identifier\n", "def _checkIcon(self, data):\n\t'''\n\tCheck the icon to see if it's valid\n\n\tIf it's a simple URL icon, then we return True. If it's a data icon\n\tthen we return False\n\t'''\n\tlogger.info('Checking icon')\n\n\treturn gntp.shim.u(data)[:4] in ['http', 'file']\n", "def add_origin_info(self, packet):\n\t\"\"\"Add optional Origin headers to message\"\"\"\n\tpacket.add_header('Origin-Machine-Name', platform.node())\n\tpacket.add_header('Origin-Software-Name', 'gntp.py')\n\tpacket.add_header('Origin-Software-Version', __version__)\n\tpacket.add_header('Origin-Platform-Name', platform.system())\n\tpacket.add_header('Origin-Platform-Version', platform.platform())\n", "def notify_hook(self, packet):\n\tpass\n", "def _send(self, messagetype, packet):\n\t\"\"\"Send the GNTP Packet\"\"\"\n\n\tpacket.validate()\n\tdata = packet.encode()\n\n\tlogger.debug('To : %s:%s <%s>\\n%s', self.hostname, self.port, packet.__class__, data)\n\n\ts = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n\ts.settimeout(self.socketTimeout)\n\ttry:\n\t\ts.connect((self.hostname, self.port))\n\t\ts.send(data)\n\t\trecv_data = s.recv(1024)\n\t\twhile not recv_data.endswith(gntp.shim.b(\"\\r\\n\\r\\n\")):\n\t\t\trecv_data += s.recv(1024)\n\texcept socket.error:\n\t\t# Python2.5 and Python3 compatibile exception\n\t\texc = sys.exc_info()[1]\n\t\traise errors.NetworkError(exc)\n\n\tresponse = gntp.core.parse_gntp(recv_data)\n\ts.close()\n\n\tlogger.debug('From : %s:%s <%s>\\n%s', self.hostname, self.port, response.__class__, response)\n\n\tif type(response) == gntp.core.GNTPOK:\n\t\treturn True\n\tlogger.error('Invalid response: %s', response.error())\n\treturn response.error()\n" ]
class GrowlNotifier(object): """Helper class to simplfy sending Growl messages :param string applicationName: Sending application name :param list notification: List of valid notifications :param list defaultNotifications: List of notifications that should be enabled by default :param string applicationIcon: Icon URL :param string hostname: Remote host :param integer port: Remote port """ passwordHash = 'MD5' socketTimeout = 3 def __init__(self, applicationName='Python GNTP', notifications=[], defaultNotifications=None, applicationIcon=None, hostname='localhost', password=None, port=23053): self.applicationName = applicationName self.notifications = list(notifications) if defaultNotifications: self.defaultNotifications = list(defaultNotifications) else: self.defaultNotifications = self.notifications self.applicationIcon = applicationIcon self.password = password self.hostname = hostname self.port = int(port) def _checkIcon(self, data): ''' Check the icon to see if it's valid If it's a simple URL icon, then we return True. If it's a data icon then we return False ''' logger.info('Checking icon') return gntp.shim.u(data)[:4] in ['http', 'file'] def register(self): """Send GNTP Registration .. warning:: Before sending notifications to Growl, you need to have sent a registration message at least once """ logger.info('Sending registration to %s:%s', self.hostname, self.port) register = gntp.core.GNTPRegister() register.add_header('Application-Name', self.applicationName) for notification in self.notifications: enabled = notification in self.defaultNotifications register.add_notification(notification, enabled) if self.applicationIcon: if self._checkIcon(self.applicationIcon): register.add_header('Application-Icon', self.applicationIcon) else: resource = register.add_resource(self.applicationIcon) register.add_header('Application-Icon', resource) if self.password: register.set_password(self.password, self.passwordHash) self.add_origin_info(register) self.register_hook(register) return self._send('register', register) def subscribe(self, id, name, port): """Send a Subscribe request to a remote machine""" sub = gntp.core.GNTPSubscribe() sub.add_header('Subscriber-ID', id) sub.add_header('Subscriber-Name', name) sub.add_header('Subscriber-Port', port) if self.password: sub.set_password(self.password, self.passwordHash) self.add_origin_info(sub) self.subscribe_hook(sub) return self._send('subscribe', sub) def add_origin_info(self, packet): """Add optional Origin headers to message""" packet.add_header('Origin-Machine-Name', platform.node()) packet.add_header('Origin-Software-Name', 'gntp.py') packet.add_header('Origin-Software-Version', __version__) packet.add_header('Origin-Platform-Name', platform.system()) packet.add_header('Origin-Platform-Version', platform.platform()) def register_hook(self, packet): pass def notify_hook(self, packet): pass def subscribe_hook(self, packet): pass def _send(self, messagetype, packet): """Send the GNTP Packet""" packet.validate() data = packet.encode() logger.debug('To : %s:%s <%s>\n%s', self.hostname, self.port, packet.__class__, data) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.settimeout(self.socketTimeout) try: s.connect((self.hostname, self.port)) s.send(data) recv_data = s.recv(1024) while not recv_data.endswith(gntp.shim.b("\r\n\r\n")): recv_data += s.recv(1024) except socket.error: # Python2.5 and Python3 compatibile exception exc = sys.exc_info()[1] raise errors.NetworkError(exc) response = gntp.core.parse_gntp(recv_data) s.close() logger.debug('From : %s:%s <%s>\n%s', self.hostname, self.port, response.__class__, response) if type(response) == gntp.core.GNTPOK: return True logger.error('Invalid response: %s', response.error()) return response.error()
kfdm/gntp
gntp/notifier.py
GrowlNotifier.subscribe
python
def subscribe(self, id, name, port): sub = gntp.core.GNTPSubscribe() sub.add_header('Subscriber-ID', id) sub.add_header('Subscriber-Name', name) sub.add_header('Subscriber-Port', port) if self.password: sub.set_password(self.password, self.passwordHash) self.add_origin_info(sub) self.subscribe_hook(sub) return self._send('subscribe', sub)
Send a Subscribe request to a remote machine
train
https://github.com/kfdm/gntp/blob/772a5f4db3707ea0253691d930bf648d1344913a/gntp/notifier.py#L155-L167
[ "def set_password(self, password, encryptAlgo='MD5'):\n\t\"\"\"Set a password for a GNTP Message\n\n\t:param string password: Null to clear password\n\t:param string encryptAlgo: Supports MD5, SHA1, SHA256, SHA512\n\t\"\"\"\n\tif not password:\n\t\tself.info['encryptionAlgorithmID'] = None\n\t\tself.info['keyHashAlgorithm'] = None\n\t\treturn\n\n\tself.password = gntp.shim.b(password)\n\tself.encryptAlgo = encryptAlgo.upper()\n\n\tif not self.encryptAlgo in self.hash_algo:\n\t\traise errors.UnsupportedError('INVALID HASH \"%s\"' % self.encryptAlgo)\n\n\thashfunction = self.hash_algo.get(self.encryptAlgo)\n\n\tpassword = password.encode('utf8')\n\tseed = time.ctime().encode('utf8')\n\tsalt = hashfunction(seed).hexdigest()\n\tsaltHash = hashfunction(seed).digest()\n\tkeyBasis = password + saltHash\n\tkey = hashfunction(keyBasis).digest()\n\tkeyHash = hashfunction(key).hexdigest()\n\n\tself.info['keyHashAlgorithmID'] = self.encryptAlgo\n\tself.info['keyHash'] = keyHash.upper()\n\tself.info['salt'] = salt.upper()\n", "def add_header(self, key, value):\n\tself.headers[key] = value\n", "def add_origin_info(self, packet):\n\t\"\"\"Add optional Origin headers to message\"\"\"\n\tpacket.add_header('Origin-Machine-Name', platform.node())\n\tpacket.add_header('Origin-Software-Name', 'gntp.py')\n\tpacket.add_header('Origin-Software-Version', __version__)\n\tpacket.add_header('Origin-Platform-Name', platform.system())\n\tpacket.add_header('Origin-Platform-Version', platform.platform())\n", "def subscribe_hook(self, packet):\n\tpass\n", "def _send(self, messagetype, packet):\n\t\"\"\"Send the GNTP Packet\"\"\"\n\n\tpacket.validate()\n\tdata = packet.encode()\n\n\tlogger.debug('To : %s:%s <%s>\\n%s', self.hostname, self.port, packet.__class__, data)\n\n\ts = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n\ts.settimeout(self.socketTimeout)\n\ttry:\n\t\ts.connect((self.hostname, self.port))\n\t\ts.send(data)\n\t\trecv_data = s.recv(1024)\n\t\twhile not recv_data.endswith(gntp.shim.b(\"\\r\\n\\r\\n\")):\n\t\t\trecv_data += s.recv(1024)\n\texcept socket.error:\n\t\t# Python2.5 and Python3 compatibile exception\n\t\texc = sys.exc_info()[1]\n\t\traise errors.NetworkError(exc)\n\n\tresponse = gntp.core.parse_gntp(recv_data)\n\ts.close()\n\n\tlogger.debug('From : %s:%s <%s>\\n%s', self.hostname, self.port, response.__class__, response)\n\n\tif type(response) == gntp.core.GNTPOK:\n\t\treturn True\n\tlogger.error('Invalid response: %s', response.error())\n\treturn response.error()\n" ]
class GrowlNotifier(object): """Helper class to simplfy sending Growl messages :param string applicationName: Sending application name :param list notification: List of valid notifications :param list defaultNotifications: List of notifications that should be enabled by default :param string applicationIcon: Icon URL :param string hostname: Remote host :param integer port: Remote port """ passwordHash = 'MD5' socketTimeout = 3 def __init__(self, applicationName='Python GNTP', notifications=[], defaultNotifications=None, applicationIcon=None, hostname='localhost', password=None, port=23053): self.applicationName = applicationName self.notifications = list(notifications) if defaultNotifications: self.defaultNotifications = list(defaultNotifications) else: self.defaultNotifications = self.notifications self.applicationIcon = applicationIcon self.password = password self.hostname = hostname self.port = int(port) def _checkIcon(self, data): ''' Check the icon to see if it's valid If it's a simple URL icon, then we return True. If it's a data icon then we return False ''' logger.info('Checking icon') return gntp.shim.u(data)[:4] in ['http', 'file'] def register(self): """Send GNTP Registration .. warning:: Before sending notifications to Growl, you need to have sent a registration message at least once """ logger.info('Sending registration to %s:%s', self.hostname, self.port) register = gntp.core.GNTPRegister() register.add_header('Application-Name', self.applicationName) for notification in self.notifications: enabled = notification in self.defaultNotifications register.add_notification(notification, enabled) if self.applicationIcon: if self._checkIcon(self.applicationIcon): register.add_header('Application-Icon', self.applicationIcon) else: resource = register.add_resource(self.applicationIcon) register.add_header('Application-Icon', resource) if self.password: register.set_password(self.password, self.passwordHash) self.add_origin_info(register) self.register_hook(register) return self._send('register', register) def notify(self, noteType, title, description, icon=None, sticky=False, priority=None, callback=None, identifier=None, custom={}): """Send a GNTP notifications .. warning:: Must have registered with growl beforehand or messages will be ignored :param string noteType: One of the notification names registered earlier :param string title: Notification title (usually displayed on the notification) :param string description: The main content of the notification :param string icon: Icon URL path :param boolean sticky: Sticky notification :param integer priority: Message priority level from -2 to 2 :param string callback: URL callback :param dict custom: Custom attributes. Key names should be prefixed with X- according to the spec but this is not enforced by this class .. warning:: For now, only URL callbacks are supported. In the future, the callback argument will also support a function """ logger.info('Sending notification [%s] to %s:%s', noteType, self.hostname, self.port) assert noteType in self.notifications notice = gntp.core.GNTPNotice() notice.add_header('Application-Name', self.applicationName) notice.add_header('Notification-Name', noteType) notice.add_header('Notification-Title', title) if self.password: notice.set_password(self.password, self.passwordHash) if sticky: notice.add_header('Notification-Sticky', sticky) if priority: notice.add_header('Notification-Priority', priority) if icon: if self._checkIcon(icon): notice.add_header('Notification-Icon', icon) else: resource = notice.add_resource(icon) notice.add_header('Notification-Icon', resource) if description: notice.add_header('Notification-Text', description) if callback: notice.add_header('Notification-Callback-Target', callback) if identifier: notice.add_header('Notification-Coalescing-ID', identifier) for key in custom: notice.add_header(key, custom[key]) self.add_origin_info(notice) self.notify_hook(notice) return self._send('notify', notice) def add_origin_info(self, packet): """Add optional Origin headers to message""" packet.add_header('Origin-Machine-Name', platform.node()) packet.add_header('Origin-Software-Name', 'gntp.py') packet.add_header('Origin-Software-Version', __version__) packet.add_header('Origin-Platform-Name', platform.system()) packet.add_header('Origin-Platform-Version', platform.platform()) def register_hook(self, packet): pass def notify_hook(self, packet): pass def subscribe_hook(self, packet): pass def _send(self, messagetype, packet): """Send the GNTP Packet""" packet.validate() data = packet.encode() logger.debug('To : %s:%s <%s>\n%s', self.hostname, self.port, packet.__class__, data) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.settimeout(self.socketTimeout) try: s.connect((self.hostname, self.port)) s.send(data) recv_data = s.recv(1024) while not recv_data.endswith(gntp.shim.b("\r\n\r\n")): recv_data += s.recv(1024) except socket.error: # Python2.5 and Python3 compatibile exception exc = sys.exc_info()[1] raise errors.NetworkError(exc) response = gntp.core.parse_gntp(recv_data) s.close() logger.debug('From : %s:%s <%s>\n%s', self.hostname, self.port, response.__class__, response) if type(response) == gntp.core.GNTPOK: return True logger.error('Invalid response: %s', response.error()) return response.error()
kfdm/gntp
gntp/notifier.py
GrowlNotifier.add_origin_info
python
def add_origin_info(self, packet): packet.add_header('Origin-Machine-Name', platform.node()) packet.add_header('Origin-Software-Name', 'gntp.py') packet.add_header('Origin-Software-Version', __version__) packet.add_header('Origin-Platform-Name', platform.system()) packet.add_header('Origin-Platform-Version', platform.platform())
Add optional Origin headers to message
train
https://github.com/kfdm/gntp/blob/772a5f4db3707ea0253691d930bf648d1344913a/gntp/notifier.py#L169-L175
[ "def add_header(self, key, value):\n\tself.headers[key] = value\n" ]
class GrowlNotifier(object): """Helper class to simplfy sending Growl messages :param string applicationName: Sending application name :param list notification: List of valid notifications :param list defaultNotifications: List of notifications that should be enabled by default :param string applicationIcon: Icon URL :param string hostname: Remote host :param integer port: Remote port """ passwordHash = 'MD5' socketTimeout = 3 def __init__(self, applicationName='Python GNTP', notifications=[], defaultNotifications=None, applicationIcon=None, hostname='localhost', password=None, port=23053): self.applicationName = applicationName self.notifications = list(notifications) if defaultNotifications: self.defaultNotifications = list(defaultNotifications) else: self.defaultNotifications = self.notifications self.applicationIcon = applicationIcon self.password = password self.hostname = hostname self.port = int(port) def _checkIcon(self, data): ''' Check the icon to see if it's valid If it's a simple URL icon, then we return True. If it's a data icon then we return False ''' logger.info('Checking icon') return gntp.shim.u(data)[:4] in ['http', 'file'] def register(self): """Send GNTP Registration .. warning:: Before sending notifications to Growl, you need to have sent a registration message at least once """ logger.info('Sending registration to %s:%s', self.hostname, self.port) register = gntp.core.GNTPRegister() register.add_header('Application-Name', self.applicationName) for notification in self.notifications: enabled = notification in self.defaultNotifications register.add_notification(notification, enabled) if self.applicationIcon: if self._checkIcon(self.applicationIcon): register.add_header('Application-Icon', self.applicationIcon) else: resource = register.add_resource(self.applicationIcon) register.add_header('Application-Icon', resource) if self.password: register.set_password(self.password, self.passwordHash) self.add_origin_info(register) self.register_hook(register) return self._send('register', register) def notify(self, noteType, title, description, icon=None, sticky=False, priority=None, callback=None, identifier=None, custom={}): """Send a GNTP notifications .. warning:: Must have registered with growl beforehand or messages will be ignored :param string noteType: One of the notification names registered earlier :param string title: Notification title (usually displayed on the notification) :param string description: The main content of the notification :param string icon: Icon URL path :param boolean sticky: Sticky notification :param integer priority: Message priority level from -2 to 2 :param string callback: URL callback :param dict custom: Custom attributes. Key names should be prefixed with X- according to the spec but this is not enforced by this class .. warning:: For now, only URL callbacks are supported. In the future, the callback argument will also support a function """ logger.info('Sending notification [%s] to %s:%s', noteType, self.hostname, self.port) assert noteType in self.notifications notice = gntp.core.GNTPNotice() notice.add_header('Application-Name', self.applicationName) notice.add_header('Notification-Name', noteType) notice.add_header('Notification-Title', title) if self.password: notice.set_password(self.password, self.passwordHash) if sticky: notice.add_header('Notification-Sticky', sticky) if priority: notice.add_header('Notification-Priority', priority) if icon: if self._checkIcon(icon): notice.add_header('Notification-Icon', icon) else: resource = notice.add_resource(icon) notice.add_header('Notification-Icon', resource) if description: notice.add_header('Notification-Text', description) if callback: notice.add_header('Notification-Callback-Target', callback) if identifier: notice.add_header('Notification-Coalescing-ID', identifier) for key in custom: notice.add_header(key, custom[key]) self.add_origin_info(notice) self.notify_hook(notice) return self._send('notify', notice) def subscribe(self, id, name, port): """Send a Subscribe request to a remote machine""" sub = gntp.core.GNTPSubscribe() sub.add_header('Subscriber-ID', id) sub.add_header('Subscriber-Name', name) sub.add_header('Subscriber-Port', port) if self.password: sub.set_password(self.password, self.passwordHash) self.add_origin_info(sub) self.subscribe_hook(sub) return self._send('subscribe', sub) def register_hook(self, packet): pass def notify_hook(self, packet): pass def subscribe_hook(self, packet): pass def _send(self, messagetype, packet): """Send the GNTP Packet""" packet.validate() data = packet.encode() logger.debug('To : %s:%s <%s>\n%s', self.hostname, self.port, packet.__class__, data) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.settimeout(self.socketTimeout) try: s.connect((self.hostname, self.port)) s.send(data) recv_data = s.recv(1024) while not recv_data.endswith(gntp.shim.b("\r\n\r\n")): recv_data += s.recv(1024) except socket.error: # Python2.5 and Python3 compatibile exception exc = sys.exc_info()[1] raise errors.NetworkError(exc) response = gntp.core.parse_gntp(recv_data) s.close() logger.debug('From : %s:%s <%s>\n%s', self.hostname, self.port, response.__class__, response) if type(response) == gntp.core.GNTPOK: return True logger.error('Invalid response: %s', response.error()) return response.error()
kfdm/gntp
gntp/notifier.py
GrowlNotifier._send
python
def _send(self, messagetype, packet): packet.validate() data = packet.encode() logger.debug('To : %s:%s <%s>\n%s', self.hostname, self.port, packet.__class__, data) s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.settimeout(self.socketTimeout) try: s.connect((self.hostname, self.port)) s.send(data) recv_data = s.recv(1024) while not recv_data.endswith(gntp.shim.b("\r\n\r\n")): recv_data += s.recv(1024) except socket.error: # Python2.5 and Python3 compatibile exception exc = sys.exc_info()[1] raise errors.NetworkError(exc) response = gntp.core.parse_gntp(recv_data) s.close() logger.debug('From : %s:%s <%s>\n%s', self.hostname, self.port, response.__class__, response) if type(response) == gntp.core.GNTPOK: return True logger.error('Invalid response: %s', response.error()) return response.error()
Send the GNTP Packet
train
https://github.com/kfdm/gntp/blob/772a5f4db3707ea0253691d930bf648d1344913a/gntp/notifier.py#L186-L215
[ "def b(s):\n\tif isinstance(s, bytes):\n\t\treturn s\n\treturn s.encode('utf8', 'replace')\n", "def validate(self):\n\t\"\"\"Verify required headers\"\"\"\n\tfor header in self._requiredHeaders:\n\t\tif not self.headers.get(header, False):\n\t\t\traise errors.ParseError('Missing Notification Header: ' + header)\n", "def encode(self):\n\t\"\"\"Encode a generic GNTP Message\n\n\t:return string: GNTP Message ready to be sent. Returned as a byte string\n\t\"\"\"\n\n\tbuff = _GNTPBuffer()\n\n\tbuff.writeln(self._format_info())\n\n\t#Headers\n\tfor k, v in self.headers.items():\n\t\tbuff.writeheader(k, v)\n\tbuff.writeln()\n\n\t#Resources\n\tfor resource, data in self.resources.items():\n\t\tbuff.writeheader('Identifier', resource)\n\t\tbuff.writeheader('Length', len(data))\n\t\tbuff.writeln()\n\t\tbuff.write(data)\n\t\tbuff.writeln()\n\t\tbuff.writeln()\n\n\treturn buff.getvalue()\n" ]
class GrowlNotifier(object): """Helper class to simplfy sending Growl messages :param string applicationName: Sending application name :param list notification: List of valid notifications :param list defaultNotifications: List of notifications that should be enabled by default :param string applicationIcon: Icon URL :param string hostname: Remote host :param integer port: Remote port """ passwordHash = 'MD5' socketTimeout = 3 def __init__(self, applicationName='Python GNTP', notifications=[], defaultNotifications=None, applicationIcon=None, hostname='localhost', password=None, port=23053): self.applicationName = applicationName self.notifications = list(notifications) if defaultNotifications: self.defaultNotifications = list(defaultNotifications) else: self.defaultNotifications = self.notifications self.applicationIcon = applicationIcon self.password = password self.hostname = hostname self.port = int(port) def _checkIcon(self, data): ''' Check the icon to see if it's valid If it's a simple URL icon, then we return True. If it's a data icon then we return False ''' logger.info('Checking icon') return gntp.shim.u(data)[:4] in ['http', 'file'] def register(self): """Send GNTP Registration .. warning:: Before sending notifications to Growl, you need to have sent a registration message at least once """ logger.info('Sending registration to %s:%s', self.hostname, self.port) register = gntp.core.GNTPRegister() register.add_header('Application-Name', self.applicationName) for notification in self.notifications: enabled = notification in self.defaultNotifications register.add_notification(notification, enabled) if self.applicationIcon: if self._checkIcon(self.applicationIcon): register.add_header('Application-Icon', self.applicationIcon) else: resource = register.add_resource(self.applicationIcon) register.add_header('Application-Icon', resource) if self.password: register.set_password(self.password, self.passwordHash) self.add_origin_info(register) self.register_hook(register) return self._send('register', register) def notify(self, noteType, title, description, icon=None, sticky=False, priority=None, callback=None, identifier=None, custom={}): """Send a GNTP notifications .. warning:: Must have registered with growl beforehand or messages will be ignored :param string noteType: One of the notification names registered earlier :param string title: Notification title (usually displayed on the notification) :param string description: The main content of the notification :param string icon: Icon URL path :param boolean sticky: Sticky notification :param integer priority: Message priority level from -2 to 2 :param string callback: URL callback :param dict custom: Custom attributes. Key names should be prefixed with X- according to the spec but this is not enforced by this class .. warning:: For now, only URL callbacks are supported. In the future, the callback argument will also support a function """ logger.info('Sending notification [%s] to %s:%s', noteType, self.hostname, self.port) assert noteType in self.notifications notice = gntp.core.GNTPNotice() notice.add_header('Application-Name', self.applicationName) notice.add_header('Notification-Name', noteType) notice.add_header('Notification-Title', title) if self.password: notice.set_password(self.password, self.passwordHash) if sticky: notice.add_header('Notification-Sticky', sticky) if priority: notice.add_header('Notification-Priority', priority) if icon: if self._checkIcon(icon): notice.add_header('Notification-Icon', icon) else: resource = notice.add_resource(icon) notice.add_header('Notification-Icon', resource) if description: notice.add_header('Notification-Text', description) if callback: notice.add_header('Notification-Callback-Target', callback) if identifier: notice.add_header('Notification-Coalescing-ID', identifier) for key in custom: notice.add_header(key, custom[key]) self.add_origin_info(notice) self.notify_hook(notice) return self._send('notify', notice) def subscribe(self, id, name, port): """Send a Subscribe request to a remote machine""" sub = gntp.core.GNTPSubscribe() sub.add_header('Subscriber-ID', id) sub.add_header('Subscriber-Name', name) sub.add_header('Subscriber-Port', port) if self.password: sub.set_password(self.password, self.passwordHash) self.add_origin_info(sub) self.subscribe_hook(sub) return self._send('subscribe', sub) def add_origin_info(self, packet): """Add optional Origin headers to message""" packet.add_header('Origin-Machine-Name', platform.node()) packet.add_header('Origin-Software-Name', 'gntp.py') packet.add_header('Origin-Software-Version', __version__) packet.add_header('Origin-Platform-Name', platform.system()) packet.add_header('Origin-Platform-Version', platform.platform()) def register_hook(self, packet): pass def notify_hook(self, packet): pass def subscribe_hook(self, packet): pass
eddieantonio/perfection
perfection/forest.py
ForestGraph.add_edge
python
def add_edge(self, edge): u, v = edge both_exist = u in self.vertices and v in self.vertices # Using `is` because if they belong to the same component, they MUST # share the same set object! if both_exist and self.components[u] is self.components[v]: # Both vertices are part of the same connected component. raise InvariantError('Adding %r would form a cycle' % (edge,)) if u == v: raise InvariantError('Cannot add loop: %r' % (edge,)) # Ensure the vertices exist in the graph. self.add_vertex(u) self.add_vertex(v) # Add the edges to each other. self._vertices[u].add(v) self._vertices[v].add(u) # Add all of the smaller components to the bigger one. smaller_component, bigger_component = self.sort_components(u, v) for vertex in smaller_component: bigger_component.add(vertex) # And with this assignment, say bye-bye to the smaller component. self.components[vertex] = bigger_component
Add edge (u, v) to the graph. Raises InvariantError if adding the edge would form a cycle.
train
https://github.com/eddieantonio/perfection/blob/69b7a06b31a15bd9534c69d4bdcc2e48e8ddfc43/perfection/forest.py#L61-L90
null
class ForestGraph(object): """ An acyclic graph comprising of one or more components. >>> graph = ForestGraph(edges=[(1, 2), (2, 3)]) >>> graph += (3, 4) >>> 4 in graph.vertices True >>> 5 in graph.vertices False >>> print(graph.to_dot()) graph { "1" -- "2"; "2" -- "3"; "3" -- "4"; } >>> set(graph.neighbours(2)) == {1, 3} True >>> set(graph.neighbours(1)) == {2} True >>> set(graph.neighbours(3)) == {2, 4} True """ def __init__(self, vertices=(), edges=()): # Each vertex is associated with a list of its neighbouring vertices. self._vertices = collections.defaultdict(set) # Each edge *may* be associated with an arbitrary value. self._edges = {} # Components is a dictionary of vertex -> to the set of all vertices # that comprise that component. Note that all vertex of the same # component share the exactly the SAME set instance! self.components = {} for edge in edges: self.add_edge(edge) def __iadd__(self, edge): self.add_edge(edge) return self def sort_components(self, u, v): return sorted((self.components[u], self.components[v]), key=len) def add_vertex(self, vertex): # Make a new component for the vertex, if the vertex doesn't exist # yet. self.components.setdefault(vertex, {vertex}) def to_dot(self, *args, **kwargs): return graph_as_dot(self.edges, *args, **kwargs) @property def edges(self): """ Edges of this graph, in canonical order. """ canonical_edges = set() for v1, neighbours in self._vertices.items(): for v2 in neighbours: edge = self.canonical_order((v1, v2)) canonical_edges.add(edge) return canonical_edges @property def vertices(self): """Set of all vertices in the graph.""" return self._vertices.keys() def neighbours(self, vertex): """ Yields all neighbours of the given vertex, in no particular order. """ return self._vertices[vertex] @staticmethod def canonical_order(edge): u, v = edge return edge if u < v else (v, u) def __repr__(self): cls_name = type(self).__name__ args = ', '.join(getattr(self, attr) for attr in ('vertices', 'edges')) return ''.join((cls_name, '(', args, ')'))
eddieantonio/perfection
perfection/forest.py
ForestGraph.edges
python
def edges(self): canonical_edges = set() for v1, neighbours in self._vertices.items(): for v2 in neighbours: edge = self.canonical_order((v1, v2)) canonical_edges.add(edge) return canonical_edges
Edges of this graph, in canonical order.
train
https://github.com/eddieantonio/perfection/blob/69b7a06b31a15bd9534c69d4bdcc2e48e8ddfc43/perfection/forest.py#L104-L113
[ "def canonical_order(edge):\n u, v = edge\n return edge if u < v else (v, u)\n" ]
class ForestGraph(object): """ An acyclic graph comprising of one or more components. >>> graph = ForestGraph(edges=[(1, 2), (2, 3)]) >>> graph += (3, 4) >>> 4 in graph.vertices True >>> 5 in graph.vertices False >>> print(graph.to_dot()) graph { "1" -- "2"; "2" -- "3"; "3" -- "4"; } >>> set(graph.neighbours(2)) == {1, 3} True >>> set(graph.neighbours(1)) == {2} True >>> set(graph.neighbours(3)) == {2, 4} True """ def __init__(self, vertices=(), edges=()): # Each vertex is associated with a list of its neighbouring vertices. self._vertices = collections.defaultdict(set) # Each edge *may* be associated with an arbitrary value. self._edges = {} # Components is a dictionary of vertex -> to the set of all vertices # that comprise that component. Note that all vertex of the same # component share the exactly the SAME set instance! self.components = {} for edge in edges: self.add_edge(edge) def __iadd__(self, edge): self.add_edge(edge) return self def add_edge(self, edge): """ Add edge (u, v) to the graph. Raises InvariantError if adding the edge would form a cycle. """ u, v = edge both_exist = u in self.vertices and v in self.vertices # Using `is` because if they belong to the same component, they MUST # share the same set object! if both_exist and self.components[u] is self.components[v]: # Both vertices are part of the same connected component. raise InvariantError('Adding %r would form a cycle' % (edge,)) if u == v: raise InvariantError('Cannot add loop: %r' % (edge,)) # Ensure the vertices exist in the graph. self.add_vertex(u) self.add_vertex(v) # Add the edges to each other. self._vertices[u].add(v) self._vertices[v].add(u) # Add all of the smaller components to the bigger one. smaller_component, bigger_component = self.sort_components(u, v) for vertex in smaller_component: bigger_component.add(vertex) # And with this assignment, say bye-bye to the smaller component. self.components[vertex] = bigger_component def sort_components(self, u, v): return sorted((self.components[u], self.components[v]), key=len) def add_vertex(self, vertex): # Make a new component for the vertex, if the vertex doesn't exist # yet. self.components.setdefault(vertex, {vertex}) def to_dot(self, *args, **kwargs): return graph_as_dot(self.edges, *args, **kwargs) @property @property def vertices(self): """Set of all vertices in the graph.""" return self._vertices.keys() def neighbours(self, vertex): """ Yields all neighbours of the given vertex, in no particular order. """ return self._vertices[vertex] @staticmethod def canonical_order(edge): u, v = edge return edge if u < v else (v, u) def __repr__(self): cls_name = type(self).__name__ args = ', '.join(getattr(self, attr) for attr in ('vertices', 'edges')) return ''.join((cls_name, '(', args, ')'))
eddieantonio/perfection
perfection/czech.py
ordered_deduplicate
python
def ordered_deduplicate(sequence): seen = set() # Micro optimization: each call to seen_add saves an extra attribute # lookup in most iterations of the loop. seen_add = seen.add return tuple(x for x in sequence if not (x in seen or seen_add(x)))
Returns the sequence as a tuple with the duplicates removed, preserving input order. Any duplicates following the first occurrence are removed. >>> ordered_deduplicate([1, 2, 3, 1, 32, 1, 2]) (1, 2, 3, 32) Based on recipe from this StackOverflow post: http://stackoverflow.com/a/480227
train
https://github.com/eddieantonio/perfection/blob/69b7a06b31a15bd9534c69d4bdcc2e48e8ddfc43/perfection/czech.py#L194-L212
null
#!/usr/bin/env python """ Use the Czech et al. method for generating minimal perfect hashes for strings. """ from __future__ import print_function import random import collections from . import forest from .utils import create_dict_subclass __all__ = ['hash_parameters', 'make_hash', 'make_pickable_hash', 'make_dict'] _info_fields = ('t1', 't2', 'g', 'indices') HashInfo = collections.namedtuple('HashInfo', _info_fields) class CzechHashBuilder(object): """ A helper class that (iteratively) stores all data needed to create a Czech hash. The entire hash generation algorithm occurs in __init__. >>> duplicated_input = 'guacala' >>> unique_len = len(set(duplicated_input)) >>> info = CzechHashBuilder(duplicated_input) >>> hf = info.hash_function >>> [hf(x) for x in 'lacug'] [4, 2, 3, 1, 0] """ def __init__(self, words, minimize=False): # Store the words as an immutable sequence. self.words = ordered_deduplicate(words) # TODO: Index minimization self.indices = list(range(len(words[0]))) # Each of the following steps add fields to `self`: # Mapping step: # - n, t1, t2, f1, f2, graph self.generate_acyclic_graph() # Assignment step: # - g self.assign() # Now hash_info will return the appropriate object. @property def hash_info(self): """ HashInfo tuple for the created hash. """ return HashInfo(*(getattr(self, key) for key in _info_fields)) @property def hash_function(self): """ Returns the hash function proper. Ensures that `self` is not bound to the returned closure. """ assert hasattr(self, 'f1') and hasattr(self, 'f2') # These are not just convenient aliases for the given # attributes; if `self` would creep into the returned closure, # that would ensure that a reference to this big, fat object # would be kept alive; hence, any hash function would carry # around all of the auxiliary state that was created during the # generation of the hash parameters. Omitting `self` ensures # this object has a chance to be garbage collected. f1, f2, g = self.f1, self.f2, self.g def czech_hash(word): v1 = f1(word) v2 = f2(word) return g[v1] + g[v2] return czech_hash # Algorithm steps. def generate_acyclic_graph(self): """ Generates an acyclic graph for the given words. Adds the graph, and a list of edge-word associations to the object. """ # Maximum length of each table, respectively. # Hardcoded n = cm, where c = 3 # There might be a good way to choose an appropriate C, # but [1] suggests the average amount of iterations needed # to generate an acyclic graph is sqrt(3). self.n = 3 * len(self.words) max_tries = len(self.words) ** 2 for trial in range(max_tries): try: self.generate_or_fail() except forest.InvariantError: continue else: # Generated successfully! self.trials_taken = trial + 1 return raise RuntimeError("Could not generate graph in " "{} tries".format(max_tries)) def generate_random_table(self): """ Generates random tables for given word lists. """ table = list(range(0, self.n)) random.shuffle(table) return table def generate_or_fail(self): """ Attempts to generate a random acyclic graph, raising an InvariantError if unable to. """ t1 = self.generate_random_table() t2 = self.generate_random_table() f1 = self.generate_func(t1) f2 = self.generate_func(t2) edges = [(f1(word), f2(word)) for word in self.words] # Try to generate that graph, mack! # Note that failure to generate the graph here should be caught # by the caller. graph = forest.ForestGraph(edges=edges) # Associate each edge with its corresponding word. associations = {} for num in range(len(self.words)): edge = edges[num] word = self.words[num] associations[graph.canonical_order(edge)] = (num, word) # Assign all of these to the object. for name in ('t1', 't2', 'f1', 'f2', 'graph', 'associations'): self.__dict__[name] = locals()[name] def generate_func(self, table): """ Generates a random table based mini-hashing function. """ # Ensure that `self` isn't suddenly in the closure... n = self.n def func(word): return sum(x * ord(c) for x, c in zip(table, word)) % n return func def assign(self): # Create an vector of empty assignments. # **g is 1-indexed!** self.g = [None] * (self.n + 1) # Assign all vertices. for vertex in self.graph.vertices: assert isinstance(vertex, int) and vertex <= self.n # This vertex has already been assigned. if self.g[vertex] is not None: continue self.g[vertex] = 0 self.assign_vertex(vertex) def assign_vertex(self, vertex): for neighbour in self.graph.neighbours(vertex): if self.g[neighbour] is not None: # This neighbour has already been assigned. continue # Get the associated edge number edge = self.graph.canonical_order((vertex, neighbour)) num, _word = self.associations[edge] # Assign this vertex such that # h(word) == g(vertex) + g(neighbour) self.g[neighbour] = num - self.g[vertex] self.assign_vertex(neighbour) # API functions ############################################################## def hash_parameters(words, minimize_indices=False): """ Gives hash parameters for the given set of words. >>> info = hash_parameters('sun mon tue wed thu fri sat'.split()) >>> len(info.t1) 21 >>> len(info.t2) 21 >>> len(info.g) # g values are 1-indexed... 22 """ # Ensure that we have an indexable sequence. words = tuple(words) # Delegate to the hash builder. return CzechHashBuilder(words).hash_info def make_hash(words, *args, **kwargs): """ Creates an ordered, minimal perfect hash function for the given sequence of words. >>> hf = make_hash(['sun', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat']) >>> hf('fri') 5 >>> hf('sun') 0 """ # Use the hash builder proper, because HashInfo is assumed to not # have the precious f1, and f2 attributes. return CzechHashBuilder(words, *args, **kwargs).hash_function class PickableHash: """ Provides a Hash function which can be transmitted using Spark """ def __init__(self, hb): assert isinstance(hb, CzechHashBuilder) self.n = hb.n self.g = hb.g self.t1 = hb.t1 self.t2 = hb.t2 def __mini_hashing(self, word, table): return sum(x * ord(c) for x, c in zip(table, word)) % self.n def czech_hash(self, word): v1 = self.__mini_hashing(word, self.t1) v2 = self.__mini_hashing(word, self.t2) return self.g[v1] + self.g[v2] def make_pickable_hash(words, *args, **kwargs): """ Creates an ordered, minimal perfect hash function for the given sequence of words. >>> hf = make_pickable_hash(['sun', 'mon', 'tue', 'wed', 'thu', ... 'fri', 'sat']) >>> hf('fri') 5 >>> hf('sun') 0 """ return PickableHash(CzechHashBuilder(words, *args, **kwargs)).czech_hash def make_dict(name, words, *args, **kwargs): """ make_dict(name, words, *args, **kwargs) -> mapping subclass Takes a sequence of words (or a pre-built Czech HashInfo) and returns a mapping subclass called `name` (used a dict) that employs the use of the minimal perfect hash. This mapping subclass has guaranteed O(1) worst-case lookups, additions, and deletions, however is slower than dict() in practice. >>> months = 'jan feb mar apr may jun jul aug sep oct nov dec'.split() >>> MyDict = make_dict('MyDict', months) >>> d = MyDict(dec=21, feb=None, may='hello') >>> d['jul'] = False >>> d MyDict([('feb', None), ('may', 'hello'), ('jul', False), ('dec', 21)]) >>> del d['may'] >>> del d['apr'] Traceback (most recent call last): ... KeyError: 'apr' >>> len(d) 3 """ info = CzechHashBuilder(words, *args, **kwargs) # Create a docstring that at least describes where the class came from... doc = """ Dictionary-like object that uses minimal perfect hashing, perserving original order. This class was generated by `%s.%s(%r, ...)`. """ % (__name__, make_dict.__name__, name) # Delegate to create_dict. return create_dict_subclass(name, info.hash_function, info.words, doc) def to_hash_info(unknown): if isinstance(unknown, HashInfo) or isinstance(unknown, CzechHashBuilder): # Unknown is a CzechHash. return unknown return HashInfo(unknown) def do_example(): import keyword words = keyword.kwlist hb = CzechHashBuilder(words) print('/*', hb.t1, hb.t2, hb.g, '*/') print(hb.graph.to_dot(edge_labels={ edge: '%d: %s' % assoc for edge, assoc in list(hb.associations.items()) })) if __name__ == '__main__': do_example()
eddieantonio/perfection
perfection/czech.py
hash_parameters
python
def hash_parameters(words, minimize_indices=False): # Ensure that we have an indexable sequence. words = tuple(words) # Delegate to the hash builder. return CzechHashBuilder(words).hash_info
Gives hash parameters for the given set of words. >>> info = hash_parameters('sun mon tue wed thu fri sat'.split()) >>> len(info.t1) 21 >>> len(info.t2) 21 >>> len(info.g) # g values are 1-indexed... 22
train
https://github.com/eddieantonio/perfection/blob/69b7a06b31a15bd9534c69d4bdcc2e48e8ddfc43/perfection/czech.py#L217-L233
null
#!/usr/bin/env python """ Use the Czech et al. method for generating minimal perfect hashes for strings. """ from __future__ import print_function import random import collections from . import forest from .utils import create_dict_subclass __all__ = ['hash_parameters', 'make_hash', 'make_pickable_hash', 'make_dict'] _info_fields = ('t1', 't2', 'g', 'indices') HashInfo = collections.namedtuple('HashInfo', _info_fields) class CzechHashBuilder(object): """ A helper class that (iteratively) stores all data needed to create a Czech hash. The entire hash generation algorithm occurs in __init__. >>> duplicated_input = 'guacala' >>> unique_len = len(set(duplicated_input)) >>> info = CzechHashBuilder(duplicated_input) >>> hf = info.hash_function >>> [hf(x) for x in 'lacug'] [4, 2, 3, 1, 0] """ def __init__(self, words, minimize=False): # Store the words as an immutable sequence. self.words = ordered_deduplicate(words) # TODO: Index minimization self.indices = list(range(len(words[0]))) # Each of the following steps add fields to `self`: # Mapping step: # - n, t1, t2, f1, f2, graph self.generate_acyclic_graph() # Assignment step: # - g self.assign() # Now hash_info will return the appropriate object. @property def hash_info(self): """ HashInfo tuple for the created hash. """ return HashInfo(*(getattr(self, key) for key in _info_fields)) @property def hash_function(self): """ Returns the hash function proper. Ensures that `self` is not bound to the returned closure. """ assert hasattr(self, 'f1') and hasattr(self, 'f2') # These are not just convenient aliases for the given # attributes; if `self` would creep into the returned closure, # that would ensure that a reference to this big, fat object # would be kept alive; hence, any hash function would carry # around all of the auxiliary state that was created during the # generation of the hash parameters. Omitting `self` ensures # this object has a chance to be garbage collected. f1, f2, g = self.f1, self.f2, self.g def czech_hash(word): v1 = f1(word) v2 = f2(word) return g[v1] + g[v2] return czech_hash # Algorithm steps. def generate_acyclic_graph(self): """ Generates an acyclic graph for the given words. Adds the graph, and a list of edge-word associations to the object. """ # Maximum length of each table, respectively. # Hardcoded n = cm, where c = 3 # There might be a good way to choose an appropriate C, # but [1] suggests the average amount of iterations needed # to generate an acyclic graph is sqrt(3). self.n = 3 * len(self.words) max_tries = len(self.words) ** 2 for trial in range(max_tries): try: self.generate_or_fail() except forest.InvariantError: continue else: # Generated successfully! self.trials_taken = trial + 1 return raise RuntimeError("Could not generate graph in " "{} tries".format(max_tries)) def generate_random_table(self): """ Generates random tables for given word lists. """ table = list(range(0, self.n)) random.shuffle(table) return table def generate_or_fail(self): """ Attempts to generate a random acyclic graph, raising an InvariantError if unable to. """ t1 = self.generate_random_table() t2 = self.generate_random_table() f1 = self.generate_func(t1) f2 = self.generate_func(t2) edges = [(f1(word), f2(word)) for word in self.words] # Try to generate that graph, mack! # Note that failure to generate the graph here should be caught # by the caller. graph = forest.ForestGraph(edges=edges) # Associate each edge with its corresponding word. associations = {} for num in range(len(self.words)): edge = edges[num] word = self.words[num] associations[graph.canonical_order(edge)] = (num, word) # Assign all of these to the object. for name in ('t1', 't2', 'f1', 'f2', 'graph', 'associations'): self.__dict__[name] = locals()[name] def generate_func(self, table): """ Generates a random table based mini-hashing function. """ # Ensure that `self` isn't suddenly in the closure... n = self.n def func(word): return sum(x * ord(c) for x, c in zip(table, word)) % n return func def assign(self): # Create an vector of empty assignments. # **g is 1-indexed!** self.g = [None] * (self.n + 1) # Assign all vertices. for vertex in self.graph.vertices: assert isinstance(vertex, int) and vertex <= self.n # This vertex has already been assigned. if self.g[vertex] is not None: continue self.g[vertex] = 0 self.assign_vertex(vertex) def assign_vertex(self, vertex): for neighbour in self.graph.neighbours(vertex): if self.g[neighbour] is not None: # This neighbour has already been assigned. continue # Get the associated edge number edge = self.graph.canonical_order((vertex, neighbour)) num, _word = self.associations[edge] # Assign this vertex such that # h(word) == g(vertex) + g(neighbour) self.g[neighbour] = num - self.g[vertex] self.assign_vertex(neighbour) def ordered_deduplicate(sequence): """ Returns the sequence as a tuple with the duplicates removed, preserving input order. Any duplicates following the first occurrence are removed. >>> ordered_deduplicate([1, 2, 3, 1, 32, 1, 2]) (1, 2, 3, 32) Based on recipe from this StackOverflow post: http://stackoverflow.com/a/480227 """ seen = set() # Micro optimization: each call to seen_add saves an extra attribute # lookup in most iterations of the loop. seen_add = seen.add return tuple(x for x in sequence if not (x in seen or seen_add(x))) # API functions ############################################################## def make_hash(words, *args, **kwargs): """ Creates an ordered, minimal perfect hash function for the given sequence of words. >>> hf = make_hash(['sun', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat']) >>> hf('fri') 5 >>> hf('sun') 0 """ # Use the hash builder proper, because HashInfo is assumed to not # have the precious f1, and f2 attributes. return CzechHashBuilder(words, *args, **kwargs).hash_function class PickableHash: """ Provides a Hash function which can be transmitted using Spark """ def __init__(self, hb): assert isinstance(hb, CzechHashBuilder) self.n = hb.n self.g = hb.g self.t1 = hb.t1 self.t2 = hb.t2 def __mini_hashing(self, word, table): return sum(x * ord(c) for x, c in zip(table, word)) % self.n def czech_hash(self, word): v1 = self.__mini_hashing(word, self.t1) v2 = self.__mini_hashing(word, self.t2) return self.g[v1] + self.g[v2] def make_pickable_hash(words, *args, **kwargs): """ Creates an ordered, minimal perfect hash function for the given sequence of words. >>> hf = make_pickable_hash(['sun', 'mon', 'tue', 'wed', 'thu', ... 'fri', 'sat']) >>> hf('fri') 5 >>> hf('sun') 0 """ return PickableHash(CzechHashBuilder(words, *args, **kwargs)).czech_hash def make_dict(name, words, *args, **kwargs): """ make_dict(name, words, *args, **kwargs) -> mapping subclass Takes a sequence of words (or a pre-built Czech HashInfo) and returns a mapping subclass called `name` (used a dict) that employs the use of the minimal perfect hash. This mapping subclass has guaranteed O(1) worst-case lookups, additions, and deletions, however is slower than dict() in practice. >>> months = 'jan feb mar apr may jun jul aug sep oct nov dec'.split() >>> MyDict = make_dict('MyDict', months) >>> d = MyDict(dec=21, feb=None, may='hello') >>> d['jul'] = False >>> d MyDict([('feb', None), ('may', 'hello'), ('jul', False), ('dec', 21)]) >>> del d['may'] >>> del d['apr'] Traceback (most recent call last): ... KeyError: 'apr' >>> len(d) 3 """ info = CzechHashBuilder(words, *args, **kwargs) # Create a docstring that at least describes where the class came from... doc = """ Dictionary-like object that uses minimal perfect hashing, perserving original order. This class was generated by `%s.%s(%r, ...)`. """ % (__name__, make_dict.__name__, name) # Delegate to create_dict. return create_dict_subclass(name, info.hash_function, info.words, doc) def to_hash_info(unknown): if isinstance(unknown, HashInfo) or isinstance(unknown, CzechHashBuilder): # Unknown is a CzechHash. return unknown return HashInfo(unknown) def do_example(): import keyword words = keyword.kwlist hb = CzechHashBuilder(words) print('/*', hb.t1, hb.t2, hb.g, '*/') print(hb.graph.to_dot(edge_labels={ edge: '%d: %s' % assoc for edge, assoc in list(hb.associations.items()) })) if __name__ == '__main__': do_example()
eddieantonio/perfection
perfection/czech.py
make_pickable_hash
python
def make_pickable_hash(words, *args, **kwargs): return PickableHash(CzechHashBuilder(words, *args, **kwargs)).czech_hash
Creates an ordered, minimal perfect hash function for the given sequence of words. >>> hf = make_pickable_hash(['sun', 'mon', 'tue', 'wed', 'thu', ... 'fri', 'sat']) >>> hf('fri') 5 >>> hf('sun') 0
train
https://github.com/eddieantonio/perfection/blob/69b7a06b31a15bd9534c69d4bdcc2e48e8ddfc43/perfection/czech.py#L272-L284
null
#!/usr/bin/env python """ Use the Czech et al. method for generating minimal perfect hashes for strings. """ from __future__ import print_function import random import collections from . import forest from .utils import create_dict_subclass __all__ = ['hash_parameters', 'make_hash', 'make_pickable_hash', 'make_dict'] _info_fields = ('t1', 't2', 'g', 'indices') HashInfo = collections.namedtuple('HashInfo', _info_fields) class CzechHashBuilder(object): """ A helper class that (iteratively) stores all data needed to create a Czech hash. The entire hash generation algorithm occurs in __init__. >>> duplicated_input = 'guacala' >>> unique_len = len(set(duplicated_input)) >>> info = CzechHashBuilder(duplicated_input) >>> hf = info.hash_function >>> [hf(x) for x in 'lacug'] [4, 2, 3, 1, 0] """ def __init__(self, words, minimize=False): # Store the words as an immutable sequence. self.words = ordered_deduplicate(words) # TODO: Index minimization self.indices = list(range(len(words[0]))) # Each of the following steps add fields to `self`: # Mapping step: # - n, t1, t2, f1, f2, graph self.generate_acyclic_graph() # Assignment step: # - g self.assign() # Now hash_info will return the appropriate object. @property def hash_info(self): """ HashInfo tuple for the created hash. """ return HashInfo(*(getattr(self, key) for key in _info_fields)) @property def hash_function(self): """ Returns the hash function proper. Ensures that `self` is not bound to the returned closure. """ assert hasattr(self, 'f1') and hasattr(self, 'f2') # These are not just convenient aliases for the given # attributes; if `self` would creep into the returned closure, # that would ensure that a reference to this big, fat object # would be kept alive; hence, any hash function would carry # around all of the auxiliary state that was created during the # generation of the hash parameters. Omitting `self` ensures # this object has a chance to be garbage collected. f1, f2, g = self.f1, self.f2, self.g def czech_hash(word): v1 = f1(word) v2 = f2(word) return g[v1] + g[v2] return czech_hash # Algorithm steps. def generate_acyclic_graph(self): """ Generates an acyclic graph for the given words. Adds the graph, and a list of edge-word associations to the object. """ # Maximum length of each table, respectively. # Hardcoded n = cm, where c = 3 # There might be a good way to choose an appropriate C, # but [1] suggests the average amount of iterations needed # to generate an acyclic graph is sqrt(3). self.n = 3 * len(self.words) max_tries = len(self.words) ** 2 for trial in range(max_tries): try: self.generate_or_fail() except forest.InvariantError: continue else: # Generated successfully! self.trials_taken = trial + 1 return raise RuntimeError("Could not generate graph in " "{} tries".format(max_tries)) def generate_random_table(self): """ Generates random tables for given word lists. """ table = list(range(0, self.n)) random.shuffle(table) return table def generate_or_fail(self): """ Attempts to generate a random acyclic graph, raising an InvariantError if unable to. """ t1 = self.generate_random_table() t2 = self.generate_random_table() f1 = self.generate_func(t1) f2 = self.generate_func(t2) edges = [(f1(word), f2(word)) for word in self.words] # Try to generate that graph, mack! # Note that failure to generate the graph here should be caught # by the caller. graph = forest.ForestGraph(edges=edges) # Associate each edge with its corresponding word. associations = {} for num in range(len(self.words)): edge = edges[num] word = self.words[num] associations[graph.canonical_order(edge)] = (num, word) # Assign all of these to the object. for name in ('t1', 't2', 'f1', 'f2', 'graph', 'associations'): self.__dict__[name] = locals()[name] def generate_func(self, table): """ Generates a random table based mini-hashing function. """ # Ensure that `self` isn't suddenly in the closure... n = self.n def func(word): return sum(x * ord(c) for x, c in zip(table, word)) % n return func def assign(self): # Create an vector of empty assignments. # **g is 1-indexed!** self.g = [None] * (self.n + 1) # Assign all vertices. for vertex in self.graph.vertices: assert isinstance(vertex, int) and vertex <= self.n # This vertex has already been assigned. if self.g[vertex] is not None: continue self.g[vertex] = 0 self.assign_vertex(vertex) def assign_vertex(self, vertex): for neighbour in self.graph.neighbours(vertex): if self.g[neighbour] is not None: # This neighbour has already been assigned. continue # Get the associated edge number edge = self.graph.canonical_order((vertex, neighbour)) num, _word = self.associations[edge] # Assign this vertex such that # h(word) == g(vertex) + g(neighbour) self.g[neighbour] = num - self.g[vertex] self.assign_vertex(neighbour) def ordered_deduplicate(sequence): """ Returns the sequence as a tuple with the duplicates removed, preserving input order. Any duplicates following the first occurrence are removed. >>> ordered_deduplicate([1, 2, 3, 1, 32, 1, 2]) (1, 2, 3, 32) Based on recipe from this StackOverflow post: http://stackoverflow.com/a/480227 """ seen = set() # Micro optimization: each call to seen_add saves an extra attribute # lookup in most iterations of the loop. seen_add = seen.add return tuple(x for x in sequence if not (x in seen or seen_add(x))) # API functions ############################################################## def hash_parameters(words, minimize_indices=False): """ Gives hash parameters for the given set of words. >>> info = hash_parameters('sun mon tue wed thu fri sat'.split()) >>> len(info.t1) 21 >>> len(info.t2) 21 >>> len(info.g) # g values are 1-indexed... 22 """ # Ensure that we have an indexable sequence. words = tuple(words) # Delegate to the hash builder. return CzechHashBuilder(words).hash_info def make_hash(words, *args, **kwargs): """ Creates an ordered, minimal perfect hash function for the given sequence of words. >>> hf = make_hash(['sun', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat']) >>> hf('fri') 5 >>> hf('sun') 0 """ # Use the hash builder proper, because HashInfo is assumed to not # have the precious f1, and f2 attributes. return CzechHashBuilder(words, *args, **kwargs).hash_function class PickableHash: """ Provides a Hash function which can be transmitted using Spark """ def __init__(self, hb): assert isinstance(hb, CzechHashBuilder) self.n = hb.n self.g = hb.g self.t1 = hb.t1 self.t2 = hb.t2 def __mini_hashing(self, word, table): return sum(x * ord(c) for x, c in zip(table, word)) % self.n def czech_hash(self, word): v1 = self.__mini_hashing(word, self.t1) v2 = self.__mini_hashing(word, self.t2) return self.g[v1] + self.g[v2] def make_dict(name, words, *args, **kwargs): """ make_dict(name, words, *args, **kwargs) -> mapping subclass Takes a sequence of words (or a pre-built Czech HashInfo) and returns a mapping subclass called `name` (used a dict) that employs the use of the minimal perfect hash. This mapping subclass has guaranteed O(1) worst-case lookups, additions, and deletions, however is slower than dict() in practice. >>> months = 'jan feb mar apr may jun jul aug sep oct nov dec'.split() >>> MyDict = make_dict('MyDict', months) >>> d = MyDict(dec=21, feb=None, may='hello') >>> d['jul'] = False >>> d MyDict([('feb', None), ('may', 'hello'), ('jul', False), ('dec', 21)]) >>> del d['may'] >>> del d['apr'] Traceback (most recent call last): ... KeyError: 'apr' >>> len(d) 3 """ info = CzechHashBuilder(words, *args, **kwargs) # Create a docstring that at least describes where the class came from... doc = """ Dictionary-like object that uses minimal perfect hashing, perserving original order. This class was generated by `%s.%s(%r, ...)`. """ % (__name__, make_dict.__name__, name) # Delegate to create_dict. return create_dict_subclass(name, info.hash_function, info.words, doc) def to_hash_info(unknown): if isinstance(unknown, HashInfo) or isinstance(unknown, CzechHashBuilder): # Unknown is a CzechHash. return unknown return HashInfo(unknown) def do_example(): import keyword words = keyword.kwlist hb = CzechHashBuilder(words) print('/*', hb.t1, hb.t2, hb.g, '*/') print(hb.graph.to_dot(edge_labels={ edge: '%d: %s' % assoc for edge, assoc in list(hb.associations.items()) })) if __name__ == '__main__': do_example()
eddieantonio/perfection
perfection/czech.py
make_dict
python
def make_dict(name, words, *args, **kwargs): info = CzechHashBuilder(words, *args, **kwargs) # Create a docstring that at least describes where the class came from... doc = """ Dictionary-like object that uses minimal perfect hashing, perserving original order. This class was generated by `%s.%s(%r, ...)`. """ % (__name__, make_dict.__name__, name) # Delegate to create_dict. return create_dict_subclass(name, info.hash_function, info.words, doc)
make_dict(name, words, *args, **kwargs) -> mapping subclass Takes a sequence of words (or a pre-built Czech HashInfo) and returns a mapping subclass called `name` (used a dict) that employs the use of the minimal perfect hash. This mapping subclass has guaranteed O(1) worst-case lookups, additions, and deletions, however is slower than dict() in practice. >>> months = 'jan feb mar apr may jun jul aug sep oct nov dec'.split() >>> MyDict = make_dict('MyDict', months) >>> d = MyDict(dec=21, feb=None, may='hello') >>> d['jul'] = False >>> d MyDict([('feb', None), ('may', 'hello'), ('jul', False), ('dec', 21)]) >>> del d['may'] >>> del d['apr'] Traceback (most recent call last): ... KeyError: 'apr' >>> len(d) 3
train
https://github.com/eddieantonio/perfection/blob/69b7a06b31a15bd9534c69d4bdcc2e48e8ddfc43/perfection/czech.py#L287-L322
[ "def create_dict_subclass(name, hash_func, slots, doc):\n \"\"\"\n Creates a dict subclass named name, using the hash_function to index\n hash_length items. Doc should be any additional documentation added to the\n class.\n \"\"\"\n\n hash_length = len(slots)\n\n # Returns array index -- raises a KeyError if the key does not match\n # its slot value.\n def index_or_key_error(key):\n index = hash_func(key)\n # Make sure the key is **exactly** the same.\n if key != slots[index]:\n raise KeyError(key)\n return index\n\n def init(self, *args, **kwargs):\n self._arr = [None] * hash_length\n self._len = 0\n\n # Delegate initialization to update provided by MutableMapping:\n self.update(*args, **kwargs)\n\n def getitem(self, key):\n index = index_or_key_error(key)\n if self._arr[index] is None:\n raise KeyError(key)\n return self._arr[index][1]\n\n def setitem(self, key, value):\n index = index_or_key_error(key)\n self._arr[index] = (key, value)\n\n def delitem(self, key):\n index = index_or_key_error(key)\n if self._arr[index] is None:\n raise KeyError(key)\n self._arr[index] = None\n\n def dict_iter(self):\n return (pair[0] for pair in self._arr if pair is not None)\n\n def dict_len(self):\n # TODO: Make this O(1) using auxiliary state?\n return sum(1 for _ in self)\n\n def dict_repr(self):\n arr_repr = (repr(pair) for pair in self._arr if pair is not None)\n return ''.join((name, '([', ', '.join(arr_repr), '])'))\n\n # Inheriting from MutableMapping gives us a whole whackload of methods for\n # free.\n bases = (collections.MutableMapping,)\n\n return type(name, bases, {\n '__init__': init,\n '__doc__': doc,\n\n '__getitem__': getitem,\n '__setitem__': setitem,\n '__delitem__': delitem,\n '__iter__': dict_iter,\n '__len__': dict_len,\n\n '__repr__': dict_repr,\n })\n" ]
#!/usr/bin/env python """ Use the Czech et al. method for generating minimal perfect hashes for strings. """ from __future__ import print_function import random import collections from . import forest from .utils import create_dict_subclass __all__ = ['hash_parameters', 'make_hash', 'make_pickable_hash', 'make_dict'] _info_fields = ('t1', 't2', 'g', 'indices') HashInfo = collections.namedtuple('HashInfo', _info_fields) class CzechHashBuilder(object): """ A helper class that (iteratively) stores all data needed to create a Czech hash. The entire hash generation algorithm occurs in __init__. >>> duplicated_input = 'guacala' >>> unique_len = len(set(duplicated_input)) >>> info = CzechHashBuilder(duplicated_input) >>> hf = info.hash_function >>> [hf(x) for x in 'lacug'] [4, 2, 3, 1, 0] """ def __init__(self, words, minimize=False): # Store the words as an immutable sequence. self.words = ordered_deduplicate(words) # TODO: Index minimization self.indices = list(range(len(words[0]))) # Each of the following steps add fields to `self`: # Mapping step: # - n, t1, t2, f1, f2, graph self.generate_acyclic_graph() # Assignment step: # - g self.assign() # Now hash_info will return the appropriate object. @property def hash_info(self): """ HashInfo tuple for the created hash. """ return HashInfo(*(getattr(self, key) for key in _info_fields)) @property def hash_function(self): """ Returns the hash function proper. Ensures that `self` is not bound to the returned closure. """ assert hasattr(self, 'f1') and hasattr(self, 'f2') # These are not just convenient aliases for the given # attributes; if `self` would creep into the returned closure, # that would ensure that a reference to this big, fat object # would be kept alive; hence, any hash function would carry # around all of the auxiliary state that was created during the # generation of the hash parameters. Omitting `self` ensures # this object has a chance to be garbage collected. f1, f2, g = self.f1, self.f2, self.g def czech_hash(word): v1 = f1(word) v2 = f2(word) return g[v1] + g[v2] return czech_hash # Algorithm steps. def generate_acyclic_graph(self): """ Generates an acyclic graph for the given words. Adds the graph, and a list of edge-word associations to the object. """ # Maximum length of each table, respectively. # Hardcoded n = cm, where c = 3 # There might be a good way to choose an appropriate C, # but [1] suggests the average amount of iterations needed # to generate an acyclic graph is sqrt(3). self.n = 3 * len(self.words) max_tries = len(self.words) ** 2 for trial in range(max_tries): try: self.generate_or_fail() except forest.InvariantError: continue else: # Generated successfully! self.trials_taken = trial + 1 return raise RuntimeError("Could not generate graph in " "{} tries".format(max_tries)) def generate_random_table(self): """ Generates random tables for given word lists. """ table = list(range(0, self.n)) random.shuffle(table) return table def generate_or_fail(self): """ Attempts to generate a random acyclic graph, raising an InvariantError if unable to. """ t1 = self.generate_random_table() t2 = self.generate_random_table() f1 = self.generate_func(t1) f2 = self.generate_func(t2) edges = [(f1(word), f2(word)) for word in self.words] # Try to generate that graph, mack! # Note that failure to generate the graph here should be caught # by the caller. graph = forest.ForestGraph(edges=edges) # Associate each edge with its corresponding word. associations = {} for num in range(len(self.words)): edge = edges[num] word = self.words[num] associations[graph.canonical_order(edge)] = (num, word) # Assign all of these to the object. for name in ('t1', 't2', 'f1', 'f2', 'graph', 'associations'): self.__dict__[name] = locals()[name] def generate_func(self, table): """ Generates a random table based mini-hashing function. """ # Ensure that `self` isn't suddenly in the closure... n = self.n def func(word): return sum(x * ord(c) for x, c in zip(table, word)) % n return func def assign(self): # Create an vector of empty assignments. # **g is 1-indexed!** self.g = [None] * (self.n + 1) # Assign all vertices. for vertex in self.graph.vertices: assert isinstance(vertex, int) and vertex <= self.n # This vertex has already been assigned. if self.g[vertex] is not None: continue self.g[vertex] = 0 self.assign_vertex(vertex) def assign_vertex(self, vertex): for neighbour in self.graph.neighbours(vertex): if self.g[neighbour] is not None: # This neighbour has already been assigned. continue # Get the associated edge number edge = self.graph.canonical_order((vertex, neighbour)) num, _word = self.associations[edge] # Assign this vertex such that # h(word) == g(vertex) + g(neighbour) self.g[neighbour] = num - self.g[vertex] self.assign_vertex(neighbour) def ordered_deduplicate(sequence): """ Returns the sequence as a tuple with the duplicates removed, preserving input order. Any duplicates following the first occurrence are removed. >>> ordered_deduplicate([1, 2, 3, 1, 32, 1, 2]) (1, 2, 3, 32) Based on recipe from this StackOverflow post: http://stackoverflow.com/a/480227 """ seen = set() # Micro optimization: each call to seen_add saves an extra attribute # lookup in most iterations of the loop. seen_add = seen.add return tuple(x for x in sequence if not (x in seen or seen_add(x))) # API functions ############################################################## def hash_parameters(words, minimize_indices=False): """ Gives hash parameters for the given set of words. >>> info = hash_parameters('sun mon tue wed thu fri sat'.split()) >>> len(info.t1) 21 >>> len(info.t2) 21 >>> len(info.g) # g values are 1-indexed... 22 """ # Ensure that we have an indexable sequence. words = tuple(words) # Delegate to the hash builder. return CzechHashBuilder(words).hash_info def make_hash(words, *args, **kwargs): """ Creates an ordered, minimal perfect hash function for the given sequence of words. >>> hf = make_hash(['sun', 'mon', 'tue', 'wed', 'thu', 'fri', 'sat']) >>> hf('fri') 5 >>> hf('sun') 0 """ # Use the hash builder proper, because HashInfo is assumed to not # have the precious f1, and f2 attributes. return CzechHashBuilder(words, *args, **kwargs).hash_function class PickableHash: """ Provides a Hash function which can be transmitted using Spark """ def __init__(self, hb): assert isinstance(hb, CzechHashBuilder) self.n = hb.n self.g = hb.g self.t1 = hb.t1 self.t2 = hb.t2 def __mini_hashing(self, word, table): return sum(x * ord(c) for x, c in zip(table, word)) % self.n def czech_hash(self, word): v1 = self.__mini_hashing(word, self.t1) v2 = self.__mini_hashing(word, self.t2) return self.g[v1] + self.g[v2] def make_pickable_hash(words, *args, **kwargs): """ Creates an ordered, minimal perfect hash function for the given sequence of words. >>> hf = make_pickable_hash(['sun', 'mon', 'tue', 'wed', 'thu', ... 'fri', 'sat']) >>> hf('fri') 5 >>> hf('sun') 0 """ return PickableHash(CzechHashBuilder(words, *args, **kwargs)).czech_hash def to_hash_info(unknown): if isinstance(unknown, HashInfo) or isinstance(unknown, CzechHashBuilder): # Unknown is a CzechHash. return unknown return HashInfo(unknown) def do_example(): import keyword words = keyword.kwlist hb = CzechHashBuilder(words) print('/*', hb.t1, hb.t2, hb.g, '*/') print(hb.graph.to_dot(edge_labels={ edge: '%d: %s' % assoc for edge, assoc in list(hb.associations.items()) })) if __name__ == '__main__': do_example()
eddieantonio/perfection
perfection/czech.py
CzechHashBuilder.hash_function
python
def hash_function(self): assert hasattr(self, 'f1') and hasattr(self, 'f2') # These are not just convenient aliases for the given # attributes; if `self` would creep into the returned closure, # that would ensure that a reference to this big, fat object # would be kept alive; hence, any hash function would carry # around all of the auxiliary state that was created during the # generation of the hash parameters. Omitting `self` ensures # this object has a chance to be garbage collected. f1, f2, g = self.f1, self.f2, self.g def czech_hash(word): v1 = f1(word) v2 = f2(word) return g[v1] + g[v2] return czech_hash
Returns the hash function proper. Ensures that `self` is not bound to the returned closure.
train
https://github.com/eddieantonio/perfection/blob/69b7a06b31a15bd9534c69d4bdcc2e48e8ddfc43/perfection/czech.py#L62-L83
null
class CzechHashBuilder(object): """ A helper class that (iteratively) stores all data needed to create a Czech hash. The entire hash generation algorithm occurs in __init__. >>> duplicated_input = 'guacala' >>> unique_len = len(set(duplicated_input)) >>> info = CzechHashBuilder(duplicated_input) >>> hf = info.hash_function >>> [hf(x) for x in 'lacug'] [4, 2, 3, 1, 0] """ def __init__(self, words, minimize=False): # Store the words as an immutable sequence. self.words = ordered_deduplicate(words) # TODO: Index minimization self.indices = list(range(len(words[0]))) # Each of the following steps add fields to `self`: # Mapping step: # - n, t1, t2, f1, f2, graph self.generate_acyclic_graph() # Assignment step: # - g self.assign() # Now hash_info will return the appropriate object. @property def hash_info(self): """ HashInfo tuple for the created hash. """ return HashInfo(*(getattr(self, key) for key in _info_fields)) @property # Algorithm steps. def generate_acyclic_graph(self): """ Generates an acyclic graph for the given words. Adds the graph, and a list of edge-word associations to the object. """ # Maximum length of each table, respectively. # Hardcoded n = cm, where c = 3 # There might be a good way to choose an appropriate C, # but [1] suggests the average amount of iterations needed # to generate an acyclic graph is sqrt(3). self.n = 3 * len(self.words) max_tries = len(self.words) ** 2 for trial in range(max_tries): try: self.generate_or_fail() except forest.InvariantError: continue else: # Generated successfully! self.trials_taken = trial + 1 return raise RuntimeError("Could not generate graph in " "{} tries".format(max_tries)) def generate_random_table(self): """ Generates random tables for given word lists. """ table = list(range(0, self.n)) random.shuffle(table) return table def generate_or_fail(self): """ Attempts to generate a random acyclic graph, raising an InvariantError if unable to. """ t1 = self.generate_random_table() t2 = self.generate_random_table() f1 = self.generate_func(t1) f2 = self.generate_func(t2) edges = [(f1(word), f2(word)) for word in self.words] # Try to generate that graph, mack! # Note that failure to generate the graph here should be caught # by the caller. graph = forest.ForestGraph(edges=edges) # Associate each edge with its corresponding word. associations = {} for num in range(len(self.words)): edge = edges[num] word = self.words[num] associations[graph.canonical_order(edge)] = (num, word) # Assign all of these to the object. for name in ('t1', 't2', 'f1', 'f2', 'graph', 'associations'): self.__dict__[name] = locals()[name] def generate_func(self, table): """ Generates a random table based mini-hashing function. """ # Ensure that `self` isn't suddenly in the closure... n = self.n def func(word): return sum(x * ord(c) for x, c in zip(table, word)) % n return func def assign(self): # Create an vector of empty assignments. # **g is 1-indexed!** self.g = [None] * (self.n + 1) # Assign all vertices. for vertex in self.graph.vertices: assert isinstance(vertex, int) and vertex <= self.n # This vertex has already been assigned. if self.g[vertex] is not None: continue self.g[vertex] = 0 self.assign_vertex(vertex) def assign_vertex(self, vertex): for neighbour in self.graph.neighbours(vertex): if self.g[neighbour] is not None: # This neighbour has already been assigned. continue # Get the associated edge number edge = self.graph.canonical_order((vertex, neighbour)) num, _word = self.associations[edge] # Assign this vertex such that # h(word) == g(vertex) + g(neighbour) self.g[neighbour] = num - self.g[vertex] self.assign_vertex(neighbour)
eddieantonio/perfection
perfection/czech.py
CzechHashBuilder.generate_acyclic_graph
python
def generate_acyclic_graph(self): # Maximum length of each table, respectively. # Hardcoded n = cm, where c = 3 # There might be a good way to choose an appropriate C, # but [1] suggests the average amount of iterations needed # to generate an acyclic graph is sqrt(3). self.n = 3 * len(self.words) max_tries = len(self.words) ** 2 for trial in range(max_tries): try: self.generate_or_fail() except forest.InvariantError: continue else: # Generated successfully! self.trials_taken = trial + 1 return raise RuntimeError("Could not generate graph in " "{} tries".format(max_tries))
Generates an acyclic graph for the given words. Adds the graph, and a list of edge-word associations to the object.
train
https://github.com/eddieantonio/perfection/blob/69b7a06b31a15bd9534c69d4bdcc2e48e8ddfc43/perfection/czech.py#L87-L112
[ "def generate_or_fail(self):\n \"\"\"\n Attempts to generate a random acyclic graph, raising an\n InvariantError if unable to.\n \"\"\"\n\n t1 = self.generate_random_table()\n t2 = self.generate_random_table()\n f1 = self.generate_func(t1)\n f2 = self.generate_func(t2)\n edges = [(f1(word), f2(word)) for word in self.words]\n\n # Try to generate that graph, mack!\n # Note that failure to generate the graph here should be caught\n # by the caller.\n graph = forest.ForestGraph(edges=edges)\n\n # Associate each edge with its corresponding word.\n associations = {}\n for num in range(len(self.words)):\n edge = edges[num]\n word = self.words[num]\n associations[graph.canonical_order(edge)] = (num, word)\n\n # Assign all of these to the object.\n for name in ('t1', 't2', 'f1', 'f2', 'graph', 'associations'):\n self.__dict__[name] = locals()[name]\n" ]
class CzechHashBuilder(object): """ A helper class that (iteratively) stores all data needed to create a Czech hash. The entire hash generation algorithm occurs in __init__. >>> duplicated_input = 'guacala' >>> unique_len = len(set(duplicated_input)) >>> info = CzechHashBuilder(duplicated_input) >>> hf = info.hash_function >>> [hf(x) for x in 'lacug'] [4, 2, 3, 1, 0] """ def __init__(self, words, minimize=False): # Store the words as an immutable sequence. self.words = ordered_deduplicate(words) # TODO: Index minimization self.indices = list(range(len(words[0]))) # Each of the following steps add fields to `self`: # Mapping step: # - n, t1, t2, f1, f2, graph self.generate_acyclic_graph() # Assignment step: # - g self.assign() # Now hash_info will return the appropriate object. @property def hash_info(self): """ HashInfo tuple for the created hash. """ return HashInfo(*(getattr(self, key) for key in _info_fields)) @property def hash_function(self): """ Returns the hash function proper. Ensures that `self` is not bound to the returned closure. """ assert hasattr(self, 'f1') and hasattr(self, 'f2') # These are not just convenient aliases for the given # attributes; if `self` would creep into the returned closure, # that would ensure that a reference to this big, fat object # would be kept alive; hence, any hash function would carry # around all of the auxiliary state that was created during the # generation of the hash parameters. Omitting `self` ensures # this object has a chance to be garbage collected. f1, f2, g = self.f1, self.f2, self.g def czech_hash(word): v1 = f1(word) v2 = f2(word) return g[v1] + g[v2] return czech_hash # Algorithm steps. def generate_random_table(self): """ Generates random tables for given word lists. """ table = list(range(0, self.n)) random.shuffle(table) return table def generate_or_fail(self): """ Attempts to generate a random acyclic graph, raising an InvariantError if unable to. """ t1 = self.generate_random_table() t2 = self.generate_random_table() f1 = self.generate_func(t1) f2 = self.generate_func(t2) edges = [(f1(word), f2(word)) for word in self.words] # Try to generate that graph, mack! # Note that failure to generate the graph here should be caught # by the caller. graph = forest.ForestGraph(edges=edges) # Associate each edge with its corresponding word. associations = {} for num in range(len(self.words)): edge = edges[num] word = self.words[num] associations[graph.canonical_order(edge)] = (num, word) # Assign all of these to the object. for name in ('t1', 't2', 'f1', 'f2', 'graph', 'associations'): self.__dict__[name] = locals()[name] def generate_func(self, table): """ Generates a random table based mini-hashing function. """ # Ensure that `self` isn't suddenly in the closure... n = self.n def func(word): return sum(x * ord(c) for x, c in zip(table, word)) % n return func def assign(self): # Create an vector of empty assignments. # **g is 1-indexed!** self.g = [None] * (self.n + 1) # Assign all vertices. for vertex in self.graph.vertices: assert isinstance(vertex, int) and vertex <= self.n # This vertex has already been assigned. if self.g[vertex] is not None: continue self.g[vertex] = 0 self.assign_vertex(vertex) def assign_vertex(self, vertex): for neighbour in self.graph.neighbours(vertex): if self.g[neighbour] is not None: # This neighbour has already been assigned. continue # Get the associated edge number edge = self.graph.canonical_order((vertex, neighbour)) num, _word = self.associations[edge] # Assign this vertex such that # h(word) == g(vertex) + g(neighbour) self.g[neighbour] = num - self.g[vertex] self.assign_vertex(neighbour)
eddieantonio/perfection
perfection/czech.py
CzechHashBuilder.generate_random_table
python
def generate_random_table(self): table = list(range(0, self.n)) random.shuffle(table) return table
Generates random tables for given word lists.
train
https://github.com/eddieantonio/perfection/blob/69b7a06b31a15bd9534c69d4bdcc2e48e8ddfc43/perfection/czech.py#L114-L120
null
class CzechHashBuilder(object): """ A helper class that (iteratively) stores all data needed to create a Czech hash. The entire hash generation algorithm occurs in __init__. >>> duplicated_input = 'guacala' >>> unique_len = len(set(duplicated_input)) >>> info = CzechHashBuilder(duplicated_input) >>> hf = info.hash_function >>> [hf(x) for x in 'lacug'] [4, 2, 3, 1, 0] """ def __init__(self, words, minimize=False): # Store the words as an immutable sequence. self.words = ordered_deduplicate(words) # TODO: Index minimization self.indices = list(range(len(words[0]))) # Each of the following steps add fields to `self`: # Mapping step: # - n, t1, t2, f1, f2, graph self.generate_acyclic_graph() # Assignment step: # - g self.assign() # Now hash_info will return the appropriate object. @property def hash_info(self): """ HashInfo tuple for the created hash. """ return HashInfo(*(getattr(self, key) for key in _info_fields)) @property def hash_function(self): """ Returns the hash function proper. Ensures that `self` is not bound to the returned closure. """ assert hasattr(self, 'f1') and hasattr(self, 'f2') # These are not just convenient aliases for the given # attributes; if `self` would creep into the returned closure, # that would ensure that a reference to this big, fat object # would be kept alive; hence, any hash function would carry # around all of the auxiliary state that was created during the # generation of the hash parameters. Omitting `self` ensures # this object has a chance to be garbage collected. f1, f2, g = self.f1, self.f2, self.g def czech_hash(word): v1 = f1(word) v2 = f2(word) return g[v1] + g[v2] return czech_hash # Algorithm steps. def generate_acyclic_graph(self): """ Generates an acyclic graph for the given words. Adds the graph, and a list of edge-word associations to the object. """ # Maximum length of each table, respectively. # Hardcoded n = cm, where c = 3 # There might be a good way to choose an appropriate C, # but [1] suggests the average amount of iterations needed # to generate an acyclic graph is sqrt(3). self.n = 3 * len(self.words) max_tries = len(self.words) ** 2 for trial in range(max_tries): try: self.generate_or_fail() except forest.InvariantError: continue else: # Generated successfully! self.trials_taken = trial + 1 return raise RuntimeError("Could not generate graph in " "{} tries".format(max_tries)) def generate_or_fail(self): """ Attempts to generate a random acyclic graph, raising an InvariantError if unable to. """ t1 = self.generate_random_table() t2 = self.generate_random_table() f1 = self.generate_func(t1) f2 = self.generate_func(t2) edges = [(f1(word), f2(word)) for word in self.words] # Try to generate that graph, mack! # Note that failure to generate the graph here should be caught # by the caller. graph = forest.ForestGraph(edges=edges) # Associate each edge with its corresponding word. associations = {} for num in range(len(self.words)): edge = edges[num] word = self.words[num] associations[graph.canonical_order(edge)] = (num, word) # Assign all of these to the object. for name in ('t1', 't2', 'f1', 'f2', 'graph', 'associations'): self.__dict__[name] = locals()[name] def generate_func(self, table): """ Generates a random table based mini-hashing function. """ # Ensure that `self` isn't suddenly in the closure... n = self.n def func(word): return sum(x * ord(c) for x, c in zip(table, word)) % n return func def assign(self): # Create an vector of empty assignments. # **g is 1-indexed!** self.g = [None] * (self.n + 1) # Assign all vertices. for vertex in self.graph.vertices: assert isinstance(vertex, int) and vertex <= self.n # This vertex has already been assigned. if self.g[vertex] is not None: continue self.g[vertex] = 0 self.assign_vertex(vertex) def assign_vertex(self, vertex): for neighbour in self.graph.neighbours(vertex): if self.g[neighbour] is not None: # This neighbour has already been assigned. continue # Get the associated edge number edge = self.graph.canonical_order((vertex, neighbour)) num, _word = self.associations[edge] # Assign this vertex such that # h(word) == g(vertex) + g(neighbour) self.g[neighbour] = num - self.g[vertex] self.assign_vertex(neighbour)
eddieantonio/perfection
perfection/czech.py
CzechHashBuilder.generate_or_fail
python
def generate_or_fail(self): t1 = self.generate_random_table() t2 = self.generate_random_table() f1 = self.generate_func(t1) f2 = self.generate_func(t2) edges = [(f1(word), f2(word)) for word in self.words] # Try to generate that graph, mack! # Note that failure to generate the graph here should be caught # by the caller. graph = forest.ForestGraph(edges=edges) # Associate each edge with its corresponding word. associations = {} for num in range(len(self.words)): edge = edges[num] word = self.words[num] associations[graph.canonical_order(edge)] = (num, word) # Assign all of these to the object. for name in ('t1', 't2', 'f1', 'f2', 'graph', 'associations'): self.__dict__[name] = locals()[name]
Attempts to generate a random acyclic graph, raising an InvariantError if unable to.
train
https://github.com/eddieantonio/perfection/blob/69b7a06b31a15bd9534c69d4bdcc2e48e8ddfc43/perfection/czech.py#L122-L148
[ "def generate_random_table(self):\n \"\"\"\n Generates random tables for given word lists.\n \"\"\"\n table = list(range(0, self.n))\n random.shuffle(table)\n return table\n", "def generate_func(self, table):\n \"\"\"\n Generates a random table based mini-hashing function.\n \"\"\"\n\n # Ensure that `self` isn't suddenly in the closure...\n n = self.n\n\n def func(word):\n return sum(x * ord(c) for x, c in zip(table, word)) % n\n\n return func\n", "def canonical_order(edge):\n u, v = edge\n return edge if u < v else (v, u)\n" ]
class CzechHashBuilder(object): """ A helper class that (iteratively) stores all data needed to create a Czech hash. The entire hash generation algorithm occurs in __init__. >>> duplicated_input = 'guacala' >>> unique_len = len(set(duplicated_input)) >>> info = CzechHashBuilder(duplicated_input) >>> hf = info.hash_function >>> [hf(x) for x in 'lacug'] [4, 2, 3, 1, 0] """ def __init__(self, words, minimize=False): # Store the words as an immutable sequence. self.words = ordered_deduplicate(words) # TODO: Index minimization self.indices = list(range(len(words[0]))) # Each of the following steps add fields to `self`: # Mapping step: # - n, t1, t2, f1, f2, graph self.generate_acyclic_graph() # Assignment step: # - g self.assign() # Now hash_info will return the appropriate object. @property def hash_info(self): """ HashInfo tuple for the created hash. """ return HashInfo(*(getattr(self, key) for key in _info_fields)) @property def hash_function(self): """ Returns the hash function proper. Ensures that `self` is not bound to the returned closure. """ assert hasattr(self, 'f1') and hasattr(self, 'f2') # These are not just convenient aliases for the given # attributes; if `self` would creep into the returned closure, # that would ensure that a reference to this big, fat object # would be kept alive; hence, any hash function would carry # around all of the auxiliary state that was created during the # generation of the hash parameters. Omitting `self` ensures # this object has a chance to be garbage collected. f1, f2, g = self.f1, self.f2, self.g def czech_hash(word): v1 = f1(word) v2 = f2(word) return g[v1] + g[v2] return czech_hash # Algorithm steps. def generate_acyclic_graph(self): """ Generates an acyclic graph for the given words. Adds the graph, and a list of edge-word associations to the object. """ # Maximum length of each table, respectively. # Hardcoded n = cm, where c = 3 # There might be a good way to choose an appropriate C, # but [1] suggests the average amount of iterations needed # to generate an acyclic graph is sqrt(3). self.n = 3 * len(self.words) max_tries = len(self.words) ** 2 for trial in range(max_tries): try: self.generate_or_fail() except forest.InvariantError: continue else: # Generated successfully! self.trials_taken = trial + 1 return raise RuntimeError("Could not generate graph in " "{} tries".format(max_tries)) def generate_random_table(self): """ Generates random tables for given word lists. """ table = list(range(0, self.n)) random.shuffle(table) return table def generate_func(self, table): """ Generates a random table based mini-hashing function. """ # Ensure that `self` isn't suddenly in the closure... n = self.n def func(word): return sum(x * ord(c) for x, c in zip(table, word)) % n return func def assign(self): # Create an vector of empty assignments. # **g is 1-indexed!** self.g = [None] * (self.n + 1) # Assign all vertices. for vertex in self.graph.vertices: assert isinstance(vertex, int) and vertex <= self.n # This vertex has already been assigned. if self.g[vertex] is not None: continue self.g[vertex] = 0 self.assign_vertex(vertex) def assign_vertex(self, vertex): for neighbour in self.graph.neighbours(vertex): if self.g[neighbour] is not None: # This neighbour has already been assigned. continue # Get the associated edge number edge = self.graph.canonical_order((vertex, neighbour)) num, _word = self.associations[edge] # Assign this vertex such that # h(word) == g(vertex) + g(neighbour) self.g[neighbour] = num - self.g[vertex] self.assign_vertex(neighbour)
eddieantonio/perfection
perfection/czech.py
CzechHashBuilder.generate_func
python
def generate_func(self, table): # Ensure that `self` isn't suddenly in the closure... n = self.n def func(word): return sum(x * ord(c) for x, c in zip(table, word)) % n return func
Generates a random table based mini-hashing function.
train
https://github.com/eddieantonio/perfection/blob/69b7a06b31a15bd9534c69d4bdcc2e48e8ddfc43/perfection/czech.py#L150-L161
null
class CzechHashBuilder(object): """ A helper class that (iteratively) stores all data needed to create a Czech hash. The entire hash generation algorithm occurs in __init__. >>> duplicated_input = 'guacala' >>> unique_len = len(set(duplicated_input)) >>> info = CzechHashBuilder(duplicated_input) >>> hf = info.hash_function >>> [hf(x) for x in 'lacug'] [4, 2, 3, 1, 0] """ def __init__(self, words, minimize=False): # Store the words as an immutable sequence. self.words = ordered_deduplicate(words) # TODO: Index minimization self.indices = list(range(len(words[0]))) # Each of the following steps add fields to `self`: # Mapping step: # - n, t1, t2, f1, f2, graph self.generate_acyclic_graph() # Assignment step: # - g self.assign() # Now hash_info will return the appropriate object. @property def hash_info(self): """ HashInfo tuple for the created hash. """ return HashInfo(*(getattr(self, key) for key in _info_fields)) @property def hash_function(self): """ Returns the hash function proper. Ensures that `self` is not bound to the returned closure. """ assert hasattr(self, 'f1') and hasattr(self, 'f2') # These are not just convenient aliases for the given # attributes; if `self` would creep into the returned closure, # that would ensure that a reference to this big, fat object # would be kept alive; hence, any hash function would carry # around all of the auxiliary state that was created during the # generation of the hash parameters. Omitting `self` ensures # this object has a chance to be garbage collected. f1, f2, g = self.f1, self.f2, self.g def czech_hash(word): v1 = f1(word) v2 = f2(word) return g[v1] + g[v2] return czech_hash # Algorithm steps. def generate_acyclic_graph(self): """ Generates an acyclic graph for the given words. Adds the graph, and a list of edge-word associations to the object. """ # Maximum length of each table, respectively. # Hardcoded n = cm, where c = 3 # There might be a good way to choose an appropriate C, # but [1] suggests the average amount of iterations needed # to generate an acyclic graph is sqrt(3). self.n = 3 * len(self.words) max_tries = len(self.words) ** 2 for trial in range(max_tries): try: self.generate_or_fail() except forest.InvariantError: continue else: # Generated successfully! self.trials_taken = trial + 1 return raise RuntimeError("Could not generate graph in " "{} tries".format(max_tries)) def generate_random_table(self): """ Generates random tables for given word lists. """ table = list(range(0, self.n)) random.shuffle(table) return table def generate_or_fail(self): """ Attempts to generate a random acyclic graph, raising an InvariantError if unable to. """ t1 = self.generate_random_table() t2 = self.generate_random_table() f1 = self.generate_func(t1) f2 = self.generate_func(t2) edges = [(f1(word), f2(word)) for word in self.words] # Try to generate that graph, mack! # Note that failure to generate the graph here should be caught # by the caller. graph = forest.ForestGraph(edges=edges) # Associate each edge with its corresponding word. associations = {} for num in range(len(self.words)): edge = edges[num] word = self.words[num] associations[graph.canonical_order(edge)] = (num, word) # Assign all of these to the object. for name in ('t1', 't2', 'f1', 'f2', 'graph', 'associations'): self.__dict__[name] = locals()[name] def assign(self): # Create an vector of empty assignments. # **g is 1-indexed!** self.g = [None] * (self.n + 1) # Assign all vertices. for vertex in self.graph.vertices: assert isinstance(vertex, int) and vertex <= self.n # This vertex has already been assigned. if self.g[vertex] is not None: continue self.g[vertex] = 0 self.assign_vertex(vertex) def assign_vertex(self, vertex): for neighbour in self.graph.neighbours(vertex): if self.g[neighbour] is not None: # This neighbour has already been assigned. continue # Get the associated edge number edge = self.graph.canonical_order((vertex, neighbour)) num, _word = self.associations[edge] # Assign this vertex such that # h(word) == g(vertex) + g(neighbour) self.g[neighbour] = num - self.g[vertex] self.assign_vertex(neighbour)
eddieantonio/perfection
perfection/utils.py
create_dict_subclass
python
def create_dict_subclass(name, hash_func, slots, doc): hash_length = len(slots) # Returns array index -- raises a KeyError if the key does not match # its slot value. def index_or_key_error(key): index = hash_func(key) # Make sure the key is **exactly** the same. if key != slots[index]: raise KeyError(key) return index def init(self, *args, **kwargs): self._arr = [None] * hash_length self._len = 0 # Delegate initialization to update provided by MutableMapping: self.update(*args, **kwargs) def getitem(self, key): index = index_or_key_error(key) if self._arr[index] is None: raise KeyError(key) return self._arr[index][1] def setitem(self, key, value): index = index_or_key_error(key) self._arr[index] = (key, value) def delitem(self, key): index = index_or_key_error(key) if self._arr[index] is None: raise KeyError(key) self._arr[index] = None def dict_iter(self): return (pair[0] for pair in self._arr if pair is not None) def dict_len(self): # TODO: Make this O(1) using auxiliary state? return sum(1 for _ in self) def dict_repr(self): arr_repr = (repr(pair) for pair in self._arr if pair is not None) return ''.join((name, '([', ', '.join(arr_repr), '])')) # Inheriting from MutableMapping gives us a whole whackload of methods for # free. bases = (collections.MutableMapping,) return type(name, bases, { '__init__': init, '__doc__': doc, '__getitem__': getitem, '__setitem__': setitem, '__delitem__': delitem, '__iter__': dict_iter, '__len__': dict_len, '__repr__': dict_repr, })
Creates a dict subclass named name, using the hash_function to index hash_length items. Doc should be any additional documentation added to the class.
train
https://github.com/eddieantonio/perfection/blob/69b7a06b31a15bd9534c69d4bdcc2e48e8ddfc43/perfection/utils.py#L10-L77
null
#!/usr/bin/env python """ Shared utilities for perfect hash tools. """ import collections
eddieantonio/perfection
perfection/getty.py
hash_parameters
python
def hash_parameters(keys, minimize=True, to_int=None): # If to_int is not assigned, simply use the identity function. if to_int is None: to_int = __identity key_to_original = {to_int(original): original for original in keys} # Create a set of all items to be hashed. items = list(key_to_original.keys()) if minimize: offset = 0 - min(items) items = frozenset(x + offset for x in items) else: offset = 0 # 1. Start with a square array (not stored) that is t units on each side. # Choose a t such that t * t >= max(S) t = choose_best_t(items) assert t * t > max(items) and t * t >= len(items) # 2. Place each key K in the square at location (x,y), where # x = K mod t, y = K / t. row_queue = place_items_in_square(items, t) # 3. Arrange rows so that they'll fit into one row and generate a # displacement vector. final_row, displacement_vector = arrange_rows(row_queue, t) # Translate the internal keys to their original items. slots = tuple(key_to_original[item - offset] if item is not None else None for item in final_row) # Return the parameters return HashInfo( t=t, slots=slots, r=displacement_vector, offset=offset, to_int=to_int if to_int is not __identity else None )
Calculates the parameters for a perfect hash. The result is returned as a HashInfo tuple which has the following fields: t The "table parameter". This is the minimum side length of the table used to create the hash. In practice, t**2 is the maximum size of the output hash. slots The original inputs mapped to a vector. This is the hash function. r The displacement vector. This is the displacement of the given row in the result vector. To find a given value, use ``x + r[y]``. offset The amount by which to offset all values (once converted to ints) to_int A function that converts the input to an int (if given). Keyword parameters: ``minimize`` Whether or not offset all integer keys internally by the minimum value. This typically results in smaller output. ``to_int`` A callable that converts the input keys to ints. If not specified, all keys should be given as ints. >>> hash_parameters([1, 5, 7], minimize=False) HashInfo(t=3, slots=(1, 5, 7), r=(-1, -1, 1), offset=0, to_int=None) >>> hash_parameters([1, 5, 7]) HashInfo(t=3, slots=(1, 5, 7), r=(0, 0, 2), offset=-1, to_int=None) >>> l = (0, 3, 4, 7 ,10, 13, 15, 18, 19, 21, 22, 24, 26, 29, 30, 34) >>> phash = hash_parameters(l) >>> phash.slots (18, 19, 0, 21, 22, 3, 4, 24, 7, 26, 30, 10, 29, 13, 34, 15) For some values, the displacement vector will be rather empty: >>> hash_parameters('Andrea', to_int=ord).r (1, None, None, None, 0, -3, 4, None)
train
https://github.com/eddieantonio/perfection/blob/69b7a06b31a15bd9534c69d4bdcc2e48e8ddfc43/perfection/getty.py#L20-L107
[ "def choose_best_t(items):\n minimum_allowable = int(math.sqrt(max(items)) + 1)\n if minimum_allowable ** 2 < len(items):\n return len(items)\n else:\n return minimum_allowable\n", "def place_items_in_square(items, t):\n \"\"\"\n Returns a list of rows that are stored as a priority queue to be\n used with heapq functions.\n\n >>> place_items_in_square([1,5,7], 4)\n [(2, 1, [(1, 5), (3, 7)]), (3, 0, [(1, 1)])]\n >>> place_items_in_square([1,5,7], 3)\n [(2, 0, [(1, 1)]), (2, 1, [(2, 5)]), (2, 2, [(1, 7)])]\n \"\"\"\n\n # A minheap (because that's all that heapq supports :/)\n # of the length of each row. Why this is important is because\n # we'll be popping the largest rows when figuring out row displacements.\n # Each item is a tuple of (t - |row|, y, [(xpos_1, item_1), ...]).\n # Until the call to heapq.heapify(), the rows are ordered in\n # increasing row number (y).\n rows = [(t, y, []) for y in range(t)]\n\n for item in items:\n # Calculate the cell the item should fall in.\n x = item % t\n y = item // t\n\n # Push the item to its corresponding row...\n inverse_length, _, row_contents = rows[y]\n heapq.heappush(row_contents, (x, item))\n\n # Ensure the heap key is kept intact.\n rows[y] = inverse_length - 1, y, row_contents\n\n assert all(inv_len == t - len(rows) for inv_len, _, rows in rows)\n\n heapq.heapify(rows)\n\n # Return only rows that are populated.\n return [row for row in rows if row[2]]\n", "def arrange_rows(row_queue, t):\n \"\"\"\n Takes a priority queue as generated by place_items_in_square().\n Arranges the items from its conceptual square to one list. Returns\n both the resultant vector, plus the displacement vector, to be used\n in the final output hash function.\n\n >>> rows = [(2, 1, [(0, 1), (1, 5)]), (3, 3, [(1, 7)])]\n >>> result, displacements = arrange_rows(rows, 4)\n >>> result\n (1, 5, 7)\n >>> displacements\n (None, 0, None, 1)\n\n >>> rows = [(1, 1, [(0, 1), (2, 7)]), (2, 2, [(1, 5)])]\n >>> result, displacements = arrange_rows(rows, 3)\n >>> result\n (1, 5, 7)\n >>> displacements\n (None, 0, 0)\n \"\"\"\n\n # Create a set of all of the unoccupied columns.\n max_columns = t ** 2\n cols = ((x, True) for x in range(max_columns))\n unoccupied_columns = collections.OrderedDict(cols)\n\n # Create the resultant and displacement vectors.\n result = [None] * max_columns\n displacements = [None] * t\n\n while row_queue:\n # Get the next row to place.\n _inverse_length, y, row = heapq.heappop(row_queue)\n\n offset = find_first_fit(unoccupied_columns, row, max_columns)\n # Calculate the offset of the first item.\n first_item_x = row[0][0]\n\n displacements[y] = offset\n for x, item in row:\n actual_x = x + offset\n result[actual_x] = item\n del unoccupied_columns[actual_x]\n\n return tuple(trim_nones_from_right(result)), tuple(displacements)\n" ]
#!/usr/bin/env python # coding: utf-8 from __future__ import print_function, division import math import collections import heapq from .utils import create_dict_subclass __all__ = ['hash_parameters', 'make_hash', 'make_dict'] HashInfo = collections.namedtuple('HashInfo', 't slots r offset to_int') __identity = lambda x: x def choose_best_t(items): minimum_allowable = int(math.sqrt(max(items)) + 1) if minimum_allowable ** 2 < len(items): return len(items) else: return minimum_allowable def place_items_in_square(items, t): """ Returns a list of rows that are stored as a priority queue to be used with heapq functions. >>> place_items_in_square([1,5,7], 4) [(2, 1, [(1, 5), (3, 7)]), (3, 0, [(1, 1)])] >>> place_items_in_square([1,5,7], 3) [(2, 0, [(1, 1)]), (2, 1, [(2, 5)]), (2, 2, [(1, 7)])] """ # A minheap (because that's all that heapq supports :/) # of the length of each row. Why this is important is because # we'll be popping the largest rows when figuring out row displacements. # Each item is a tuple of (t - |row|, y, [(xpos_1, item_1), ...]). # Until the call to heapq.heapify(), the rows are ordered in # increasing row number (y). rows = [(t, y, []) for y in range(t)] for item in items: # Calculate the cell the item should fall in. x = item % t y = item // t # Push the item to its corresponding row... inverse_length, _, row_contents = rows[y] heapq.heappush(row_contents, (x, item)) # Ensure the heap key is kept intact. rows[y] = inverse_length - 1, y, row_contents assert all(inv_len == t - len(rows) for inv_len, _, rows in rows) heapq.heapify(rows) # Return only rows that are populated. return [row for row in rows if row[2]] def arrange_rows(row_queue, t): """ Takes a priority queue as generated by place_items_in_square(). Arranges the items from its conceptual square to one list. Returns both the resultant vector, plus the displacement vector, to be used in the final output hash function. >>> rows = [(2, 1, [(0, 1), (1, 5)]), (3, 3, [(1, 7)])] >>> result, displacements = arrange_rows(rows, 4) >>> result (1, 5, 7) >>> displacements (None, 0, None, 1) >>> rows = [(1, 1, [(0, 1), (2, 7)]), (2, 2, [(1, 5)])] >>> result, displacements = arrange_rows(rows, 3) >>> result (1, 5, 7) >>> displacements (None, 0, 0) """ # Create a set of all of the unoccupied columns. max_columns = t ** 2 cols = ((x, True) for x in range(max_columns)) unoccupied_columns = collections.OrderedDict(cols) # Create the resultant and displacement vectors. result = [None] * max_columns displacements = [None] * t while row_queue: # Get the next row to place. _inverse_length, y, row = heapq.heappop(row_queue) offset = find_first_fit(unoccupied_columns, row, max_columns) # Calculate the offset of the first item. first_item_x = row[0][0] displacements[y] = offset for x, item in row: actual_x = x + offset result[actual_x] = item del unoccupied_columns[actual_x] return tuple(trim_nones_from_right(result)), tuple(displacements) def find_first_fit(unoccupied_columns, row, row_length): """ Finds the first index that the row's items can fit. """ for free_col in unoccupied_columns: # The offset is that such that the first item goes in the free column. first_item_x = row[0][0] offset = free_col - first_item_x if check_columns_fit(unoccupied_columns, row, offset, row_length): return offset raise ValueError("Row cannot bossily fit in %r: %r" % (list(unoccupied_columns.keys()), row)) def check_columns_fit(unoccupied_columns, row, offset, row_length): """ Checks if all the occupied columns in the row fit in the indices given by free columns. >>> check_columns_fit({0,1,2,3}, [(0, True), (2, True)], 0, 4) True >>> check_columns_fit({0,2,3}, [(2, True), (3, True)], 0, 4) True >>> check_columns_fit({}, [(2, True), (3, True)], 0, 4) False >>> check_columns_fit({0}, [(2, True)], 2, 4) True >>> check_columns_fit({0}, [(3, True)], 2, 4) False """ for index, item in row: adjusted_index = (index + offset) % row_length # Check if the index is in the appropriate place. if adjusted_index not in unoccupied_columns: return False return True def print_square(row_queue, t): """ Prints a row queue as its conceptual square array. """ occupied_rows = {y: row for _, y, row in row_queue} empty_row = ', '.join('...' for _ in range(t)) for y in range(t): print('|', end=' ') if y not in occupied_rows: print(empty_row, end=' ') else: row = dict(occupied_rows[y]) all_cols = ('%3d' % row[x] if x in row else '...' for x in range(t)) print(', '.join(all_cols), end=' ') print("|") def trim_nones_from_right(xs): """ Returns the list without all the Nones at the right end. >>> trim_nones_from_right([1, 2, None, 4, None, 5, None, None]) [1, 2, None, 4, None, 5] """ # Find the first element that does not contain none. for i, item in enumerate(reversed(xs)): if item is not None: break return xs[:-i] def make_hash(keys, **kwargs): """ Creates a perfect hash function from the given keys. For a description of the keyword arguments see :py:func:`hash_parameters`. >>> l = (0, 3, 4, 7 ,10, 13, 15, 18, 19, 21, 22, 24, 26, 29, 30, 34) >>> hf = make_hash(l) >>> hf(19) 1 >>> hash_parameters(l).slots[1] 19 """ params = hash_parameters(keys, **kwargs) t = params.t r = params.r offset = params.offset to_int = params.to_int if params.to_int else __identity def perfect_hash(x): val = to_int(x) + offset x = val % t y = val // t return x + r[y] # Undocumented properties, but used in make_dict()... perfect_hash.length = len(params.slots) perfect_hash.slots = params.slots return perfect_hash def make_dict(name, keys, **kwargs): """ Creates a dictionary-like mapping class that uses perfect hashing. ``name`` is the proper class name of the returned class. See ``hash_parameters()`` for documentation on all arguments after ``name``. >>> MyDict = make_dict('MyDict', '+-<>[],.', to_int=ord) >>> d = MyDict([('+', 1), ('-', 2)]) >>> d[','] = 3 >>> d MyDict([('+', 1), (',', 3), ('-', 2)]) >>> del d['+'] >>> del d['.'] Traceback (most recent call last): ... KeyError: '.' >>> len(d) 2 """ hash_func = make_hash(keys, **kwargs) slots = hash_func.slots # Create a docstring that at least describes where the class came from... doc = """ Dictionary-like object that uses perfect hashing. This class was generated by `%s.%s(%r, ...)`. """ % (__name__, make_dict.__name__, name) return create_dict_subclass(name, hash_func, slots, doc) if __name__ == '__main__': import doctest # Test the module. exit(doctest.testmod(verbose=False).failed)
eddieantonio/perfection
perfection/getty.py
place_items_in_square
python
def place_items_in_square(items, t): # A minheap (because that's all that heapq supports :/) # of the length of each row. Why this is important is because # we'll be popping the largest rows when figuring out row displacements. # Each item is a tuple of (t - |row|, y, [(xpos_1, item_1), ...]). # Until the call to heapq.heapify(), the rows are ordered in # increasing row number (y). rows = [(t, y, []) for y in range(t)] for item in items: # Calculate the cell the item should fall in. x = item % t y = item // t # Push the item to its corresponding row... inverse_length, _, row_contents = rows[y] heapq.heappush(row_contents, (x, item)) # Ensure the heap key is kept intact. rows[y] = inverse_length - 1, y, row_contents assert all(inv_len == t - len(rows) for inv_len, _, rows in rows) heapq.heapify(rows) # Return only rows that are populated. return [row for row in rows if row[2]]
Returns a list of rows that are stored as a priority queue to be used with heapq functions. >>> place_items_in_square([1,5,7], 4) [(2, 1, [(1, 5), (3, 7)]), (3, 0, [(1, 1)])] >>> place_items_in_square([1,5,7], 3) [(2, 0, [(1, 1)]), (2, 1, [(2, 5)]), (2, 2, [(1, 7)])]
train
https://github.com/eddieantonio/perfection/blob/69b7a06b31a15bd9534c69d4bdcc2e48e8ddfc43/perfection/getty.py#L118-L154
null
#!/usr/bin/env python # coding: utf-8 from __future__ import print_function, division import math import collections import heapq from .utils import create_dict_subclass __all__ = ['hash_parameters', 'make_hash', 'make_dict'] HashInfo = collections.namedtuple('HashInfo', 't slots r offset to_int') __identity = lambda x: x def hash_parameters(keys, minimize=True, to_int=None): """ Calculates the parameters for a perfect hash. The result is returned as a HashInfo tuple which has the following fields: t The "table parameter". This is the minimum side length of the table used to create the hash. In practice, t**2 is the maximum size of the output hash. slots The original inputs mapped to a vector. This is the hash function. r The displacement vector. This is the displacement of the given row in the result vector. To find a given value, use ``x + r[y]``. offset The amount by which to offset all values (once converted to ints) to_int A function that converts the input to an int (if given). Keyword parameters: ``minimize`` Whether or not offset all integer keys internally by the minimum value. This typically results in smaller output. ``to_int`` A callable that converts the input keys to ints. If not specified, all keys should be given as ints. >>> hash_parameters([1, 5, 7], minimize=False) HashInfo(t=3, slots=(1, 5, 7), r=(-1, -1, 1), offset=0, to_int=None) >>> hash_parameters([1, 5, 7]) HashInfo(t=3, slots=(1, 5, 7), r=(0, 0, 2), offset=-1, to_int=None) >>> l = (0, 3, 4, 7 ,10, 13, 15, 18, 19, 21, 22, 24, 26, 29, 30, 34) >>> phash = hash_parameters(l) >>> phash.slots (18, 19, 0, 21, 22, 3, 4, 24, 7, 26, 30, 10, 29, 13, 34, 15) For some values, the displacement vector will be rather empty: >>> hash_parameters('Andrea', to_int=ord).r (1, None, None, None, 0, -3, 4, None) """ # If to_int is not assigned, simply use the identity function. if to_int is None: to_int = __identity key_to_original = {to_int(original): original for original in keys} # Create a set of all items to be hashed. items = list(key_to_original.keys()) if minimize: offset = 0 - min(items) items = frozenset(x + offset for x in items) else: offset = 0 # 1. Start with a square array (not stored) that is t units on each side. # Choose a t such that t * t >= max(S) t = choose_best_t(items) assert t * t > max(items) and t * t >= len(items) # 2. Place each key K in the square at location (x,y), where # x = K mod t, y = K / t. row_queue = place_items_in_square(items, t) # 3. Arrange rows so that they'll fit into one row and generate a # displacement vector. final_row, displacement_vector = arrange_rows(row_queue, t) # Translate the internal keys to their original items. slots = tuple(key_to_original[item - offset] if item is not None else None for item in final_row) # Return the parameters return HashInfo( t=t, slots=slots, r=displacement_vector, offset=offset, to_int=to_int if to_int is not __identity else None ) def choose_best_t(items): minimum_allowable = int(math.sqrt(max(items)) + 1) if minimum_allowable ** 2 < len(items): return len(items) else: return minimum_allowable def arrange_rows(row_queue, t): """ Takes a priority queue as generated by place_items_in_square(). Arranges the items from its conceptual square to one list. Returns both the resultant vector, plus the displacement vector, to be used in the final output hash function. >>> rows = [(2, 1, [(0, 1), (1, 5)]), (3, 3, [(1, 7)])] >>> result, displacements = arrange_rows(rows, 4) >>> result (1, 5, 7) >>> displacements (None, 0, None, 1) >>> rows = [(1, 1, [(0, 1), (2, 7)]), (2, 2, [(1, 5)])] >>> result, displacements = arrange_rows(rows, 3) >>> result (1, 5, 7) >>> displacements (None, 0, 0) """ # Create a set of all of the unoccupied columns. max_columns = t ** 2 cols = ((x, True) for x in range(max_columns)) unoccupied_columns = collections.OrderedDict(cols) # Create the resultant and displacement vectors. result = [None] * max_columns displacements = [None] * t while row_queue: # Get the next row to place. _inverse_length, y, row = heapq.heappop(row_queue) offset = find_first_fit(unoccupied_columns, row, max_columns) # Calculate the offset of the first item. first_item_x = row[0][0] displacements[y] = offset for x, item in row: actual_x = x + offset result[actual_x] = item del unoccupied_columns[actual_x] return tuple(trim_nones_from_right(result)), tuple(displacements) def find_first_fit(unoccupied_columns, row, row_length): """ Finds the first index that the row's items can fit. """ for free_col in unoccupied_columns: # The offset is that such that the first item goes in the free column. first_item_x = row[0][0] offset = free_col - first_item_x if check_columns_fit(unoccupied_columns, row, offset, row_length): return offset raise ValueError("Row cannot bossily fit in %r: %r" % (list(unoccupied_columns.keys()), row)) def check_columns_fit(unoccupied_columns, row, offset, row_length): """ Checks if all the occupied columns in the row fit in the indices given by free columns. >>> check_columns_fit({0,1,2,3}, [(0, True), (2, True)], 0, 4) True >>> check_columns_fit({0,2,3}, [(2, True), (3, True)], 0, 4) True >>> check_columns_fit({}, [(2, True), (3, True)], 0, 4) False >>> check_columns_fit({0}, [(2, True)], 2, 4) True >>> check_columns_fit({0}, [(3, True)], 2, 4) False """ for index, item in row: adjusted_index = (index + offset) % row_length # Check if the index is in the appropriate place. if adjusted_index not in unoccupied_columns: return False return True def print_square(row_queue, t): """ Prints a row queue as its conceptual square array. """ occupied_rows = {y: row for _, y, row in row_queue} empty_row = ', '.join('...' for _ in range(t)) for y in range(t): print('|', end=' ') if y not in occupied_rows: print(empty_row, end=' ') else: row = dict(occupied_rows[y]) all_cols = ('%3d' % row[x] if x in row else '...' for x in range(t)) print(', '.join(all_cols), end=' ') print("|") def trim_nones_from_right(xs): """ Returns the list without all the Nones at the right end. >>> trim_nones_from_right([1, 2, None, 4, None, 5, None, None]) [1, 2, None, 4, None, 5] """ # Find the first element that does not contain none. for i, item in enumerate(reversed(xs)): if item is not None: break return xs[:-i] def make_hash(keys, **kwargs): """ Creates a perfect hash function from the given keys. For a description of the keyword arguments see :py:func:`hash_parameters`. >>> l = (0, 3, 4, 7 ,10, 13, 15, 18, 19, 21, 22, 24, 26, 29, 30, 34) >>> hf = make_hash(l) >>> hf(19) 1 >>> hash_parameters(l).slots[1] 19 """ params = hash_parameters(keys, **kwargs) t = params.t r = params.r offset = params.offset to_int = params.to_int if params.to_int else __identity def perfect_hash(x): val = to_int(x) + offset x = val % t y = val // t return x + r[y] # Undocumented properties, but used in make_dict()... perfect_hash.length = len(params.slots) perfect_hash.slots = params.slots return perfect_hash def make_dict(name, keys, **kwargs): """ Creates a dictionary-like mapping class that uses perfect hashing. ``name`` is the proper class name of the returned class. See ``hash_parameters()`` for documentation on all arguments after ``name``. >>> MyDict = make_dict('MyDict', '+-<>[],.', to_int=ord) >>> d = MyDict([('+', 1), ('-', 2)]) >>> d[','] = 3 >>> d MyDict([('+', 1), (',', 3), ('-', 2)]) >>> del d['+'] >>> del d['.'] Traceback (most recent call last): ... KeyError: '.' >>> len(d) 2 """ hash_func = make_hash(keys, **kwargs) slots = hash_func.slots # Create a docstring that at least describes where the class came from... doc = """ Dictionary-like object that uses perfect hashing. This class was generated by `%s.%s(%r, ...)`. """ % (__name__, make_dict.__name__, name) return create_dict_subclass(name, hash_func, slots, doc) if __name__ == '__main__': import doctest # Test the module. exit(doctest.testmod(verbose=False).failed)
eddieantonio/perfection
perfection/getty.py
arrange_rows
python
def arrange_rows(row_queue, t): # Create a set of all of the unoccupied columns. max_columns = t ** 2 cols = ((x, True) for x in range(max_columns)) unoccupied_columns = collections.OrderedDict(cols) # Create the resultant and displacement vectors. result = [None] * max_columns displacements = [None] * t while row_queue: # Get the next row to place. _inverse_length, y, row = heapq.heappop(row_queue) offset = find_first_fit(unoccupied_columns, row, max_columns) # Calculate the offset of the first item. first_item_x = row[0][0] displacements[y] = offset for x, item in row: actual_x = x + offset result[actual_x] = item del unoccupied_columns[actual_x] return tuple(trim_nones_from_right(result)), tuple(displacements)
Takes a priority queue as generated by place_items_in_square(). Arranges the items from its conceptual square to one list. Returns both the resultant vector, plus the displacement vector, to be used in the final output hash function. >>> rows = [(2, 1, [(0, 1), (1, 5)]), (3, 3, [(1, 7)])] >>> result, displacements = arrange_rows(rows, 4) >>> result (1, 5, 7) >>> displacements (None, 0, None, 1) >>> rows = [(1, 1, [(0, 1), (2, 7)]), (2, 2, [(1, 5)])] >>> result, displacements = arrange_rows(rows, 3) >>> result (1, 5, 7) >>> displacements (None, 0, 0)
train
https://github.com/eddieantonio/perfection/blob/69b7a06b31a15bd9534c69d4bdcc2e48e8ddfc43/perfection/getty.py#L157-L202
[ "def find_first_fit(unoccupied_columns, row, row_length):\n \"\"\"\n Finds the first index that the row's items can fit.\n \"\"\"\n for free_col in unoccupied_columns:\n # The offset is that such that the first item goes in the free column.\n first_item_x = row[0][0]\n offset = free_col - first_item_x\n if check_columns_fit(unoccupied_columns, row, offset, row_length):\n return offset\n\n raise ValueError(\"Row cannot bossily fit in %r: %r\"\n % (list(unoccupied_columns.keys()), row))\n", "def trim_nones_from_right(xs):\n \"\"\"\n Returns the list without all the Nones at the right end.\n\n >>> trim_nones_from_right([1, 2, None, 4, None, 5, None, None])\n [1, 2, None, 4, None, 5]\n\n \"\"\"\n # Find the first element that does not contain none.\n for i, item in enumerate(reversed(xs)):\n if item is not None:\n break\n\n return xs[:-i]\n" ]
#!/usr/bin/env python # coding: utf-8 from __future__ import print_function, division import math import collections import heapq from .utils import create_dict_subclass __all__ = ['hash_parameters', 'make_hash', 'make_dict'] HashInfo = collections.namedtuple('HashInfo', 't slots r offset to_int') __identity = lambda x: x def hash_parameters(keys, minimize=True, to_int=None): """ Calculates the parameters for a perfect hash. The result is returned as a HashInfo tuple which has the following fields: t The "table parameter". This is the minimum side length of the table used to create the hash. In practice, t**2 is the maximum size of the output hash. slots The original inputs mapped to a vector. This is the hash function. r The displacement vector. This is the displacement of the given row in the result vector. To find a given value, use ``x + r[y]``. offset The amount by which to offset all values (once converted to ints) to_int A function that converts the input to an int (if given). Keyword parameters: ``minimize`` Whether or not offset all integer keys internally by the minimum value. This typically results in smaller output. ``to_int`` A callable that converts the input keys to ints. If not specified, all keys should be given as ints. >>> hash_parameters([1, 5, 7], minimize=False) HashInfo(t=3, slots=(1, 5, 7), r=(-1, -1, 1), offset=0, to_int=None) >>> hash_parameters([1, 5, 7]) HashInfo(t=3, slots=(1, 5, 7), r=(0, 0, 2), offset=-1, to_int=None) >>> l = (0, 3, 4, 7 ,10, 13, 15, 18, 19, 21, 22, 24, 26, 29, 30, 34) >>> phash = hash_parameters(l) >>> phash.slots (18, 19, 0, 21, 22, 3, 4, 24, 7, 26, 30, 10, 29, 13, 34, 15) For some values, the displacement vector will be rather empty: >>> hash_parameters('Andrea', to_int=ord).r (1, None, None, None, 0, -3, 4, None) """ # If to_int is not assigned, simply use the identity function. if to_int is None: to_int = __identity key_to_original = {to_int(original): original for original in keys} # Create a set of all items to be hashed. items = list(key_to_original.keys()) if minimize: offset = 0 - min(items) items = frozenset(x + offset for x in items) else: offset = 0 # 1. Start with a square array (not stored) that is t units on each side. # Choose a t such that t * t >= max(S) t = choose_best_t(items) assert t * t > max(items) and t * t >= len(items) # 2. Place each key K in the square at location (x,y), where # x = K mod t, y = K / t. row_queue = place_items_in_square(items, t) # 3. Arrange rows so that they'll fit into one row and generate a # displacement vector. final_row, displacement_vector = arrange_rows(row_queue, t) # Translate the internal keys to their original items. slots = tuple(key_to_original[item - offset] if item is not None else None for item in final_row) # Return the parameters return HashInfo( t=t, slots=slots, r=displacement_vector, offset=offset, to_int=to_int if to_int is not __identity else None ) def choose_best_t(items): minimum_allowable = int(math.sqrt(max(items)) + 1) if minimum_allowable ** 2 < len(items): return len(items) else: return minimum_allowable def place_items_in_square(items, t): """ Returns a list of rows that are stored as a priority queue to be used with heapq functions. >>> place_items_in_square([1,5,7], 4) [(2, 1, [(1, 5), (3, 7)]), (3, 0, [(1, 1)])] >>> place_items_in_square([1,5,7], 3) [(2, 0, [(1, 1)]), (2, 1, [(2, 5)]), (2, 2, [(1, 7)])] """ # A minheap (because that's all that heapq supports :/) # of the length of each row. Why this is important is because # we'll be popping the largest rows when figuring out row displacements. # Each item is a tuple of (t - |row|, y, [(xpos_1, item_1), ...]). # Until the call to heapq.heapify(), the rows are ordered in # increasing row number (y). rows = [(t, y, []) for y in range(t)] for item in items: # Calculate the cell the item should fall in. x = item % t y = item // t # Push the item to its corresponding row... inverse_length, _, row_contents = rows[y] heapq.heappush(row_contents, (x, item)) # Ensure the heap key is kept intact. rows[y] = inverse_length - 1, y, row_contents assert all(inv_len == t - len(rows) for inv_len, _, rows in rows) heapq.heapify(rows) # Return only rows that are populated. return [row for row in rows if row[2]] def find_first_fit(unoccupied_columns, row, row_length): """ Finds the first index that the row's items can fit. """ for free_col in unoccupied_columns: # The offset is that such that the first item goes in the free column. first_item_x = row[0][0] offset = free_col - first_item_x if check_columns_fit(unoccupied_columns, row, offset, row_length): return offset raise ValueError("Row cannot bossily fit in %r: %r" % (list(unoccupied_columns.keys()), row)) def check_columns_fit(unoccupied_columns, row, offset, row_length): """ Checks if all the occupied columns in the row fit in the indices given by free columns. >>> check_columns_fit({0,1,2,3}, [(0, True), (2, True)], 0, 4) True >>> check_columns_fit({0,2,3}, [(2, True), (3, True)], 0, 4) True >>> check_columns_fit({}, [(2, True), (3, True)], 0, 4) False >>> check_columns_fit({0}, [(2, True)], 2, 4) True >>> check_columns_fit({0}, [(3, True)], 2, 4) False """ for index, item in row: adjusted_index = (index + offset) % row_length # Check if the index is in the appropriate place. if adjusted_index not in unoccupied_columns: return False return True def print_square(row_queue, t): """ Prints a row queue as its conceptual square array. """ occupied_rows = {y: row for _, y, row in row_queue} empty_row = ', '.join('...' for _ in range(t)) for y in range(t): print('|', end=' ') if y not in occupied_rows: print(empty_row, end=' ') else: row = dict(occupied_rows[y]) all_cols = ('%3d' % row[x] if x in row else '...' for x in range(t)) print(', '.join(all_cols), end=' ') print("|") def trim_nones_from_right(xs): """ Returns the list without all the Nones at the right end. >>> trim_nones_from_right([1, 2, None, 4, None, 5, None, None]) [1, 2, None, 4, None, 5] """ # Find the first element that does not contain none. for i, item in enumerate(reversed(xs)): if item is not None: break return xs[:-i] def make_hash(keys, **kwargs): """ Creates a perfect hash function from the given keys. For a description of the keyword arguments see :py:func:`hash_parameters`. >>> l = (0, 3, 4, 7 ,10, 13, 15, 18, 19, 21, 22, 24, 26, 29, 30, 34) >>> hf = make_hash(l) >>> hf(19) 1 >>> hash_parameters(l).slots[1] 19 """ params = hash_parameters(keys, **kwargs) t = params.t r = params.r offset = params.offset to_int = params.to_int if params.to_int else __identity def perfect_hash(x): val = to_int(x) + offset x = val % t y = val // t return x + r[y] # Undocumented properties, but used in make_dict()... perfect_hash.length = len(params.slots) perfect_hash.slots = params.slots return perfect_hash def make_dict(name, keys, **kwargs): """ Creates a dictionary-like mapping class that uses perfect hashing. ``name`` is the proper class name of the returned class. See ``hash_parameters()`` for documentation on all arguments after ``name``. >>> MyDict = make_dict('MyDict', '+-<>[],.', to_int=ord) >>> d = MyDict([('+', 1), ('-', 2)]) >>> d[','] = 3 >>> d MyDict([('+', 1), (',', 3), ('-', 2)]) >>> del d['+'] >>> del d['.'] Traceback (most recent call last): ... KeyError: '.' >>> len(d) 2 """ hash_func = make_hash(keys, **kwargs) slots = hash_func.slots # Create a docstring that at least describes where the class came from... doc = """ Dictionary-like object that uses perfect hashing. This class was generated by `%s.%s(%r, ...)`. """ % (__name__, make_dict.__name__, name) return create_dict_subclass(name, hash_func, slots, doc) if __name__ == '__main__': import doctest # Test the module. exit(doctest.testmod(verbose=False).failed)
eddieantonio/perfection
perfection/getty.py
find_first_fit
python
def find_first_fit(unoccupied_columns, row, row_length): for free_col in unoccupied_columns: # The offset is that such that the first item goes in the free column. first_item_x = row[0][0] offset = free_col - first_item_x if check_columns_fit(unoccupied_columns, row, offset, row_length): return offset raise ValueError("Row cannot bossily fit in %r: %r" % (list(unoccupied_columns.keys()), row))
Finds the first index that the row's items can fit.
train
https://github.com/eddieantonio/perfection/blob/69b7a06b31a15bd9534c69d4bdcc2e48e8ddfc43/perfection/getty.py#L205-L217
[ "def check_columns_fit(unoccupied_columns, row, offset, row_length):\n \"\"\"\n Checks if all the occupied columns in the row fit in the indices\n given by free columns.\n\n >>> check_columns_fit({0,1,2,3}, [(0, True), (2, True)], 0, 4)\n True\n >>> check_columns_fit({0,2,3}, [(2, True), (3, True)], 0, 4)\n True\n >>> check_columns_fit({}, [(2, True), (3, True)], 0, 4)\n False\n >>> check_columns_fit({0}, [(2, True)], 2, 4)\n True\n >>> check_columns_fit({0}, [(3, True)], 2, 4)\n False\n\n \"\"\"\n for index, item in row:\n adjusted_index = (index + offset) % row_length\n\n # Check if the index is in the appropriate place.\n if adjusted_index not in unoccupied_columns:\n return False\n\n return True\n" ]
#!/usr/bin/env python # coding: utf-8 from __future__ import print_function, division import math import collections import heapq from .utils import create_dict_subclass __all__ = ['hash_parameters', 'make_hash', 'make_dict'] HashInfo = collections.namedtuple('HashInfo', 't slots r offset to_int') __identity = lambda x: x def hash_parameters(keys, minimize=True, to_int=None): """ Calculates the parameters for a perfect hash. The result is returned as a HashInfo tuple which has the following fields: t The "table parameter". This is the minimum side length of the table used to create the hash. In practice, t**2 is the maximum size of the output hash. slots The original inputs mapped to a vector. This is the hash function. r The displacement vector. This is the displacement of the given row in the result vector. To find a given value, use ``x + r[y]``. offset The amount by which to offset all values (once converted to ints) to_int A function that converts the input to an int (if given). Keyword parameters: ``minimize`` Whether or not offset all integer keys internally by the minimum value. This typically results in smaller output. ``to_int`` A callable that converts the input keys to ints. If not specified, all keys should be given as ints. >>> hash_parameters([1, 5, 7], minimize=False) HashInfo(t=3, slots=(1, 5, 7), r=(-1, -1, 1), offset=0, to_int=None) >>> hash_parameters([1, 5, 7]) HashInfo(t=3, slots=(1, 5, 7), r=(0, 0, 2), offset=-1, to_int=None) >>> l = (0, 3, 4, 7 ,10, 13, 15, 18, 19, 21, 22, 24, 26, 29, 30, 34) >>> phash = hash_parameters(l) >>> phash.slots (18, 19, 0, 21, 22, 3, 4, 24, 7, 26, 30, 10, 29, 13, 34, 15) For some values, the displacement vector will be rather empty: >>> hash_parameters('Andrea', to_int=ord).r (1, None, None, None, 0, -3, 4, None) """ # If to_int is not assigned, simply use the identity function. if to_int is None: to_int = __identity key_to_original = {to_int(original): original for original in keys} # Create a set of all items to be hashed. items = list(key_to_original.keys()) if minimize: offset = 0 - min(items) items = frozenset(x + offset for x in items) else: offset = 0 # 1. Start with a square array (not stored) that is t units on each side. # Choose a t such that t * t >= max(S) t = choose_best_t(items) assert t * t > max(items) and t * t >= len(items) # 2. Place each key K in the square at location (x,y), where # x = K mod t, y = K / t. row_queue = place_items_in_square(items, t) # 3. Arrange rows so that they'll fit into one row and generate a # displacement vector. final_row, displacement_vector = arrange_rows(row_queue, t) # Translate the internal keys to their original items. slots = tuple(key_to_original[item - offset] if item is not None else None for item in final_row) # Return the parameters return HashInfo( t=t, slots=slots, r=displacement_vector, offset=offset, to_int=to_int if to_int is not __identity else None ) def choose_best_t(items): minimum_allowable = int(math.sqrt(max(items)) + 1) if minimum_allowable ** 2 < len(items): return len(items) else: return minimum_allowable def place_items_in_square(items, t): """ Returns a list of rows that are stored as a priority queue to be used with heapq functions. >>> place_items_in_square([1,5,7], 4) [(2, 1, [(1, 5), (3, 7)]), (3, 0, [(1, 1)])] >>> place_items_in_square([1,5,7], 3) [(2, 0, [(1, 1)]), (2, 1, [(2, 5)]), (2, 2, [(1, 7)])] """ # A minheap (because that's all that heapq supports :/) # of the length of each row. Why this is important is because # we'll be popping the largest rows when figuring out row displacements. # Each item is a tuple of (t - |row|, y, [(xpos_1, item_1), ...]). # Until the call to heapq.heapify(), the rows are ordered in # increasing row number (y). rows = [(t, y, []) for y in range(t)] for item in items: # Calculate the cell the item should fall in. x = item % t y = item // t # Push the item to its corresponding row... inverse_length, _, row_contents = rows[y] heapq.heappush(row_contents, (x, item)) # Ensure the heap key is kept intact. rows[y] = inverse_length - 1, y, row_contents assert all(inv_len == t - len(rows) for inv_len, _, rows in rows) heapq.heapify(rows) # Return only rows that are populated. return [row for row in rows if row[2]] def arrange_rows(row_queue, t): """ Takes a priority queue as generated by place_items_in_square(). Arranges the items from its conceptual square to one list. Returns both the resultant vector, plus the displacement vector, to be used in the final output hash function. >>> rows = [(2, 1, [(0, 1), (1, 5)]), (3, 3, [(1, 7)])] >>> result, displacements = arrange_rows(rows, 4) >>> result (1, 5, 7) >>> displacements (None, 0, None, 1) >>> rows = [(1, 1, [(0, 1), (2, 7)]), (2, 2, [(1, 5)])] >>> result, displacements = arrange_rows(rows, 3) >>> result (1, 5, 7) >>> displacements (None, 0, 0) """ # Create a set of all of the unoccupied columns. max_columns = t ** 2 cols = ((x, True) for x in range(max_columns)) unoccupied_columns = collections.OrderedDict(cols) # Create the resultant and displacement vectors. result = [None] * max_columns displacements = [None] * t while row_queue: # Get the next row to place. _inverse_length, y, row = heapq.heappop(row_queue) offset = find_first_fit(unoccupied_columns, row, max_columns) # Calculate the offset of the first item. first_item_x = row[0][0] displacements[y] = offset for x, item in row: actual_x = x + offset result[actual_x] = item del unoccupied_columns[actual_x] return tuple(trim_nones_from_right(result)), tuple(displacements) def check_columns_fit(unoccupied_columns, row, offset, row_length): """ Checks if all the occupied columns in the row fit in the indices given by free columns. >>> check_columns_fit({0,1,2,3}, [(0, True), (2, True)], 0, 4) True >>> check_columns_fit({0,2,3}, [(2, True), (3, True)], 0, 4) True >>> check_columns_fit({}, [(2, True), (3, True)], 0, 4) False >>> check_columns_fit({0}, [(2, True)], 2, 4) True >>> check_columns_fit({0}, [(3, True)], 2, 4) False """ for index, item in row: adjusted_index = (index + offset) % row_length # Check if the index is in the appropriate place. if adjusted_index not in unoccupied_columns: return False return True def print_square(row_queue, t): """ Prints a row queue as its conceptual square array. """ occupied_rows = {y: row for _, y, row in row_queue} empty_row = ', '.join('...' for _ in range(t)) for y in range(t): print('|', end=' ') if y not in occupied_rows: print(empty_row, end=' ') else: row = dict(occupied_rows[y]) all_cols = ('%3d' % row[x] if x in row else '...' for x in range(t)) print(', '.join(all_cols), end=' ') print("|") def trim_nones_from_right(xs): """ Returns the list without all the Nones at the right end. >>> trim_nones_from_right([1, 2, None, 4, None, 5, None, None]) [1, 2, None, 4, None, 5] """ # Find the first element that does not contain none. for i, item in enumerate(reversed(xs)): if item is not None: break return xs[:-i] def make_hash(keys, **kwargs): """ Creates a perfect hash function from the given keys. For a description of the keyword arguments see :py:func:`hash_parameters`. >>> l = (0, 3, 4, 7 ,10, 13, 15, 18, 19, 21, 22, 24, 26, 29, 30, 34) >>> hf = make_hash(l) >>> hf(19) 1 >>> hash_parameters(l).slots[1] 19 """ params = hash_parameters(keys, **kwargs) t = params.t r = params.r offset = params.offset to_int = params.to_int if params.to_int else __identity def perfect_hash(x): val = to_int(x) + offset x = val % t y = val // t return x + r[y] # Undocumented properties, but used in make_dict()... perfect_hash.length = len(params.slots) perfect_hash.slots = params.slots return perfect_hash def make_dict(name, keys, **kwargs): """ Creates a dictionary-like mapping class that uses perfect hashing. ``name`` is the proper class name of the returned class. See ``hash_parameters()`` for documentation on all arguments after ``name``. >>> MyDict = make_dict('MyDict', '+-<>[],.', to_int=ord) >>> d = MyDict([('+', 1), ('-', 2)]) >>> d[','] = 3 >>> d MyDict([('+', 1), (',', 3), ('-', 2)]) >>> del d['+'] >>> del d['.'] Traceback (most recent call last): ... KeyError: '.' >>> len(d) 2 """ hash_func = make_hash(keys, **kwargs) slots = hash_func.slots # Create a docstring that at least describes where the class came from... doc = """ Dictionary-like object that uses perfect hashing. This class was generated by `%s.%s(%r, ...)`. """ % (__name__, make_dict.__name__, name) return create_dict_subclass(name, hash_func, slots, doc) if __name__ == '__main__': import doctest # Test the module. exit(doctest.testmod(verbose=False).failed)
eddieantonio/perfection
perfection/getty.py
check_columns_fit
python
def check_columns_fit(unoccupied_columns, row, offset, row_length): for index, item in row: adjusted_index = (index + offset) % row_length # Check if the index is in the appropriate place. if adjusted_index not in unoccupied_columns: return False return True
Checks if all the occupied columns in the row fit in the indices given by free columns. >>> check_columns_fit({0,1,2,3}, [(0, True), (2, True)], 0, 4) True >>> check_columns_fit({0,2,3}, [(2, True), (3, True)], 0, 4) True >>> check_columns_fit({}, [(2, True), (3, True)], 0, 4) False >>> check_columns_fit({0}, [(2, True)], 2, 4) True >>> check_columns_fit({0}, [(3, True)], 2, 4) False
train
https://github.com/eddieantonio/perfection/blob/69b7a06b31a15bd9534c69d4bdcc2e48e8ddfc43/perfection/getty.py#L220-L244
null
#!/usr/bin/env python # coding: utf-8 from __future__ import print_function, division import math import collections import heapq from .utils import create_dict_subclass __all__ = ['hash_parameters', 'make_hash', 'make_dict'] HashInfo = collections.namedtuple('HashInfo', 't slots r offset to_int') __identity = lambda x: x def hash_parameters(keys, minimize=True, to_int=None): """ Calculates the parameters for a perfect hash. The result is returned as a HashInfo tuple which has the following fields: t The "table parameter". This is the minimum side length of the table used to create the hash. In practice, t**2 is the maximum size of the output hash. slots The original inputs mapped to a vector. This is the hash function. r The displacement vector. This is the displacement of the given row in the result vector. To find a given value, use ``x + r[y]``. offset The amount by which to offset all values (once converted to ints) to_int A function that converts the input to an int (if given). Keyword parameters: ``minimize`` Whether or not offset all integer keys internally by the minimum value. This typically results in smaller output. ``to_int`` A callable that converts the input keys to ints. If not specified, all keys should be given as ints. >>> hash_parameters([1, 5, 7], minimize=False) HashInfo(t=3, slots=(1, 5, 7), r=(-1, -1, 1), offset=0, to_int=None) >>> hash_parameters([1, 5, 7]) HashInfo(t=3, slots=(1, 5, 7), r=(0, 0, 2), offset=-1, to_int=None) >>> l = (0, 3, 4, 7 ,10, 13, 15, 18, 19, 21, 22, 24, 26, 29, 30, 34) >>> phash = hash_parameters(l) >>> phash.slots (18, 19, 0, 21, 22, 3, 4, 24, 7, 26, 30, 10, 29, 13, 34, 15) For some values, the displacement vector will be rather empty: >>> hash_parameters('Andrea', to_int=ord).r (1, None, None, None, 0, -3, 4, None) """ # If to_int is not assigned, simply use the identity function. if to_int is None: to_int = __identity key_to_original = {to_int(original): original for original in keys} # Create a set of all items to be hashed. items = list(key_to_original.keys()) if minimize: offset = 0 - min(items) items = frozenset(x + offset for x in items) else: offset = 0 # 1. Start with a square array (not stored) that is t units on each side. # Choose a t such that t * t >= max(S) t = choose_best_t(items) assert t * t > max(items) and t * t >= len(items) # 2. Place each key K in the square at location (x,y), where # x = K mod t, y = K / t. row_queue = place_items_in_square(items, t) # 3. Arrange rows so that they'll fit into one row and generate a # displacement vector. final_row, displacement_vector = arrange_rows(row_queue, t) # Translate the internal keys to their original items. slots = tuple(key_to_original[item - offset] if item is not None else None for item in final_row) # Return the parameters return HashInfo( t=t, slots=slots, r=displacement_vector, offset=offset, to_int=to_int if to_int is not __identity else None ) def choose_best_t(items): minimum_allowable = int(math.sqrt(max(items)) + 1) if minimum_allowable ** 2 < len(items): return len(items) else: return minimum_allowable def place_items_in_square(items, t): """ Returns a list of rows that are stored as a priority queue to be used with heapq functions. >>> place_items_in_square([1,5,7], 4) [(2, 1, [(1, 5), (3, 7)]), (3, 0, [(1, 1)])] >>> place_items_in_square([1,5,7], 3) [(2, 0, [(1, 1)]), (2, 1, [(2, 5)]), (2, 2, [(1, 7)])] """ # A minheap (because that's all that heapq supports :/) # of the length of each row. Why this is important is because # we'll be popping the largest rows when figuring out row displacements. # Each item is a tuple of (t - |row|, y, [(xpos_1, item_1), ...]). # Until the call to heapq.heapify(), the rows are ordered in # increasing row number (y). rows = [(t, y, []) for y in range(t)] for item in items: # Calculate the cell the item should fall in. x = item % t y = item // t # Push the item to its corresponding row... inverse_length, _, row_contents = rows[y] heapq.heappush(row_contents, (x, item)) # Ensure the heap key is kept intact. rows[y] = inverse_length - 1, y, row_contents assert all(inv_len == t - len(rows) for inv_len, _, rows in rows) heapq.heapify(rows) # Return only rows that are populated. return [row for row in rows if row[2]] def arrange_rows(row_queue, t): """ Takes a priority queue as generated by place_items_in_square(). Arranges the items from its conceptual square to one list. Returns both the resultant vector, plus the displacement vector, to be used in the final output hash function. >>> rows = [(2, 1, [(0, 1), (1, 5)]), (3, 3, [(1, 7)])] >>> result, displacements = arrange_rows(rows, 4) >>> result (1, 5, 7) >>> displacements (None, 0, None, 1) >>> rows = [(1, 1, [(0, 1), (2, 7)]), (2, 2, [(1, 5)])] >>> result, displacements = arrange_rows(rows, 3) >>> result (1, 5, 7) >>> displacements (None, 0, 0) """ # Create a set of all of the unoccupied columns. max_columns = t ** 2 cols = ((x, True) for x in range(max_columns)) unoccupied_columns = collections.OrderedDict(cols) # Create the resultant and displacement vectors. result = [None] * max_columns displacements = [None] * t while row_queue: # Get the next row to place. _inverse_length, y, row = heapq.heappop(row_queue) offset = find_first_fit(unoccupied_columns, row, max_columns) # Calculate the offset of the first item. first_item_x = row[0][0] displacements[y] = offset for x, item in row: actual_x = x + offset result[actual_x] = item del unoccupied_columns[actual_x] return tuple(trim_nones_from_right(result)), tuple(displacements) def find_first_fit(unoccupied_columns, row, row_length): """ Finds the first index that the row's items can fit. """ for free_col in unoccupied_columns: # The offset is that such that the first item goes in the free column. first_item_x = row[0][0] offset = free_col - first_item_x if check_columns_fit(unoccupied_columns, row, offset, row_length): return offset raise ValueError("Row cannot bossily fit in %r: %r" % (list(unoccupied_columns.keys()), row)) def print_square(row_queue, t): """ Prints a row queue as its conceptual square array. """ occupied_rows = {y: row for _, y, row in row_queue} empty_row = ', '.join('...' for _ in range(t)) for y in range(t): print('|', end=' ') if y not in occupied_rows: print(empty_row, end=' ') else: row = dict(occupied_rows[y]) all_cols = ('%3d' % row[x] if x in row else '...' for x in range(t)) print(', '.join(all_cols), end=' ') print("|") def trim_nones_from_right(xs): """ Returns the list without all the Nones at the right end. >>> trim_nones_from_right([1, 2, None, 4, None, 5, None, None]) [1, 2, None, 4, None, 5] """ # Find the first element that does not contain none. for i, item in enumerate(reversed(xs)): if item is not None: break return xs[:-i] def make_hash(keys, **kwargs): """ Creates a perfect hash function from the given keys. For a description of the keyword arguments see :py:func:`hash_parameters`. >>> l = (0, 3, 4, 7 ,10, 13, 15, 18, 19, 21, 22, 24, 26, 29, 30, 34) >>> hf = make_hash(l) >>> hf(19) 1 >>> hash_parameters(l).slots[1] 19 """ params = hash_parameters(keys, **kwargs) t = params.t r = params.r offset = params.offset to_int = params.to_int if params.to_int else __identity def perfect_hash(x): val = to_int(x) + offset x = val % t y = val // t return x + r[y] # Undocumented properties, but used in make_dict()... perfect_hash.length = len(params.slots) perfect_hash.slots = params.slots return perfect_hash def make_dict(name, keys, **kwargs): """ Creates a dictionary-like mapping class that uses perfect hashing. ``name`` is the proper class name of the returned class. See ``hash_parameters()`` for documentation on all arguments after ``name``. >>> MyDict = make_dict('MyDict', '+-<>[],.', to_int=ord) >>> d = MyDict([('+', 1), ('-', 2)]) >>> d[','] = 3 >>> d MyDict([('+', 1), (',', 3), ('-', 2)]) >>> del d['+'] >>> del d['.'] Traceback (most recent call last): ... KeyError: '.' >>> len(d) 2 """ hash_func = make_hash(keys, **kwargs) slots = hash_func.slots # Create a docstring that at least describes where the class came from... doc = """ Dictionary-like object that uses perfect hashing. This class was generated by `%s.%s(%r, ...)`. """ % (__name__, make_dict.__name__, name) return create_dict_subclass(name, hash_func, slots, doc) if __name__ == '__main__': import doctest # Test the module. exit(doctest.testmod(verbose=False).failed)
eddieantonio/perfection
perfection/getty.py
print_square
python
def print_square(row_queue, t): occupied_rows = {y: row for _, y, row in row_queue} empty_row = ', '.join('...' for _ in range(t)) for y in range(t): print('|', end=' ') if y not in occupied_rows: print(empty_row, end=' ') else: row = dict(occupied_rows[y]) all_cols = ('%3d' % row[x] if x in row else '...' for x in range(t)) print(', '.join(all_cols), end=' ') print("|")
Prints a row queue as its conceptual square array.
train
https://github.com/eddieantonio/perfection/blob/69b7a06b31a15bd9534c69d4bdcc2e48e8ddfc43/perfection/getty.py#L247-L264
null
#!/usr/bin/env python # coding: utf-8 from __future__ import print_function, division import math import collections import heapq from .utils import create_dict_subclass __all__ = ['hash_parameters', 'make_hash', 'make_dict'] HashInfo = collections.namedtuple('HashInfo', 't slots r offset to_int') __identity = lambda x: x def hash_parameters(keys, minimize=True, to_int=None): """ Calculates the parameters for a perfect hash. The result is returned as a HashInfo tuple which has the following fields: t The "table parameter". This is the minimum side length of the table used to create the hash. In practice, t**2 is the maximum size of the output hash. slots The original inputs mapped to a vector. This is the hash function. r The displacement vector. This is the displacement of the given row in the result vector. To find a given value, use ``x + r[y]``. offset The amount by which to offset all values (once converted to ints) to_int A function that converts the input to an int (if given). Keyword parameters: ``minimize`` Whether or not offset all integer keys internally by the minimum value. This typically results in smaller output. ``to_int`` A callable that converts the input keys to ints. If not specified, all keys should be given as ints. >>> hash_parameters([1, 5, 7], minimize=False) HashInfo(t=3, slots=(1, 5, 7), r=(-1, -1, 1), offset=0, to_int=None) >>> hash_parameters([1, 5, 7]) HashInfo(t=3, slots=(1, 5, 7), r=(0, 0, 2), offset=-1, to_int=None) >>> l = (0, 3, 4, 7 ,10, 13, 15, 18, 19, 21, 22, 24, 26, 29, 30, 34) >>> phash = hash_parameters(l) >>> phash.slots (18, 19, 0, 21, 22, 3, 4, 24, 7, 26, 30, 10, 29, 13, 34, 15) For some values, the displacement vector will be rather empty: >>> hash_parameters('Andrea', to_int=ord).r (1, None, None, None, 0, -3, 4, None) """ # If to_int is not assigned, simply use the identity function. if to_int is None: to_int = __identity key_to_original = {to_int(original): original for original in keys} # Create a set of all items to be hashed. items = list(key_to_original.keys()) if minimize: offset = 0 - min(items) items = frozenset(x + offset for x in items) else: offset = 0 # 1. Start with a square array (not stored) that is t units on each side. # Choose a t such that t * t >= max(S) t = choose_best_t(items) assert t * t > max(items) and t * t >= len(items) # 2. Place each key K in the square at location (x,y), where # x = K mod t, y = K / t. row_queue = place_items_in_square(items, t) # 3. Arrange rows so that they'll fit into one row and generate a # displacement vector. final_row, displacement_vector = arrange_rows(row_queue, t) # Translate the internal keys to their original items. slots = tuple(key_to_original[item - offset] if item is not None else None for item in final_row) # Return the parameters return HashInfo( t=t, slots=slots, r=displacement_vector, offset=offset, to_int=to_int if to_int is not __identity else None ) def choose_best_t(items): minimum_allowable = int(math.sqrt(max(items)) + 1) if minimum_allowable ** 2 < len(items): return len(items) else: return minimum_allowable def place_items_in_square(items, t): """ Returns a list of rows that are stored as a priority queue to be used with heapq functions. >>> place_items_in_square([1,5,7], 4) [(2, 1, [(1, 5), (3, 7)]), (3, 0, [(1, 1)])] >>> place_items_in_square([1,5,7], 3) [(2, 0, [(1, 1)]), (2, 1, [(2, 5)]), (2, 2, [(1, 7)])] """ # A minheap (because that's all that heapq supports :/) # of the length of each row. Why this is important is because # we'll be popping the largest rows when figuring out row displacements. # Each item is a tuple of (t - |row|, y, [(xpos_1, item_1), ...]). # Until the call to heapq.heapify(), the rows are ordered in # increasing row number (y). rows = [(t, y, []) for y in range(t)] for item in items: # Calculate the cell the item should fall in. x = item % t y = item // t # Push the item to its corresponding row... inverse_length, _, row_contents = rows[y] heapq.heappush(row_contents, (x, item)) # Ensure the heap key is kept intact. rows[y] = inverse_length - 1, y, row_contents assert all(inv_len == t - len(rows) for inv_len, _, rows in rows) heapq.heapify(rows) # Return only rows that are populated. return [row for row in rows if row[2]] def arrange_rows(row_queue, t): """ Takes a priority queue as generated by place_items_in_square(). Arranges the items from its conceptual square to one list. Returns both the resultant vector, plus the displacement vector, to be used in the final output hash function. >>> rows = [(2, 1, [(0, 1), (1, 5)]), (3, 3, [(1, 7)])] >>> result, displacements = arrange_rows(rows, 4) >>> result (1, 5, 7) >>> displacements (None, 0, None, 1) >>> rows = [(1, 1, [(0, 1), (2, 7)]), (2, 2, [(1, 5)])] >>> result, displacements = arrange_rows(rows, 3) >>> result (1, 5, 7) >>> displacements (None, 0, 0) """ # Create a set of all of the unoccupied columns. max_columns = t ** 2 cols = ((x, True) for x in range(max_columns)) unoccupied_columns = collections.OrderedDict(cols) # Create the resultant and displacement vectors. result = [None] * max_columns displacements = [None] * t while row_queue: # Get the next row to place. _inverse_length, y, row = heapq.heappop(row_queue) offset = find_first_fit(unoccupied_columns, row, max_columns) # Calculate the offset of the first item. first_item_x = row[0][0] displacements[y] = offset for x, item in row: actual_x = x + offset result[actual_x] = item del unoccupied_columns[actual_x] return tuple(trim_nones_from_right(result)), tuple(displacements) def find_first_fit(unoccupied_columns, row, row_length): """ Finds the first index that the row's items can fit. """ for free_col in unoccupied_columns: # The offset is that such that the first item goes in the free column. first_item_x = row[0][0] offset = free_col - first_item_x if check_columns_fit(unoccupied_columns, row, offset, row_length): return offset raise ValueError("Row cannot bossily fit in %r: %r" % (list(unoccupied_columns.keys()), row)) def check_columns_fit(unoccupied_columns, row, offset, row_length): """ Checks if all the occupied columns in the row fit in the indices given by free columns. >>> check_columns_fit({0,1,2,3}, [(0, True), (2, True)], 0, 4) True >>> check_columns_fit({0,2,3}, [(2, True), (3, True)], 0, 4) True >>> check_columns_fit({}, [(2, True), (3, True)], 0, 4) False >>> check_columns_fit({0}, [(2, True)], 2, 4) True >>> check_columns_fit({0}, [(3, True)], 2, 4) False """ for index, item in row: adjusted_index = (index + offset) % row_length # Check if the index is in the appropriate place. if adjusted_index not in unoccupied_columns: return False return True def trim_nones_from_right(xs): """ Returns the list without all the Nones at the right end. >>> trim_nones_from_right([1, 2, None, 4, None, 5, None, None]) [1, 2, None, 4, None, 5] """ # Find the first element that does not contain none. for i, item in enumerate(reversed(xs)): if item is not None: break return xs[:-i] def make_hash(keys, **kwargs): """ Creates a perfect hash function from the given keys. For a description of the keyword arguments see :py:func:`hash_parameters`. >>> l = (0, 3, 4, 7 ,10, 13, 15, 18, 19, 21, 22, 24, 26, 29, 30, 34) >>> hf = make_hash(l) >>> hf(19) 1 >>> hash_parameters(l).slots[1] 19 """ params = hash_parameters(keys, **kwargs) t = params.t r = params.r offset = params.offset to_int = params.to_int if params.to_int else __identity def perfect_hash(x): val = to_int(x) + offset x = val % t y = val // t return x + r[y] # Undocumented properties, but used in make_dict()... perfect_hash.length = len(params.slots) perfect_hash.slots = params.slots return perfect_hash def make_dict(name, keys, **kwargs): """ Creates a dictionary-like mapping class that uses perfect hashing. ``name`` is the proper class name of the returned class. See ``hash_parameters()`` for documentation on all arguments after ``name``. >>> MyDict = make_dict('MyDict', '+-<>[],.', to_int=ord) >>> d = MyDict([('+', 1), ('-', 2)]) >>> d[','] = 3 >>> d MyDict([('+', 1), (',', 3), ('-', 2)]) >>> del d['+'] >>> del d['.'] Traceback (most recent call last): ... KeyError: '.' >>> len(d) 2 """ hash_func = make_hash(keys, **kwargs) slots = hash_func.slots # Create a docstring that at least describes where the class came from... doc = """ Dictionary-like object that uses perfect hashing. This class was generated by `%s.%s(%r, ...)`. """ % (__name__, make_dict.__name__, name) return create_dict_subclass(name, hash_func, slots, doc) if __name__ == '__main__': import doctest # Test the module. exit(doctest.testmod(verbose=False).failed)
eddieantonio/perfection
perfection/getty.py
trim_nones_from_right
python
def trim_nones_from_right(xs): # Find the first element that does not contain none. for i, item in enumerate(reversed(xs)): if item is not None: break return xs[:-i]
Returns the list without all the Nones at the right end. >>> trim_nones_from_right([1, 2, None, 4, None, 5, None, None]) [1, 2, None, 4, None, 5]
train
https://github.com/eddieantonio/perfection/blob/69b7a06b31a15bd9534c69d4bdcc2e48e8ddfc43/perfection/getty.py#L267-L280
null
#!/usr/bin/env python # coding: utf-8 from __future__ import print_function, division import math import collections import heapq from .utils import create_dict_subclass __all__ = ['hash_parameters', 'make_hash', 'make_dict'] HashInfo = collections.namedtuple('HashInfo', 't slots r offset to_int') __identity = lambda x: x def hash_parameters(keys, minimize=True, to_int=None): """ Calculates the parameters for a perfect hash. The result is returned as a HashInfo tuple which has the following fields: t The "table parameter". This is the minimum side length of the table used to create the hash. In practice, t**2 is the maximum size of the output hash. slots The original inputs mapped to a vector. This is the hash function. r The displacement vector. This is the displacement of the given row in the result vector. To find a given value, use ``x + r[y]``. offset The amount by which to offset all values (once converted to ints) to_int A function that converts the input to an int (if given). Keyword parameters: ``minimize`` Whether or not offset all integer keys internally by the minimum value. This typically results in smaller output. ``to_int`` A callable that converts the input keys to ints. If not specified, all keys should be given as ints. >>> hash_parameters([1, 5, 7], minimize=False) HashInfo(t=3, slots=(1, 5, 7), r=(-1, -1, 1), offset=0, to_int=None) >>> hash_parameters([1, 5, 7]) HashInfo(t=3, slots=(1, 5, 7), r=(0, 0, 2), offset=-1, to_int=None) >>> l = (0, 3, 4, 7 ,10, 13, 15, 18, 19, 21, 22, 24, 26, 29, 30, 34) >>> phash = hash_parameters(l) >>> phash.slots (18, 19, 0, 21, 22, 3, 4, 24, 7, 26, 30, 10, 29, 13, 34, 15) For some values, the displacement vector will be rather empty: >>> hash_parameters('Andrea', to_int=ord).r (1, None, None, None, 0, -3, 4, None) """ # If to_int is not assigned, simply use the identity function. if to_int is None: to_int = __identity key_to_original = {to_int(original): original for original in keys} # Create a set of all items to be hashed. items = list(key_to_original.keys()) if minimize: offset = 0 - min(items) items = frozenset(x + offset for x in items) else: offset = 0 # 1. Start with a square array (not stored) that is t units on each side. # Choose a t such that t * t >= max(S) t = choose_best_t(items) assert t * t > max(items) and t * t >= len(items) # 2. Place each key K in the square at location (x,y), where # x = K mod t, y = K / t. row_queue = place_items_in_square(items, t) # 3. Arrange rows so that they'll fit into one row and generate a # displacement vector. final_row, displacement_vector = arrange_rows(row_queue, t) # Translate the internal keys to their original items. slots = tuple(key_to_original[item - offset] if item is not None else None for item in final_row) # Return the parameters return HashInfo( t=t, slots=slots, r=displacement_vector, offset=offset, to_int=to_int if to_int is not __identity else None ) def choose_best_t(items): minimum_allowable = int(math.sqrt(max(items)) + 1) if minimum_allowable ** 2 < len(items): return len(items) else: return minimum_allowable def place_items_in_square(items, t): """ Returns a list of rows that are stored as a priority queue to be used with heapq functions. >>> place_items_in_square([1,5,7], 4) [(2, 1, [(1, 5), (3, 7)]), (3, 0, [(1, 1)])] >>> place_items_in_square([1,5,7], 3) [(2, 0, [(1, 1)]), (2, 1, [(2, 5)]), (2, 2, [(1, 7)])] """ # A minheap (because that's all that heapq supports :/) # of the length of each row. Why this is important is because # we'll be popping the largest rows when figuring out row displacements. # Each item is a tuple of (t - |row|, y, [(xpos_1, item_1), ...]). # Until the call to heapq.heapify(), the rows are ordered in # increasing row number (y). rows = [(t, y, []) for y in range(t)] for item in items: # Calculate the cell the item should fall in. x = item % t y = item // t # Push the item to its corresponding row... inverse_length, _, row_contents = rows[y] heapq.heappush(row_contents, (x, item)) # Ensure the heap key is kept intact. rows[y] = inverse_length - 1, y, row_contents assert all(inv_len == t - len(rows) for inv_len, _, rows in rows) heapq.heapify(rows) # Return only rows that are populated. return [row for row in rows if row[2]] def arrange_rows(row_queue, t): """ Takes a priority queue as generated by place_items_in_square(). Arranges the items from its conceptual square to one list. Returns both the resultant vector, plus the displacement vector, to be used in the final output hash function. >>> rows = [(2, 1, [(0, 1), (1, 5)]), (3, 3, [(1, 7)])] >>> result, displacements = arrange_rows(rows, 4) >>> result (1, 5, 7) >>> displacements (None, 0, None, 1) >>> rows = [(1, 1, [(0, 1), (2, 7)]), (2, 2, [(1, 5)])] >>> result, displacements = arrange_rows(rows, 3) >>> result (1, 5, 7) >>> displacements (None, 0, 0) """ # Create a set of all of the unoccupied columns. max_columns = t ** 2 cols = ((x, True) for x in range(max_columns)) unoccupied_columns = collections.OrderedDict(cols) # Create the resultant and displacement vectors. result = [None] * max_columns displacements = [None] * t while row_queue: # Get the next row to place. _inverse_length, y, row = heapq.heappop(row_queue) offset = find_first_fit(unoccupied_columns, row, max_columns) # Calculate the offset of the first item. first_item_x = row[0][0] displacements[y] = offset for x, item in row: actual_x = x + offset result[actual_x] = item del unoccupied_columns[actual_x] return tuple(trim_nones_from_right(result)), tuple(displacements) def find_first_fit(unoccupied_columns, row, row_length): """ Finds the first index that the row's items can fit. """ for free_col in unoccupied_columns: # The offset is that such that the first item goes in the free column. first_item_x = row[0][0] offset = free_col - first_item_x if check_columns_fit(unoccupied_columns, row, offset, row_length): return offset raise ValueError("Row cannot bossily fit in %r: %r" % (list(unoccupied_columns.keys()), row)) def check_columns_fit(unoccupied_columns, row, offset, row_length): """ Checks if all the occupied columns in the row fit in the indices given by free columns. >>> check_columns_fit({0,1,2,3}, [(0, True), (2, True)], 0, 4) True >>> check_columns_fit({0,2,3}, [(2, True), (3, True)], 0, 4) True >>> check_columns_fit({}, [(2, True), (3, True)], 0, 4) False >>> check_columns_fit({0}, [(2, True)], 2, 4) True >>> check_columns_fit({0}, [(3, True)], 2, 4) False """ for index, item in row: adjusted_index = (index + offset) % row_length # Check if the index is in the appropriate place. if adjusted_index not in unoccupied_columns: return False return True def print_square(row_queue, t): """ Prints a row queue as its conceptual square array. """ occupied_rows = {y: row for _, y, row in row_queue} empty_row = ', '.join('...' for _ in range(t)) for y in range(t): print('|', end=' ') if y not in occupied_rows: print(empty_row, end=' ') else: row = dict(occupied_rows[y]) all_cols = ('%3d' % row[x] if x in row else '...' for x in range(t)) print(', '.join(all_cols), end=' ') print("|") def make_hash(keys, **kwargs): """ Creates a perfect hash function from the given keys. For a description of the keyword arguments see :py:func:`hash_parameters`. >>> l = (0, 3, 4, 7 ,10, 13, 15, 18, 19, 21, 22, 24, 26, 29, 30, 34) >>> hf = make_hash(l) >>> hf(19) 1 >>> hash_parameters(l).slots[1] 19 """ params = hash_parameters(keys, **kwargs) t = params.t r = params.r offset = params.offset to_int = params.to_int if params.to_int else __identity def perfect_hash(x): val = to_int(x) + offset x = val % t y = val // t return x + r[y] # Undocumented properties, but used in make_dict()... perfect_hash.length = len(params.slots) perfect_hash.slots = params.slots return perfect_hash def make_dict(name, keys, **kwargs): """ Creates a dictionary-like mapping class that uses perfect hashing. ``name`` is the proper class name of the returned class. See ``hash_parameters()`` for documentation on all arguments after ``name``. >>> MyDict = make_dict('MyDict', '+-<>[],.', to_int=ord) >>> d = MyDict([('+', 1), ('-', 2)]) >>> d[','] = 3 >>> d MyDict([('+', 1), (',', 3), ('-', 2)]) >>> del d['+'] >>> del d['.'] Traceback (most recent call last): ... KeyError: '.' >>> len(d) 2 """ hash_func = make_hash(keys, **kwargs) slots = hash_func.slots # Create a docstring that at least describes where the class came from... doc = """ Dictionary-like object that uses perfect hashing. This class was generated by `%s.%s(%r, ...)`. """ % (__name__, make_dict.__name__, name) return create_dict_subclass(name, hash_func, slots, doc) if __name__ == '__main__': import doctest # Test the module. exit(doctest.testmod(verbose=False).failed)
eddieantonio/perfection
perfection/getty.py
make_hash
python
def make_hash(keys, **kwargs): params = hash_parameters(keys, **kwargs) t = params.t r = params.r offset = params.offset to_int = params.to_int if params.to_int else __identity def perfect_hash(x): val = to_int(x) + offset x = val % t y = val // t return x + r[y] # Undocumented properties, but used in make_dict()... perfect_hash.length = len(params.slots) perfect_hash.slots = params.slots return perfect_hash
Creates a perfect hash function from the given keys. For a description of the keyword arguments see :py:func:`hash_parameters`. >>> l = (0, 3, 4, 7 ,10, 13, 15, 18, 19, 21, 22, 24, 26, 29, 30, 34) >>> hf = make_hash(l) >>> hf(19) 1 >>> hash_parameters(l).slots[1] 19
train
https://github.com/eddieantonio/perfection/blob/69b7a06b31a15bd9534c69d4bdcc2e48e8ddfc43/perfection/getty.py#L283-L311
[ "def hash_parameters(keys, minimize=True, to_int=None):\n \"\"\"\n Calculates the parameters for a perfect hash. The result is returned\n as a HashInfo tuple which has the following fields:\n\n t\n The \"table parameter\". This is the minimum side length of the\n table used to create the hash. In practice, t**2 is the maximum\n size of the output hash.\n slots\n The original inputs mapped to a vector. This is the hash\n function.\n r\n The displacement vector. This is the displacement of the given\n row in the result vector. To find a given value, use\n ``x + r[y]``.\n offset\n The amount by which to offset all values (once converted to ints)\n to_int\n A function that converts the input to an int (if given).\n\n Keyword parameters:\n\n ``minimize``\n Whether or not offset all integer keys internally by the minimum\n value. This typically results in smaller output.\n ``to_int``\n A callable that converts the input keys to ints. If not\n specified, all keys should be given as ints.\n\n\n >>> hash_parameters([1, 5, 7], minimize=False)\n HashInfo(t=3, slots=(1, 5, 7), r=(-1, -1, 1), offset=0, to_int=None)\n\n >>> hash_parameters([1, 5, 7])\n HashInfo(t=3, slots=(1, 5, 7), r=(0, 0, 2), offset=-1, to_int=None)\n\n >>> l = (0, 3, 4, 7 ,10, 13, 15, 18, 19, 21, 22, 24, 26, 29, 30, 34)\n >>> phash = hash_parameters(l)\n >>> phash.slots\n (18, 19, 0, 21, 22, 3, 4, 24, 7, 26, 30, 10, 29, 13, 34, 15)\n\n For some values, the displacement vector will be rather empty:\n\n >>> hash_parameters('Andrea', to_int=ord).r\n (1, None, None, None, 0, -3, 4, None)\n \"\"\"\n\n # If to_int is not assigned, simply use the identity function.\n if to_int is None:\n to_int = __identity\n\n key_to_original = {to_int(original): original for original in keys}\n\n # Create a set of all items to be hashed.\n items = list(key_to_original.keys())\n\n if minimize:\n offset = 0 - min(items)\n items = frozenset(x + offset for x in items)\n else:\n offset = 0\n\n # 1. Start with a square array (not stored) that is t units on each side.\n # Choose a t such that t * t >= max(S)\n t = choose_best_t(items)\n assert t * t > max(items) and t * t >= len(items)\n\n # 2. Place each key K in the square at location (x,y), where\n # x = K mod t, y = K / t.\n row_queue = place_items_in_square(items, t)\n\n # 3. Arrange rows so that they'll fit into one row and generate a\n # displacement vector.\n final_row, displacement_vector = arrange_rows(row_queue, t)\n\n # Translate the internal keys to their original items.\n slots = tuple(key_to_original[item - offset] if item is not None else None\n for item in final_row)\n\n # Return the parameters\n return HashInfo(\n t=t,\n slots=slots,\n r=displacement_vector,\n offset=offset,\n to_int=to_int if to_int is not __identity else None\n )\n" ]
#!/usr/bin/env python # coding: utf-8 from __future__ import print_function, division import math import collections import heapq from .utils import create_dict_subclass __all__ = ['hash_parameters', 'make_hash', 'make_dict'] HashInfo = collections.namedtuple('HashInfo', 't slots r offset to_int') __identity = lambda x: x def hash_parameters(keys, minimize=True, to_int=None): """ Calculates the parameters for a perfect hash. The result is returned as a HashInfo tuple which has the following fields: t The "table parameter". This is the minimum side length of the table used to create the hash. In practice, t**2 is the maximum size of the output hash. slots The original inputs mapped to a vector. This is the hash function. r The displacement vector. This is the displacement of the given row in the result vector. To find a given value, use ``x + r[y]``. offset The amount by which to offset all values (once converted to ints) to_int A function that converts the input to an int (if given). Keyword parameters: ``minimize`` Whether or not offset all integer keys internally by the minimum value. This typically results in smaller output. ``to_int`` A callable that converts the input keys to ints. If not specified, all keys should be given as ints. >>> hash_parameters([1, 5, 7], minimize=False) HashInfo(t=3, slots=(1, 5, 7), r=(-1, -1, 1), offset=0, to_int=None) >>> hash_parameters([1, 5, 7]) HashInfo(t=3, slots=(1, 5, 7), r=(0, 0, 2), offset=-1, to_int=None) >>> l = (0, 3, 4, 7 ,10, 13, 15, 18, 19, 21, 22, 24, 26, 29, 30, 34) >>> phash = hash_parameters(l) >>> phash.slots (18, 19, 0, 21, 22, 3, 4, 24, 7, 26, 30, 10, 29, 13, 34, 15) For some values, the displacement vector will be rather empty: >>> hash_parameters('Andrea', to_int=ord).r (1, None, None, None, 0, -3, 4, None) """ # If to_int is not assigned, simply use the identity function. if to_int is None: to_int = __identity key_to_original = {to_int(original): original for original in keys} # Create a set of all items to be hashed. items = list(key_to_original.keys()) if minimize: offset = 0 - min(items) items = frozenset(x + offset for x in items) else: offset = 0 # 1. Start with a square array (not stored) that is t units on each side. # Choose a t such that t * t >= max(S) t = choose_best_t(items) assert t * t > max(items) and t * t >= len(items) # 2. Place each key K in the square at location (x,y), where # x = K mod t, y = K / t. row_queue = place_items_in_square(items, t) # 3. Arrange rows so that they'll fit into one row and generate a # displacement vector. final_row, displacement_vector = arrange_rows(row_queue, t) # Translate the internal keys to their original items. slots = tuple(key_to_original[item - offset] if item is not None else None for item in final_row) # Return the parameters return HashInfo( t=t, slots=slots, r=displacement_vector, offset=offset, to_int=to_int if to_int is not __identity else None ) def choose_best_t(items): minimum_allowable = int(math.sqrt(max(items)) + 1) if minimum_allowable ** 2 < len(items): return len(items) else: return minimum_allowable def place_items_in_square(items, t): """ Returns a list of rows that are stored as a priority queue to be used with heapq functions. >>> place_items_in_square([1,5,7], 4) [(2, 1, [(1, 5), (3, 7)]), (3, 0, [(1, 1)])] >>> place_items_in_square([1,5,7], 3) [(2, 0, [(1, 1)]), (2, 1, [(2, 5)]), (2, 2, [(1, 7)])] """ # A minheap (because that's all that heapq supports :/) # of the length of each row. Why this is important is because # we'll be popping the largest rows when figuring out row displacements. # Each item is a tuple of (t - |row|, y, [(xpos_1, item_1), ...]). # Until the call to heapq.heapify(), the rows are ordered in # increasing row number (y). rows = [(t, y, []) for y in range(t)] for item in items: # Calculate the cell the item should fall in. x = item % t y = item // t # Push the item to its corresponding row... inverse_length, _, row_contents = rows[y] heapq.heappush(row_contents, (x, item)) # Ensure the heap key is kept intact. rows[y] = inverse_length - 1, y, row_contents assert all(inv_len == t - len(rows) for inv_len, _, rows in rows) heapq.heapify(rows) # Return only rows that are populated. return [row for row in rows if row[2]] def arrange_rows(row_queue, t): """ Takes a priority queue as generated by place_items_in_square(). Arranges the items from its conceptual square to one list. Returns both the resultant vector, plus the displacement vector, to be used in the final output hash function. >>> rows = [(2, 1, [(0, 1), (1, 5)]), (3, 3, [(1, 7)])] >>> result, displacements = arrange_rows(rows, 4) >>> result (1, 5, 7) >>> displacements (None, 0, None, 1) >>> rows = [(1, 1, [(0, 1), (2, 7)]), (2, 2, [(1, 5)])] >>> result, displacements = arrange_rows(rows, 3) >>> result (1, 5, 7) >>> displacements (None, 0, 0) """ # Create a set of all of the unoccupied columns. max_columns = t ** 2 cols = ((x, True) for x in range(max_columns)) unoccupied_columns = collections.OrderedDict(cols) # Create the resultant and displacement vectors. result = [None] * max_columns displacements = [None] * t while row_queue: # Get the next row to place. _inverse_length, y, row = heapq.heappop(row_queue) offset = find_first_fit(unoccupied_columns, row, max_columns) # Calculate the offset of the first item. first_item_x = row[0][0] displacements[y] = offset for x, item in row: actual_x = x + offset result[actual_x] = item del unoccupied_columns[actual_x] return tuple(trim_nones_from_right(result)), tuple(displacements) def find_first_fit(unoccupied_columns, row, row_length): """ Finds the first index that the row's items can fit. """ for free_col in unoccupied_columns: # The offset is that such that the first item goes in the free column. first_item_x = row[0][0] offset = free_col - first_item_x if check_columns_fit(unoccupied_columns, row, offset, row_length): return offset raise ValueError("Row cannot bossily fit in %r: %r" % (list(unoccupied_columns.keys()), row)) def check_columns_fit(unoccupied_columns, row, offset, row_length): """ Checks if all the occupied columns in the row fit in the indices given by free columns. >>> check_columns_fit({0,1,2,3}, [(0, True), (2, True)], 0, 4) True >>> check_columns_fit({0,2,3}, [(2, True), (3, True)], 0, 4) True >>> check_columns_fit({}, [(2, True), (3, True)], 0, 4) False >>> check_columns_fit({0}, [(2, True)], 2, 4) True >>> check_columns_fit({0}, [(3, True)], 2, 4) False """ for index, item in row: adjusted_index = (index + offset) % row_length # Check if the index is in the appropriate place. if adjusted_index not in unoccupied_columns: return False return True def print_square(row_queue, t): """ Prints a row queue as its conceptual square array. """ occupied_rows = {y: row for _, y, row in row_queue} empty_row = ', '.join('...' for _ in range(t)) for y in range(t): print('|', end=' ') if y not in occupied_rows: print(empty_row, end=' ') else: row = dict(occupied_rows[y]) all_cols = ('%3d' % row[x] if x in row else '...' for x in range(t)) print(', '.join(all_cols), end=' ') print("|") def trim_nones_from_right(xs): """ Returns the list without all the Nones at the right end. >>> trim_nones_from_right([1, 2, None, 4, None, 5, None, None]) [1, 2, None, 4, None, 5] """ # Find the first element that does not contain none. for i, item in enumerate(reversed(xs)): if item is not None: break return xs[:-i] def make_dict(name, keys, **kwargs): """ Creates a dictionary-like mapping class that uses perfect hashing. ``name`` is the proper class name of the returned class. See ``hash_parameters()`` for documentation on all arguments after ``name``. >>> MyDict = make_dict('MyDict', '+-<>[],.', to_int=ord) >>> d = MyDict([('+', 1), ('-', 2)]) >>> d[','] = 3 >>> d MyDict([('+', 1), (',', 3), ('-', 2)]) >>> del d['+'] >>> del d['.'] Traceback (most recent call last): ... KeyError: '.' >>> len(d) 2 """ hash_func = make_hash(keys, **kwargs) slots = hash_func.slots # Create a docstring that at least describes where the class came from... doc = """ Dictionary-like object that uses perfect hashing. This class was generated by `%s.%s(%r, ...)`. """ % (__name__, make_dict.__name__, name) return create_dict_subclass(name, hash_func, slots, doc) if __name__ == '__main__': import doctest # Test the module. exit(doctest.testmod(verbose=False).failed)
eddieantonio/perfection
perfection/getty.py
make_dict
python
def make_dict(name, keys, **kwargs): hash_func = make_hash(keys, **kwargs) slots = hash_func.slots # Create a docstring that at least describes where the class came from... doc = """ Dictionary-like object that uses perfect hashing. This class was generated by `%s.%s(%r, ...)`. """ % (__name__, make_dict.__name__, name) return create_dict_subclass(name, hash_func, slots, doc)
Creates a dictionary-like mapping class that uses perfect hashing. ``name`` is the proper class name of the returned class. See ``hash_parameters()`` for documentation on all arguments after ``name``. >>> MyDict = make_dict('MyDict', '+-<>[],.', to_int=ord) >>> d = MyDict([('+', 1), ('-', 2)]) >>> d[','] = 3 >>> d MyDict([('+', 1), (',', 3), ('-', 2)]) >>> del d['+'] >>> del d['.'] Traceback (most recent call last): ... KeyError: '.' >>> len(d) 2
train
https://github.com/eddieantonio/perfection/blob/69b7a06b31a15bd9534c69d4bdcc2e48e8ddfc43/perfection/getty.py#L314-L343
[ "def make_hash(keys, **kwargs):\n \"\"\"\n Creates a perfect hash function from the given keys. For a\n description of the keyword arguments see :py:func:`hash_parameters`.\n\n >>> l = (0, 3, 4, 7 ,10, 13, 15, 18, 19, 21, 22, 24, 26, 29, 30, 34)\n >>> hf = make_hash(l)\n >>> hf(19)\n 1\n >>> hash_parameters(l).slots[1]\n 19\n \"\"\"\n params = hash_parameters(keys, **kwargs)\n\n t = params.t\n r = params.r\n offset = params.offset\n to_int = params.to_int if params.to_int else __identity\n\n def perfect_hash(x):\n val = to_int(x) + offset\n x = val % t\n y = val // t\n return x + r[y]\n\n # Undocumented properties, but used in make_dict()...\n perfect_hash.length = len(params.slots)\n perfect_hash.slots = params.slots\n return perfect_hash\n", "def create_dict_subclass(name, hash_func, slots, doc):\n \"\"\"\n Creates a dict subclass named name, using the hash_function to index\n hash_length items. Doc should be any additional documentation added to the\n class.\n \"\"\"\n\n hash_length = len(slots)\n\n # Returns array index -- raises a KeyError if the key does not match\n # its slot value.\n def index_or_key_error(key):\n index = hash_func(key)\n # Make sure the key is **exactly** the same.\n if key != slots[index]:\n raise KeyError(key)\n return index\n\n def init(self, *args, **kwargs):\n self._arr = [None] * hash_length\n self._len = 0\n\n # Delegate initialization to update provided by MutableMapping:\n self.update(*args, **kwargs)\n\n def getitem(self, key):\n index = index_or_key_error(key)\n if self._arr[index] is None:\n raise KeyError(key)\n return self._arr[index][1]\n\n def setitem(self, key, value):\n index = index_or_key_error(key)\n self._arr[index] = (key, value)\n\n def delitem(self, key):\n index = index_or_key_error(key)\n if self._arr[index] is None:\n raise KeyError(key)\n self._arr[index] = None\n\n def dict_iter(self):\n return (pair[0] for pair in self._arr if pair is not None)\n\n def dict_len(self):\n # TODO: Make this O(1) using auxiliary state?\n return sum(1 for _ in self)\n\n def dict_repr(self):\n arr_repr = (repr(pair) for pair in self._arr if pair is not None)\n return ''.join((name, '([', ', '.join(arr_repr), '])'))\n\n # Inheriting from MutableMapping gives us a whole whackload of methods for\n # free.\n bases = (collections.MutableMapping,)\n\n return type(name, bases, {\n '__init__': init,\n '__doc__': doc,\n\n '__getitem__': getitem,\n '__setitem__': setitem,\n '__delitem__': delitem,\n '__iter__': dict_iter,\n '__len__': dict_len,\n\n '__repr__': dict_repr,\n })\n" ]
#!/usr/bin/env python # coding: utf-8 from __future__ import print_function, division import math import collections import heapq from .utils import create_dict_subclass __all__ = ['hash_parameters', 'make_hash', 'make_dict'] HashInfo = collections.namedtuple('HashInfo', 't slots r offset to_int') __identity = lambda x: x def hash_parameters(keys, minimize=True, to_int=None): """ Calculates the parameters for a perfect hash. The result is returned as a HashInfo tuple which has the following fields: t The "table parameter". This is the minimum side length of the table used to create the hash. In practice, t**2 is the maximum size of the output hash. slots The original inputs mapped to a vector. This is the hash function. r The displacement vector. This is the displacement of the given row in the result vector. To find a given value, use ``x + r[y]``. offset The amount by which to offset all values (once converted to ints) to_int A function that converts the input to an int (if given). Keyword parameters: ``minimize`` Whether or not offset all integer keys internally by the minimum value. This typically results in smaller output. ``to_int`` A callable that converts the input keys to ints. If not specified, all keys should be given as ints. >>> hash_parameters([1, 5, 7], minimize=False) HashInfo(t=3, slots=(1, 5, 7), r=(-1, -1, 1), offset=0, to_int=None) >>> hash_parameters([1, 5, 7]) HashInfo(t=3, slots=(1, 5, 7), r=(0, 0, 2), offset=-1, to_int=None) >>> l = (0, 3, 4, 7 ,10, 13, 15, 18, 19, 21, 22, 24, 26, 29, 30, 34) >>> phash = hash_parameters(l) >>> phash.slots (18, 19, 0, 21, 22, 3, 4, 24, 7, 26, 30, 10, 29, 13, 34, 15) For some values, the displacement vector will be rather empty: >>> hash_parameters('Andrea', to_int=ord).r (1, None, None, None, 0, -3, 4, None) """ # If to_int is not assigned, simply use the identity function. if to_int is None: to_int = __identity key_to_original = {to_int(original): original for original in keys} # Create a set of all items to be hashed. items = list(key_to_original.keys()) if minimize: offset = 0 - min(items) items = frozenset(x + offset for x in items) else: offset = 0 # 1. Start with a square array (not stored) that is t units on each side. # Choose a t such that t * t >= max(S) t = choose_best_t(items) assert t * t > max(items) and t * t >= len(items) # 2. Place each key K in the square at location (x,y), where # x = K mod t, y = K / t. row_queue = place_items_in_square(items, t) # 3. Arrange rows so that they'll fit into one row and generate a # displacement vector. final_row, displacement_vector = arrange_rows(row_queue, t) # Translate the internal keys to their original items. slots = tuple(key_to_original[item - offset] if item is not None else None for item in final_row) # Return the parameters return HashInfo( t=t, slots=slots, r=displacement_vector, offset=offset, to_int=to_int if to_int is not __identity else None ) def choose_best_t(items): minimum_allowable = int(math.sqrt(max(items)) + 1) if minimum_allowable ** 2 < len(items): return len(items) else: return minimum_allowable def place_items_in_square(items, t): """ Returns a list of rows that are stored as a priority queue to be used with heapq functions. >>> place_items_in_square([1,5,7], 4) [(2, 1, [(1, 5), (3, 7)]), (3, 0, [(1, 1)])] >>> place_items_in_square([1,5,7], 3) [(2, 0, [(1, 1)]), (2, 1, [(2, 5)]), (2, 2, [(1, 7)])] """ # A minheap (because that's all that heapq supports :/) # of the length of each row. Why this is important is because # we'll be popping the largest rows when figuring out row displacements. # Each item is a tuple of (t - |row|, y, [(xpos_1, item_1), ...]). # Until the call to heapq.heapify(), the rows are ordered in # increasing row number (y). rows = [(t, y, []) for y in range(t)] for item in items: # Calculate the cell the item should fall in. x = item % t y = item // t # Push the item to its corresponding row... inverse_length, _, row_contents = rows[y] heapq.heappush(row_contents, (x, item)) # Ensure the heap key is kept intact. rows[y] = inverse_length - 1, y, row_contents assert all(inv_len == t - len(rows) for inv_len, _, rows in rows) heapq.heapify(rows) # Return only rows that are populated. return [row for row in rows if row[2]] def arrange_rows(row_queue, t): """ Takes a priority queue as generated by place_items_in_square(). Arranges the items from its conceptual square to one list. Returns both the resultant vector, plus the displacement vector, to be used in the final output hash function. >>> rows = [(2, 1, [(0, 1), (1, 5)]), (3, 3, [(1, 7)])] >>> result, displacements = arrange_rows(rows, 4) >>> result (1, 5, 7) >>> displacements (None, 0, None, 1) >>> rows = [(1, 1, [(0, 1), (2, 7)]), (2, 2, [(1, 5)])] >>> result, displacements = arrange_rows(rows, 3) >>> result (1, 5, 7) >>> displacements (None, 0, 0) """ # Create a set of all of the unoccupied columns. max_columns = t ** 2 cols = ((x, True) for x in range(max_columns)) unoccupied_columns = collections.OrderedDict(cols) # Create the resultant and displacement vectors. result = [None] * max_columns displacements = [None] * t while row_queue: # Get the next row to place. _inverse_length, y, row = heapq.heappop(row_queue) offset = find_first_fit(unoccupied_columns, row, max_columns) # Calculate the offset of the first item. first_item_x = row[0][0] displacements[y] = offset for x, item in row: actual_x = x + offset result[actual_x] = item del unoccupied_columns[actual_x] return tuple(trim_nones_from_right(result)), tuple(displacements) def find_first_fit(unoccupied_columns, row, row_length): """ Finds the first index that the row's items can fit. """ for free_col in unoccupied_columns: # The offset is that such that the first item goes in the free column. first_item_x = row[0][0] offset = free_col - first_item_x if check_columns_fit(unoccupied_columns, row, offset, row_length): return offset raise ValueError("Row cannot bossily fit in %r: %r" % (list(unoccupied_columns.keys()), row)) def check_columns_fit(unoccupied_columns, row, offset, row_length): """ Checks if all the occupied columns in the row fit in the indices given by free columns. >>> check_columns_fit({0,1,2,3}, [(0, True), (2, True)], 0, 4) True >>> check_columns_fit({0,2,3}, [(2, True), (3, True)], 0, 4) True >>> check_columns_fit({}, [(2, True), (3, True)], 0, 4) False >>> check_columns_fit({0}, [(2, True)], 2, 4) True >>> check_columns_fit({0}, [(3, True)], 2, 4) False """ for index, item in row: adjusted_index = (index + offset) % row_length # Check if the index is in the appropriate place. if adjusted_index not in unoccupied_columns: return False return True def print_square(row_queue, t): """ Prints a row queue as its conceptual square array. """ occupied_rows = {y: row for _, y, row in row_queue} empty_row = ', '.join('...' for _ in range(t)) for y in range(t): print('|', end=' ') if y not in occupied_rows: print(empty_row, end=' ') else: row = dict(occupied_rows[y]) all_cols = ('%3d' % row[x] if x in row else '...' for x in range(t)) print(', '.join(all_cols), end=' ') print("|") def trim_nones_from_right(xs): """ Returns the list without all the Nones at the right end. >>> trim_nones_from_right([1, 2, None, 4, None, 5, None, None]) [1, 2, None, 4, None, 5] """ # Find the first element that does not contain none. for i, item in enumerate(reversed(xs)): if item is not None: break return xs[:-i] def make_hash(keys, **kwargs): """ Creates a perfect hash function from the given keys. For a description of the keyword arguments see :py:func:`hash_parameters`. >>> l = (0, 3, 4, 7 ,10, 13, 15, 18, 19, 21, 22, 24, 26, 29, 30, 34) >>> hf = make_hash(l) >>> hf(19) 1 >>> hash_parameters(l).slots[1] 19 """ params = hash_parameters(keys, **kwargs) t = params.t r = params.r offset = params.offset to_int = params.to_int if params.to_int else __identity def perfect_hash(x): val = to_int(x) + offset x = val % t y = val // t return x + r[y] # Undocumented properties, but used in make_dict()... perfect_hash.length = len(params.slots) perfect_hash.slots = params.slots return perfect_hash if __name__ == '__main__': import doctest # Test the module. exit(doctest.testmod(verbose=False).failed)
python-useful-helpers/threaded
setup.py
_extension
python
def _extension(modpath: str) -> setuptools.Extension: return setuptools.Extension(modpath, [modpath.replace(".", "/") + ".py"])
Make setuptools.Extension.
train
https://github.com/python-useful-helpers/threaded/blob/c1aa5a631ab3e2904b915ed6c6a8be03a9673a1a/setup.py#L53-L55
null
# Copyright 2017 - 2019 Alexey Stepanov aka penguinolog # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Wrap in ProcessPool/ThreadPool executors or asyncio.Task.""" # Standard Library import ast import collections import distutils.errors import os.path import shutil import sys from distutils.command import build_ext # External Dependencies import setuptools try: import typing except ImportError: typing = None try: # noinspection PyPackageRequirements from Cython.Build import cythonize except ImportError: cythonize = None with open(os.path.join(os.path.dirname(__file__), "threaded", "__init__.py")) as f: SOURCE = f.read() with open("requirements.txt") as f: REQUIRED = f.read().splitlines() with open("README.rst") as f: LONG_DESCRIPTION = f.read() REQUIRES_OPTIMIZATION = [ setuptools.Extension("threaded.class_decorator", ["threaded/class_decorator.pyx"]), _extension("threaded._base_threaded"), setuptools.Extension("threaded._asynciotask", ["threaded/_asynciotask.pyx"]), setuptools.Extension("threaded._threaded", ["threaded/_threaded.pyx"]), _extension("threaded._threadpooled"), ] if "win32" != sys.platform: REQUIRES_OPTIMIZATION.append(_extension("threaded.__init__")) # noinspection PyCallingNonCallable EXT_MODULES = ( cythonize( REQUIRES_OPTIMIZATION, compiler_directives=dict( always_allow_keywords=True, binding=True, embedsignature=True, overflowcheck=True, language_level=3 ), ) if cythonize is not None else [] ) class BuildFailed(Exception): """For install clear scripts.""" class AllowFailRepair(build_ext.build_ext): """This class allows C extension building to fail and repairs init.""" def run(self): """Run. :raises BuildFailed: cythonize impossible """ try: build_ext.build_ext.run(self) # Copy __init__.py back to repair package. build_dir = os.path.abspath(self.build_lib) root_dir = os.path.abspath(os.path.join(__file__, "..")) target_dir = build_dir if not self.inplace else root_dir src_file = os.path.join("threaded", "__init__.py") src = os.path.join(root_dir, src_file) dst = os.path.join(target_dir, src_file) if src != dst: shutil.copyfile(src, dst) except ( distutils.errors.DistutilsPlatformError, getattr(globals()["__builtins__"], "FileNotFoundError", OSError), ): raise BuildFailed() def build_extension(self, ext): """build_extension. :raises BuildFailed: cythonize impossible """ try: build_ext.build_ext.build_extension(self, ext) except ( distutils.errors.CCompilerError, distutils.errors.DistutilsExecError, distutils.errors.DistutilsPlatformError, ValueError, ): raise BuildFailed() # noinspection PyUnresolvedReferences def get_simple_vars_from_src( src: str ) -> "typing.Dict[str, typing.Union[str, bytes, int, float, complex, list, set, dict, tuple, None]]": """Get simple (string/number/boolean and None) assigned values from source. :param src: Source code :type src: str :returns: OrderedDict with keys, values = variable names, values :rtype: typing.Dict[ str, typing.Union[ str, bytes, int, float, complex, list, set, dict, tuple, None, ] ] Limitations: Only defined from scratch variables. Not supported by design: * Imports * Executable code, including string formatting and comprehensions. Examples: >>> string_sample = "a = '1'" >>> get_simple_vars_from_src(string_sample) OrderedDict([('a', '1')]) >>> int_sample = "b = 1" >>> get_simple_vars_from_src(int_sample) OrderedDict([('b', 1)]) >>> list_sample = "c = [u'1', b'1', 1, 1.0, 1j, None]" >>> result = get_simple_vars_from_src(list_sample) >>> result == collections.OrderedDict( ... [('c', [u'1', b'1', 1, 1.0, 1j, None])] ... ) True >>> iterable_sample = "d = ([1], {1: 1}, {1})" >>> get_simple_vars_from_src(iterable_sample) OrderedDict([('d', ([1], {1: 1}, {1}))]) >>> multiple_assign = "e = f = g = 1" >>> get_simple_vars_from_src(multiple_assign) OrderedDict([('e', 1), ('f', 1), ('g', 1)]) """ ast_data = (ast.Str, ast.Num, ast.List, ast.Set, ast.Dict, ast.Tuple, ast.Bytes, ast.NameConstant) tree = ast.parse(src) result = collections.OrderedDict() for node in ast.iter_child_nodes(tree): if not isinstance(node, ast.Assign): # We parse assigns only continue try: if isinstance(node.value, ast_data): value = ast.literal_eval(node.value) else: continue except ValueError: continue for tgt in node.targets: if isinstance(tgt, ast.Name) and isinstance(tgt.ctx, ast.Store): result[tgt.id] = value return result VARIABLES = get_simple_vars_from_src(SOURCE) CLASSIFIERS = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Topic :: Software Development :: Libraries :: Python Modules", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: Implementation :: CPython", ] KEYWORDS = ["pooling", "multithreading", "threading", "asyncio", "development"] setup_args = dict( name="threaded", author=VARIABLES["__author__"], author_email=VARIABLES["__author_email__"], maintainer=", ".join( "{name} <{email}>".format(name=name, email=email) for name, email in VARIABLES["__maintainers__"].items() ), url=VARIABLES["__url__"], license=VARIABLES["__license__"], description=VARIABLES["__description__"], long_description=LONG_DESCRIPTION, classifiers=CLASSIFIERS, keywords=KEYWORDS, python_requires=">=3.6", # While setuptools cannot deal with pre-installed incompatible versions, # setting a lower bound is not harmful - it makes error messages cleaner. DO # NOT set an upper bound on setuptools, as that will lead to uninstallable # situations as progressive releases of projects are done. # Blacklist setuptools 34.0.0-34.3.2 due to https://github.com/pypa/setuptools/issues/951 # Blacklist setuptools 36.2.0 due to https://github.com/pypa/setuptools/issues/1086 setup_requires=[ "setuptools >= 21.0.0,!=24.0.0," "!=34.0.0,!=34.0.1,!=34.0.2,!=34.0.3,!=34.1.0,!=34.1.1,!=34.2.0,!=34.3.0,!=34.3.1,!=34.3.2," "!=36.2.0", "setuptools_scm", ], use_scm_version=True, install_requires=REQUIRED, package_data={"threaded": ["py.typed"]}, ) if cythonize is not None: setup_args["ext_modules"] = EXT_MODULES setup_args["cmdclass"] = dict(build_ext=AllowFailRepair) try: setuptools.setup(**setup_args) except BuildFailed: print("*" * 80 + "\n" "* Build Failed!\n" "* Use clear scripts version.\n" "*" * 80 + "\n") del setup_args["ext_modules"] del setup_args["cmdclass"] setuptools.setup(**setup_args)
python-useful-helpers/threaded
setup.py
AllowFailRepair.build_extension
python
def build_extension(self, ext): try: build_ext.build_ext.build_extension(self, ext) except ( distutils.errors.CCompilerError, distutils.errors.DistutilsExecError, distutils.errors.DistutilsPlatformError, ValueError, ): raise BuildFailed()
build_extension. :raises BuildFailed: cythonize impossible
train
https://github.com/python-useful-helpers/threaded/blob/c1aa5a631ab3e2904b915ed6c6a8be03a9673a1a/setup.py#L115-L128
null
class AllowFailRepair(build_ext.build_ext): """This class allows C extension building to fail and repairs init.""" def run(self): """Run. :raises BuildFailed: cythonize impossible """ try: build_ext.build_ext.run(self) # Copy __init__.py back to repair package. build_dir = os.path.abspath(self.build_lib) root_dir = os.path.abspath(os.path.join(__file__, "..")) target_dir = build_dir if not self.inplace else root_dir src_file = os.path.join("threaded", "__init__.py") src = os.path.join(root_dir, src_file) dst = os.path.join(target_dir, src_file) if src != dst: shutil.copyfile(src, dst) except ( distutils.errors.DistutilsPlatformError, getattr(globals()["__builtins__"], "FileNotFoundError", OSError), ): raise BuildFailed()
python-useful-helpers/threaded
threaded/class_decorator.py
BaseDecorator._func
python
def _func(self) -> typing.Optional[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]]: return self.__func
Get wrapped function. :rtype: typing.Optional[typing.Callable[..., typing.Union[typing.Awaitable, typing.Any]]]
train
https://github.com/python-useful-helpers/threaded/blob/c1aa5a631ab3e2904b915ed6c6a8be03a9673a1a/threaded/class_decorator.py#L89-L94
null
class BaseDecorator(metaclass=abc.ABCMeta): """Base class for decorators. Implements wrapping and __call__, wrapper getter is abstract. Note: wrapper getter is called only on function call, if decorator used without braces. Usage example: >>> class TestDecorator(BaseDecorator): ... def _get_function_wrapper(self, func): ... print('Wrapping: {}'.format(func.__name__)) ... @functools.wraps(func) ... def wrapper(*args, **kwargs): ... print('call_function: {}'.format(func.__name__)) ... return func(*args, **kwargs) ... return wrapper >>> @TestDecorator ... def func_no_init(): ... pass >>> func_no_init() Wrapping: func_no_init call_function: func_no_init >>> isinstance(func_no_init, TestDecorator) True >>> func_no_init._func is func_no_init.__wrapped__ True >>> @TestDecorator() ... def func_init(): ... pass Wrapping: func_init >>> func_init() call_function: func_init >>> isinstance(func_init, TestDecorator) False """ def __init__( self, func: typing.Optional[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]] = None, ) -> None: """Decorator. :param func: function to wrap :type func: typing.Optional[typing.Callable] """ # noinspection PyArgumentList super(BaseDecorator, self).__init__() # pylint: disable=assigning-non-slot self.__func: typing.Optional[ typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]] ] = func if self.__func is not None: functools.update_wrapper(self, self.__func) # pylint: enable=assigning-non-slot @property # pragma: no cover @abc.abstractmethod def _get_function_wrapper( self, func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]] ) -> typing.Callable[..., typing.Any]: """Here should be constructed and returned real decorator. :param func: Wrapped function :type func: typing.Callable[..., typing.Union[typing.Awaitable, typing.Any]] :rtype: typing.Callable """ raise NotImplementedError() # pragma: no cover def __call__( self, *args: typing.Union[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]], typing.Any], **kwargs: typing.Any, ) -> typing.Any: """Main decorator getter.""" l_args: typing.List[typing.Any] = list(args) if self._func: wrapped: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]] = self._func else: wrapped = l_args.pop(0) wrapper: typing.Callable[..., typing.Any] = self._get_function_wrapper(wrapped) if self.__func: return wrapper(*l_args, **kwargs) return wrapper @staticmethod def _await_if_required( target: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]] ) -> typing.Callable[..., typing.Any]: """Await result if coroutine was returned.""" @functools.wraps(target) def wrapper(*args, **kwargs): # type: (typing.Any, typing.Any) -> typing.Any """Decorator/wrapper.""" result = target(*args, **kwargs) if asyncio.iscoroutine(result): loop = asyncio.new_event_loop() result = loop.run_until_complete(result) loop.close() return result return wrapper def __repr__(self) -> str: """For debug purposes.""" return f"<{self.__class__.__name__}({self.__func!r}) at 0x{id(self):X}>" # pragma: no cover
python-useful-helpers/threaded
threaded/class_decorator.py
BaseDecorator._get_function_wrapper
python
def _get_function_wrapper( self, func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]] ) -> typing.Callable[..., typing.Any]: raise NotImplementedError()
Here should be constructed and returned real decorator. :param func: Wrapped function :type func: typing.Callable[..., typing.Union[typing.Awaitable, typing.Any]] :rtype: typing.Callable
train
https://github.com/python-useful-helpers/threaded/blob/c1aa5a631ab3e2904b915ed6c6a8be03a9673a1a/threaded/class_decorator.py#L97-L106
null
class BaseDecorator(metaclass=abc.ABCMeta): """Base class for decorators. Implements wrapping and __call__, wrapper getter is abstract. Note: wrapper getter is called only on function call, if decorator used without braces. Usage example: >>> class TestDecorator(BaseDecorator): ... def _get_function_wrapper(self, func): ... print('Wrapping: {}'.format(func.__name__)) ... @functools.wraps(func) ... def wrapper(*args, **kwargs): ... print('call_function: {}'.format(func.__name__)) ... return func(*args, **kwargs) ... return wrapper >>> @TestDecorator ... def func_no_init(): ... pass >>> func_no_init() Wrapping: func_no_init call_function: func_no_init >>> isinstance(func_no_init, TestDecorator) True >>> func_no_init._func is func_no_init.__wrapped__ True >>> @TestDecorator() ... def func_init(): ... pass Wrapping: func_init >>> func_init() call_function: func_init >>> isinstance(func_init, TestDecorator) False """ def __init__( self, func: typing.Optional[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]] = None, ) -> None: """Decorator. :param func: function to wrap :type func: typing.Optional[typing.Callable] """ # noinspection PyArgumentList super(BaseDecorator, self).__init__() # pylint: disable=assigning-non-slot self.__func: typing.Optional[ typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]] ] = func if self.__func is not None: functools.update_wrapper(self, self.__func) # pylint: enable=assigning-non-slot @property def _func(self) -> typing.Optional[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]]: """Get wrapped function. :rtype: typing.Optional[typing.Callable[..., typing.Union[typing.Awaitable, typing.Any]]] """ return self.__func # pragma: no cover @abc.abstractmethod # pragma: no cover def __call__( self, *args: typing.Union[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]], typing.Any], **kwargs: typing.Any, ) -> typing.Any: """Main decorator getter.""" l_args: typing.List[typing.Any] = list(args) if self._func: wrapped: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]] = self._func else: wrapped = l_args.pop(0) wrapper: typing.Callable[..., typing.Any] = self._get_function_wrapper(wrapped) if self.__func: return wrapper(*l_args, **kwargs) return wrapper @staticmethod def _await_if_required( target: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]] ) -> typing.Callable[..., typing.Any]: """Await result if coroutine was returned.""" @functools.wraps(target) def wrapper(*args, **kwargs): # type: (typing.Any, typing.Any) -> typing.Any """Decorator/wrapper.""" result = target(*args, **kwargs) if asyncio.iscoroutine(result): loop = asyncio.new_event_loop() result = loop.run_until_complete(result) loop.close() return result return wrapper def __repr__(self) -> str: """For debug purposes.""" return f"<{self.__class__.__name__}({self.__func!r}) at 0x{id(self):X}>" # pragma: no cover
python-useful-helpers/threaded
threaded/class_decorator.py
BaseDecorator._await_if_required
python
def _await_if_required( target: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]] ) -> typing.Callable[..., typing.Any]: @functools.wraps(target) def wrapper(*args, **kwargs): # type: (typing.Any, typing.Any) -> typing.Any """Decorator/wrapper.""" result = target(*args, **kwargs) if asyncio.iscoroutine(result): loop = asyncio.new_event_loop() result = loop.run_until_complete(result) loop.close() return result return wrapper
Await result if coroutine was returned.
train
https://github.com/python-useful-helpers/threaded/blob/c1aa5a631ab3e2904b915ed6c6a8be03a9673a1a/threaded/class_decorator.py#L127-L142
null
class BaseDecorator(metaclass=abc.ABCMeta): """Base class for decorators. Implements wrapping and __call__, wrapper getter is abstract. Note: wrapper getter is called only on function call, if decorator used without braces. Usage example: >>> class TestDecorator(BaseDecorator): ... def _get_function_wrapper(self, func): ... print('Wrapping: {}'.format(func.__name__)) ... @functools.wraps(func) ... def wrapper(*args, **kwargs): ... print('call_function: {}'.format(func.__name__)) ... return func(*args, **kwargs) ... return wrapper >>> @TestDecorator ... def func_no_init(): ... pass >>> func_no_init() Wrapping: func_no_init call_function: func_no_init >>> isinstance(func_no_init, TestDecorator) True >>> func_no_init._func is func_no_init.__wrapped__ True >>> @TestDecorator() ... def func_init(): ... pass Wrapping: func_init >>> func_init() call_function: func_init >>> isinstance(func_init, TestDecorator) False """ def __init__( self, func: typing.Optional[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]] = None, ) -> None: """Decorator. :param func: function to wrap :type func: typing.Optional[typing.Callable] """ # noinspection PyArgumentList super(BaseDecorator, self).__init__() # pylint: disable=assigning-non-slot self.__func: typing.Optional[ typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]] ] = func if self.__func is not None: functools.update_wrapper(self, self.__func) # pylint: enable=assigning-non-slot @property def _func(self) -> typing.Optional[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]]: """Get wrapped function. :rtype: typing.Optional[typing.Callable[..., typing.Union[typing.Awaitable, typing.Any]]] """ return self.__func # pragma: no cover @abc.abstractmethod def _get_function_wrapper( self, func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]] ) -> typing.Callable[..., typing.Any]: """Here should be constructed and returned real decorator. :param func: Wrapped function :type func: typing.Callable[..., typing.Union[typing.Awaitable, typing.Any]] :rtype: typing.Callable """ raise NotImplementedError() # pragma: no cover def __call__( self, *args: typing.Union[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]], typing.Any], **kwargs: typing.Any, ) -> typing.Any: """Main decorator getter.""" l_args: typing.List[typing.Any] = list(args) if self._func: wrapped: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]] = self._func else: wrapped = l_args.pop(0) wrapper: typing.Callable[..., typing.Any] = self._get_function_wrapper(wrapped) if self.__func: return wrapper(*l_args, **kwargs) return wrapper @staticmethod def __repr__(self) -> str: """For debug purposes.""" return f"<{self.__class__.__name__}({self.__func!r}) at 0x{id(self):X}>" # pragma: no cover
python-useful-helpers/threaded
threaded/_threadpooled.py
threadpooled
python
def threadpooled( func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]], *, loop_getter: None = None, loop_getter_need_context: bool = False, ) -> typing.Callable[..., "concurrent.futures.Future[typing.Any]"]: """Overload: function callable, no loop getter."""
Overload: function callable, no loop getter.
train
https://github.com/python-useful-helpers/threaded/blob/c1aa5a631ab3e2904b915ed6c6a8be03a9673a1a/threaded/_threadpooled.py#L178-L184
null
# Copyright 2017 - 2019 Alexey Stepanov aka penguinolog ## # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ThreadPooled implementation. Asyncio is supported """ __all__ = ("ThreadPooled", "threadpooled") # Standard Library import asyncio import concurrent.futures import functools import typing # Local Implementation from . import _base_threaded class ThreadPooled(_base_threaded.APIPooled): """Post function to ThreadPoolExecutor.""" __slots__ = ("__loop_getter", "__loop_getter_need_context") __executor: typing.Optional["ThreadPoolExecutor"] = None @classmethod def configure(cls: typing.Type["ThreadPooled"], max_workers: typing.Optional[int] = None) -> None: """Pool executor create and configure. :param max_workers: Maximum workers :type max_workers: typing.Optional[int] """ if isinstance(cls.__executor, ThreadPoolExecutor): if cls.__executor.max_workers == max_workers: return cls.__executor.shutdown() cls.__executor = ThreadPoolExecutor(max_workers=max_workers) @classmethod def shutdown(cls: typing.Type["ThreadPooled"]) -> None: """Shutdown executor.""" if cls.__executor is not None: cls.__executor.shutdown() @property def executor(self) -> "ThreadPoolExecutor": """Executor instance. :rtype: ThreadPoolExecutor """ if not isinstance(self.__executor, ThreadPoolExecutor) or self.__executor.is_shutdown: self.configure() return self.__executor # type: ignore def __init__( self, func: typing.Optional[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]] = None, *, loop_getter: typing.Optional[ typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] ] = None, loop_getter_need_context: bool = False, ) -> None: """Wrap function in future and return. :param func: function to wrap :type func: typing.Optional[typing.Callable] :param loop_getter: Method to get event loop, if wrap in asyncio task :type loop_getter: typing.Union[ None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] :param loop_getter_need_context: Loop getter requires function context :type loop_getter_need_context: bool """ super(ThreadPooled, self).__init__(func=func) self.__loop_getter: typing.Optional[ typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] ] = loop_getter self.__loop_getter_need_context: bool = loop_getter_need_context @property def loop_getter( self ) -> typing.Optional[typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop]]: """Loop getter. :rtype: typing.Union[None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] """ return self.__loop_getter @property def loop_getter_need_context(self) -> bool: """Loop getter need execution context. :rtype: bool """ return self.__loop_getter_need_context def _get_loop(self, *args: typing.Any, **kwargs: typing.Any) -> typing.Optional[asyncio.AbstractEventLoop]: """Get event loop in decorator class.""" if callable(self.loop_getter): if self.loop_getter_need_context: return self.loop_getter(*args, **kwargs) # pylint: disable=not-callable return self.loop_getter() # pylint: disable=not-callable return self.loop_getter def _get_function_wrapper( self, func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]] ) -> typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"]: """Here should be constructed and returned real decorator. :param func: Wrapped function :type func: typing.Callable :return: wrapped coroutine or function :rtype: typing.Callable[..., typing.Union[typing.Awaitable, concurrent.futures.Future]] """ prepared = self._await_if_required(func) # noinspection PyMissingOrEmptyDocstring @functools.wraps(prepared) # pylint: disable=missing-docstring def wrapper( *args: typing.Any, **kwargs: typing.Any ) -> typing.Union[ "concurrent.futures.Future[typing.Any]", "typing.Awaitable[typing.Any]", typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"], ]: loop: typing.Optional[asyncio.AbstractEventLoop] = self._get_loop(*args, **kwargs) if loop is None: return self.executor.submit(prepared, *args, **kwargs) return loop.run_in_executor(self.executor, functools.partial(prepared, *args, **kwargs)) return wrapper def __call__( # pylint: disable=useless-super-delegation self, *args: typing.Union[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]], typing.Any], **kwargs: typing.Any, ) -> typing.Union[ "concurrent.futures.Future[typing.Any]", "typing.Awaitable[typing.Any]", typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"], ]: """Callable instance.""" return super(ThreadPooled, self).__call__(*args, **kwargs) # type: ignore def __repr__(self) -> str: # pragma: no cover """For debug purposes.""" return ( f"<{self.__class__.__name__}(" f"{self._func!r}, " f"loop_getter={self.loop_getter!r}, " f"loop_getter_need_context={self.loop_getter_need_context!r}, " f") at 0x{id(self):X}>" ) # pylint: disable=function-redefined, unused-argument @typing.overload @typing.overload # noqa: F811 def threadpooled( func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]], *, loop_getter: typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop], loop_getter_need_context: bool = False, ) -> typing.Callable[..., "asyncio.Task[typing.Any]"]: """Overload: function callable, loop getter available.""" @typing.overload # noqa: F811 def threadpooled( func: None = None, *, loop_getter: typing.Union[None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] = None, loop_getter_need_context: bool = False, ) -> ThreadPooled: """Overload: No function.""" # pylint: enable=unused-argument def threadpooled( # noqa: F811 func: typing.Optional[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]] = None, *, loop_getter: typing.Union[None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] = None, loop_getter_need_context: bool = False, ) -> typing.Union[ ThreadPooled, typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"], ]: """Post function to ThreadPoolExecutor. :param func: function to wrap :type func: typing.Optional[typing.Callable[..., typing.Union[typing.Awaitable, typing.Any]]] :param loop_getter: Method to get event loop, if wrap in asyncio task :type loop_getter: typing.Union[ None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] :param loop_getter_need_context: Loop getter requires function context :type loop_getter_need_context: bool :return: ThreadPooled instance, if called as function or argumented decorator, else callable wrapper :rtype: typing.Union[ThreadPooled, typing.Callable[..., typing.Union[concurrent.futures.Future, typing.Awaitable]]] """ if func is None: return ThreadPooled(func=func, loop_getter=loop_getter, loop_getter_need_context=loop_getter_need_context) return ThreadPooled( # type: ignore func=None, loop_getter=loop_getter, loop_getter_need_context=loop_getter_need_context )(func) # pylint: enable=function-redefined class ThreadPoolExecutor(concurrent.futures.ThreadPoolExecutor): """Provide readers for protected attributes. Simply extend concurrent.futures.ThreadPoolExecutor. """ __slots__ = () @property def max_workers(self) -> int: """MaxWorkers. :rtype: int """ return self._max_workers # type: ignore @property def is_shutdown(self) -> bool: """Executor shutdown state. :rtype: bool """ return self._shutdown # type: ignore
python-useful-helpers/threaded
threaded/_threadpooled.py
threadpooled
python
def threadpooled( func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]], *, loop_getter: typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop], loop_getter_need_context: bool = False, ) -> typing.Callable[..., "asyncio.Task[typing.Any]"]: """Overload: function callable, loop getter available."""
Overload: function callable, loop getter available.
train
https://github.com/python-useful-helpers/threaded/blob/c1aa5a631ab3e2904b915ed6c6a8be03a9673a1a/threaded/_threadpooled.py#L188-L194
null
# Copyright 2017 - 2019 Alexey Stepanov aka penguinolog ## # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ThreadPooled implementation. Asyncio is supported """ __all__ = ("ThreadPooled", "threadpooled") # Standard Library import asyncio import concurrent.futures import functools import typing # Local Implementation from . import _base_threaded class ThreadPooled(_base_threaded.APIPooled): """Post function to ThreadPoolExecutor.""" __slots__ = ("__loop_getter", "__loop_getter_need_context") __executor: typing.Optional["ThreadPoolExecutor"] = None @classmethod def configure(cls: typing.Type["ThreadPooled"], max_workers: typing.Optional[int] = None) -> None: """Pool executor create and configure. :param max_workers: Maximum workers :type max_workers: typing.Optional[int] """ if isinstance(cls.__executor, ThreadPoolExecutor): if cls.__executor.max_workers == max_workers: return cls.__executor.shutdown() cls.__executor = ThreadPoolExecutor(max_workers=max_workers) @classmethod def shutdown(cls: typing.Type["ThreadPooled"]) -> None: """Shutdown executor.""" if cls.__executor is not None: cls.__executor.shutdown() @property def executor(self) -> "ThreadPoolExecutor": """Executor instance. :rtype: ThreadPoolExecutor """ if not isinstance(self.__executor, ThreadPoolExecutor) or self.__executor.is_shutdown: self.configure() return self.__executor # type: ignore def __init__( self, func: typing.Optional[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]] = None, *, loop_getter: typing.Optional[ typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] ] = None, loop_getter_need_context: bool = False, ) -> None: """Wrap function in future and return. :param func: function to wrap :type func: typing.Optional[typing.Callable] :param loop_getter: Method to get event loop, if wrap in asyncio task :type loop_getter: typing.Union[ None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] :param loop_getter_need_context: Loop getter requires function context :type loop_getter_need_context: bool """ super(ThreadPooled, self).__init__(func=func) self.__loop_getter: typing.Optional[ typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] ] = loop_getter self.__loop_getter_need_context: bool = loop_getter_need_context @property def loop_getter( self ) -> typing.Optional[typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop]]: """Loop getter. :rtype: typing.Union[None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] """ return self.__loop_getter @property def loop_getter_need_context(self) -> bool: """Loop getter need execution context. :rtype: bool """ return self.__loop_getter_need_context def _get_loop(self, *args: typing.Any, **kwargs: typing.Any) -> typing.Optional[asyncio.AbstractEventLoop]: """Get event loop in decorator class.""" if callable(self.loop_getter): if self.loop_getter_need_context: return self.loop_getter(*args, **kwargs) # pylint: disable=not-callable return self.loop_getter() # pylint: disable=not-callable return self.loop_getter def _get_function_wrapper( self, func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]] ) -> typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"]: """Here should be constructed and returned real decorator. :param func: Wrapped function :type func: typing.Callable :return: wrapped coroutine or function :rtype: typing.Callable[..., typing.Union[typing.Awaitable, concurrent.futures.Future]] """ prepared = self._await_if_required(func) # noinspection PyMissingOrEmptyDocstring @functools.wraps(prepared) # pylint: disable=missing-docstring def wrapper( *args: typing.Any, **kwargs: typing.Any ) -> typing.Union[ "concurrent.futures.Future[typing.Any]", "typing.Awaitable[typing.Any]", typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"], ]: loop: typing.Optional[asyncio.AbstractEventLoop] = self._get_loop(*args, **kwargs) if loop is None: return self.executor.submit(prepared, *args, **kwargs) return loop.run_in_executor(self.executor, functools.partial(prepared, *args, **kwargs)) return wrapper def __call__( # pylint: disable=useless-super-delegation self, *args: typing.Union[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]], typing.Any], **kwargs: typing.Any, ) -> typing.Union[ "concurrent.futures.Future[typing.Any]", "typing.Awaitable[typing.Any]", typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"], ]: """Callable instance.""" return super(ThreadPooled, self).__call__(*args, **kwargs) # type: ignore def __repr__(self) -> str: # pragma: no cover """For debug purposes.""" return ( f"<{self.__class__.__name__}(" f"{self._func!r}, " f"loop_getter={self.loop_getter!r}, " f"loop_getter_need_context={self.loop_getter_need_context!r}, " f") at 0x{id(self):X}>" ) # pylint: disable=function-redefined, unused-argument @typing.overload def threadpooled( func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]], *, loop_getter: None = None, loop_getter_need_context: bool = False, ) -> typing.Callable[..., "concurrent.futures.Future[typing.Any]"]: """Overload: function callable, no loop getter.""" @typing.overload # noqa: F811 @typing.overload # noqa: F811 def threadpooled( func: None = None, *, loop_getter: typing.Union[None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] = None, loop_getter_need_context: bool = False, ) -> ThreadPooled: """Overload: No function.""" # pylint: enable=unused-argument def threadpooled( # noqa: F811 func: typing.Optional[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]] = None, *, loop_getter: typing.Union[None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] = None, loop_getter_need_context: bool = False, ) -> typing.Union[ ThreadPooled, typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"], ]: """Post function to ThreadPoolExecutor. :param func: function to wrap :type func: typing.Optional[typing.Callable[..., typing.Union[typing.Awaitable, typing.Any]]] :param loop_getter: Method to get event loop, if wrap in asyncio task :type loop_getter: typing.Union[ None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] :param loop_getter_need_context: Loop getter requires function context :type loop_getter_need_context: bool :return: ThreadPooled instance, if called as function or argumented decorator, else callable wrapper :rtype: typing.Union[ThreadPooled, typing.Callable[..., typing.Union[concurrent.futures.Future, typing.Awaitable]]] """ if func is None: return ThreadPooled(func=func, loop_getter=loop_getter, loop_getter_need_context=loop_getter_need_context) return ThreadPooled( # type: ignore func=None, loop_getter=loop_getter, loop_getter_need_context=loop_getter_need_context )(func) # pylint: enable=function-redefined class ThreadPoolExecutor(concurrent.futures.ThreadPoolExecutor): """Provide readers for protected attributes. Simply extend concurrent.futures.ThreadPoolExecutor. """ __slots__ = () @property def max_workers(self) -> int: """MaxWorkers. :rtype: int """ return self._max_workers # type: ignore @property def is_shutdown(self) -> bool: """Executor shutdown state. :rtype: bool """ return self._shutdown # type: ignore
python-useful-helpers/threaded
threaded/_threadpooled.py
threadpooled
python
def threadpooled( func: None = None, *, loop_getter: typing.Union[None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] = None, loop_getter_need_context: bool = False, ) -> ThreadPooled: """Overload: No function."""
Overload: No function.
train
https://github.com/python-useful-helpers/threaded/blob/c1aa5a631ab3e2904b915ed6c6a8be03a9673a1a/threaded/_threadpooled.py#L198-L204
null
# Copyright 2017 - 2019 Alexey Stepanov aka penguinolog ## # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ThreadPooled implementation. Asyncio is supported """ __all__ = ("ThreadPooled", "threadpooled") # Standard Library import asyncio import concurrent.futures import functools import typing # Local Implementation from . import _base_threaded class ThreadPooled(_base_threaded.APIPooled): """Post function to ThreadPoolExecutor.""" __slots__ = ("__loop_getter", "__loop_getter_need_context") __executor: typing.Optional["ThreadPoolExecutor"] = None @classmethod def configure(cls: typing.Type["ThreadPooled"], max_workers: typing.Optional[int] = None) -> None: """Pool executor create and configure. :param max_workers: Maximum workers :type max_workers: typing.Optional[int] """ if isinstance(cls.__executor, ThreadPoolExecutor): if cls.__executor.max_workers == max_workers: return cls.__executor.shutdown() cls.__executor = ThreadPoolExecutor(max_workers=max_workers) @classmethod def shutdown(cls: typing.Type["ThreadPooled"]) -> None: """Shutdown executor.""" if cls.__executor is not None: cls.__executor.shutdown() @property def executor(self) -> "ThreadPoolExecutor": """Executor instance. :rtype: ThreadPoolExecutor """ if not isinstance(self.__executor, ThreadPoolExecutor) or self.__executor.is_shutdown: self.configure() return self.__executor # type: ignore def __init__( self, func: typing.Optional[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]] = None, *, loop_getter: typing.Optional[ typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] ] = None, loop_getter_need_context: bool = False, ) -> None: """Wrap function in future and return. :param func: function to wrap :type func: typing.Optional[typing.Callable] :param loop_getter: Method to get event loop, if wrap in asyncio task :type loop_getter: typing.Union[ None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] :param loop_getter_need_context: Loop getter requires function context :type loop_getter_need_context: bool """ super(ThreadPooled, self).__init__(func=func) self.__loop_getter: typing.Optional[ typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] ] = loop_getter self.__loop_getter_need_context: bool = loop_getter_need_context @property def loop_getter( self ) -> typing.Optional[typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop]]: """Loop getter. :rtype: typing.Union[None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] """ return self.__loop_getter @property def loop_getter_need_context(self) -> bool: """Loop getter need execution context. :rtype: bool """ return self.__loop_getter_need_context def _get_loop(self, *args: typing.Any, **kwargs: typing.Any) -> typing.Optional[asyncio.AbstractEventLoop]: """Get event loop in decorator class.""" if callable(self.loop_getter): if self.loop_getter_need_context: return self.loop_getter(*args, **kwargs) # pylint: disable=not-callable return self.loop_getter() # pylint: disable=not-callable return self.loop_getter def _get_function_wrapper( self, func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]] ) -> typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"]: """Here should be constructed and returned real decorator. :param func: Wrapped function :type func: typing.Callable :return: wrapped coroutine or function :rtype: typing.Callable[..., typing.Union[typing.Awaitable, concurrent.futures.Future]] """ prepared = self._await_if_required(func) # noinspection PyMissingOrEmptyDocstring @functools.wraps(prepared) # pylint: disable=missing-docstring def wrapper( *args: typing.Any, **kwargs: typing.Any ) -> typing.Union[ "concurrent.futures.Future[typing.Any]", "typing.Awaitable[typing.Any]", typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"], ]: loop: typing.Optional[asyncio.AbstractEventLoop] = self._get_loop(*args, **kwargs) if loop is None: return self.executor.submit(prepared, *args, **kwargs) return loop.run_in_executor(self.executor, functools.partial(prepared, *args, **kwargs)) return wrapper def __call__( # pylint: disable=useless-super-delegation self, *args: typing.Union[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]], typing.Any], **kwargs: typing.Any, ) -> typing.Union[ "concurrent.futures.Future[typing.Any]", "typing.Awaitable[typing.Any]", typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"], ]: """Callable instance.""" return super(ThreadPooled, self).__call__(*args, **kwargs) # type: ignore def __repr__(self) -> str: # pragma: no cover """For debug purposes.""" return ( f"<{self.__class__.__name__}(" f"{self._func!r}, " f"loop_getter={self.loop_getter!r}, " f"loop_getter_need_context={self.loop_getter_need_context!r}, " f") at 0x{id(self):X}>" ) # pylint: disable=function-redefined, unused-argument @typing.overload def threadpooled( func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]], *, loop_getter: None = None, loop_getter_need_context: bool = False, ) -> typing.Callable[..., "concurrent.futures.Future[typing.Any]"]: """Overload: function callable, no loop getter.""" @typing.overload # noqa: F811 def threadpooled( func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]], *, loop_getter: typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop], loop_getter_need_context: bool = False, ) -> typing.Callable[..., "asyncio.Task[typing.Any]"]: """Overload: function callable, loop getter available.""" @typing.overload # noqa: F811 # pylint: enable=unused-argument def threadpooled( # noqa: F811 func: typing.Optional[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]] = None, *, loop_getter: typing.Union[None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] = None, loop_getter_need_context: bool = False, ) -> typing.Union[ ThreadPooled, typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"], ]: """Post function to ThreadPoolExecutor. :param func: function to wrap :type func: typing.Optional[typing.Callable[..., typing.Union[typing.Awaitable, typing.Any]]] :param loop_getter: Method to get event loop, if wrap in asyncio task :type loop_getter: typing.Union[ None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] :param loop_getter_need_context: Loop getter requires function context :type loop_getter_need_context: bool :return: ThreadPooled instance, if called as function or argumented decorator, else callable wrapper :rtype: typing.Union[ThreadPooled, typing.Callable[..., typing.Union[concurrent.futures.Future, typing.Awaitable]]] """ if func is None: return ThreadPooled(func=func, loop_getter=loop_getter, loop_getter_need_context=loop_getter_need_context) return ThreadPooled( # type: ignore func=None, loop_getter=loop_getter, loop_getter_need_context=loop_getter_need_context )(func) # pylint: enable=function-redefined class ThreadPoolExecutor(concurrent.futures.ThreadPoolExecutor): """Provide readers for protected attributes. Simply extend concurrent.futures.ThreadPoolExecutor. """ __slots__ = () @property def max_workers(self) -> int: """MaxWorkers. :rtype: int """ return self._max_workers # type: ignore @property def is_shutdown(self) -> bool: """Executor shutdown state. :rtype: bool """ return self._shutdown # type: ignore
python-useful-helpers/threaded
threaded/_threadpooled.py
threadpooled
python
def threadpooled( # noqa: F811 func: typing.Optional[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]] = None, *, loop_getter: typing.Union[None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] = None, loop_getter_need_context: bool = False, ) -> typing.Union[ ThreadPooled, typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"], ]: if func is None: return ThreadPooled(func=func, loop_getter=loop_getter, loop_getter_need_context=loop_getter_need_context) return ThreadPooled( # type: ignore func=None, loop_getter=loop_getter, loop_getter_need_context=loop_getter_need_context )(func)
Post function to ThreadPoolExecutor. :param func: function to wrap :type func: typing.Optional[typing.Callable[..., typing.Union[typing.Awaitable, typing.Any]]] :param loop_getter: Method to get event loop, if wrap in asyncio task :type loop_getter: typing.Union[ None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] :param loop_getter_need_context: Loop getter requires function context :type loop_getter_need_context: bool :return: ThreadPooled instance, if called as function or argumented decorator, else callable wrapper :rtype: typing.Union[ThreadPooled, typing.Callable[..., typing.Union[concurrent.futures.Future, typing.Awaitable]]]
train
https://github.com/python-useful-helpers/threaded/blob/c1aa5a631ab3e2904b915ed6c6a8be03a9673a1a/threaded/_threadpooled.py#L208-L236
null
# Copyright 2017 - 2019 Alexey Stepanov aka penguinolog ## # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ThreadPooled implementation. Asyncio is supported """ __all__ = ("ThreadPooled", "threadpooled") # Standard Library import asyncio import concurrent.futures import functools import typing # Local Implementation from . import _base_threaded class ThreadPooled(_base_threaded.APIPooled): """Post function to ThreadPoolExecutor.""" __slots__ = ("__loop_getter", "__loop_getter_need_context") __executor: typing.Optional["ThreadPoolExecutor"] = None @classmethod def configure(cls: typing.Type["ThreadPooled"], max_workers: typing.Optional[int] = None) -> None: """Pool executor create and configure. :param max_workers: Maximum workers :type max_workers: typing.Optional[int] """ if isinstance(cls.__executor, ThreadPoolExecutor): if cls.__executor.max_workers == max_workers: return cls.__executor.shutdown() cls.__executor = ThreadPoolExecutor(max_workers=max_workers) @classmethod def shutdown(cls: typing.Type["ThreadPooled"]) -> None: """Shutdown executor.""" if cls.__executor is not None: cls.__executor.shutdown() @property def executor(self) -> "ThreadPoolExecutor": """Executor instance. :rtype: ThreadPoolExecutor """ if not isinstance(self.__executor, ThreadPoolExecutor) or self.__executor.is_shutdown: self.configure() return self.__executor # type: ignore def __init__( self, func: typing.Optional[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]] = None, *, loop_getter: typing.Optional[ typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] ] = None, loop_getter_need_context: bool = False, ) -> None: """Wrap function in future and return. :param func: function to wrap :type func: typing.Optional[typing.Callable] :param loop_getter: Method to get event loop, if wrap in asyncio task :type loop_getter: typing.Union[ None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] :param loop_getter_need_context: Loop getter requires function context :type loop_getter_need_context: bool """ super(ThreadPooled, self).__init__(func=func) self.__loop_getter: typing.Optional[ typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] ] = loop_getter self.__loop_getter_need_context: bool = loop_getter_need_context @property def loop_getter( self ) -> typing.Optional[typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop]]: """Loop getter. :rtype: typing.Union[None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] """ return self.__loop_getter @property def loop_getter_need_context(self) -> bool: """Loop getter need execution context. :rtype: bool """ return self.__loop_getter_need_context def _get_loop(self, *args: typing.Any, **kwargs: typing.Any) -> typing.Optional[asyncio.AbstractEventLoop]: """Get event loop in decorator class.""" if callable(self.loop_getter): if self.loop_getter_need_context: return self.loop_getter(*args, **kwargs) # pylint: disable=not-callable return self.loop_getter() # pylint: disable=not-callable return self.loop_getter def _get_function_wrapper( self, func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]] ) -> typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"]: """Here should be constructed and returned real decorator. :param func: Wrapped function :type func: typing.Callable :return: wrapped coroutine or function :rtype: typing.Callable[..., typing.Union[typing.Awaitable, concurrent.futures.Future]] """ prepared = self._await_if_required(func) # noinspection PyMissingOrEmptyDocstring @functools.wraps(prepared) # pylint: disable=missing-docstring def wrapper( *args: typing.Any, **kwargs: typing.Any ) -> typing.Union[ "concurrent.futures.Future[typing.Any]", "typing.Awaitable[typing.Any]", typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"], ]: loop: typing.Optional[asyncio.AbstractEventLoop] = self._get_loop(*args, **kwargs) if loop is None: return self.executor.submit(prepared, *args, **kwargs) return loop.run_in_executor(self.executor, functools.partial(prepared, *args, **kwargs)) return wrapper def __call__( # pylint: disable=useless-super-delegation self, *args: typing.Union[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]], typing.Any], **kwargs: typing.Any, ) -> typing.Union[ "concurrent.futures.Future[typing.Any]", "typing.Awaitable[typing.Any]", typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"], ]: """Callable instance.""" return super(ThreadPooled, self).__call__(*args, **kwargs) # type: ignore def __repr__(self) -> str: # pragma: no cover """For debug purposes.""" return ( f"<{self.__class__.__name__}(" f"{self._func!r}, " f"loop_getter={self.loop_getter!r}, " f"loop_getter_need_context={self.loop_getter_need_context!r}, " f") at 0x{id(self):X}>" ) # pylint: disable=function-redefined, unused-argument @typing.overload def threadpooled( func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]], *, loop_getter: None = None, loop_getter_need_context: bool = False, ) -> typing.Callable[..., "concurrent.futures.Future[typing.Any]"]: """Overload: function callable, no loop getter.""" @typing.overload # noqa: F811 def threadpooled( func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]], *, loop_getter: typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop], loop_getter_need_context: bool = False, ) -> typing.Callable[..., "asyncio.Task[typing.Any]"]: """Overload: function callable, loop getter available.""" @typing.overload # noqa: F811 def threadpooled( func: None = None, *, loop_getter: typing.Union[None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] = None, loop_getter_need_context: bool = False, ) -> ThreadPooled: """Overload: No function.""" # pylint: enable=unused-argument # pylint: enable=function-redefined class ThreadPoolExecutor(concurrent.futures.ThreadPoolExecutor): """Provide readers for protected attributes. Simply extend concurrent.futures.ThreadPoolExecutor. """ __slots__ = () @property def max_workers(self) -> int: """MaxWorkers. :rtype: int """ return self._max_workers # type: ignore @property def is_shutdown(self) -> bool: """Executor shutdown state. :rtype: bool """ return self._shutdown # type: ignore
python-useful-helpers/threaded
threaded/_threadpooled.py
ThreadPooled.configure
python
def configure(cls: typing.Type["ThreadPooled"], max_workers: typing.Optional[int] = None) -> None: if isinstance(cls.__executor, ThreadPoolExecutor): if cls.__executor.max_workers == max_workers: return cls.__executor.shutdown() cls.__executor = ThreadPoolExecutor(max_workers=max_workers)
Pool executor create and configure. :param max_workers: Maximum workers :type max_workers: typing.Optional[int]
train
https://github.com/python-useful-helpers/threaded/blob/c1aa5a631ab3e2904b915ed6c6a8be03a9673a1a/threaded/_threadpooled.py#L40-L51
null
class ThreadPooled(_base_threaded.APIPooled): """Post function to ThreadPoolExecutor.""" __slots__ = ("__loop_getter", "__loop_getter_need_context") __executor: typing.Optional["ThreadPoolExecutor"] = None @classmethod @classmethod def shutdown(cls: typing.Type["ThreadPooled"]) -> None: """Shutdown executor.""" if cls.__executor is not None: cls.__executor.shutdown() @property def executor(self) -> "ThreadPoolExecutor": """Executor instance. :rtype: ThreadPoolExecutor """ if not isinstance(self.__executor, ThreadPoolExecutor) or self.__executor.is_shutdown: self.configure() return self.__executor # type: ignore def __init__( self, func: typing.Optional[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]] = None, *, loop_getter: typing.Optional[ typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] ] = None, loop_getter_need_context: bool = False, ) -> None: """Wrap function in future and return. :param func: function to wrap :type func: typing.Optional[typing.Callable] :param loop_getter: Method to get event loop, if wrap in asyncio task :type loop_getter: typing.Union[ None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] :param loop_getter_need_context: Loop getter requires function context :type loop_getter_need_context: bool """ super(ThreadPooled, self).__init__(func=func) self.__loop_getter: typing.Optional[ typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] ] = loop_getter self.__loop_getter_need_context: bool = loop_getter_need_context @property def loop_getter( self ) -> typing.Optional[typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop]]: """Loop getter. :rtype: typing.Union[None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] """ return self.__loop_getter @property def loop_getter_need_context(self) -> bool: """Loop getter need execution context. :rtype: bool """ return self.__loop_getter_need_context def _get_loop(self, *args: typing.Any, **kwargs: typing.Any) -> typing.Optional[asyncio.AbstractEventLoop]: """Get event loop in decorator class.""" if callable(self.loop_getter): if self.loop_getter_need_context: return self.loop_getter(*args, **kwargs) # pylint: disable=not-callable return self.loop_getter() # pylint: disable=not-callable return self.loop_getter def _get_function_wrapper( self, func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]] ) -> typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"]: """Here should be constructed and returned real decorator. :param func: Wrapped function :type func: typing.Callable :return: wrapped coroutine or function :rtype: typing.Callable[..., typing.Union[typing.Awaitable, concurrent.futures.Future]] """ prepared = self._await_if_required(func) # noinspection PyMissingOrEmptyDocstring @functools.wraps(prepared) # pylint: disable=missing-docstring def wrapper( *args: typing.Any, **kwargs: typing.Any ) -> typing.Union[ "concurrent.futures.Future[typing.Any]", "typing.Awaitable[typing.Any]", typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"], ]: loop: typing.Optional[asyncio.AbstractEventLoop] = self._get_loop(*args, **kwargs) if loop is None: return self.executor.submit(prepared, *args, **kwargs) return loop.run_in_executor(self.executor, functools.partial(prepared, *args, **kwargs)) return wrapper def __call__( # pylint: disable=useless-super-delegation self, *args: typing.Union[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]], typing.Any], **kwargs: typing.Any, ) -> typing.Union[ "concurrent.futures.Future[typing.Any]", "typing.Awaitable[typing.Any]", typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"], ]: """Callable instance.""" return super(ThreadPooled, self).__call__(*args, **kwargs) # type: ignore def __repr__(self) -> str: # pragma: no cover """For debug purposes.""" return ( f"<{self.__class__.__name__}(" f"{self._func!r}, " f"loop_getter={self.loop_getter!r}, " f"loop_getter_need_context={self.loop_getter_need_context!r}, " f") at 0x{id(self):X}>" )
python-useful-helpers/threaded
threaded/_threadpooled.py
ThreadPooled.executor
python
def executor(self) -> "ThreadPoolExecutor": if not isinstance(self.__executor, ThreadPoolExecutor) or self.__executor.is_shutdown: self.configure() return self.__executor
Executor instance. :rtype: ThreadPoolExecutor
train
https://github.com/python-useful-helpers/threaded/blob/c1aa5a631ab3e2904b915ed6c6a8be03a9673a1a/threaded/_threadpooled.py#L60-L67
[ "def configure(cls: typing.Type[\"ThreadPooled\"], max_workers: typing.Optional[int] = None) -> None:\n \"\"\"Pool executor create and configure.\n\n :param max_workers: Maximum workers\n :type max_workers: typing.Optional[int]\n \"\"\"\n if isinstance(cls.__executor, ThreadPoolExecutor):\n if cls.__executor.max_workers == max_workers:\n return\n cls.__executor.shutdown()\n\n cls.__executor = ThreadPoolExecutor(max_workers=max_workers)\n" ]
class ThreadPooled(_base_threaded.APIPooled): """Post function to ThreadPoolExecutor.""" __slots__ = ("__loop_getter", "__loop_getter_need_context") __executor: typing.Optional["ThreadPoolExecutor"] = None @classmethod def configure(cls: typing.Type["ThreadPooled"], max_workers: typing.Optional[int] = None) -> None: """Pool executor create and configure. :param max_workers: Maximum workers :type max_workers: typing.Optional[int] """ if isinstance(cls.__executor, ThreadPoolExecutor): if cls.__executor.max_workers == max_workers: return cls.__executor.shutdown() cls.__executor = ThreadPoolExecutor(max_workers=max_workers) @classmethod def shutdown(cls: typing.Type["ThreadPooled"]) -> None: """Shutdown executor.""" if cls.__executor is not None: cls.__executor.shutdown() @property # type: ignore def __init__( self, func: typing.Optional[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]] = None, *, loop_getter: typing.Optional[ typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] ] = None, loop_getter_need_context: bool = False, ) -> None: """Wrap function in future and return. :param func: function to wrap :type func: typing.Optional[typing.Callable] :param loop_getter: Method to get event loop, if wrap in asyncio task :type loop_getter: typing.Union[ None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] :param loop_getter_need_context: Loop getter requires function context :type loop_getter_need_context: bool """ super(ThreadPooled, self).__init__(func=func) self.__loop_getter: typing.Optional[ typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] ] = loop_getter self.__loop_getter_need_context: bool = loop_getter_need_context @property def loop_getter( self ) -> typing.Optional[typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop]]: """Loop getter. :rtype: typing.Union[None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] """ return self.__loop_getter @property def loop_getter_need_context(self) -> bool: """Loop getter need execution context. :rtype: bool """ return self.__loop_getter_need_context def _get_loop(self, *args: typing.Any, **kwargs: typing.Any) -> typing.Optional[asyncio.AbstractEventLoop]: """Get event loop in decorator class.""" if callable(self.loop_getter): if self.loop_getter_need_context: return self.loop_getter(*args, **kwargs) # pylint: disable=not-callable return self.loop_getter() # pylint: disable=not-callable return self.loop_getter def _get_function_wrapper( self, func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]] ) -> typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"]: """Here should be constructed and returned real decorator. :param func: Wrapped function :type func: typing.Callable :return: wrapped coroutine or function :rtype: typing.Callable[..., typing.Union[typing.Awaitable, concurrent.futures.Future]] """ prepared = self._await_if_required(func) # noinspection PyMissingOrEmptyDocstring @functools.wraps(prepared) # pylint: disable=missing-docstring def wrapper( *args: typing.Any, **kwargs: typing.Any ) -> typing.Union[ "concurrent.futures.Future[typing.Any]", "typing.Awaitable[typing.Any]", typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"], ]: loop: typing.Optional[asyncio.AbstractEventLoop] = self._get_loop(*args, **kwargs) if loop is None: return self.executor.submit(prepared, *args, **kwargs) return loop.run_in_executor(self.executor, functools.partial(prepared, *args, **kwargs)) return wrapper def __call__( # pylint: disable=useless-super-delegation self, *args: typing.Union[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]], typing.Any], **kwargs: typing.Any, ) -> typing.Union[ "concurrent.futures.Future[typing.Any]", "typing.Awaitable[typing.Any]", typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"], ]: """Callable instance.""" return super(ThreadPooled, self).__call__(*args, **kwargs) # type: ignore def __repr__(self) -> str: # pragma: no cover """For debug purposes.""" return ( f"<{self.__class__.__name__}(" f"{self._func!r}, " f"loop_getter={self.loop_getter!r}, " f"loop_getter_need_context={self.loop_getter_need_context!r}, " f") at 0x{id(self):X}>" )
python-useful-helpers/threaded
threaded/_threadpooled.py
ThreadPooled.loop_getter
python
def loop_getter( self ) -> typing.Optional[typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop]]: return self.__loop_getter
Loop getter. :rtype: typing.Union[None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop]
train
https://github.com/python-useful-helpers/threaded/blob/c1aa5a631ab3e2904b915ed6c6a8be03a9673a1a/threaded/_threadpooled.py#L98-L105
null
class ThreadPooled(_base_threaded.APIPooled): """Post function to ThreadPoolExecutor.""" __slots__ = ("__loop_getter", "__loop_getter_need_context") __executor: typing.Optional["ThreadPoolExecutor"] = None @classmethod def configure(cls: typing.Type["ThreadPooled"], max_workers: typing.Optional[int] = None) -> None: """Pool executor create and configure. :param max_workers: Maximum workers :type max_workers: typing.Optional[int] """ if isinstance(cls.__executor, ThreadPoolExecutor): if cls.__executor.max_workers == max_workers: return cls.__executor.shutdown() cls.__executor = ThreadPoolExecutor(max_workers=max_workers) @classmethod def shutdown(cls: typing.Type["ThreadPooled"]) -> None: """Shutdown executor.""" if cls.__executor is not None: cls.__executor.shutdown() @property def executor(self) -> "ThreadPoolExecutor": """Executor instance. :rtype: ThreadPoolExecutor """ if not isinstance(self.__executor, ThreadPoolExecutor) or self.__executor.is_shutdown: self.configure() return self.__executor # type: ignore def __init__( self, func: typing.Optional[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]] = None, *, loop_getter: typing.Optional[ typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] ] = None, loop_getter_need_context: bool = False, ) -> None: """Wrap function in future and return. :param func: function to wrap :type func: typing.Optional[typing.Callable] :param loop_getter: Method to get event loop, if wrap in asyncio task :type loop_getter: typing.Union[ None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] :param loop_getter_need_context: Loop getter requires function context :type loop_getter_need_context: bool """ super(ThreadPooled, self).__init__(func=func) self.__loop_getter: typing.Optional[ typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] ] = loop_getter self.__loop_getter_need_context: bool = loop_getter_need_context @property @property def loop_getter_need_context(self) -> bool: """Loop getter need execution context. :rtype: bool """ return self.__loop_getter_need_context def _get_loop(self, *args: typing.Any, **kwargs: typing.Any) -> typing.Optional[asyncio.AbstractEventLoop]: """Get event loop in decorator class.""" if callable(self.loop_getter): if self.loop_getter_need_context: return self.loop_getter(*args, **kwargs) # pylint: disable=not-callable return self.loop_getter() # pylint: disable=not-callable return self.loop_getter def _get_function_wrapper( self, func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]] ) -> typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"]: """Here should be constructed and returned real decorator. :param func: Wrapped function :type func: typing.Callable :return: wrapped coroutine or function :rtype: typing.Callable[..., typing.Union[typing.Awaitable, concurrent.futures.Future]] """ prepared = self._await_if_required(func) # noinspection PyMissingOrEmptyDocstring @functools.wraps(prepared) # pylint: disable=missing-docstring def wrapper( *args: typing.Any, **kwargs: typing.Any ) -> typing.Union[ "concurrent.futures.Future[typing.Any]", "typing.Awaitable[typing.Any]", typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"], ]: loop: typing.Optional[asyncio.AbstractEventLoop] = self._get_loop(*args, **kwargs) if loop is None: return self.executor.submit(prepared, *args, **kwargs) return loop.run_in_executor(self.executor, functools.partial(prepared, *args, **kwargs)) return wrapper def __call__( # pylint: disable=useless-super-delegation self, *args: typing.Union[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]], typing.Any], **kwargs: typing.Any, ) -> typing.Union[ "concurrent.futures.Future[typing.Any]", "typing.Awaitable[typing.Any]", typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"], ]: """Callable instance.""" return super(ThreadPooled, self).__call__(*args, **kwargs) # type: ignore def __repr__(self) -> str: # pragma: no cover """For debug purposes.""" return ( f"<{self.__class__.__name__}(" f"{self._func!r}, " f"loop_getter={self.loop_getter!r}, " f"loop_getter_need_context={self.loop_getter_need_context!r}, " f") at 0x{id(self):X}>" )
python-useful-helpers/threaded
threaded/_threadpooled.py
ThreadPooled._get_loop
python
def _get_loop(self, *args: typing.Any, **kwargs: typing.Any) -> typing.Optional[asyncio.AbstractEventLoop]: if callable(self.loop_getter): if self.loop_getter_need_context: return self.loop_getter(*args, **kwargs) # pylint: disable=not-callable return self.loop_getter() # pylint: disable=not-callable return self.loop_getter
Get event loop in decorator class.
train
https://github.com/python-useful-helpers/threaded/blob/c1aa5a631ab3e2904b915ed6c6a8be03a9673a1a/threaded/_threadpooled.py#L115-L121
null
class ThreadPooled(_base_threaded.APIPooled): """Post function to ThreadPoolExecutor.""" __slots__ = ("__loop_getter", "__loop_getter_need_context") __executor: typing.Optional["ThreadPoolExecutor"] = None @classmethod def configure(cls: typing.Type["ThreadPooled"], max_workers: typing.Optional[int] = None) -> None: """Pool executor create and configure. :param max_workers: Maximum workers :type max_workers: typing.Optional[int] """ if isinstance(cls.__executor, ThreadPoolExecutor): if cls.__executor.max_workers == max_workers: return cls.__executor.shutdown() cls.__executor = ThreadPoolExecutor(max_workers=max_workers) @classmethod def shutdown(cls: typing.Type["ThreadPooled"]) -> None: """Shutdown executor.""" if cls.__executor is not None: cls.__executor.shutdown() @property def executor(self) -> "ThreadPoolExecutor": """Executor instance. :rtype: ThreadPoolExecutor """ if not isinstance(self.__executor, ThreadPoolExecutor) or self.__executor.is_shutdown: self.configure() return self.__executor # type: ignore def __init__( self, func: typing.Optional[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]] = None, *, loop_getter: typing.Optional[ typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] ] = None, loop_getter_need_context: bool = False, ) -> None: """Wrap function in future and return. :param func: function to wrap :type func: typing.Optional[typing.Callable] :param loop_getter: Method to get event loop, if wrap in asyncio task :type loop_getter: typing.Union[ None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] :param loop_getter_need_context: Loop getter requires function context :type loop_getter_need_context: bool """ super(ThreadPooled, self).__init__(func=func) self.__loop_getter: typing.Optional[ typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] ] = loop_getter self.__loop_getter_need_context: bool = loop_getter_need_context @property def loop_getter( self ) -> typing.Optional[typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop]]: """Loop getter. :rtype: typing.Union[None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] """ return self.__loop_getter @property def loop_getter_need_context(self) -> bool: """Loop getter need execution context. :rtype: bool """ return self.__loop_getter_need_context def _get_function_wrapper( self, func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]] ) -> typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"]: """Here should be constructed and returned real decorator. :param func: Wrapped function :type func: typing.Callable :return: wrapped coroutine or function :rtype: typing.Callable[..., typing.Union[typing.Awaitable, concurrent.futures.Future]] """ prepared = self._await_if_required(func) # noinspection PyMissingOrEmptyDocstring @functools.wraps(prepared) # pylint: disable=missing-docstring def wrapper( *args: typing.Any, **kwargs: typing.Any ) -> typing.Union[ "concurrent.futures.Future[typing.Any]", "typing.Awaitable[typing.Any]", typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"], ]: loop: typing.Optional[asyncio.AbstractEventLoop] = self._get_loop(*args, **kwargs) if loop is None: return self.executor.submit(prepared, *args, **kwargs) return loop.run_in_executor(self.executor, functools.partial(prepared, *args, **kwargs)) return wrapper def __call__( # pylint: disable=useless-super-delegation self, *args: typing.Union[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]], typing.Any], **kwargs: typing.Any, ) -> typing.Union[ "concurrent.futures.Future[typing.Any]", "typing.Awaitable[typing.Any]", typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"], ]: """Callable instance.""" return super(ThreadPooled, self).__call__(*args, **kwargs) # type: ignore def __repr__(self) -> str: # pragma: no cover """For debug purposes.""" return ( f"<{self.__class__.__name__}(" f"{self._func!r}, " f"loop_getter={self.loop_getter!r}, " f"loop_getter_need_context={self.loop_getter_need_context!r}, " f") at 0x{id(self):X}>" )
python-useful-helpers/threaded
threaded/_threadpooled.py
ThreadPooled._get_function_wrapper
python
def _get_function_wrapper( self, func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]] ) -> typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"]: prepared = self._await_if_required(func) # noinspection PyMissingOrEmptyDocstring @functools.wraps(prepared) # pylint: disable=missing-docstring def wrapper( *args: typing.Any, **kwargs: typing.Any ) -> typing.Union[ "concurrent.futures.Future[typing.Any]", "typing.Awaitable[typing.Any]", typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"], ]: loop: typing.Optional[asyncio.AbstractEventLoop] = self._get_loop(*args, **kwargs) if loop is None: return self.executor.submit(prepared, *args, **kwargs) return loop.run_in_executor(self.executor, functools.partial(prepared, *args, **kwargs)) return wrapper
Here should be constructed and returned real decorator. :param func: Wrapped function :type func: typing.Callable :return: wrapped coroutine or function :rtype: typing.Callable[..., typing.Union[typing.Awaitable, concurrent.futures.Future]]
train
https://github.com/python-useful-helpers/threaded/blob/c1aa5a631ab3e2904b915ed6c6a8be03a9673a1a/threaded/_threadpooled.py#L123-L151
[ "def _await_if_required(\n target: typing.Callable[..., typing.Union[\"typing.Awaitable[typing.Any]\", typing.Any]]\n) -> typing.Callable[..., typing.Any]:\n \"\"\"Await result if coroutine was returned.\"\"\"\n\n @functools.wraps(target)\n def wrapper(*args, **kwargs): # type: (typing.Any, typing.Any) -> typing.Any\n \"\"\"Decorator/wrapper.\"\"\"\n result = target(*args, **kwargs)\n if asyncio.iscoroutine(result):\n loop = asyncio.new_event_loop()\n result = loop.run_until_complete(result)\n loop.close()\n return result\n\n return wrapper\n" ]
class ThreadPooled(_base_threaded.APIPooled): """Post function to ThreadPoolExecutor.""" __slots__ = ("__loop_getter", "__loop_getter_need_context") __executor: typing.Optional["ThreadPoolExecutor"] = None @classmethod def configure(cls: typing.Type["ThreadPooled"], max_workers: typing.Optional[int] = None) -> None: """Pool executor create and configure. :param max_workers: Maximum workers :type max_workers: typing.Optional[int] """ if isinstance(cls.__executor, ThreadPoolExecutor): if cls.__executor.max_workers == max_workers: return cls.__executor.shutdown() cls.__executor = ThreadPoolExecutor(max_workers=max_workers) @classmethod def shutdown(cls: typing.Type["ThreadPooled"]) -> None: """Shutdown executor.""" if cls.__executor is not None: cls.__executor.shutdown() @property def executor(self) -> "ThreadPoolExecutor": """Executor instance. :rtype: ThreadPoolExecutor """ if not isinstance(self.__executor, ThreadPoolExecutor) or self.__executor.is_shutdown: self.configure() return self.__executor # type: ignore def __init__( self, func: typing.Optional[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]] = None, *, loop_getter: typing.Optional[ typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] ] = None, loop_getter_need_context: bool = False, ) -> None: """Wrap function in future and return. :param func: function to wrap :type func: typing.Optional[typing.Callable] :param loop_getter: Method to get event loop, if wrap in asyncio task :type loop_getter: typing.Union[ None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] :param loop_getter_need_context: Loop getter requires function context :type loop_getter_need_context: bool """ super(ThreadPooled, self).__init__(func=func) self.__loop_getter: typing.Optional[ typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] ] = loop_getter self.__loop_getter_need_context: bool = loop_getter_need_context @property def loop_getter( self ) -> typing.Optional[typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop]]: """Loop getter. :rtype: typing.Union[None, typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] """ return self.__loop_getter @property def loop_getter_need_context(self) -> bool: """Loop getter need execution context. :rtype: bool """ return self.__loop_getter_need_context def _get_loop(self, *args: typing.Any, **kwargs: typing.Any) -> typing.Optional[asyncio.AbstractEventLoop]: """Get event loop in decorator class.""" if callable(self.loop_getter): if self.loop_getter_need_context: return self.loop_getter(*args, **kwargs) # pylint: disable=not-callable return self.loop_getter() # pylint: disable=not-callable return self.loop_getter def __call__( # pylint: disable=useless-super-delegation self, *args: typing.Union[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]], typing.Any], **kwargs: typing.Any, ) -> typing.Union[ "concurrent.futures.Future[typing.Any]", "typing.Awaitable[typing.Any]", typing.Callable[..., "typing.Union[concurrent.futures.Future[typing.Any], typing.Awaitable[typing.Any]]"], ]: """Callable instance.""" return super(ThreadPooled, self).__call__(*args, **kwargs) # type: ignore def __repr__(self) -> str: # pragma: no cover """For debug purposes.""" return ( f"<{self.__class__.__name__}(" f"{self._func!r}, " f"loop_getter={self.loop_getter!r}, " f"loop_getter_need_context={self.loop_getter_need_context!r}, " f") at 0x{id(self):X}>" )
python-useful-helpers/threaded
threaded/_asynciotask.py
asynciotask
python
def asynciotask( func: None = None, *, loop_getter: typing.Union[ typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] = asyncio.get_event_loop, loop_getter_need_context: bool = False, ) -> AsyncIOTask: """Overload: no function."""
Overload: no function.
train
https://github.com/python-useful-helpers/threaded/blob/c1aa5a631ab3e2904b915ed6c6a8be03a9673a1a/threaded/_asynciotask.py#L123-L131
null
# Copyright 2017 - 2019 Alexey Stepanov aka penguinolog ## # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """AsyncIOTask implementation.""" __all__ = ("AsyncIOTask", "asynciotask") # Standard Library import asyncio import functools import typing # Local Implementation from . import class_decorator class AsyncIOTask(class_decorator.BaseDecorator): """Wrap to asyncio.Task.""" __slots__ = ("__loop_getter", "__loop_getter_need_context") def __init__( self, func: typing.Optional[typing.Callable[..., "typing.Awaitable[typing.Any]"]] = None, *, loop_getter: typing.Union[ typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] = asyncio.get_event_loop, loop_getter_need_context: bool = False, ) -> None: """Wrap function in future and return. :param func: Function to wrap :type func: typing.Optional[typing.Callable[..., typing.Awaitable]] :param loop_getter: Method to get event loop, if wrap in asyncio task :type loop_getter: typing.Union[ typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] :param loop_getter_need_context: Loop getter requires function context :type loop_getter_need_context: bool """ super(AsyncIOTask, self).__init__(func=func) self.__loop_getter: typing.Union[ typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] = loop_getter self.__loop_getter_need_context: bool = loop_getter_need_context @property def loop_getter(self) -> typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop]: """Loop getter. :rtype: typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] """ return self.__loop_getter @property def loop_getter_need_context(self) -> bool: """Loop getter need execution context. :rtype: bool """ return self.__loop_getter_need_context def get_loop(self, *args, **kwargs): # type: (typing.Any, typing.Any) -> asyncio.AbstractEventLoop """Get event loop in decorator class.""" if callable(self.loop_getter): if self.loop_getter_need_context: return self.loop_getter(*args, **kwargs) # pylint: disable=not-callable return self.loop_getter() # pylint: disable=not-callable return self.loop_getter def _get_function_wrapper( self, func: typing.Callable[..., "typing.Awaitable[typing.Any]"] ) -> typing.Callable[..., "asyncio.Task[typing.Any]"]: """Here should be constructed and returned real decorator. :param func: Wrapped function :type func: typing.Callable[..., typing.Awaitable] :return: wrapper, which will produce asyncio.Task on call with function called inside it :rtype: typing.Callable[..., asyncio.Task] """ # noinspection PyMissingOrEmptyDocstring @functools.wraps(func) # pylint: disable=missing-docstring def wrapper(*args, **kwargs): # type: (typing.Any, typing.Any) -> asyncio.Task[typing.Any] loop = self.get_loop(*args, **kwargs) return loop.create_task(func(*args, **kwargs)) return wrapper def __call__( # pylint: disable=useless-super-delegation self, *args: typing.Union[typing.Callable[..., "typing.Awaitable[typing.Any]"], typing.Any], **kwargs: typing.Any, ) -> typing.Union["asyncio.Task[typing.Any]", typing.Callable[..., "asyncio.Task[typing.Any]"]]: """Callable instance.""" return super(AsyncIOTask, self).__call__(*args, **kwargs) # type: ignore def __repr__(self) -> str: """For debug purposes.""" return ( f"<{self.__class__.__name__}(" f"{self._func!r}, " f"loop_getter={self.loop_getter!r}, " f"loop_getter_need_context={self.loop_getter_need_context!r}, " f") at 0x{id(self):X}>" ) # pragma: no cover # pylint: disable=function-redefined, unused-argument @typing.overload @typing.overload # noqa: F811 def asynciotask( func: typing.Callable[..., "typing.Awaitable[typing.Any]"], *, loop_getter: typing.Union[ typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] = asyncio.get_event_loop, loop_getter_need_context: bool = False, ) -> typing.Callable[..., "asyncio.Task[typing.Any]"]: """Overload: provided function.""" # pylint: enable=unused-argument def asynciotask( # noqa: F811 func: typing.Optional[typing.Callable[..., "typing.Awaitable[typing.Any]"]] = None, *, loop_getter: typing.Union[ typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] = asyncio.get_event_loop, loop_getter_need_context: bool = False, ) -> typing.Union[AsyncIOTask, typing.Callable[..., "asyncio.Task[typing.Any]"]]: """Wrap function in future and return. :param func: Function to wrap :type func: typing.Optional[typing.Callable[..., typing.Awaitable]] :param loop_getter: Method to get event loop, if wrap in asyncio task :type loop_getter: typing.Union[ typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] :param loop_getter_need_context: Loop getter requires function context :type loop_getter_need_context: bool :return: AsyncIOTask instance, if called as function or argumented decorator, else callable wrapper :rtype: typing.Union[AsyncIOTask, typing.Callable[..., asyncio.Task]] """ if func is None: return AsyncIOTask(func=func, loop_getter=loop_getter, loop_getter_need_context=loop_getter_need_context) return AsyncIOTask( # type: ignore func=None, loop_getter=loop_getter, loop_getter_need_context=loop_getter_need_context )(func) # pylint: enable=function-redefined
python-useful-helpers/threaded
threaded/_asynciotask.py
asynciotask
python
def asynciotask( # noqa: F811 func: typing.Optional[typing.Callable[..., "typing.Awaitable[typing.Any]"]] = None, *, loop_getter: typing.Union[ typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] = asyncio.get_event_loop, loop_getter_need_context: bool = False, ) -> typing.Union[AsyncIOTask, typing.Callable[..., "asyncio.Task[typing.Any]"]]: if func is None: return AsyncIOTask(func=func, loop_getter=loop_getter, loop_getter_need_context=loop_getter_need_context) return AsyncIOTask( # type: ignore func=None, loop_getter=loop_getter, loop_getter_need_context=loop_getter_need_context )(func)
Wrap function in future and return. :param func: Function to wrap :type func: typing.Optional[typing.Callable[..., typing.Awaitable]] :param loop_getter: Method to get event loop, if wrap in asyncio task :type loop_getter: typing.Union[ typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] :param loop_getter_need_context: Loop getter requires function context :type loop_getter_need_context: bool :return: AsyncIOTask instance, if called as function or argumented decorator, else callable wrapper :rtype: typing.Union[AsyncIOTask, typing.Callable[..., asyncio.Task]]
train
https://github.com/python-useful-helpers/threaded/blob/c1aa5a631ab3e2904b915ed6c6a8be03a9673a1a/threaded/_asynciotask.py#L147-L173
null
# Copyright 2017 - 2019 Alexey Stepanov aka penguinolog ## # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """AsyncIOTask implementation.""" __all__ = ("AsyncIOTask", "asynciotask") # Standard Library import asyncio import functools import typing # Local Implementation from . import class_decorator class AsyncIOTask(class_decorator.BaseDecorator): """Wrap to asyncio.Task.""" __slots__ = ("__loop_getter", "__loop_getter_need_context") def __init__( self, func: typing.Optional[typing.Callable[..., "typing.Awaitable[typing.Any]"]] = None, *, loop_getter: typing.Union[ typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] = asyncio.get_event_loop, loop_getter_need_context: bool = False, ) -> None: """Wrap function in future and return. :param func: Function to wrap :type func: typing.Optional[typing.Callable[..., typing.Awaitable]] :param loop_getter: Method to get event loop, if wrap in asyncio task :type loop_getter: typing.Union[ typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] :param loop_getter_need_context: Loop getter requires function context :type loop_getter_need_context: bool """ super(AsyncIOTask, self).__init__(func=func) self.__loop_getter: typing.Union[ typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] = loop_getter self.__loop_getter_need_context: bool = loop_getter_need_context @property def loop_getter(self) -> typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop]: """Loop getter. :rtype: typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] """ return self.__loop_getter @property def loop_getter_need_context(self) -> bool: """Loop getter need execution context. :rtype: bool """ return self.__loop_getter_need_context def get_loop(self, *args, **kwargs): # type: (typing.Any, typing.Any) -> asyncio.AbstractEventLoop """Get event loop in decorator class.""" if callable(self.loop_getter): if self.loop_getter_need_context: return self.loop_getter(*args, **kwargs) # pylint: disable=not-callable return self.loop_getter() # pylint: disable=not-callable return self.loop_getter def _get_function_wrapper( self, func: typing.Callable[..., "typing.Awaitable[typing.Any]"] ) -> typing.Callable[..., "asyncio.Task[typing.Any]"]: """Here should be constructed and returned real decorator. :param func: Wrapped function :type func: typing.Callable[..., typing.Awaitable] :return: wrapper, which will produce asyncio.Task on call with function called inside it :rtype: typing.Callable[..., asyncio.Task] """ # noinspection PyMissingOrEmptyDocstring @functools.wraps(func) # pylint: disable=missing-docstring def wrapper(*args, **kwargs): # type: (typing.Any, typing.Any) -> asyncio.Task[typing.Any] loop = self.get_loop(*args, **kwargs) return loop.create_task(func(*args, **kwargs)) return wrapper def __call__( # pylint: disable=useless-super-delegation self, *args: typing.Union[typing.Callable[..., "typing.Awaitable[typing.Any]"], typing.Any], **kwargs: typing.Any, ) -> typing.Union["asyncio.Task[typing.Any]", typing.Callable[..., "asyncio.Task[typing.Any]"]]: """Callable instance.""" return super(AsyncIOTask, self).__call__(*args, **kwargs) # type: ignore def __repr__(self) -> str: """For debug purposes.""" return ( f"<{self.__class__.__name__}(" f"{self._func!r}, " f"loop_getter={self.loop_getter!r}, " f"loop_getter_need_context={self.loop_getter_need_context!r}, " f") at 0x{id(self):X}>" ) # pragma: no cover # pylint: disable=function-redefined, unused-argument @typing.overload def asynciotask( func: None = None, *, loop_getter: typing.Union[ typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] = asyncio.get_event_loop, loop_getter_need_context: bool = False, ) -> AsyncIOTask: """Overload: no function.""" @typing.overload # noqa: F811 def asynciotask( func: typing.Callable[..., "typing.Awaitable[typing.Any]"], *, loop_getter: typing.Union[ typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] = asyncio.get_event_loop, loop_getter_need_context: bool = False, ) -> typing.Callable[..., "asyncio.Task[typing.Any]"]: """Overload: provided function.""" # pylint: enable=unused-argument # pylint: enable=function-redefined
python-useful-helpers/threaded
threaded/_asynciotask.py
AsyncIOTask.get_loop
python
def get_loop(self, *args, **kwargs): # type: (typing.Any, typing.Any) -> asyncio.AbstractEventLoop if callable(self.loop_getter): if self.loop_getter_need_context: return self.loop_getter(*args, **kwargs) # pylint: disable=not-callable return self.loop_getter() # pylint: disable=not-callable return self.loop_getter
Get event loop in decorator class.
train
https://github.com/python-useful-helpers/threaded/blob/c1aa5a631ab3e2904b915ed6c6a8be03a9673a1a/threaded/_asynciotask.py#L76-L82
null
class AsyncIOTask(class_decorator.BaseDecorator): """Wrap to asyncio.Task.""" __slots__ = ("__loop_getter", "__loop_getter_need_context") def __init__( self, func: typing.Optional[typing.Callable[..., "typing.Awaitable[typing.Any]"]] = None, *, loop_getter: typing.Union[ typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] = asyncio.get_event_loop, loop_getter_need_context: bool = False, ) -> None: """Wrap function in future and return. :param func: Function to wrap :type func: typing.Optional[typing.Callable[..., typing.Awaitable]] :param loop_getter: Method to get event loop, if wrap in asyncio task :type loop_getter: typing.Union[ typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] :param loop_getter_need_context: Loop getter requires function context :type loop_getter_need_context: bool """ super(AsyncIOTask, self).__init__(func=func) self.__loop_getter: typing.Union[ typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] = loop_getter self.__loop_getter_need_context: bool = loop_getter_need_context @property def loop_getter(self) -> typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop]: """Loop getter. :rtype: typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] """ return self.__loop_getter @property def loop_getter_need_context(self) -> bool: """Loop getter need execution context. :rtype: bool """ return self.__loop_getter_need_context def _get_function_wrapper( self, func: typing.Callable[..., "typing.Awaitable[typing.Any]"] ) -> typing.Callable[..., "asyncio.Task[typing.Any]"]: """Here should be constructed and returned real decorator. :param func: Wrapped function :type func: typing.Callable[..., typing.Awaitable] :return: wrapper, which will produce asyncio.Task on call with function called inside it :rtype: typing.Callable[..., asyncio.Task] """ # noinspection PyMissingOrEmptyDocstring @functools.wraps(func) # pylint: disable=missing-docstring def wrapper(*args, **kwargs): # type: (typing.Any, typing.Any) -> asyncio.Task[typing.Any] loop = self.get_loop(*args, **kwargs) return loop.create_task(func(*args, **kwargs)) return wrapper def __call__( # pylint: disable=useless-super-delegation self, *args: typing.Union[typing.Callable[..., "typing.Awaitable[typing.Any]"], typing.Any], **kwargs: typing.Any, ) -> typing.Union["asyncio.Task[typing.Any]", typing.Callable[..., "asyncio.Task[typing.Any]"]]: """Callable instance.""" return super(AsyncIOTask, self).__call__(*args, **kwargs) # type: ignore def __repr__(self) -> str: """For debug purposes.""" return ( f"<{self.__class__.__name__}(" f"{self._func!r}, " f"loop_getter={self.loop_getter!r}, " f"loop_getter_need_context={self.loop_getter_need_context!r}, " f") at 0x{id(self):X}>" ) # pragma: no cover
python-useful-helpers/threaded
threaded/_asynciotask.py
AsyncIOTask._get_function_wrapper
python
def _get_function_wrapper( self, func: typing.Callable[..., "typing.Awaitable[typing.Any]"] ) -> typing.Callable[..., "asyncio.Task[typing.Any]"]: # noinspection PyMissingOrEmptyDocstring @functools.wraps(func) # pylint: disable=missing-docstring def wrapper(*args, **kwargs): # type: (typing.Any, typing.Any) -> asyncio.Task[typing.Any] loop = self.get_loop(*args, **kwargs) return loop.create_task(func(*args, **kwargs)) return wrapper
Here should be constructed and returned real decorator. :param func: Wrapped function :type func: typing.Callable[..., typing.Awaitable] :return: wrapper, which will produce asyncio.Task on call with function called inside it :rtype: typing.Callable[..., asyncio.Task]
train
https://github.com/python-useful-helpers/threaded/blob/c1aa5a631ab3e2904b915ed6c6a8be03a9673a1a/threaded/_asynciotask.py#L84-L100
null
class AsyncIOTask(class_decorator.BaseDecorator): """Wrap to asyncio.Task.""" __slots__ = ("__loop_getter", "__loop_getter_need_context") def __init__( self, func: typing.Optional[typing.Callable[..., "typing.Awaitable[typing.Any]"]] = None, *, loop_getter: typing.Union[ typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] = asyncio.get_event_loop, loop_getter_need_context: bool = False, ) -> None: """Wrap function in future and return. :param func: Function to wrap :type func: typing.Optional[typing.Callable[..., typing.Awaitable]] :param loop_getter: Method to get event loop, if wrap in asyncio task :type loop_getter: typing.Union[ typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] :param loop_getter_need_context: Loop getter requires function context :type loop_getter_need_context: bool """ super(AsyncIOTask, self).__init__(func=func) self.__loop_getter: typing.Union[ typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop ] = loop_getter self.__loop_getter_need_context: bool = loop_getter_need_context @property def loop_getter(self) -> typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop]: """Loop getter. :rtype: typing.Union[typing.Callable[..., asyncio.AbstractEventLoop], asyncio.AbstractEventLoop] """ return self.__loop_getter @property def loop_getter_need_context(self) -> bool: """Loop getter need execution context. :rtype: bool """ return self.__loop_getter_need_context def get_loop(self, *args, **kwargs): # type: (typing.Any, typing.Any) -> asyncio.AbstractEventLoop """Get event loop in decorator class.""" if callable(self.loop_getter): if self.loop_getter_need_context: return self.loop_getter(*args, **kwargs) # pylint: disable=not-callable return self.loop_getter() # pylint: disable=not-callable return self.loop_getter def __call__( # pylint: disable=useless-super-delegation self, *args: typing.Union[typing.Callable[..., "typing.Awaitable[typing.Any]"], typing.Any], **kwargs: typing.Any, ) -> typing.Union["asyncio.Task[typing.Any]", typing.Callable[..., "asyncio.Task[typing.Any]"]]: """Callable instance.""" return super(AsyncIOTask, self).__call__(*args, **kwargs) # type: ignore def __repr__(self) -> str: """For debug purposes.""" return ( f"<{self.__class__.__name__}(" f"{self._func!r}, " f"loop_getter={self.loop_getter!r}, " f"loop_getter_need_context={self.loop_getter_need_context!r}, " f") at 0x{id(self):X}>" ) # pragma: no cover
python-useful-helpers/threaded
threaded/_threaded.py
threaded
python
def threaded( name: typing.Callable[..., typing.Any], daemon: bool = False, started: bool = False ) -> typing.Callable[..., threading.Thread]: """Overload: Call decorator without arguments."""
Overload: Call decorator without arguments.
train
https://github.com/python-useful-helpers/threaded/blob/c1aa5a631ab3e2904b915ed6c6a8be03a9673a1a/threaded/_threaded.py#L130-L133
null
# Copyright 2017 - 2019 Alexey Stepanov aka penguinolog ## # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Threaded implementation. Asyncio is supported """ __all__ = ("Threaded", "threaded") # Standard Library import functools import threading import typing # Local Implementation from . import class_decorator class Threaded(class_decorator.BaseDecorator): """Run function in separate thread.""" __slots__ = ("__name", "__daemon", "__started") def __init__( self, name: typing.Optional[ typing.Union[str, typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]] ] = None, daemon: bool = False, started: bool = False, ) -> None: """Run function in separate thread. :param name: New thread name. If callable: use as wrapped function. If none: use wrapped function name. :type name: typing.Optional[typing.Union[str, typing.Callable[..., typing.Union[typing.Awaitable, typing.Any]]]] :param daemon: Daemonize thread. :type daemon: bool :param started: Return started thread :type started: bool """ # pylint: disable=assigning-non-slot self.__daemon: bool = daemon self.__started: bool = started if callable(name): func: typing.Optional[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]] = name self.__name: typing.Optional[str] = "Threaded: " + getattr(name, "__name__", str(hash(name))) else: func, self.__name = None, name super(Threaded, self).__init__(func=func) # pylint: enable=assigning-non-slot @property def name(self) -> typing.Optional[str]: """Thread name. :rtype: typing.Optional[str] """ return self.__name @property def daemon(self) -> bool: """Start thread as daemon. :rtype: bool """ return self.__daemon @property def started(self) -> bool: """Return started thread. :rtype: bool """ return self.__started def __repr__(self) -> str: # pragma: no cover """For debug purposes.""" return f"{self.__class__.__name__}(name={self.name!r}, daemon={self.daemon!r}, started={self.started!r}, )" def _get_function_wrapper( self, func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]] ) -> typing.Callable[..., threading.Thread]: """Here should be constructed and returned real decorator. :param func: Wrapped function :type func: typing.Callable[..., typing.Union[typing.Awaitable, typing.Any]] :return: wrapped function :rtype: typing.Callable[..., threading.Thread] """ prepared: typing.Callable[..., typing.Any] = self._await_if_required(func) name: typing.Optional[str] = self.name if name is None: name = "Threaded: " + getattr(func, "__name__", str(hash(func))) # noinspection PyMissingOrEmptyDocstring @functools.wraps(prepared) # pylint: disable=missing-docstring def wrapper(*args, **kwargs): # type: (typing.Any, typing.Any) -> threading.Thread thread = threading.Thread(target=prepared, name=name, args=args, kwargs=kwargs, daemon=self.daemon) if self.started: thread.start() return thread return wrapper def __call__( # pylint: disable=useless-super-delegation self, *args: typing.Union[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]], typing.Any], **kwargs: typing.Any, ) -> typing.Union[threading.Thread, typing.Callable[..., threading.Thread]]: """Executable instance.""" return super(Threaded, self).__call__(*args, **kwargs) # type: ignore # pylint: disable=function-redefined, unused-argument @typing.overload @typing.overload # noqa: F811 def threaded(name: typing.Optional[str] = None, daemon: bool = False, started: bool = False) -> Threaded: """Overload: Name is not callable.""" # pylint: enable=unused-argument def threaded( # noqa: F811 name: typing.Optional[typing.Union[str, typing.Callable[..., typing.Any]]] = None, daemon: bool = False, started: bool = False, ) -> typing.Union[Threaded, typing.Callable[..., threading.Thread]]: """Run function in separate thread. :param name: New thread name. If callable: use as wrapped function. If none: use wrapped function name. :type name: typing.Union[None, str, typing.Callable] :param daemon: Daemonize thread. :type daemon: bool :param started: Return started thread :type started: bool :return: Threaded instance, if called as function or argumented decorator, else callable wraper :rtype: typing.Union[Threaded, typing.Callable[..., threading.Thread]] """ if callable(name): func, name = (name, "Threaded: " + getattr(name, "__name__", str(hash(name)))) return Threaded(name=name, daemon=daemon, started=started)(func) # type: ignore return Threaded(name=name, daemon=daemon, started=started) # pylint: enable=function-redefined
python-useful-helpers/threaded
threaded/_threaded.py
threaded
python
def threaded( # noqa: F811 name: typing.Optional[typing.Union[str, typing.Callable[..., typing.Any]]] = None, daemon: bool = False, started: bool = False, ) -> typing.Union[Threaded, typing.Callable[..., threading.Thread]]: if callable(name): func, name = (name, "Threaded: " + getattr(name, "__name__", str(hash(name)))) return Threaded(name=name, daemon=daemon, started=started)(func) # type: ignore return Threaded(name=name, daemon=daemon, started=started)
Run function in separate thread. :param name: New thread name. If callable: use as wrapped function. If none: use wrapped function name. :type name: typing.Union[None, str, typing.Callable] :param daemon: Daemonize thread. :type daemon: bool :param started: Return started thread :type started: bool :return: Threaded instance, if called as function or argumented decorator, else callable wraper :rtype: typing.Union[Threaded, typing.Callable[..., threading.Thread]]
train
https://github.com/python-useful-helpers/threaded/blob/c1aa5a631ab3e2904b915ed6c6a8be03a9673a1a/threaded/_threaded.py#L142-L163
null
# Copyright 2017 - 2019 Alexey Stepanov aka penguinolog ## # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Threaded implementation. Asyncio is supported """ __all__ = ("Threaded", "threaded") # Standard Library import functools import threading import typing # Local Implementation from . import class_decorator class Threaded(class_decorator.BaseDecorator): """Run function in separate thread.""" __slots__ = ("__name", "__daemon", "__started") def __init__( self, name: typing.Optional[ typing.Union[str, typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]] ] = None, daemon: bool = False, started: bool = False, ) -> None: """Run function in separate thread. :param name: New thread name. If callable: use as wrapped function. If none: use wrapped function name. :type name: typing.Optional[typing.Union[str, typing.Callable[..., typing.Union[typing.Awaitable, typing.Any]]]] :param daemon: Daemonize thread. :type daemon: bool :param started: Return started thread :type started: bool """ # pylint: disable=assigning-non-slot self.__daemon: bool = daemon self.__started: bool = started if callable(name): func: typing.Optional[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]] = name self.__name: typing.Optional[str] = "Threaded: " + getattr(name, "__name__", str(hash(name))) else: func, self.__name = None, name super(Threaded, self).__init__(func=func) # pylint: enable=assigning-non-slot @property def name(self) -> typing.Optional[str]: """Thread name. :rtype: typing.Optional[str] """ return self.__name @property def daemon(self) -> bool: """Start thread as daemon. :rtype: bool """ return self.__daemon @property def started(self) -> bool: """Return started thread. :rtype: bool """ return self.__started def __repr__(self) -> str: # pragma: no cover """For debug purposes.""" return f"{self.__class__.__name__}(name={self.name!r}, daemon={self.daemon!r}, started={self.started!r}, )" def _get_function_wrapper( self, func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]] ) -> typing.Callable[..., threading.Thread]: """Here should be constructed and returned real decorator. :param func: Wrapped function :type func: typing.Callable[..., typing.Union[typing.Awaitable, typing.Any]] :return: wrapped function :rtype: typing.Callable[..., threading.Thread] """ prepared: typing.Callable[..., typing.Any] = self._await_if_required(func) name: typing.Optional[str] = self.name if name is None: name = "Threaded: " + getattr(func, "__name__", str(hash(func))) # noinspection PyMissingOrEmptyDocstring @functools.wraps(prepared) # pylint: disable=missing-docstring def wrapper(*args, **kwargs): # type: (typing.Any, typing.Any) -> threading.Thread thread = threading.Thread(target=prepared, name=name, args=args, kwargs=kwargs, daemon=self.daemon) if self.started: thread.start() return thread return wrapper def __call__( # pylint: disable=useless-super-delegation self, *args: typing.Union[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]], typing.Any], **kwargs: typing.Any, ) -> typing.Union[threading.Thread, typing.Callable[..., threading.Thread]]: """Executable instance.""" return super(Threaded, self).__call__(*args, **kwargs) # type: ignore # pylint: disable=function-redefined, unused-argument @typing.overload def threaded( name: typing.Callable[..., typing.Any], daemon: bool = False, started: bool = False ) -> typing.Callable[..., threading.Thread]: """Overload: Call decorator without arguments.""" @typing.overload # noqa: F811 def threaded(name: typing.Optional[str] = None, daemon: bool = False, started: bool = False) -> Threaded: """Overload: Name is not callable.""" # pylint: enable=unused-argument # pylint: enable=function-redefined
python-useful-helpers/threaded
threaded/_threaded.py
Threaded._get_function_wrapper
python
def _get_function_wrapper( self, func: typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]] ) -> typing.Callable[..., threading.Thread]: prepared: typing.Callable[..., typing.Any] = self._await_if_required(func) name: typing.Optional[str] = self.name if name is None: name = "Threaded: " + getattr(func, "__name__", str(hash(func))) # noinspection PyMissingOrEmptyDocstring @functools.wraps(prepared) # pylint: disable=missing-docstring def wrapper(*args, **kwargs): # type: (typing.Any, typing.Any) -> threading.Thread thread = threading.Thread(target=prepared, name=name, args=args, kwargs=kwargs, daemon=self.daemon) if self.started: thread.start() return thread return wrapper
Here should be constructed and returned real decorator. :param func: Wrapped function :type func: typing.Callable[..., typing.Union[typing.Awaitable, typing.Any]] :return: wrapped function :rtype: typing.Callable[..., threading.Thread]
train
https://github.com/python-useful-helpers/threaded/blob/c1aa5a631ab3e2904b915ed6c6a8be03a9673a1a/threaded/_threaded.py#L94-L117
[ "def _await_if_required(\n target: typing.Callable[..., typing.Union[\"typing.Awaitable[typing.Any]\", typing.Any]]\n) -> typing.Callable[..., typing.Any]:\n \"\"\"Await result if coroutine was returned.\"\"\"\n\n @functools.wraps(target)\n def wrapper(*args, **kwargs): # type: (typing.Any, typing.Any) -> typing.Any\n \"\"\"Decorator/wrapper.\"\"\"\n result = target(*args, **kwargs)\n if asyncio.iscoroutine(result):\n loop = asyncio.new_event_loop()\n result = loop.run_until_complete(result)\n loop.close()\n return result\n\n return wrapper\n" ]
class Threaded(class_decorator.BaseDecorator): """Run function in separate thread.""" __slots__ = ("__name", "__daemon", "__started") def __init__( self, name: typing.Optional[ typing.Union[str, typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]] ] = None, daemon: bool = False, started: bool = False, ) -> None: """Run function in separate thread. :param name: New thread name. If callable: use as wrapped function. If none: use wrapped function name. :type name: typing.Optional[typing.Union[str, typing.Callable[..., typing.Union[typing.Awaitable, typing.Any]]]] :param daemon: Daemonize thread. :type daemon: bool :param started: Return started thread :type started: bool """ # pylint: disable=assigning-non-slot self.__daemon: bool = daemon self.__started: bool = started if callable(name): func: typing.Optional[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]]] = name self.__name: typing.Optional[str] = "Threaded: " + getattr(name, "__name__", str(hash(name))) else: func, self.__name = None, name super(Threaded, self).__init__(func=func) # pylint: enable=assigning-non-slot @property def name(self) -> typing.Optional[str]: """Thread name. :rtype: typing.Optional[str] """ return self.__name @property def daemon(self) -> bool: """Start thread as daemon. :rtype: bool """ return self.__daemon @property def started(self) -> bool: """Return started thread. :rtype: bool """ return self.__started def __repr__(self) -> str: # pragma: no cover """For debug purposes.""" return f"{self.__class__.__name__}(name={self.name!r}, daemon={self.daemon!r}, started={self.started!r}, )" def __call__( # pylint: disable=useless-super-delegation self, *args: typing.Union[typing.Callable[..., typing.Union["typing.Awaitable[typing.Any]", typing.Any]], typing.Any], **kwargs: typing.Any, ) -> typing.Union[threading.Thread, typing.Callable[..., threading.Thread]]: """Executable instance.""" return super(Threaded, self).__call__(*args, **kwargs) # type: ignore
GoogleCloudPlatform/python-repo-tools
gcp_devrel/tools/pylint.py
read_config
python
def read_config(contents): file_obj = io.StringIO(contents) config = six.moves.configparser.ConfigParser() config.readfp(file_obj) return config
Reads pylintrc config into native ConfigParser object. Args: contents (str): The contents of the file containing the INI config. Returns: ConfigParser.ConfigParser: The parsed configuration.
train
https://github.com/GoogleCloudPlatform/python-repo-tools/blob/87422ba91814529848a2b8bf8be4294283a3e041/gcp_devrel/tools/pylint.py#L113-L125
null
# Copyright 2016 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This script runs Pylint on the specified source. Before running Pylint, it generates a Pylint configuration on the fly based on programmatic defaults. """ from __future__ import print_function import collections import copy import imp import io import os import subprocess import sys import six DEFAULT_LIBRARY_RC_ADDITIONS = { 'MESSAGES CONTROL': { 'disable': [ 'I', 'import-error', 'no-member', 'protected-access', 'redefined-variable-type', 'similarities', 'no-else-return', 'ungrouped-imports' ], }, } DEFAULT_LIBRARY_RC_REPLACEMENTS = { 'MASTER': { 'ignore': ['CVS', '.git', '.cache', '.tox', '.nox'], 'load-plugins': 'pylint.extensions.check_docs', }, 'REPORTS': { 'reports': 'no', }, 'BASIC': { 'method-rgx': '[a-z_][a-z0-9_]{2,40}$', 'function-rgx': '[a-z_][a-z0-9_]{2,40}$', }, 'TYPECHECK': { 'ignored-modules': ['six', 'google.protobuf'], }, 'DESIGN': { 'min-public-methods': '0', 'max-args': '10', 'max-attributes': '15', }, } DEFAULT_TEST_RC_ADDITIONS = copy.deepcopy(DEFAULT_LIBRARY_RC_ADDITIONS) DEFAULT_TEST_RC_ADDITIONS['MESSAGES CONTROL']['disable'].extend([ 'missing-docstring', 'no-self-use', 'redefined-outer-name', 'unused-argument', 'no-name-in-module', ]) DEFAULT_TEST_RC_REPLACEMENTS = copy.deepcopy(DEFAULT_LIBRARY_RC_REPLACEMENTS) DEFAULT_TEST_RC_REPLACEMENTS.setdefault('BASIC', {}) DEFAULT_TEST_RC_REPLACEMENTS['BASIC'].update({ 'good-names': ['i', 'j', 'k', 'ex', 'Run', '_', 'fh', 'pytestmark'], 'method-rgx': '[a-z_][a-z0-9_]{2,80}$', 'function-rgx': '[a-z_][a-z0-9_]{2,80}$', }) _ERROR_TEMPLATE = 'Pylint failed on {} with status {:d}.' _LINT_FILESET_MSG = ( 'Keyword arguments rc_filename and description are both ' 'required. No other keyword arguments are allowed.') _MISSING_OPTION_ADDITION = 'Expected to be adding to existing option {!r}.' _MISSING_OPTION_REPLACE = 'Expected to be replacing existing option {!r}.' def get_default_config(): """Get the default Pylint configuration. .. note:: The output of this function varies based on the current version of Pylint installed. Returns: str: The default Pylint configuration. """ # Swallow STDERR if it says # "No config file found, using default configuration" result = subprocess.check_output(['pylint', '--generate-rcfile'], stderr=subprocess.PIPE) # On Python 3, this returns bytes (from STDOUT), so we # convert to a string. return result.decode('utf-8') def load_local_config(filename): """Loads the pylint.config.py file. Args: filename (str): The python file containing the local configuration. Returns: module: The loaded Python module. """ if not filename: return imp.new_module('local_pylint_config') module = imp.load_source('local_pylint_config', filename) return module Config = collections.namedtuple('Config', [ 'library_additions', 'library_replacements', 'test_additions', 'test_replacements']) def determine_final_config(config_module): """Determines the final additions and replacements. Combines the config module with the defaults. Args: config_module: The loaded local configuration module. Returns: Config: the final configuration. """ config = Config( DEFAULT_LIBRARY_RC_ADDITIONS, DEFAULT_LIBRARY_RC_REPLACEMENTS, DEFAULT_TEST_RC_ADDITIONS, DEFAULT_TEST_RC_REPLACEMENTS) for field in config._fields: if hasattr(config_module, field): config = config._replace(**{field: getattr(config_module, field)}) return config def _transform_opt(opt_val): """Transform a config option value to a string. If already a string, do nothing. If an iterable, then combine into a string by joining on ",". Args: opt_val (Union[str, list]): A config option's value. Returns: str: The option value converted to a string. """ if isinstance(opt_val, (list, tuple)): return ','.join(opt_val) else: return opt_val def lint_fileset(*dirnames, **kwargs): """Lints a group of files using a given rcfile. Keyword arguments are * ``rc_filename`` (``str``): The name of the Pylint config RC file. * ``description`` (``str``): A description of the files and configuration currently being run. Args: dirnames (tuple): Directories to run Pylint in. kwargs: The keyword arguments. The only keyword arguments are ``rc_filename`` and ``description`` and both are required. Raises: KeyError: If the wrong keyword arguments are used. """ try: rc_filename = kwargs['rc_filename'] description = kwargs['description'] if len(kwargs) != 2: raise KeyError except KeyError: raise KeyError(_LINT_FILESET_MSG) pylint_shell_command = ['pylint', '--rcfile', rc_filename] pylint_shell_command.extend(dirnames) status_code = subprocess.call(pylint_shell_command) if status_code != 0: error_message = _ERROR_TEMPLATE.format(description, status_code) print(error_message, file=sys.stderr) sys.exit(status_code) def make_rc(base_cfg, target_filename, additions=None, replacements=None): """Combines a base rc and additions into single file. Args: base_cfg (ConfigParser.ConfigParser): The configuration we are merging into. target_filename (str): The filename where the new configuration will be saved. additions (dict): (Optional) The values added to the configuration. replacements (dict): (Optional) The wholesale replacements for the new configuration. Raises: KeyError: if one of the additions or replacements does not already exist in the current config. """ # Set-up the mutable default values. if additions is None: additions = {} if replacements is None: replacements = {} # Create fresh config, which must extend the base one. new_cfg = six.moves.configparser.ConfigParser() # pylint: disable=protected-access new_cfg._sections = copy.deepcopy(base_cfg._sections) new_sections = new_cfg._sections # pylint: enable=protected-access for section, opts in additions.items(): curr_section = new_sections.setdefault( section, collections.OrderedDict()) for opt, opt_val in opts.items(): curr_val = curr_section.get(opt) if curr_val is None: msg = _MISSING_OPTION_ADDITION.format(opt) raise KeyError(msg) curr_val = curr_val.rstrip(',') opt_val = _transform_opt(opt_val) curr_section[opt] = '%s, %s' % (curr_val, opt_val) for section, opts in replacements.items(): curr_section = new_sections.setdefault( section, collections.OrderedDict()) for opt, opt_val in opts.items(): curr_val = curr_section.get(opt) if curr_val is None: # NOTE: This doesn't need to fail, because some options # are present in one version of pylint and not present # in another. For example ``[BASIC].method-rgx`` is # ``(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$`` in # ``1.7.5`` but in ``1.8.0`` the default config has # ``#method-rgx=`` (i.e. it is commented out in the # ``[BASIC]`` section). msg = _MISSING_OPTION_REPLACE.format(opt) print(msg, file=sys.stderr) opt_val = _transform_opt(opt_val) curr_section[opt] = '%s' % (opt_val,) with open(target_filename, 'w') as file_obj: new_cfg.write(file_obj) def run_command(args): """Script entry point. Lints both sets of files.""" library_rc = 'pylintrc' test_rc = 'pylintrc.test' if os.path.exists(library_rc): os.remove(library_rc) if os.path.exists(test_rc): os.remove(test_rc) default_config = read_config(get_default_config()) user_config = load_local_config(args.config) configuration = determine_final_config(user_config) make_rc(default_config, library_rc, additions=configuration.library_additions, replacements=configuration.library_replacements) make_rc(default_config, test_rc, additions=configuration.test_additions, replacements=configuration.test_replacements) lint_fileset(*args.library_filesets, rc_filename=library_rc, description='Library') lint_fileset(*args.test_filesets, rc_filename=test_rc, description='Test') def register_commands(subparsers): parser = subparsers.add_parser( 'run-pylint', help=__doc__) parser.set_defaults(func=run_command) parser.add_argument('--config') parser.add_argument('--library-filesets', nargs='+', default=[]) parser.add_argument('--test-filesets', nargs='+', default=[])
GoogleCloudPlatform/python-repo-tools
gcp_devrel/tools/pylint.py
load_local_config
python
def load_local_config(filename): if not filename: return imp.new_module('local_pylint_config') module = imp.load_source('local_pylint_config', filename) return module
Loads the pylint.config.py file. Args: filename (str): The python file containing the local configuration. Returns: module: The loaded Python module.
train
https://github.com/GoogleCloudPlatform/python-repo-tools/blob/87422ba91814529848a2b8bf8be4294283a3e041/gcp_devrel/tools/pylint.py#L128-L140
null
# Copyright 2016 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This script runs Pylint on the specified source. Before running Pylint, it generates a Pylint configuration on the fly based on programmatic defaults. """ from __future__ import print_function import collections import copy import imp import io import os import subprocess import sys import six DEFAULT_LIBRARY_RC_ADDITIONS = { 'MESSAGES CONTROL': { 'disable': [ 'I', 'import-error', 'no-member', 'protected-access', 'redefined-variable-type', 'similarities', 'no-else-return', 'ungrouped-imports' ], }, } DEFAULT_LIBRARY_RC_REPLACEMENTS = { 'MASTER': { 'ignore': ['CVS', '.git', '.cache', '.tox', '.nox'], 'load-plugins': 'pylint.extensions.check_docs', }, 'REPORTS': { 'reports': 'no', }, 'BASIC': { 'method-rgx': '[a-z_][a-z0-9_]{2,40}$', 'function-rgx': '[a-z_][a-z0-9_]{2,40}$', }, 'TYPECHECK': { 'ignored-modules': ['six', 'google.protobuf'], }, 'DESIGN': { 'min-public-methods': '0', 'max-args': '10', 'max-attributes': '15', }, } DEFAULT_TEST_RC_ADDITIONS = copy.deepcopy(DEFAULT_LIBRARY_RC_ADDITIONS) DEFAULT_TEST_RC_ADDITIONS['MESSAGES CONTROL']['disable'].extend([ 'missing-docstring', 'no-self-use', 'redefined-outer-name', 'unused-argument', 'no-name-in-module', ]) DEFAULT_TEST_RC_REPLACEMENTS = copy.deepcopy(DEFAULT_LIBRARY_RC_REPLACEMENTS) DEFAULT_TEST_RC_REPLACEMENTS.setdefault('BASIC', {}) DEFAULT_TEST_RC_REPLACEMENTS['BASIC'].update({ 'good-names': ['i', 'j', 'k', 'ex', 'Run', '_', 'fh', 'pytestmark'], 'method-rgx': '[a-z_][a-z0-9_]{2,80}$', 'function-rgx': '[a-z_][a-z0-9_]{2,80}$', }) _ERROR_TEMPLATE = 'Pylint failed on {} with status {:d}.' _LINT_FILESET_MSG = ( 'Keyword arguments rc_filename and description are both ' 'required. No other keyword arguments are allowed.') _MISSING_OPTION_ADDITION = 'Expected to be adding to existing option {!r}.' _MISSING_OPTION_REPLACE = 'Expected to be replacing existing option {!r}.' def get_default_config(): """Get the default Pylint configuration. .. note:: The output of this function varies based on the current version of Pylint installed. Returns: str: The default Pylint configuration. """ # Swallow STDERR if it says # "No config file found, using default configuration" result = subprocess.check_output(['pylint', '--generate-rcfile'], stderr=subprocess.PIPE) # On Python 3, this returns bytes (from STDOUT), so we # convert to a string. return result.decode('utf-8') def read_config(contents): """Reads pylintrc config into native ConfigParser object. Args: contents (str): The contents of the file containing the INI config. Returns: ConfigParser.ConfigParser: The parsed configuration. """ file_obj = io.StringIO(contents) config = six.moves.configparser.ConfigParser() config.readfp(file_obj) return config Config = collections.namedtuple('Config', [ 'library_additions', 'library_replacements', 'test_additions', 'test_replacements']) def determine_final_config(config_module): """Determines the final additions and replacements. Combines the config module with the defaults. Args: config_module: The loaded local configuration module. Returns: Config: the final configuration. """ config = Config( DEFAULT_LIBRARY_RC_ADDITIONS, DEFAULT_LIBRARY_RC_REPLACEMENTS, DEFAULT_TEST_RC_ADDITIONS, DEFAULT_TEST_RC_REPLACEMENTS) for field in config._fields: if hasattr(config_module, field): config = config._replace(**{field: getattr(config_module, field)}) return config def _transform_opt(opt_val): """Transform a config option value to a string. If already a string, do nothing. If an iterable, then combine into a string by joining on ",". Args: opt_val (Union[str, list]): A config option's value. Returns: str: The option value converted to a string. """ if isinstance(opt_val, (list, tuple)): return ','.join(opt_val) else: return opt_val def lint_fileset(*dirnames, **kwargs): """Lints a group of files using a given rcfile. Keyword arguments are * ``rc_filename`` (``str``): The name of the Pylint config RC file. * ``description`` (``str``): A description of the files and configuration currently being run. Args: dirnames (tuple): Directories to run Pylint in. kwargs: The keyword arguments. The only keyword arguments are ``rc_filename`` and ``description`` and both are required. Raises: KeyError: If the wrong keyword arguments are used. """ try: rc_filename = kwargs['rc_filename'] description = kwargs['description'] if len(kwargs) != 2: raise KeyError except KeyError: raise KeyError(_LINT_FILESET_MSG) pylint_shell_command = ['pylint', '--rcfile', rc_filename] pylint_shell_command.extend(dirnames) status_code = subprocess.call(pylint_shell_command) if status_code != 0: error_message = _ERROR_TEMPLATE.format(description, status_code) print(error_message, file=sys.stderr) sys.exit(status_code) def make_rc(base_cfg, target_filename, additions=None, replacements=None): """Combines a base rc and additions into single file. Args: base_cfg (ConfigParser.ConfigParser): The configuration we are merging into. target_filename (str): The filename where the new configuration will be saved. additions (dict): (Optional) The values added to the configuration. replacements (dict): (Optional) The wholesale replacements for the new configuration. Raises: KeyError: if one of the additions or replacements does not already exist in the current config. """ # Set-up the mutable default values. if additions is None: additions = {} if replacements is None: replacements = {} # Create fresh config, which must extend the base one. new_cfg = six.moves.configparser.ConfigParser() # pylint: disable=protected-access new_cfg._sections = copy.deepcopy(base_cfg._sections) new_sections = new_cfg._sections # pylint: enable=protected-access for section, opts in additions.items(): curr_section = new_sections.setdefault( section, collections.OrderedDict()) for opt, opt_val in opts.items(): curr_val = curr_section.get(opt) if curr_val is None: msg = _MISSING_OPTION_ADDITION.format(opt) raise KeyError(msg) curr_val = curr_val.rstrip(',') opt_val = _transform_opt(opt_val) curr_section[opt] = '%s, %s' % (curr_val, opt_val) for section, opts in replacements.items(): curr_section = new_sections.setdefault( section, collections.OrderedDict()) for opt, opt_val in opts.items(): curr_val = curr_section.get(opt) if curr_val is None: # NOTE: This doesn't need to fail, because some options # are present in one version of pylint and not present # in another. For example ``[BASIC].method-rgx`` is # ``(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$`` in # ``1.7.5`` but in ``1.8.0`` the default config has # ``#method-rgx=`` (i.e. it is commented out in the # ``[BASIC]`` section). msg = _MISSING_OPTION_REPLACE.format(opt) print(msg, file=sys.stderr) opt_val = _transform_opt(opt_val) curr_section[opt] = '%s' % (opt_val,) with open(target_filename, 'w') as file_obj: new_cfg.write(file_obj) def run_command(args): """Script entry point. Lints both sets of files.""" library_rc = 'pylintrc' test_rc = 'pylintrc.test' if os.path.exists(library_rc): os.remove(library_rc) if os.path.exists(test_rc): os.remove(test_rc) default_config = read_config(get_default_config()) user_config = load_local_config(args.config) configuration = determine_final_config(user_config) make_rc(default_config, library_rc, additions=configuration.library_additions, replacements=configuration.library_replacements) make_rc(default_config, test_rc, additions=configuration.test_additions, replacements=configuration.test_replacements) lint_fileset(*args.library_filesets, rc_filename=library_rc, description='Library') lint_fileset(*args.test_filesets, rc_filename=test_rc, description='Test') def register_commands(subparsers): parser = subparsers.add_parser( 'run-pylint', help=__doc__) parser.set_defaults(func=run_command) parser.add_argument('--config') parser.add_argument('--library-filesets', nargs='+', default=[]) parser.add_argument('--test-filesets', nargs='+', default=[])
GoogleCloudPlatform/python-repo-tools
gcp_devrel/tools/pylint.py
determine_final_config
python
def determine_final_config(config_module): config = Config( DEFAULT_LIBRARY_RC_ADDITIONS, DEFAULT_LIBRARY_RC_REPLACEMENTS, DEFAULT_TEST_RC_ADDITIONS, DEFAULT_TEST_RC_REPLACEMENTS) for field in config._fields: if hasattr(config_module, field): config = config._replace(**{field: getattr(config_module, field)}) return config
Determines the final additions and replacements. Combines the config module with the defaults. Args: config_module: The loaded local configuration module. Returns: Config: the final configuration.
train
https://github.com/GoogleCloudPlatform/python-repo-tools/blob/87422ba91814529848a2b8bf8be4294283a3e041/gcp_devrel/tools/pylint.py#L148-L167
null
# Copyright 2016 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This script runs Pylint on the specified source. Before running Pylint, it generates a Pylint configuration on the fly based on programmatic defaults. """ from __future__ import print_function import collections import copy import imp import io import os import subprocess import sys import six DEFAULT_LIBRARY_RC_ADDITIONS = { 'MESSAGES CONTROL': { 'disable': [ 'I', 'import-error', 'no-member', 'protected-access', 'redefined-variable-type', 'similarities', 'no-else-return', 'ungrouped-imports' ], }, } DEFAULT_LIBRARY_RC_REPLACEMENTS = { 'MASTER': { 'ignore': ['CVS', '.git', '.cache', '.tox', '.nox'], 'load-plugins': 'pylint.extensions.check_docs', }, 'REPORTS': { 'reports': 'no', }, 'BASIC': { 'method-rgx': '[a-z_][a-z0-9_]{2,40}$', 'function-rgx': '[a-z_][a-z0-9_]{2,40}$', }, 'TYPECHECK': { 'ignored-modules': ['six', 'google.protobuf'], }, 'DESIGN': { 'min-public-methods': '0', 'max-args': '10', 'max-attributes': '15', }, } DEFAULT_TEST_RC_ADDITIONS = copy.deepcopy(DEFAULT_LIBRARY_RC_ADDITIONS) DEFAULT_TEST_RC_ADDITIONS['MESSAGES CONTROL']['disable'].extend([ 'missing-docstring', 'no-self-use', 'redefined-outer-name', 'unused-argument', 'no-name-in-module', ]) DEFAULT_TEST_RC_REPLACEMENTS = copy.deepcopy(DEFAULT_LIBRARY_RC_REPLACEMENTS) DEFAULT_TEST_RC_REPLACEMENTS.setdefault('BASIC', {}) DEFAULT_TEST_RC_REPLACEMENTS['BASIC'].update({ 'good-names': ['i', 'j', 'k', 'ex', 'Run', '_', 'fh', 'pytestmark'], 'method-rgx': '[a-z_][a-z0-9_]{2,80}$', 'function-rgx': '[a-z_][a-z0-9_]{2,80}$', }) _ERROR_TEMPLATE = 'Pylint failed on {} with status {:d}.' _LINT_FILESET_MSG = ( 'Keyword arguments rc_filename and description are both ' 'required. No other keyword arguments are allowed.') _MISSING_OPTION_ADDITION = 'Expected to be adding to existing option {!r}.' _MISSING_OPTION_REPLACE = 'Expected to be replacing existing option {!r}.' def get_default_config(): """Get the default Pylint configuration. .. note:: The output of this function varies based on the current version of Pylint installed. Returns: str: The default Pylint configuration. """ # Swallow STDERR if it says # "No config file found, using default configuration" result = subprocess.check_output(['pylint', '--generate-rcfile'], stderr=subprocess.PIPE) # On Python 3, this returns bytes (from STDOUT), so we # convert to a string. return result.decode('utf-8') def read_config(contents): """Reads pylintrc config into native ConfigParser object. Args: contents (str): The contents of the file containing the INI config. Returns: ConfigParser.ConfigParser: The parsed configuration. """ file_obj = io.StringIO(contents) config = six.moves.configparser.ConfigParser() config.readfp(file_obj) return config def load_local_config(filename): """Loads the pylint.config.py file. Args: filename (str): The python file containing the local configuration. Returns: module: The loaded Python module. """ if not filename: return imp.new_module('local_pylint_config') module = imp.load_source('local_pylint_config', filename) return module Config = collections.namedtuple('Config', [ 'library_additions', 'library_replacements', 'test_additions', 'test_replacements']) def _transform_opt(opt_val): """Transform a config option value to a string. If already a string, do nothing. If an iterable, then combine into a string by joining on ",". Args: opt_val (Union[str, list]): A config option's value. Returns: str: The option value converted to a string. """ if isinstance(opt_val, (list, tuple)): return ','.join(opt_val) else: return opt_val def lint_fileset(*dirnames, **kwargs): """Lints a group of files using a given rcfile. Keyword arguments are * ``rc_filename`` (``str``): The name of the Pylint config RC file. * ``description`` (``str``): A description of the files and configuration currently being run. Args: dirnames (tuple): Directories to run Pylint in. kwargs: The keyword arguments. The only keyword arguments are ``rc_filename`` and ``description`` and both are required. Raises: KeyError: If the wrong keyword arguments are used. """ try: rc_filename = kwargs['rc_filename'] description = kwargs['description'] if len(kwargs) != 2: raise KeyError except KeyError: raise KeyError(_LINT_FILESET_MSG) pylint_shell_command = ['pylint', '--rcfile', rc_filename] pylint_shell_command.extend(dirnames) status_code = subprocess.call(pylint_shell_command) if status_code != 0: error_message = _ERROR_TEMPLATE.format(description, status_code) print(error_message, file=sys.stderr) sys.exit(status_code) def make_rc(base_cfg, target_filename, additions=None, replacements=None): """Combines a base rc and additions into single file. Args: base_cfg (ConfigParser.ConfigParser): The configuration we are merging into. target_filename (str): The filename where the new configuration will be saved. additions (dict): (Optional) The values added to the configuration. replacements (dict): (Optional) The wholesale replacements for the new configuration. Raises: KeyError: if one of the additions or replacements does not already exist in the current config. """ # Set-up the mutable default values. if additions is None: additions = {} if replacements is None: replacements = {} # Create fresh config, which must extend the base one. new_cfg = six.moves.configparser.ConfigParser() # pylint: disable=protected-access new_cfg._sections = copy.deepcopy(base_cfg._sections) new_sections = new_cfg._sections # pylint: enable=protected-access for section, opts in additions.items(): curr_section = new_sections.setdefault( section, collections.OrderedDict()) for opt, opt_val in opts.items(): curr_val = curr_section.get(opt) if curr_val is None: msg = _MISSING_OPTION_ADDITION.format(opt) raise KeyError(msg) curr_val = curr_val.rstrip(',') opt_val = _transform_opt(opt_val) curr_section[opt] = '%s, %s' % (curr_val, opt_val) for section, opts in replacements.items(): curr_section = new_sections.setdefault( section, collections.OrderedDict()) for opt, opt_val in opts.items(): curr_val = curr_section.get(opt) if curr_val is None: # NOTE: This doesn't need to fail, because some options # are present in one version of pylint and not present # in another. For example ``[BASIC].method-rgx`` is # ``(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$`` in # ``1.7.5`` but in ``1.8.0`` the default config has # ``#method-rgx=`` (i.e. it is commented out in the # ``[BASIC]`` section). msg = _MISSING_OPTION_REPLACE.format(opt) print(msg, file=sys.stderr) opt_val = _transform_opt(opt_val) curr_section[opt] = '%s' % (opt_val,) with open(target_filename, 'w') as file_obj: new_cfg.write(file_obj) def run_command(args): """Script entry point. Lints both sets of files.""" library_rc = 'pylintrc' test_rc = 'pylintrc.test' if os.path.exists(library_rc): os.remove(library_rc) if os.path.exists(test_rc): os.remove(test_rc) default_config = read_config(get_default_config()) user_config = load_local_config(args.config) configuration = determine_final_config(user_config) make_rc(default_config, library_rc, additions=configuration.library_additions, replacements=configuration.library_replacements) make_rc(default_config, test_rc, additions=configuration.test_additions, replacements=configuration.test_replacements) lint_fileset(*args.library_filesets, rc_filename=library_rc, description='Library') lint_fileset(*args.test_filesets, rc_filename=test_rc, description='Test') def register_commands(subparsers): parser = subparsers.add_parser( 'run-pylint', help=__doc__) parser.set_defaults(func=run_command) parser.add_argument('--config') parser.add_argument('--library-filesets', nargs='+', default=[]) parser.add_argument('--test-filesets', nargs='+', default=[])
GoogleCloudPlatform/python-repo-tools
gcp_devrel/tools/pylint.py
lint_fileset
python
def lint_fileset(*dirnames, **kwargs): try: rc_filename = kwargs['rc_filename'] description = kwargs['description'] if len(kwargs) != 2: raise KeyError except KeyError: raise KeyError(_LINT_FILESET_MSG) pylint_shell_command = ['pylint', '--rcfile', rc_filename] pylint_shell_command.extend(dirnames) status_code = subprocess.call(pylint_shell_command) if status_code != 0: error_message = _ERROR_TEMPLATE.format(description, status_code) print(error_message, file=sys.stderr) sys.exit(status_code)
Lints a group of files using a given rcfile. Keyword arguments are * ``rc_filename`` (``str``): The name of the Pylint config RC file. * ``description`` (``str``): A description of the files and configuration currently being run. Args: dirnames (tuple): Directories to run Pylint in. kwargs: The keyword arguments. The only keyword arguments are ``rc_filename`` and ``description`` and both are required. Raises: KeyError: If the wrong keyword arguments are used.
train
https://github.com/GoogleCloudPlatform/python-repo-tools/blob/87422ba91814529848a2b8bf8be4294283a3e041/gcp_devrel/tools/pylint.py#L188-L220
null
# Copyright 2016 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This script runs Pylint on the specified source. Before running Pylint, it generates a Pylint configuration on the fly based on programmatic defaults. """ from __future__ import print_function import collections import copy import imp import io import os import subprocess import sys import six DEFAULT_LIBRARY_RC_ADDITIONS = { 'MESSAGES CONTROL': { 'disable': [ 'I', 'import-error', 'no-member', 'protected-access', 'redefined-variable-type', 'similarities', 'no-else-return', 'ungrouped-imports' ], }, } DEFAULT_LIBRARY_RC_REPLACEMENTS = { 'MASTER': { 'ignore': ['CVS', '.git', '.cache', '.tox', '.nox'], 'load-plugins': 'pylint.extensions.check_docs', }, 'REPORTS': { 'reports': 'no', }, 'BASIC': { 'method-rgx': '[a-z_][a-z0-9_]{2,40}$', 'function-rgx': '[a-z_][a-z0-9_]{2,40}$', }, 'TYPECHECK': { 'ignored-modules': ['six', 'google.protobuf'], }, 'DESIGN': { 'min-public-methods': '0', 'max-args': '10', 'max-attributes': '15', }, } DEFAULT_TEST_RC_ADDITIONS = copy.deepcopy(DEFAULT_LIBRARY_RC_ADDITIONS) DEFAULT_TEST_RC_ADDITIONS['MESSAGES CONTROL']['disable'].extend([ 'missing-docstring', 'no-self-use', 'redefined-outer-name', 'unused-argument', 'no-name-in-module', ]) DEFAULT_TEST_RC_REPLACEMENTS = copy.deepcopy(DEFAULT_LIBRARY_RC_REPLACEMENTS) DEFAULT_TEST_RC_REPLACEMENTS.setdefault('BASIC', {}) DEFAULT_TEST_RC_REPLACEMENTS['BASIC'].update({ 'good-names': ['i', 'j', 'k', 'ex', 'Run', '_', 'fh', 'pytestmark'], 'method-rgx': '[a-z_][a-z0-9_]{2,80}$', 'function-rgx': '[a-z_][a-z0-9_]{2,80}$', }) _ERROR_TEMPLATE = 'Pylint failed on {} with status {:d}.' _LINT_FILESET_MSG = ( 'Keyword arguments rc_filename and description are both ' 'required. No other keyword arguments are allowed.') _MISSING_OPTION_ADDITION = 'Expected to be adding to existing option {!r}.' _MISSING_OPTION_REPLACE = 'Expected to be replacing existing option {!r}.' def get_default_config(): """Get the default Pylint configuration. .. note:: The output of this function varies based on the current version of Pylint installed. Returns: str: The default Pylint configuration. """ # Swallow STDERR if it says # "No config file found, using default configuration" result = subprocess.check_output(['pylint', '--generate-rcfile'], stderr=subprocess.PIPE) # On Python 3, this returns bytes (from STDOUT), so we # convert to a string. return result.decode('utf-8') def read_config(contents): """Reads pylintrc config into native ConfigParser object. Args: contents (str): The contents of the file containing the INI config. Returns: ConfigParser.ConfigParser: The parsed configuration. """ file_obj = io.StringIO(contents) config = six.moves.configparser.ConfigParser() config.readfp(file_obj) return config def load_local_config(filename): """Loads the pylint.config.py file. Args: filename (str): The python file containing the local configuration. Returns: module: The loaded Python module. """ if not filename: return imp.new_module('local_pylint_config') module = imp.load_source('local_pylint_config', filename) return module Config = collections.namedtuple('Config', [ 'library_additions', 'library_replacements', 'test_additions', 'test_replacements']) def determine_final_config(config_module): """Determines the final additions and replacements. Combines the config module with the defaults. Args: config_module: The loaded local configuration module. Returns: Config: the final configuration. """ config = Config( DEFAULT_LIBRARY_RC_ADDITIONS, DEFAULT_LIBRARY_RC_REPLACEMENTS, DEFAULT_TEST_RC_ADDITIONS, DEFAULT_TEST_RC_REPLACEMENTS) for field in config._fields: if hasattr(config_module, field): config = config._replace(**{field: getattr(config_module, field)}) return config def _transform_opt(opt_val): """Transform a config option value to a string. If already a string, do nothing. If an iterable, then combine into a string by joining on ",". Args: opt_val (Union[str, list]): A config option's value. Returns: str: The option value converted to a string. """ if isinstance(opt_val, (list, tuple)): return ','.join(opt_val) else: return opt_val def make_rc(base_cfg, target_filename, additions=None, replacements=None): """Combines a base rc and additions into single file. Args: base_cfg (ConfigParser.ConfigParser): The configuration we are merging into. target_filename (str): The filename where the new configuration will be saved. additions (dict): (Optional) The values added to the configuration. replacements (dict): (Optional) The wholesale replacements for the new configuration. Raises: KeyError: if one of the additions or replacements does not already exist in the current config. """ # Set-up the mutable default values. if additions is None: additions = {} if replacements is None: replacements = {} # Create fresh config, which must extend the base one. new_cfg = six.moves.configparser.ConfigParser() # pylint: disable=protected-access new_cfg._sections = copy.deepcopy(base_cfg._sections) new_sections = new_cfg._sections # pylint: enable=protected-access for section, opts in additions.items(): curr_section = new_sections.setdefault( section, collections.OrderedDict()) for opt, opt_val in opts.items(): curr_val = curr_section.get(opt) if curr_val is None: msg = _MISSING_OPTION_ADDITION.format(opt) raise KeyError(msg) curr_val = curr_val.rstrip(',') opt_val = _transform_opt(opt_val) curr_section[opt] = '%s, %s' % (curr_val, opt_val) for section, opts in replacements.items(): curr_section = new_sections.setdefault( section, collections.OrderedDict()) for opt, opt_val in opts.items(): curr_val = curr_section.get(opt) if curr_val is None: # NOTE: This doesn't need to fail, because some options # are present in one version of pylint and not present # in another. For example ``[BASIC].method-rgx`` is # ``(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$`` in # ``1.7.5`` but in ``1.8.0`` the default config has # ``#method-rgx=`` (i.e. it is commented out in the # ``[BASIC]`` section). msg = _MISSING_OPTION_REPLACE.format(opt) print(msg, file=sys.stderr) opt_val = _transform_opt(opt_val) curr_section[opt] = '%s' % (opt_val,) with open(target_filename, 'w') as file_obj: new_cfg.write(file_obj) def run_command(args): """Script entry point. Lints both sets of files.""" library_rc = 'pylintrc' test_rc = 'pylintrc.test' if os.path.exists(library_rc): os.remove(library_rc) if os.path.exists(test_rc): os.remove(test_rc) default_config = read_config(get_default_config()) user_config = load_local_config(args.config) configuration = determine_final_config(user_config) make_rc(default_config, library_rc, additions=configuration.library_additions, replacements=configuration.library_replacements) make_rc(default_config, test_rc, additions=configuration.test_additions, replacements=configuration.test_replacements) lint_fileset(*args.library_filesets, rc_filename=library_rc, description='Library') lint_fileset(*args.test_filesets, rc_filename=test_rc, description='Test') def register_commands(subparsers): parser = subparsers.add_parser( 'run-pylint', help=__doc__) parser.set_defaults(func=run_command) parser.add_argument('--config') parser.add_argument('--library-filesets', nargs='+', default=[]) parser.add_argument('--test-filesets', nargs='+', default=[])
GoogleCloudPlatform/python-repo-tools
gcp_devrel/tools/pylint.py
make_rc
python
def make_rc(base_cfg, target_filename, additions=None, replacements=None): # Set-up the mutable default values. if additions is None: additions = {} if replacements is None: replacements = {} # Create fresh config, which must extend the base one. new_cfg = six.moves.configparser.ConfigParser() # pylint: disable=protected-access new_cfg._sections = copy.deepcopy(base_cfg._sections) new_sections = new_cfg._sections # pylint: enable=protected-access for section, opts in additions.items(): curr_section = new_sections.setdefault( section, collections.OrderedDict()) for opt, opt_val in opts.items(): curr_val = curr_section.get(opt) if curr_val is None: msg = _MISSING_OPTION_ADDITION.format(opt) raise KeyError(msg) curr_val = curr_val.rstrip(',') opt_val = _transform_opt(opt_val) curr_section[opt] = '%s, %s' % (curr_val, opt_val) for section, opts in replacements.items(): curr_section = new_sections.setdefault( section, collections.OrderedDict()) for opt, opt_val in opts.items(): curr_val = curr_section.get(opt) if curr_val is None: # NOTE: This doesn't need to fail, because some options # are present in one version of pylint and not present # in another. For example ``[BASIC].method-rgx`` is # ``(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$`` in # ``1.7.5`` but in ``1.8.0`` the default config has # ``#method-rgx=`` (i.e. it is commented out in the # ``[BASIC]`` section). msg = _MISSING_OPTION_REPLACE.format(opt) print(msg, file=sys.stderr) opt_val = _transform_opt(opt_val) curr_section[opt] = '%s' % (opt_val,) with open(target_filename, 'w') as file_obj: new_cfg.write(file_obj)
Combines a base rc and additions into single file. Args: base_cfg (ConfigParser.ConfigParser): The configuration we are merging into. target_filename (str): The filename where the new configuration will be saved. additions (dict): (Optional) The values added to the configuration. replacements (dict): (Optional) The wholesale replacements for the new configuration. Raises: KeyError: if one of the additions or replacements does not already exist in the current config.
train
https://github.com/GoogleCloudPlatform/python-repo-tools/blob/87422ba91814529848a2b8bf8be4294283a3e041/gcp_devrel/tools/pylint.py#L223-L284
[ "def _transform_opt(opt_val):\n \"\"\"Transform a config option value to a string.\n\n If already a string, do nothing. If an iterable, then\n combine into a string by joining on \",\".\n\n Args:\n opt_val (Union[str, list]): A config option's value.\n\n Returns:\n str: The option value converted to a string.\n \"\"\"\n if isinstance(opt_val, (list, tuple)):\n return ','.join(opt_val)\n else:\n return opt_val\n" ]
# Copyright 2016 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This script runs Pylint on the specified source. Before running Pylint, it generates a Pylint configuration on the fly based on programmatic defaults. """ from __future__ import print_function import collections import copy import imp import io import os import subprocess import sys import six DEFAULT_LIBRARY_RC_ADDITIONS = { 'MESSAGES CONTROL': { 'disable': [ 'I', 'import-error', 'no-member', 'protected-access', 'redefined-variable-type', 'similarities', 'no-else-return', 'ungrouped-imports' ], }, } DEFAULT_LIBRARY_RC_REPLACEMENTS = { 'MASTER': { 'ignore': ['CVS', '.git', '.cache', '.tox', '.nox'], 'load-plugins': 'pylint.extensions.check_docs', }, 'REPORTS': { 'reports': 'no', }, 'BASIC': { 'method-rgx': '[a-z_][a-z0-9_]{2,40}$', 'function-rgx': '[a-z_][a-z0-9_]{2,40}$', }, 'TYPECHECK': { 'ignored-modules': ['six', 'google.protobuf'], }, 'DESIGN': { 'min-public-methods': '0', 'max-args': '10', 'max-attributes': '15', }, } DEFAULT_TEST_RC_ADDITIONS = copy.deepcopy(DEFAULT_LIBRARY_RC_ADDITIONS) DEFAULT_TEST_RC_ADDITIONS['MESSAGES CONTROL']['disable'].extend([ 'missing-docstring', 'no-self-use', 'redefined-outer-name', 'unused-argument', 'no-name-in-module', ]) DEFAULT_TEST_RC_REPLACEMENTS = copy.deepcopy(DEFAULT_LIBRARY_RC_REPLACEMENTS) DEFAULT_TEST_RC_REPLACEMENTS.setdefault('BASIC', {}) DEFAULT_TEST_RC_REPLACEMENTS['BASIC'].update({ 'good-names': ['i', 'j', 'k', 'ex', 'Run', '_', 'fh', 'pytestmark'], 'method-rgx': '[a-z_][a-z0-9_]{2,80}$', 'function-rgx': '[a-z_][a-z0-9_]{2,80}$', }) _ERROR_TEMPLATE = 'Pylint failed on {} with status {:d}.' _LINT_FILESET_MSG = ( 'Keyword arguments rc_filename and description are both ' 'required. No other keyword arguments are allowed.') _MISSING_OPTION_ADDITION = 'Expected to be adding to existing option {!r}.' _MISSING_OPTION_REPLACE = 'Expected to be replacing existing option {!r}.' def get_default_config(): """Get the default Pylint configuration. .. note:: The output of this function varies based on the current version of Pylint installed. Returns: str: The default Pylint configuration. """ # Swallow STDERR if it says # "No config file found, using default configuration" result = subprocess.check_output(['pylint', '--generate-rcfile'], stderr=subprocess.PIPE) # On Python 3, this returns bytes (from STDOUT), so we # convert to a string. return result.decode('utf-8') def read_config(contents): """Reads pylintrc config into native ConfigParser object. Args: contents (str): The contents of the file containing the INI config. Returns: ConfigParser.ConfigParser: The parsed configuration. """ file_obj = io.StringIO(contents) config = six.moves.configparser.ConfigParser() config.readfp(file_obj) return config def load_local_config(filename): """Loads the pylint.config.py file. Args: filename (str): The python file containing the local configuration. Returns: module: The loaded Python module. """ if not filename: return imp.new_module('local_pylint_config') module = imp.load_source('local_pylint_config', filename) return module Config = collections.namedtuple('Config', [ 'library_additions', 'library_replacements', 'test_additions', 'test_replacements']) def determine_final_config(config_module): """Determines the final additions and replacements. Combines the config module with the defaults. Args: config_module: The loaded local configuration module. Returns: Config: the final configuration. """ config = Config( DEFAULT_LIBRARY_RC_ADDITIONS, DEFAULT_LIBRARY_RC_REPLACEMENTS, DEFAULT_TEST_RC_ADDITIONS, DEFAULT_TEST_RC_REPLACEMENTS) for field in config._fields: if hasattr(config_module, field): config = config._replace(**{field: getattr(config_module, field)}) return config def _transform_opt(opt_val): """Transform a config option value to a string. If already a string, do nothing. If an iterable, then combine into a string by joining on ",". Args: opt_val (Union[str, list]): A config option's value. Returns: str: The option value converted to a string. """ if isinstance(opt_val, (list, tuple)): return ','.join(opt_val) else: return opt_val def lint_fileset(*dirnames, **kwargs): """Lints a group of files using a given rcfile. Keyword arguments are * ``rc_filename`` (``str``): The name of the Pylint config RC file. * ``description`` (``str``): A description of the files and configuration currently being run. Args: dirnames (tuple): Directories to run Pylint in. kwargs: The keyword arguments. The only keyword arguments are ``rc_filename`` and ``description`` and both are required. Raises: KeyError: If the wrong keyword arguments are used. """ try: rc_filename = kwargs['rc_filename'] description = kwargs['description'] if len(kwargs) != 2: raise KeyError except KeyError: raise KeyError(_LINT_FILESET_MSG) pylint_shell_command = ['pylint', '--rcfile', rc_filename] pylint_shell_command.extend(dirnames) status_code = subprocess.call(pylint_shell_command) if status_code != 0: error_message = _ERROR_TEMPLATE.format(description, status_code) print(error_message, file=sys.stderr) sys.exit(status_code) def run_command(args): """Script entry point. Lints both sets of files.""" library_rc = 'pylintrc' test_rc = 'pylintrc.test' if os.path.exists(library_rc): os.remove(library_rc) if os.path.exists(test_rc): os.remove(test_rc) default_config = read_config(get_default_config()) user_config = load_local_config(args.config) configuration = determine_final_config(user_config) make_rc(default_config, library_rc, additions=configuration.library_additions, replacements=configuration.library_replacements) make_rc(default_config, test_rc, additions=configuration.test_additions, replacements=configuration.test_replacements) lint_fileset(*args.library_filesets, rc_filename=library_rc, description='Library') lint_fileset(*args.test_filesets, rc_filename=test_rc, description='Test') def register_commands(subparsers): parser = subparsers.add_parser( 'run-pylint', help=__doc__) parser.set_defaults(func=run_command) parser.add_argument('--config') parser.add_argument('--library-filesets', nargs='+', default=[]) parser.add_argument('--test-filesets', nargs='+', default=[])
GoogleCloudPlatform/python-repo-tools
gcp_devrel/tools/pylint.py
run_command
python
def run_command(args): library_rc = 'pylintrc' test_rc = 'pylintrc.test' if os.path.exists(library_rc): os.remove(library_rc) if os.path.exists(test_rc): os.remove(test_rc) default_config = read_config(get_default_config()) user_config = load_local_config(args.config) configuration = determine_final_config(user_config) make_rc(default_config, library_rc, additions=configuration.library_additions, replacements=configuration.library_replacements) make_rc(default_config, test_rc, additions=configuration.test_additions, replacements=configuration.test_replacements) lint_fileset(*args.library_filesets, rc_filename=library_rc, description='Library') lint_fileset(*args.test_filesets, rc_filename=test_rc, description='Test')
Script entry point. Lints both sets of files.
train
https://github.com/GoogleCloudPlatform/python-repo-tools/blob/87422ba91814529848a2b8bf8be4294283a3e041/gcp_devrel/tools/pylint.py#L287-L312
[ "def get_default_config():\n \"\"\"Get the default Pylint configuration.\n\n .. note::\n\n The output of this function varies based on the current version of\n Pylint installed.\n\n Returns:\n str: The default Pylint configuration.\n \"\"\"\n # Swallow STDERR if it says\n # \"No config file found, using default configuration\"\n result = subprocess.check_output(['pylint', '--generate-rcfile'],\n stderr=subprocess.PIPE)\n # On Python 3, this returns bytes (from STDOUT), so we\n # convert to a string.\n return result.decode('utf-8')\n", "def read_config(contents):\n \"\"\"Reads pylintrc config into native ConfigParser object.\n\n Args:\n contents (str): The contents of the file containing the INI config.\n\n Returns:\n ConfigParser.ConfigParser: The parsed configuration.\n \"\"\"\n file_obj = io.StringIO(contents)\n config = six.moves.configparser.ConfigParser()\n config.readfp(file_obj)\n return config\n", "def load_local_config(filename):\n \"\"\"Loads the pylint.config.py file.\n\n Args:\n filename (str): The python file containing the local configuration.\n\n Returns:\n module: The loaded Python module.\n \"\"\"\n if not filename:\n return imp.new_module('local_pylint_config')\n module = imp.load_source('local_pylint_config', filename)\n return module\n", "def determine_final_config(config_module):\n \"\"\"Determines the final additions and replacements.\n\n Combines the config module with the defaults.\n\n Args:\n config_module: The loaded local configuration module.\n\n Returns:\n Config: the final configuration.\n \"\"\"\n config = Config(\n DEFAULT_LIBRARY_RC_ADDITIONS, DEFAULT_LIBRARY_RC_REPLACEMENTS,\n DEFAULT_TEST_RC_ADDITIONS, DEFAULT_TEST_RC_REPLACEMENTS)\n\n for field in config._fields:\n if hasattr(config_module, field):\n config = config._replace(**{field: getattr(config_module, field)})\n\n return config\n", "def lint_fileset(*dirnames, **kwargs):\n \"\"\"Lints a group of files using a given rcfile.\n\n Keyword arguments are\n\n * ``rc_filename`` (``str``): The name of the Pylint config RC file.\n * ``description`` (``str``): A description of the files and configuration\n currently being run.\n\n Args:\n dirnames (tuple): Directories to run Pylint in.\n kwargs: The keyword arguments. The only keyword arguments\n are ``rc_filename`` and ``description`` and both\n are required.\n\n Raises:\n KeyError: If the wrong keyword arguments are used.\n \"\"\"\n try:\n rc_filename = kwargs['rc_filename']\n description = kwargs['description']\n if len(kwargs) != 2:\n raise KeyError\n except KeyError:\n raise KeyError(_LINT_FILESET_MSG)\n\n pylint_shell_command = ['pylint', '--rcfile', rc_filename]\n pylint_shell_command.extend(dirnames)\n status_code = subprocess.call(pylint_shell_command)\n if status_code != 0:\n error_message = _ERROR_TEMPLATE.format(description, status_code)\n print(error_message, file=sys.stderr)\n sys.exit(status_code)\n", "def make_rc(base_cfg, target_filename,\n additions=None, replacements=None):\n \"\"\"Combines a base rc and additions into single file.\n\n Args:\n base_cfg (ConfigParser.ConfigParser): The configuration we are\n merging into.\n target_filename (str): The filename where the new configuration\n will be saved.\n additions (dict): (Optional) The values added to the configuration.\n replacements (dict): (Optional) The wholesale replacements for\n the new configuration.\n\n Raises:\n KeyError: if one of the additions or replacements does not\n already exist in the current config.\n \"\"\"\n # Set-up the mutable default values.\n if additions is None:\n additions = {}\n if replacements is None:\n replacements = {}\n\n # Create fresh config, which must extend the base one.\n new_cfg = six.moves.configparser.ConfigParser()\n # pylint: disable=protected-access\n new_cfg._sections = copy.deepcopy(base_cfg._sections)\n new_sections = new_cfg._sections\n # pylint: enable=protected-access\n\n for section, opts in additions.items():\n curr_section = new_sections.setdefault(\n section, collections.OrderedDict())\n for opt, opt_val in opts.items():\n curr_val = curr_section.get(opt)\n if curr_val is None:\n msg = _MISSING_OPTION_ADDITION.format(opt)\n raise KeyError(msg)\n curr_val = curr_val.rstrip(',')\n opt_val = _transform_opt(opt_val)\n curr_section[opt] = '%s, %s' % (curr_val, opt_val)\n\n for section, opts in replacements.items():\n curr_section = new_sections.setdefault(\n section, collections.OrderedDict())\n for opt, opt_val in opts.items():\n curr_val = curr_section.get(opt)\n if curr_val is None:\n # NOTE: This doesn't need to fail, because some options\n # are present in one version of pylint and not present\n # in another. For example ``[BASIC].method-rgx`` is\n # ``(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$`` in\n # ``1.7.5`` but in ``1.8.0`` the default config has\n # ``#method-rgx=`` (i.e. it is commented out in the\n # ``[BASIC]`` section).\n msg = _MISSING_OPTION_REPLACE.format(opt)\n print(msg, file=sys.stderr)\n opt_val = _transform_opt(opt_val)\n curr_section[opt] = '%s' % (opt_val,)\n\n with open(target_filename, 'w') as file_obj:\n new_cfg.write(file_obj)\n" ]
# Copyright 2016 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This script runs Pylint on the specified source. Before running Pylint, it generates a Pylint configuration on the fly based on programmatic defaults. """ from __future__ import print_function import collections import copy import imp import io import os import subprocess import sys import six DEFAULT_LIBRARY_RC_ADDITIONS = { 'MESSAGES CONTROL': { 'disable': [ 'I', 'import-error', 'no-member', 'protected-access', 'redefined-variable-type', 'similarities', 'no-else-return', 'ungrouped-imports' ], }, } DEFAULT_LIBRARY_RC_REPLACEMENTS = { 'MASTER': { 'ignore': ['CVS', '.git', '.cache', '.tox', '.nox'], 'load-plugins': 'pylint.extensions.check_docs', }, 'REPORTS': { 'reports': 'no', }, 'BASIC': { 'method-rgx': '[a-z_][a-z0-9_]{2,40}$', 'function-rgx': '[a-z_][a-z0-9_]{2,40}$', }, 'TYPECHECK': { 'ignored-modules': ['six', 'google.protobuf'], }, 'DESIGN': { 'min-public-methods': '0', 'max-args': '10', 'max-attributes': '15', }, } DEFAULT_TEST_RC_ADDITIONS = copy.deepcopy(DEFAULT_LIBRARY_RC_ADDITIONS) DEFAULT_TEST_RC_ADDITIONS['MESSAGES CONTROL']['disable'].extend([ 'missing-docstring', 'no-self-use', 'redefined-outer-name', 'unused-argument', 'no-name-in-module', ]) DEFAULT_TEST_RC_REPLACEMENTS = copy.deepcopy(DEFAULT_LIBRARY_RC_REPLACEMENTS) DEFAULT_TEST_RC_REPLACEMENTS.setdefault('BASIC', {}) DEFAULT_TEST_RC_REPLACEMENTS['BASIC'].update({ 'good-names': ['i', 'j', 'k', 'ex', 'Run', '_', 'fh', 'pytestmark'], 'method-rgx': '[a-z_][a-z0-9_]{2,80}$', 'function-rgx': '[a-z_][a-z0-9_]{2,80}$', }) _ERROR_TEMPLATE = 'Pylint failed on {} with status {:d}.' _LINT_FILESET_MSG = ( 'Keyword arguments rc_filename and description are both ' 'required. No other keyword arguments are allowed.') _MISSING_OPTION_ADDITION = 'Expected to be adding to existing option {!r}.' _MISSING_OPTION_REPLACE = 'Expected to be replacing existing option {!r}.' def get_default_config(): """Get the default Pylint configuration. .. note:: The output of this function varies based on the current version of Pylint installed. Returns: str: The default Pylint configuration. """ # Swallow STDERR if it says # "No config file found, using default configuration" result = subprocess.check_output(['pylint', '--generate-rcfile'], stderr=subprocess.PIPE) # On Python 3, this returns bytes (from STDOUT), so we # convert to a string. return result.decode('utf-8') def read_config(contents): """Reads pylintrc config into native ConfigParser object. Args: contents (str): The contents of the file containing the INI config. Returns: ConfigParser.ConfigParser: The parsed configuration. """ file_obj = io.StringIO(contents) config = six.moves.configparser.ConfigParser() config.readfp(file_obj) return config def load_local_config(filename): """Loads the pylint.config.py file. Args: filename (str): The python file containing the local configuration. Returns: module: The loaded Python module. """ if not filename: return imp.new_module('local_pylint_config') module = imp.load_source('local_pylint_config', filename) return module Config = collections.namedtuple('Config', [ 'library_additions', 'library_replacements', 'test_additions', 'test_replacements']) def determine_final_config(config_module): """Determines the final additions and replacements. Combines the config module with the defaults. Args: config_module: The loaded local configuration module. Returns: Config: the final configuration. """ config = Config( DEFAULT_LIBRARY_RC_ADDITIONS, DEFAULT_LIBRARY_RC_REPLACEMENTS, DEFAULT_TEST_RC_ADDITIONS, DEFAULT_TEST_RC_REPLACEMENTS) for field in config._fields: if hasattr(config_module, field): config = config._replace(**{field: getattr(config_module, field)}) return config def _transform_opt(opt_val): """Transform a config option value to a string. If already a string, do nothing. If an iterable, then combine into a string by joining on ",". Args: opt_val (Union[str, list]): A config option's value. Returns: str: The option value converted to a string. """ if isinstance(opt_val, (list, tuple)): return ','.join(opt_val) else: return opt_val def lint_fileset(*dirnames, **kwargs): """Lints a group of files using a given rcfile. Keyword arguments are * ``rc_filename`` (``str``): The name of the Pylint config RC file. * ``description`` (``str``): A description of the files and configuration currently being run. Args: dirnames (tuple): Directories to run Pylint in. kwargs: The keyword arguments. The only keyword arguments are ``rc_filename`` and ``description`` and both are required. Raises: KeyError: If the wrong keyword arguments are used. """ try: rc_filename = kwargs['rc_filename'] description = kwargs['description'] if len(kwargs) != 2: raise KeyError except KeyError: raise KeyError(_LINT_FILESET_MSG) pylint_shell_command = ['pylint', '--rcfile', rc_filename] pylint_shell_command.extend(dirnames) status_code = subprocess.call(pylint_shell_command) if status_code != 0: error_message = _ERROR_TEMPLATE.format(description, status_code) print(error_message, file=sys.stderr) sys.exit(status_code) def make_rc(base_cfg, target_filename, additions=None, replacements=None): """Combines a base rc and additions into single file. Args: base_cfg (ConfigParser.ConfigParser): The configuration we are merging into. target_filename (str): The filename where the new configuration will be saved. additions (dict): (Optional) The values added to the configuration. replacements (dict): (Optional) The wholesale replacements for the new configuration. Raises: KeyError: if one of the additions or replacements does not already exist in the current config. """ # Set-up the mutable default values. if additions is None: additions = {} if replacements is None: replacements = {} # Create fresh config, which must extend the base one. new_cfg = six.moves.configparser.ConfigParser() # pylint: disable=protected-access new_cfg._sections = copy.deepcopy(base_cfg._sections) new_sections = new_cfg._sections # pylint: enable=protected-access for section, opts in additions.items(): curr_section = new_sections.setdefault( section, collections.OrderedDict()) for opt, opt_val in opts.items(): curr_val = curr_section.get(opt) if curr_val is None: msg = _MISSING_OPTION_ADDITION.format(opt) raise KeyError(msg) curr_val = curr_val.rstrip(',') opt_val = _transform_opt(opt_val) curr_section[opt] = '%s, %s' % (curr_val, opt_val) for section, opts in replacements.items(): curr_section = new_sections.setdefault( section, collections.OrderedDict()) for opt, opt_val in opts.items(): curr_val = curr_section.get(opt) if curr_val is None: # NOTE: This doesn't need to fail, because some options # are present in one version of pylint and not present # in another. For example ``[BASIC].method-rgx`` is # ``(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$`` in # ``1.7.5`` but in ``1.8.0`` the default config has # ``#method-rgx=`` (i.e. it is commented out in the # ``[BASIC]`` section). msg = _MISSING_OPTION_REPLACE.format(opt) print(msg, file=sys.stderr) opt_val = _transform_opt(opt_val) curr_section[opt] = '%s' % (opt_val,) with open(target_filename, 'w') as file_obj: new_cfg.write(file_obj) def register_commands(subparsers): parser = subparsers.add_parser( 'run-pylint', help=__doc__) parser.set_defaults(func=run_command) parser.add_argument('--config') parser.add_argument('--library-filesets', nargs='+', default=[]) parser.add_argument('--test-filesets', nargs='+', default=[])
GoogleCloudPlatform/python-repo-tools
gcp_devrel/tools/appengine.py
get_gae_versions
python
def get_gae_versions(): r = requests.get(SDK_RELEASES_URL) r.raise_for_status() releases = r.json().get('items', {}) # We only care about the Python releases, which all are in the format # "featured/google_appengine_{version}.zip". We'll extract the version # number so we can sort the list by version, and finally get the download # URL. versions_and_urls = [] for release in releases: match = PYTHON_RELEASE_RE.match(release['name']) if not match: continue versions_and_urls.append( ([int(x) for x in match.groups()], release['mediaLink'])) return sorted(versions_and_urls, key=lambda x: x[0])
Gets a list of all of the available Python SDK versions, sorted with the newest last.
train
https://github.com/GoogleCloudPlatform/python-repo-tools/blob/87422ba91814529848a2b8bf8be4294283a3e041/gcp_devrel/tools/appengine.py#L41-L63
null
# Copyright 2016 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Fetches the most recent GAE SDK and extracts it to the given directory.""" from __future__ import print_function import os import re import sys import zipfile import requests if sys.version_info[0] == 2: from StringIO import StringIO elif sys.version_info[0] == 3: from io import StringIO SDK_RELEASES_URL = ( 'https://www.googleapis.com/storage/v1/b/appengine-sdks/o?prefix=featured') PYTHON_RELEASE_RE = re.compile( r'featured/google_appengine_(\d+?)\.(\d+?)\.(\d+?)\.zip') SDK_RELEASE_RE = re.compile( r'release: \"(\d+?)\.(\d+?)\.(\d+?)\"') def is_existing_up_to_date(destination, latest_version): """Returns False if there is no existing install or if the existing install is out of date. Otherwise, returns True.""" version_path = os.path.join( destination, 'google_appengine', 'VERSION') if not os.path.exists(version_path): return False with open(version_path, 'r') as f: version_line = f.readline() match = SDK_RELEASE_RE.match(version_line) if not match: print('Unable to parse version from:', version_line) return False version = [int(x) for x in match.groups()] return version >= latest_version def download_sdk(url): """Downloads the SDK and returns a file-like object for the zip content.""" r = requests.get(url) r.raise_for_status() return StringIO(r.content) def extract_zip(zip, destination): zip_contents = zipfile.ZipFile(zip) if not os.path.exists(destination): os.makedirs(destination) zip_contents.extractall(destination) def fixup_version(destination, version): """Newer releases of the SDK do not have the version number set correctly in the VERSION file. Fix it up.""" version_path = os.path.join( destination, 'google_appengine', 'VERSION') with open(version_path, 'r') as f: version_data = f.read() version_data = version_data.replace( 'release: "0.0.0"', 'release: "{}"'.format('.'.join(str(x) for x in version))) with open(version_path, 'w') as f: f.write(version_data) def download_command(args): """Downloads and extracts the latest App Engine SDK to the given destination.""" latest_two_versions = list(reversed(get_gae_versions()))[:2] zip = None version_number = None for version in latest_two_versions: if is_existing_up_to_date(args.destination, version[0]): print( 'App Engine SDK already exists and is up to date ' 'at {}.'.format(args.destination)) return try: print('Downloading App Engine SDK {}'.format( '.'.join([str(x) for x in version[0]]))) zip = download_sdk(version[1]) version_number = version[0] break except Exception as e: print('Failed to download: {}'.format(e)) continue if not zip: return print('Extracting SDK to {}'.format(args.destination)) extract_zip(zip, args.destination) fixup_version(args.destination, version_number) print('App Engine SDK installed.') def register_commands(subparsers): download = subparsers.add_parser( 'download-appengine-sdk', help=download_command.__doc__) download.set_defaults(func=download_command) download.add_argument( 'destination', help='Path to install the App Engine SDK')
GoogleCloudPlatform/python-repo-tools
gcp_devrel/tools/appengine.py
is_existing_up_to_date
python
def is_existing_up_to_date(destination, latest_version): version_path = os.path.join( destination, 'google_appengine', 'VERSION') if not os.path.exists(version_path): return False with open(version_path, 'r') as f: version_line = f.readline() match = SDK_RELEASE_RE.match(version_line) if not match: print('Unable to parse version from:', version_line) return False version = [int(x) for x in match.groups()] return version >= latest_version
Returns False if there is no existing install or if the existing install is out of date. Otherwise, returns True.
train
https://github.com/GoogleCloudPlatform/python-repo-tools/blob/87422ba91814529848a2b8bf8be4294283a3e041/gcp_devrel/tools/appengine.py#L66-L86
null
# Copyright 2016 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Fetches the most recent GAE SDK and extracts it to the given directory.""" from __future__ import print_function import os import re import sys import zipfile import requests if sys.version_info[0] == 2: from StringIO import StringIO elif sys.version_info[0] == 3: from io import StringIO SDK_RELEASES_URL = ( 'https://www.googleapis.com/storage/v1/b/appengine-sdks/o?prefix=featured') PYTHON_RELEASE_RE = re.compile( r'featured/google_appengine_(\d+?)\.(\d+?)\.(\d+?)\.zip') SDK_RELEASE_RE = re.compile( r'release: \"(\d+?)\.(\d+?)\.(\d+?)\"') def get_gae_versions(): """Gets a list of all of the available Python SDK versions, sorted with the newest last.""" r = requests.get(SDK_RELEASES_URL) r.raise_for_status() releases = r.json().get('items', {}) # We only care about the Python releases, which all are in the format # "featured/google_appengine_{version}.zip". We'll extract the version # number so we can sort the list by version, and finally get the download # URL. versions_and_urls = [] for release in releases: match = PYTHON_RELEASE_RE.match(release['name']) if not match: continue versions_and_urls.append( ([int(x) for x in match.groups()], release['mediaLink'])) return sorted(versions_and_urls, key=lambda x: x[0]) def download_sdk(url): """Downloads the SDK and returns a file-like object for the zip content.""" r = requests.get(url) r.raise_for_status() return StringIO(r.content) def extract_zip(zip, destination): zip_contents = zipfile.ZipFile(zip) if not os.path.exists(destination): os.makedirs(destination) zip_contents.extractall(destination) def fixup_version(destination, version): """Newer releases of the SDK do not have the version number set correctly in the VERSION file. Fix it up.""" version_path = os.path.join( destination, 'google_appengine', 'VERSION') with open(version_path, 'r') as f: version_data = f.read() version_data = version_data.replace( 'release: "0.0.0"', 'release: "{}"'.format('.'.join(str(x) for x in version))) with open(version_path, 'w') as f: f.write(version_data) def download_command(args): """Downloads and extracts the latest App Engine SDK to the given destination.""" latest_two_versions = list(reversed(get_gae_versions()))[:2] zip = None version_number = None for version in latest_two_versions: if is_existing_up_to_date(args.destination, version[0]): print( 'App Engine SDK already exists and is up to date ' 'at {}.'.format(args.destination)) return try: print('Downloading App Engine SDK {}'.format( '.'.join([str(x) for x in version[0]]))) zip = download_sdk(version[1]) version_number = version[0] break except Exception as e: print('Failed to download: {}'.format(e)) continue if not zip: return print('Extracting SDK to {}'.format(args.destination)) extract_zip(zip, args.destination) fixup_version(args.destination, version_number) print('App Engine SDK installed.') def register_commands(subparsers): download = subparsers.add_parser( 'download-appengine-sdk', help=download_command.__doc__) download.set_defaults(func=download_command) download.add_argument( 'destination', help='Path to install the App Engine SDK')
GoogleCloudPlatform/python-repo-tools
gcp_devrel/tools/appengine.py
download_sdk
python
def download_sdk(url): r = requests.get(url) r.raise_for_status() return StringIO(r.content)
Downloads the SDK and returns a file-like object for the zip content.
train
https://github.com/GoogleCloudPlatform/python-repo-tools/blob/87422ba91814529848a2b8bf8be4294283a3e041/gcp_devrel/tools/appengine.py#L89-L93
null
# Copyright 2016 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Fetches the most recent GAE SDK and extracts it to the given directory.""" from __future__ import print_function import os import re import sys import zipfile import requests if sys.version_info[0] == 2: from StringIO import StringIO elif sys.version_info[0] == 3: from io import StringIO SDK_RELEASES_URL = ( 'https://www.googleapis.com/storage/v1/b/appengine-sdks/o?prefix=featured') PYTHON_RELEASE_RE = re.compile( r'featured/google_appengine_(\d+?)\.(\d+?)\.(\d+?)\.zip') SDK_RELEASE_RE = re.compile( r'release: \"(\d+?)\.(\d+?)\.(\d+?)\"') def get_gae_versions(): """Gets a list of all of the available Python SDK versions, sorted with the newest last.""" r = requests.get(SDK_RELEASES_URL) r.raise_for_status() releases = r.json().get('items', {}) # We only care about the Python releases, which all are in the format # "featured/google_appengine_{version}.zip". We'll extract the version # number so we can sort the list by version, and finally get the download # URL. versions_and_urls = [] for release in releases: match = PYTHON_RELEASE_RE.match(release['name']) if not match: continue versions_and_urls.append( ([int(x) for x in match.groups()], release['mediaLink'])) return sorted(versions_and_urls, key=lambda x: x[0]) def is_existing_up_to_date(destination, latest_version): """Returns False if there is no existing install or if the existing install is out of date. Otherwise, returns True.""" version_path = os.path.join( destination, 'google_appengine', 'VERSION') if not os.path.exists(version_path): return False with open(version_path, 'r') as f: version_line = f.readline() match = SDK_RELEASE_RE.match(version_line) if not match: print('Unable to parse version from:', version_line) return False version = [int(x) for x in match.groups()] return version >= latest_version def extract_zip(zip, destination): zip_contents = zipfile.ZipFile(zip) if not os.path.exists(destination): os.makedirs(destination) zip_contents.extractall(destination) def fixup_version(destination, version): """Newer releases of the SDK do not have the version number set correctly in the VERSION file. Fix it up.""" version_path = os.path.join( destination, 'google_appengine', 'VERSION') with open(version_path, 'r') as f: version_data = f.read() version_data = version_data.replace( 'release: "0.0.0"', 'release: "{}"'.format('.'.join(str(x) for x in version))) with open(version_path, 'w') as f: f.write(version_data) def download_command(args): """Downloads and extracts the latest App Engine SDK to the given destination.""" latest_two_versions = list(reversed(get_gae_versions()))[:2] zip = None version_number = None for version in latest_two_versions: if is_existing_up_to_date(args.destination, version[0]): print( 'App Engine SDK already exists and is up to date ' 'at {}.'.format(args.destination)) return try: print('Downloading App Engine SDK {}'.format( '.'.join([str(x) for x in version[0]]))) zip = download_sdk(version[1]) version_number = version[0] break except Exception as e: print('Failed to download: {}'.format(e)) continue if not zip: return print('Extracting SDK to {}'.format(args.destination)) extract_zip(zip, args.destination) fixup_version(args.destination, version_number) print('App Engine SDK installed.') def register_commands(subparsers): download = subparsers.add_parser( 'download-appengine-sdk', help=download_command.__doc__) download.set_defaults(func=download_command) download.add_argument( 'destination', help='Path to install the App Engine SDK')
GoogleCloudPlatform/python-repo-tools
gcp_devrel/tools/appengine.py
fixup_version
python
def fixup_version(destination, version): version_path = os.path.join( destination, 'google_appengine', 'VERSION') with open(version_path, 'r') as f: version_data = f.read() version_data = version_data.replace( 'release: "0.0.0"', 'release: "{}"'.format('.'.join(str(x) for x in version))) with open(version_path, 'w') as f: f.write(version_data)
Newer releases of the SDK do not have the version number set correctly in the VERSION file. Fix it up.
train
https://github.com/GoogleCloudPlatform/python-repo-tools/blob/87422ba91814529848a2b8bf8be4294283a3e041/gcp_devrel/tools/appengine.py#L105-L119
null
# Copyright 2016 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Fetches the most recent GAE SDK and extracts it to the given directory.""" from __future__ import print_function import os import re import sys import zipfile import requests if sys.version_info[0] == 2: from StringIO import StringIO elif sys.version_info[0] == 3: from io import StringIO SDK_RELEASES_URL = ( 'https://www.googleapis.com/storage/v1/b/appengine-sdks/o?prefix=featured') PYTHON_RELEASE_RE = re.compile( r'featured/google_appengine_(\d+?)\.(\d+?)\.(\d+?)\.zip') SDK_RELEASE_RE = re.compile( r'release: \"(\d+?)\.(\d+?)\.(\d+?)\"') def get_gae_versions(): """Gets a list of all of the available Python SDK versions, sorted with the newest last.""" r = requests.get(SDK_RELEASES_URL) r.raise_for_status() releases = r.json().get('items', {}) # We only care about the Python releases, which all are in the format # "featured/google_appengine_{version}.zip". We'll extract the version # number so we can sort the list by version, and finally get the download # URL. versions_and_urls = [] for release in releases: match = PYTHON_RELEASE_RE.match(release['name']) if not match: continue versions_and_urls.append( ([int(x) for x in match.groups()], release['mediaLink'])) return sorted(versions_and_urls, key=lambda x: x[0]) def is_existing_up_to_date(destination, latest_version): """Returns False if there is no existing install or if the existing install is out of date. Otherwise, returns True.""" version_path = os.path.join( destination, 'google_appengine', 'VERSION') if not os.path.exists(version_path): return False with open(version_path, 'r') as f: version_line = f.readline() match = SDK_RELEASE_RE.match(version_line) if not match: print('Unable to parse version from:', version_line) return False version = [int(x) for x in match.groups()] return version >= latest_version def download_sdk(url): """Downloads the SDK and returns a file-like object for the zip content.""" r = requests.get(url) r.raise_for_status() return StringIO(r.content) def extract_zip(zip, destination): zip_contents = zipfile.ZipFile(zip) if not os.path.exists(destination): os.makedirs(destination) zip_contents.extractall(destination) def download_command(args): """Downloads and extracts the latest App Engine SDK to the given destination.""" latest_two_versions = list(reversed(get_gae_versions()))[:2] zip = None version_number = None for version in latest_two_versions: if is_existing_up_to_date(args.destination, version[0]): print( 'App Engine SDK already exists and is up to date ' 'at {}.'.format(args.destination)) return try: print('Downloading App Engine SDK {}'.format( '.'.join([str(x) for x in version[0]]))) zip = download_sdk(version[1]) version_number = version[0] break except Exception as e: print('Failed to download: {}'.format(e)) continue if not zip: return print('Extracting SDK to {}'.format(args.destination)) extract_zip(zip, args.destination) fixup_version(args.destination, version_number) print('App Engine SDK installed.') def register_commands(subparsers): download = subparsers.add_parser( 'download-appengine-sdk', help=download_command.__doc__) download.set_defaults(func=download_command) download.add_argument( 'destination', help='Path to install the App Engine SDK')