_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q258300
AxisGraph.null
validation
def null(self): """Zero crossing value.""" if not self.option.axis: return -1 else: return self.screen.height - ( -self.minimum * 4.0 / self.extents * self.size.y )
python
{ "resource": "" }
q258301
KBinXML.mem_size
validation
def mem_size(self): '''used when allocating memory ingame''' data_len = self._data_mem_size node_count = len(list(self.xml_doc.iter(tag=etree.Element))) if self.compressed: size = 52 * node_count + data_len + 630 else: tags_len = 0 for e in self.xml_doc.iter(tag=etree.Element): e_len = max(len(e.tag), 8) e_len = (e_len + 3) & ~3 tags_len += e_len size = 56 * node_count + data_len + 630 + tags_len # debugging #print('nodes:{} ({}) data:{} ({})'.format(node_count,hex(node_count), data_len, hex(data_len))) return (size + 8) & ~7
python
{ "resource": "" }
q258302
_load_class
validation
def _load_class(class_path, default): """ Loads the class from the class_path string """ if class_path is None: return default component = class_path.rsplit('.', 1) result_processor = getattr( importlib.import_module(component[0]), component[1], default ) if len(component) > 1 else default return result_processor
python
{ "resource": "" }
q258303
_process_pagination_values
validation
def _process_pagination_values(request): """ process pagination requests from request parameter """ size = 20 page = 0 from_ = 0 if "page_size" in request.POST: size = int(request.POST["page_size"]) max_page_size = getattr(settings, "SEARCH_MAX_PAGE_SIZE", 100) # The parens below are superfluous, but make it much clearer to the reader what is going on if not (0 < size <= max_page_size): # pylint: disable=superfluous-parens raise ValueError(_('Invalid page size of {page_size}').format(page_size=size)) if "page_index" in request.POST: page = int(request.POST["page_index"]) from_ = page * size return size, from_, page
python
{ "resource": "" }
q258304
_process_field_values
validation
def _process_field_values(request): """ Create separate dictionary of supported filter values provided """ return { field_key: request.POST[field_key] for field_key in request.POST if field_key in course_discovery_filter_fields() }
python
{ "resource": "" }
q258305
do_search
validation
def do_search(request, course_id=None): """ Search view for http requests Args: request (required) - django request object course_id (optional) - course_id within which to restrict search Returns: http json response with the following fields "took" - how many seconds the operation took "total" - how many results were found "max_score" - maximum score from these results "results" - json array of result documents or "error" - displayable information about an error that occured on the server POST Params: "search_string" (required) - text upon which to search "page_size" (optional)- how many results to return per page (defaults to 20, with maximum cutoff at 100) "page_index" (optional) - for which page (zero-indexed) to include results (defaults to 0) """ # Setup search environment SearchInitializer.set_search_enviroment(request=request, course_id=course_id) results = { "error": _("Nothing to search") } status_code = 500 search_term = request.POST.get("search_string", None) try: if not search_term: raise ValueError(_('No search term provided for search')) size, from_, page = _process_pagination_values(request) # Analytics - log search request track.emit( 'edx.course.search.initiated', { "search_term": search_term, "page_size": size, "page_number": page, } ) results = perform_search( search_term, user=request.user, size=size, from_=from_, course_id=course_id ) status_code = 200 # Analytics - log search results before sending to browser track.emit( 'edx.course.search.results_displayed', { "search_term": search_term, "page_size": size, "page_number": page, "results_count": results["total"], } ) except ValueError as invalid_err: results = { "error": six.text_type(invalid_err) } log.debug(six.text_type(invalid_err)) except QueryParseError: results = { "error": _('Your query seems malformed. Check for unmatched quotes.') } # Allow for broad exceptions here - this is an entry point from external reference except Exception as err: # pylint: disable=broad-except results = { "error": _('An error occurred when searching for "{search_string}"').format(search_string=search_term) } log.exception( 'Search view exception when searching for %s for user %s: %r', search_term, request.user.id, err ) return JsonResponse(results, status=status_code)
python
{ "resource": "" }
q258306
course_discovery
validation
def course_discovery(request): """ Search for courses Args: request (required) - django request object Returns: http json response with the following fields "took" - how many seconds the operation took "total" - how many results were found "max_score" - maximum score from these resutls "results" - json array of result documents or "error" - displayable information about an error that occured on the server POST Params: "search_string" (optional) - text with which to search for courses "page_size" (optional)- how many results to return per page (defaults to 20, with maximum cutoff at 100) "page_index" (optional) - for which page (zero-indexed) to include results (defaults to 0) """ results = { "error": _("Nothing to search") } status_code = 500 search_term = request.POST.get("search_string", None) try: size, from_, page = _process_pagination_values(request) field_dictionary = _process_field_values(request) # Analytics - log search request track.emit( 'edx.course_discovery.search.initiated', { "search_term": search_term, "page_size": size, "page_number": page, } ) results = course_discovery_search( search_term=search_term, size=size, from_=from_, field_dictionary=field_dictionary, ) # Analytics - log search results before sending to browser track.emit( 'edx.course_discovery.search.results_displayed', { "search_term": search_term, "page_size": size, "page_number": page, "results_count": results["total"], } ) status_code = 200 except ValueError as invalid_err: results = { "error": six.text_type(invalid_err) } log.debug(six.text_type(invalid_err)) except QueryParseError: results = { "error": _('Your query seems malformed. Check for unmatched quotes.') } # Allow for broad exceptions here - this is an entry point from external reference except Exception as err: # pylint: disable=broad-except results = { "error": _('An error occurred when searching for "{search_string}"').format(search_string=search_term) } log.exception( 'Search view exception when searching for %s for user %s: %r', search_term, request.user.id, err ) return JsonResponse(results, status=status_code)
python
{ "resource": "" }
q258307
_translate_hits
validation
def _translate_hits(es_response): """ Provide resultset in our desired format from elasticsearch results """ def translate_result(result): """ Any conversion from ES result syntax into our search engine syntax """ translated_result = copy.copy(result) data = translated_result.pop("_source") translated_result.update({ "data": data, "score": translated_result["_score"] }) return translated_result def translate_facet(result): """ Any conversion from ES facet syntax into our search engine sytax """ terms = {term["term"]: term["count"] for term in result["terms"]} return { "terms": terms, "total": result["total"], "other": result["other"], } results = [translate_result(hit) for hit in es_response["hits"]["hits"]] response = { "took": es_response["took"], "total": es_response["hits"]["total"], "max_score": es_response["hits"]["max_score"], "results": results, } if "facets" in es_response: response["facets"] = {facet: translate_facet(es_response["facets"][facet]) for facet in es_response["facets"]} return response
python
{ "resource": "" }
q258308
_get_filter_field
validation
def _get_filter_field(field_name, field_value): """ Return field to apply into filter, if an array then use a range, otherwise look for a term match """ filter_field = None if isinstance(field_value, ValueRange): range_values = {} if field_value.lower: range_values.update({"gte": field_value.lower_string}) if field_value.upper: range_values.update({"lte": field_value.upper_string}) filter_field = { "range": { field_name: range_values } } elif _is_iterable(field_value): filter_field = { "terms": { field_name: field_value } } else: filter_field = { "term": { field_name: field_value } } return filter_field
python
{ "resource": "" }
q258309
_process_field_queries
validation
def _process_field_queries(field_dictionary): """ We have a field_dictionary - we want to match the values for an elasticsearch "match" query This is only potentially useful when trying to tune certain search operations """ def field_item(field): """ format field match as "match" item for elasticsearch query """ return { "match": { field: field_dictionary[field] } } return [field_item(field) for field in field_dictionary]
python
{ "resource": "" }
q258310
_process_filters
validation
def _process_filters(filter_dictionary): """ We have a filter_dictionary - this means that if the field is included and matches, then we can include, OR if the field is undefined, then we assume it is safe to include """ def filter_item(field): """ format elasticsearch filter to pass if value matches OR field is not included """ if filter_dictionary[field] is not None: return { "or": [ _get_filter_field(field, filter_dictionary[field]), { "missing": { "field": field } } ] } return { "missing": { "field": field } } return [filter_item(field) for field in filter_dictionary]
python
{ "resource": "" }
q258311
_process_exclude_dictionary
validation
def _process_exclude_dictionary(exclude_dictionary): """ Based on values in the exclude_dictionary generate a list of term queries that will filter out unwanted results. """ # not_properties will hold the generated term queries. not_properties = [] for exclude_property in exclude_dictionary: exclude_values = exclude_dictionary[exclude_property] if not isinstance(exclude_values, list): exclude_values = [exclude_values] not_properties.extend([{"term": {exclude_property: exclude_value}} for exclude_value in exclude_values]) # Returning a query segment with an empty list freaks out ElasticSearch, # so just return an empty segment. if not not_properties: return {} return { "not": { "filter": { "or": not_properties } } }
python
{ "resource": "" }
q258312
_process_facet_terms
validation
def _process_facet_terms(facet_terms): """ We have a list of terms with which we return facets """ elastic_facets = {} for facet in facet_terms: facet_term = {"field": facet} if facet_terms[facet]: for facet_option in facet_terms[facet]: facet_term[facet_option] = facet_terms[facet][facet_option] elastic_facets[facet] = { "terms": facet_term } return elastic_facets
python
{ "resource": "" }
q258313
ElasticSearchEngine.get_mappings
validation
def get_mappings(cls, index_name, doc_type): """ fetch mapped-items structure from cache """ return cache.get(cls.get_cache_item_name(index_name, doc_type), {})
python
{ "resource": "" }
q258314
ElasticSearchEngine.set_mappings
validation
def set_mappings(cls, index_name, doc_type, mappings): """ set new mapped-items structure into cache """ cache.set(cls.get_cache_item_name(index_name, doc_type), mappings)
python
{ "resource": "" }
q258315
ElasticSearchEngine.log_indexing_error
validation
def log_indexing_error(cls, indexing_errors): """ Logs indexing errors and raises a general ElasticSearch Exception""" indexing_errors_log = [] for indexing_error in indexing_errors: indexing_errors_log.append(str(indexing_error)) raise exceptions.ElasticsearchException(', '.join(indexing_errors_log))
python
{ "resource": "" }
q258316
ElasticSearchEngine._get_mappings
validation
def _get_mappings(self, doc_type): """ Interfaces with the elasticsearch mappings for the index prevents multiple loading of the same mappings from ES when called more than once Mappings format in elasticsearch is as follows: { "doc_type": { "properties": { "nested_property": { "properties": { "an_analysed_property": { "type": "string" }, "another_analysed_property": { "type": "string" } } }, "a_not_analysed_property": { "type": "string", "index": "not_analyzed" }, "a_date_property": { "type": "date" } } } } We cache the properties of each doc_type, if they are not available, we'll load them again from Elasticsearch """ # Try loading the mapping from the cache. mapping = ElasticSearchEngine.get_mappings(self.index_name, doc_type) # Fall back to Elasticsearch if not mapping: mapping = self._es.indices.get_mapping( index=self.index_name, doc_type=doc_type, ).get(self.index_name, {}).get('mappings', {}).get(doc_type, {}) # Cache the mapping, if one was retrieved if mapping: ElasticSearchEngine.set_mappings( self.index_name, doc_type, mapping ) return mapping
python
{ "resource": "" }
q258317
ElasticSearchEngine.index
validation
def index(self, doc_type, sources, **kwargs): """ Implements call to add documents to the ES index Note the call to _check_mappings which will setup fields with the desired mappings """ try: actions = [] for source in sources: self._check_mappings(doc_type, source) id_ = source['id'] if 'id' in source else None log.debug("indexing %s object with id %s", doc_type, id_) action = { "_index": self.index_name, "_type": doc_type, "_id": id_, "_source": source } actions.append(action) # bulk() returns a tuple with summary information # number of successfully executed actions and number of errors if stats_only is set to True. _, indexing_errors = bulk( self._es, actions, **kwargs ) if indexing_errors: ElasticSearchEngine.log_indexing_error(indexing_errors) # Broad exception handler to protect around bulk call except Exception as ex: # log information and re-raise log.exception("error while indexing - %s", str(ex)) raise
python
{ "resource": "" }
q258318
ElasticSearchEngine.remove
validation
def remove(self, doc_type, doc_ids, **kwargs): """ Implements call to remove the documents from the index """ try: # ignore is flagged as an unexpected-keyword-arg; ES python client documents that it can be used # pylint: disable=unexpected-keyword-arg actions = [] for doc_id in doc_ids: log.debug("Removing document of type %s and index %s", doc_type, doc_id) action = { '_op_type': 'delete', "_index": self.index_name, "_type": doc_type, "_id": doc_id } actions.append(action) bulk(self._es, actions, **kwargs) except BulkIndexError as ex: valid_errors = [error for error in ex.errors if error['delete']['status'] != 404] if valid_errors: log.exception("An error occurred while removing documents from the index.") raise
python
{ "resource": "" }
q258319
ElasticSearchEngine.search
validation
def search(self, query_string=None, field_dictionary=None, filter_dictionary=None, exclude_dictionary=None, facet_terms=None, exclude_ids=None, use_field_match=False, **kwargs): # pylint: disable=too-many-arguments, too-many-locals, too-many-branches, arguments-differ """ Implements call to search the index for the desired content. Args: query_string (str): the string of values upon which to search within the content of the objects within the index field_dictionary (dict): dictionary of values which _must_ exist and _must_ match in order for the documents to be included in the results filter_dictionary (dict): dictionary of values which _must_ match if the field exists in order for the documents to be included in the results; documents for which the field does not exist may be included in the results if they are not otherwise filtered out exclude_dictionary(dict): dictionary of values all of which which must not match in order for the documents to be included in the results; documents which have any of these fields and for which the value matches one of the specified values shall be filtered out of the result set facet_terms (dict): dictionary of terms to include within search facets list - key is the term desired to facet upon, and the value is a dictionary of extended information to include. Supported right now is a size specification for a cap upon how many facet results to return (can be an empty dictionary to use default size for underlying engine): e.g. { "org": {"size": 10}, # only show top 10 organizations "modes": {} } use_field_match (bool): flag to indicate whether to use elastic filtering or elastic matching for field matches - this is nothing but a potential performance tune for certain queries (deprecated) exclude_ids (list): list of id values to exclude from the results - useful for finding maches that aren't "one of these" Returns: dict object with results in the desired format { "took": 3, "total": 4, "max_score": 2.0123, "results": [ { "score": 2.0123, "data": { ... } }, { "score": 0.0983, "data": { ... } } ], "facets": { "org": { "total": total_count, "other": 1, "terms": { "MITx": 25, "HarvardX": 18 } }, "modes": { "total": modes_count, "other": 15, "terms": { "honor": 58, "verified": 44, } } } } Raises: ElasticsearchException when there is a problem with the response from elasticsearch Example usage: .search( "find the words within this string", { "must_have_field": "mast_have_value for must_have_field" }, { } ) """ log.debug("searching index with %s", query_string) elastic_queries = [] elastic_filters = [] # We have a query string, search all fields for matching text within the "content" node if query_string: if six.PY2: query_string = query_string.encode('utf-8').translate(None, RESERVED_CHARACTERS) else: query_string = query_string.translate(query_string.maketrans('', '', RESERVED_CHARACTERS)) elastic_queries.append({ "query_string": { "fields": ["content.*"], "query": query_string } }) if field_dictionary: if use_field_match: elastic_queries.extend(_process_field_queries(field_dictionary)) else: elastic_filters.extend(_process_field_filters(field_dictionary)) if filter_dictionary: elastic_filters.extend(_process_filters(filter_dictionary)) # Support deprecated argument of exclude_ids if exclude_ids: if not exclude_dictionary: exclude_dictionary = {} if "_id" not in exclude_dictionary: exclude_dictionary["_id"] = [] exclude_dictionary["_id"].extend(exclude_ids) if exclude_dictionary: elastic_filters.append(_process_exclude_dictionary(exclude_dictionary)) query_segment = { "match_all": {} } if elastic_queries: query_segment = { "bool": { "must": elastic_queries } } query = query_segment if elastic_filters: filter_segment = { "bool": { "must": elastic_filters } } query = { "filtered": { "query": query_segment, "filter": filter_segment, } } body = {"query": query} if facet_terms: facet_query = _process_facet_terms(facet_terms) if facet_query: body["facets"] = facet_query try: es_response = self._es.search( index=self.index_name, body=body, **kwargs ) except exceptions.ElasticsearchException as ex: message = six.text_type(ex) if 'QueryParsingException' in message: log.exception("Malformed search query: %s", message) raise QueryParseError('Malformed search query.') else: # log information and re-raise log.exception("error while searching index - %s", str(message)) raise return _translate_hits(es_response)
python
{ "resource": "" }
q258320
perform_search
validation
def perform_search( search_term, user=None, size=10, from_=0, course_id=None): """ Call the search engine with the appropriate parameters """ # field_, filter_ and exclude_dictionary(s) can be overridden by calling application # field_dictionary includes course if course_id provided (field_dictionary, filter_dictionary, exclude_dictionary) = SearchFilterGenerator.generate_field_filters( user=user, course_id=course_id ) searcher = SearchEngine.get_search_engine(getattr(settings, "COURSEWARE_INDEX_NAME", "courseware_index")) if not searcher: raise NoSearchEngineError("No search engine specified in settings.SEARCH_ENGINE") results = searcher.search_string( search_term, field_dictionary=field_dictionary, filter_dictionary=filter_dictionary, exclude_dictionary=exclude_dictionary, size=size, from_=from_, doc_type="courseware_content", ) # post-process the result for result in results["results"]: result["data"] = SearchResultProcessor.process_result(result["data"], search_term, user) results["access_denied_count"] = len([r for r in results["results"] if r["data"] is None]) results["results"] = [r for r in results["results"] if r["data"] is not None] return results
python
{ "resource": "" }
q258321
course_discovery_search
validation
def course_discovery_search(search_term=None, size=20, from_=0, field_dictionary=None): """ Course Discovery activities against the search engine index of course details """ # We'll ignore the course-enrollemnt informaiton in field and filter # dictionary, and use our own logic upon enrollment dates for these use_search_fields = ["org"] (search_fields, _, exclude_dictionary) = SearchFilterGenerator.generate_field_filters() use_field_dictionary = {} use_field_dictionary.update({field: search_fields[field] for field in search_fields if field in use_search_fields}) if field_dictionary: use_field_dictionary.update(field_dictionary) if not getattr(settings, "SEARCH_SKIP_ENROLLMENT_START_DATE_FILTERING", False): use_field_dictionary["enrollment_start"] = DateRange(None, datetime.utcnow()) searcher = SearchEngine.get_search_engine(getattr(settings, "COURSEWARE_INDEX_NAME", "courseware_index")) if not searcher: raise NoSearchEngineError("No search engine specified in settings.SEARCH_ENGINE") results = searcher.search( query_string=search_term, doc_type="course_info", size=size, from_=from_, # only show when enrollment start IS provided and is before now field_dictionary=use_field_dictionary, # show if no enrollment end is provided and has not yet been reached filter_dictionary={"enrollment_end": DateRange(datetime.utcnow(), None)}, exclude_dictionary=exclude_dictionary, facet_terms=course_discovery_facets(), ) return results
python
{ "resource": "" }
q258322
SearchResultProcessor.strings_in_dictionary
validation
def strings_in_dictionary(dictionary): """ Used by default implementation for finding excerpt """ strings = [value for value in six.itervalues(dictionary) if not isinstance(value, dict)] for child_dict in [dv for dv in six.itervalues(dictionary) if isinstance(dv, dict)]: strings.extend(SearchResultProcessor.strings_in_dictionary(child_dict)) return strings
python
{ "resource": "" }
q258323
SearchResultProcessor.find_matches
validation
def find_matches(strings, words, length_hoped): """ Used by default property excerpt """ lower_words = [w.lower() for w in words] def has_match(string): """ Do any of the words match within the string """ lower_string = string.lower() for test_word in lower_words: if test_word in lower_string: return True return False shortened_strings = [textwrap.wrap(s) for s in strings] short_string_list = list(chain.from_iterable(shortened_strings)) matches = [ms for ms in short_string_list if has_match(ms)] cumulative_len = 0 break_at = None for idx, match in enumerate(matches): cumulative_len += len(match) if cumulative_len >= length_hoped: break_at = idx break return matches[0:break_at]
python
{ "resource": "" }
q258324
SearchResultProcessor.decorate_matches
validation
def decorate_matches(match_in, match_word): """ decorate the matches within the excerpt """ matches = re.finditer(match_word, match_in, re.IGNORECASE) for matched_string in set([match.group() for match in matches]): match_in = match_in.replace( matched_string, getattr(settings, "SEARCH_MATCH_DECORATION", u"<b>{}</b>").format(matched_string) ) return match_in
python
{ "resource": "" }
q258325
SearchResultProcessor.add_properties
validation
def add_properties(self): """ Called during post processing of result Any properties defined in your subclass will get exposed as members of the result json from the search """ for property_name in [p[0] for p in inspect.getmembers(self.__class__) if isinstance(p[1], property)]: self._results_fields[property_name] = getattr(self, property_name, None)
python
{ "resource": "" }
q258326
SearchResultProcessor.process_result
validation
def process_result(cls, dictionary, match_phrase, user): """ Called from within search handler. Finds desired subclass and decides if the result should be removed and adds properties derived from the result information """ result_processor = _load_class(getattr(settings, "SEARCH_RESULT_PROCESSOR", None), cls) srp = result_processor(dictionary, match_phrase) if srp.should_remove(user): return None try: srp.add_properties() # protect around any problems introduced by subclasses within their properties except Exception as ex: # pylint: disable=broad-except log.exception("error processing properties for %s - %s: will remove from results", json.dumps(dictionary, cls=DjangoJSONEncoder), str(ex)) return None return dictionary
python
{ "resource": "" }
q258327
SearchResultProcessor.excerpt
validation
def excerpt(self): """ Property to display a useful excerpt representing the matches within the results """ if "content" not in self._results_fields: return None match_phrases = [self._match_phrase] if six.PY2: separate_phrases = [ phrase.decode('utf-8') for phrase in shlex.split(self._match_phrase.encode('utf-8')) ] else: separate_phrases = [ phrase for phrase in shlex.split(self._match_phrase) ] if len(separate_phrases) > 1: match_phrases.extend(separate_phrases) else: match_phrases = separate_phrases matches = SearchResultProcessor.find_matches( SearchResultProcessor.strings_in_dictionary(self._results_fields["content"]), match_phrases, DESIRED_EXCERPT_LENGTH ) excerpt_text = ELLIPSIS.join(matches) for match_word in match_phrases: excerpt_text = SearchResultProcessor.decorate_matches(excerpt_text, match_word) return excerpt_text
python
{ "resource": "" }
q258328
SearchFilterGenerator.generate_field_filters
validation
def generate_field_filters(cls, **kwargs): """ Called from within search handler Finds desired subclass and adds filter information based upon user information """ generator = _load_class(getattr(settings, "SEARCH_FILTER_GENERATOR", None), cls)() return ( generator.field_dictionary(**kwargs), generator.filter_dictionary(**kwargs), generator.exclude_dictionary(**kwargs), )
python
{ "resource": "" }
q258329
SearchInitializer.set_search_enviroment
validation
def set_search_enviroment(cls, **kwargs): """ Called from within search handler Finds desired subclass and calls initialize method """ initializer = _load_class(getattr(settings, "SEARCH_INITIALIZER", None), cls)() return initializer.initialize(**kwargs)
python
{ "resource": "" }
q258330
Detector._parse
validation
def _parse(self, filename): """Opens data file and for each line, calls _eat_name_line""" self.names = {} with codecs.open(filename, encoding="iso8859-1") as f: for line in f: if any(map(lambda c: 128 < ord(c) < 160, line)): line = line.encode("iso8859-1").decode("windows-1252") self._eat_name_line(line.strip())
python
{ "resource": "" }
q258331
Detector._eat_name_line
validation
def _eat_name_line(self, line): """Parses one line of data file""" if line[0] not in "#=": parts = line.split() country_values = line[30:-1] name = map_name(parts[1]) if not self.case_sensitive: name = name.lower() if parts[0] == "M": self._set(name, u"male", country_values) elif parts[0] == "1M" or parts[0] == "?M": self._set(name, u"mostly_male", country_values) elif parts[0] == "F": self._set(name, u"female", country_values) elif parts[0] == "1F" or parts[0] == "?F": self._set(name, u"mostly_female", country_values) elif parts[0] == "?": self._set(name, self.unknown_value, country_values) else: raise "Not sure what to do with a sex of %s" % parts[0]
python
{ "resource": "" }
q258332
Detector._set
validation
def _set(self, name, gender, country_values): """Sets gender and relevant country values for names dictionary of detector""" if '+' in name: for replacement in ['', ' ', '-']: self._set(name.replace('+', replacement), gender, country_values) else: if name not in self.names: self.names[name] = {} self.names[name][gender] = country_values
python
{ "resource": "" }
q258333
Detector._most_popular_gender
validation
def _most_popular_gender(self, name, counter): """Finds the most popular gender for the given name counting by given counter""" if name not in self.names: return self.unknown_value max_count, max_tie = (0, 0) best = self.names[name].keys()[0] for gender, country_values in self.names[name].items(): count, tie = counter(country_values) if count > max_count or (count == max_count and tie > max_tie): max_count, max_tie, best = count, tie, gender return best if max_count > 0 else self.unknown_value
python
{ "resource": "" }
q258334
Detector.get_gender
validation
def get_gender(self, name, country=None): """Returns best gender for the given name and country pair""" if not self.case_sensitive: name = name.lower() if name not in self.names: return self.unknown_value elif not country: def counter(country_values): country_values = map(ord, country_values.replace(" ", "")) return (len(country_values), sum(map(lambda c: c > 64 and c-55 or c-48, country_values))) return self._most_popular_gender(name, counter) elif country in self.__class__.COUNTRIES: index = self.__class__.COUNTRIES.index(country) counter = lambda e: (ord(e[index])-32, 0) return self._most_popular_gender(name, counter) else: raise NoCountryError("No such country: %s" % country)
python
{ "resource": "" }
q258335
Report.output
validation
def output(self, msg, newline=True): """ Writes the specified string to the output target of the report. :param msg: the message to output. :type msg: str :param newline: whether or not to append a newline to the end of the message :type newline: str """ click.echo(text_type(msg), nl=newline, file=self.output_file)
python
{ "resource": "" }
q258336
execute_tools
validation
def execute_tools(config, path, progress=None): """ Executes the suite of TidyPy tools upon the project and returns the issues that are found. :param config: the TidyPy configuration to use :type config: dict :param path: that path to the project to analyze :type path: str :param progress: the progress reporter object that will receive callbacks during the execution of the tool suite. If not specified, not progress notifications will occur. :type progress: tidypy.Progress :rtype: tidypy.Collector """ progress = progress or QuietProgress() progress.on_start() manager = SyncManager() manager.start() num_tools = 0 tools = manager.Queue() for name, cls in iteritems(get_tools()): if config[name]['use'] and cls.can_be_used(): num_tools += 1 tools.put({ 'name': name, 'config': config[name], }) collector = Collector(config) if not num_tools: progress.on_finish() return collector notifications = manager.Queue() environment = manager.dict({ 'finder': Finder(path, config), }) workers = [] for _ in range(config['workers']): worker = Worker( args=( tools, notifications, environment, ), ) worker.start() workers.append(worker) while num_tools: try: notification = notifications.get(True, 0.25) except Empty: pass else: if notification['type'] == 'start': progress.on_tool_start(notification['tool']) elif notification['type'] == 'complete': collector.add_issues(notification['issues']) progress.on_tool_finish(notification['tool']) num_tools -= 1 progress.on_finish() return collector
python
{ "resource": "" }
q258337
execute_reports
validation
def execute_reports( config, path, collector, on_report_finish=None, output_file=None): """ Executes the configured suite of issue reports. :param config: the TidyPy configuration to use :type config: dict :param path: that path to the project that was analyzed :type path: str :param collector: the issues to report :type collector: tidypy.Collector """ reports = get_reports() for report in config.get('requested_reports', []): if report.get('type') and report['type'] in reports: cfg = config.get('report', {}).get(report['type'], {}) cfg.update(report) reporter = reports[report['type']]( cfg, path, output_file=output_file, ) reporter.produce(collector) if on_report_finish: on_report_finish(report)
python
{ "resource": "" }
q258338
Finder.is_excluded
validation
def is_excluded(self, path): """ Determines whether or not the specified file is excluded by the project's configuration. :param path: the path to check :type path: pathlib.Path :rtype: bool """ relpath = path.relative_to(self.base_path).as_posix() return matches_masks(relpath, self.excludes)
python
{ "resource": "" }
q258339
Finder.is_excluded_dir
validation
def is_excluded_dir(self, path): """ Determines whether or not the specified directory is excluded by the project's configuration. :param path: the path to check :type path: pathlib.Path :rtype: bool """ if self.is_excluded(path): return True return matches_masks(path.name, ALWAYS_EXCLUDED_DIRS)
python
{ "resource": "" }
q258340
Finder.files
validation
def files(self, filters=None): """ A generator that produces a sequence of paths to files in the project that matches the specified filters. :param filters: the regular expressions to use when finding files in the project. If not specified, all files are returned. :type filters: list(str) """ filters = compile_masks(filters or [r'.*']) for files in itervalues(self._found): for file_ in files: relpath = text_type(Path(file_).relative_to(self.base_path)) if matches_masks(relpath, filters): yield file_
python
{ "resource": "" }
q258341
Finder.directories
validation
def directories(self, filters=None, containing=None): """ A generator that produces a sequence of paths to directories in the project that matches the specified filters. :param filters: the regular expressions to use when finding directories in the project. If not specified, all directories are returned. :type filters: list(str) :param containing: if a directory passes through the specified filters, it is checked for the presence of a file that matches one of the regular expressions in this parameter. :type containing: list(str) """ filters = compile_masks(filters or [r'.*']) contains = compile_masks(containing) for dirname, files in iteritems(self._found): relpath = text_type(Path(dirname).relative_to(self.base_path)) if matches_masks(relpath, filters): if not contains or self._contains(files, contains): yield dirname
python
{ "resource": "" }
q258342
Collector.add_issues
validation
def add_issues(self, issues): """ Adds an issue to the collection. :param issues: the issue(s) to add :type issues: tidypy.Issue or list(tidypy.Issue) """ if not isinstance(issues, (list, tuple)): issues = [issues] with self._lock: self._all_issues.extend(issues) self._cleaned_issues = None
python
{ "resource": "" }
q258343
Collector.issue_count
validation
def issue_count(self, include_unclean=False): """ Returns the number of issues in the collection. :param include_unclean: whether or not to include issues that are being ignored due to being a duplicate, excluded, etc. :type include_unclean: bool :rtype: int """ if include_unclean: return len(self._all_issues) self._ensure_cleaned_issues() return len(self._cleaned_issues)
python
{ "resource": "" }
q258344
Collector.get_issues
validation
def get_issues(self, sortby=None): """ Retrieves the issues in the collection. :param sortby: the properties to sort the issues by :type sortby: list(str) :rtype: list(tidypy.Issue) """ self._ensure_cleaned_issues() return self._sort_issues(self._cleaned_issues, sortby)
python
{ "resource": "" }
q258345
Collector.get_grouped_issues
validation
def get_grouped_issues(self, keyfunc=None, sortby=None): """ Retrieves the issues in the collection grouped into buckets according to the key generated by the keyfunc. :param keyfunc: a function that will be used to generate the key that identifies the group that an issue will be assigned to. This function receives a single tidypy.Issue argument and must return a string. If not specified, the filename of the issue will be used. :type keyfunc: func :param sortby: the properties to sort the issues by :type sortby: list(str) :rtype: OrderedDict """ if not keyfunc: keyfunc = default_group if not sortby: sortby = self.DEFAULT_SORT self._ensure_cleaned_issues() return self._group_issues(self._cleaned_issues, keyfunc, sortby)
python
{ "resource": "" }
q258346
Extender.parse
validation
def parse(cls, content, is_pyproject=False): """ A convenience method for parsing a TOML-serialized configuration. :param content: a TOML string containing a TidyPy configuration :type content: str :param is_pyproject: whether or not the content is (or resembles) a ``pyproject.toml`` file, where the TidyPy configuration is located within a key named ``tool``. :type is_pyproject: bool :rtype: dict """ parsed = pytoml.loads(content) if is_pyproject: parsed = parsed.get('tool', {}) parsed = parsed.get('tidypy', {}) return parsed
python
{ "resource": "" }
q258347
get_tools
validation
def get_tools(): """ Retrieves the TidyPy tools that are available in the current Python environment. The returned dictionary has keys that are the tool names and values are the tool classes. :rtype: dict """ # pylint: disable=protected-access if not hasattr(get_tools, '_CACHE'): get_tools._CACHE = dict() for entry in pkg_resources.iter_entry_points('tidypy.tools'): try: get_tools._CACHE[entry.name] = entry.load() except ImportError as exc: # pragma: no cover output_error( 'Could not load tool "%s" defined by "%s": %s' % ( entry, entry.dist, exc, ), ) return get_tools._CACHE
python
{ "resource": "" }
q258348
get_reports
validation
def get_reports(): """ Retrieves the TidyPy issue reports that are available in the current Python environment. The returned dictionary has keys are the report names and values are the report classes. :rtype: dict """ # pylint: disable=protected-access if not hasattr(get_reports, '_CACHE'): get_reports._CACHE = dict() for entry in pkg_resources.iter_entry_points('tidypy.reports'): try: get_reports._CACHE[entry.name] = entry.load() except ImportError as exc: # pragma: no cover output_error( 'Could not load report "%s" defined by "%s": %s' % ( entry, entry.dist, exc, ), ) return get_reports._CACHE
python
{ "resource": "" }
q258349
get_extenders
validation
def get_extenders(): """ Retrieves the TidyPy configuration extenders that are available in the current Python environment. The returned dictionary has keys are the extender names and values are the extender classes. :rtype: dict """ # pylint: disable=protected-access if not hasattr(get_extenders, '_CACHE'): get_extenders._CACHE = dict() for entry in pkg_resources.iter_entry_points('tidypy.extenders'): try: get_extenders._CACHE[entry.name] = entry.load() except ImportError as exc: # pragma: no cover output_error( 'Could not load extender "%s" defined by "%s": %s' % ( entry, entry.dist, exc, ), ) return get_extenders._CACHE
python
{ "resource": "" }
q258350
purge_config_cache
validation
def purge_config_cache(location=None): """ Clears out the cache of TidyPy configurations that were retrieved from outside the normal locations. """ cache_path = get_cache_path(location) if location: os.remove(cache_path) else: shutil.rmtree(cache_path)
python
{ "resource": "" }
q258351
get_user_config
validation
def get_user_config(project_path, use_cache=True): """ Produces a TidyPy configuration that incorporates the configuration files stored in the current user's home directory. :param project_path: the path to the project that is going to be analyzed :type project_path: str :param use_cache: whether or not to use cached versions of any remote/referenced TidyPy configurations. If not specified, defaults to ``True``. :type use_cache: bool :rtype: dict """ if sys.platform == 'win32': user_config = os.path.expanduser(r'~\\tidypy') else: user_config = os.path.join( os.getenv('XDG_CONFIG_HOME') or os.path.expanduser('~/.config'), 'tidypy' ) if os.path.exists(user_config): with open(user_config, 'r') as config_file: config = pytoml.load(config_file).get('tidypy', {}) config = merge_dict(get_default_config(), config) config = process_extensions(config, project_path, use_cache=use_cache) return config return None
python
{ "resource": "" }
q258352
get_local_config
validation
def get_local_config(project_path, use_cache=True): """ Produces a TidyPy configuration using the ``pyproject.toml`` in the project's directory. :param project_path: the path to the project that is going to be analyzed :type project_path: str :param use_cache: whether or not to use cached versions of any remote/referenced TidyPy configurations. If not specified, defaults to ``True``. :type use_cache: bool :rtype: dict """ pyproject_path = os.path.join(project_path, 'pyproject.toml') if os.path.exists(pyproject_path): with open(pyproject_path, 'r') as config_file: config = pytoml.load(config_file) config = config.get('tool', {}).get('tidypy', {}) config = merge_dict(get_default_config(), config) config = process_extensions(config, project_path, use_cache=use_cache) return config return None
python
{ "resource": "" }
q258353
get_project_config
validation
def get_project_config(project_path, use_cache=True): """ Produces the Tidypy configuration to use for the specified project. If a ``pyproject.toml`` exists, the configuration will be based on that. If not, the TidyPy configuration in the user's home directory will be used. If one does not exist, the default configuration will be used. :param project_path: the path to the project that is going to be analyzed :type project_path: str :param use_cache: whether or not to use cached versions of any remote/referenced TidyPy configurations. If not specified, defaults to ``True``. :type use_cache: bool :rtype: dict """ return get_local_config(project_path, use_cache=use_cache) \ or get_user_config(project_path, use_cache=use_cache) \ or get_default_config()
python
{ "resource": "" }
q258354
merge_list
validation
def merge_list(list1, list2): """ Merges the contents of two lists into a new list. :param list1: the first list :type list1: list :param list2: the second list :type list2: list :returns: list """ merged = list(list1) for value in list2: if value not in merged: merged.append(value) return merged
python
{ "resource": "" }
q258355
merge_dict
validation
def merge_dict(dict1, dict2, merge_lists=False): """ Recursively merges the contents of two dictionaries into a new dictionary. When both input dictionaries share a key, the value from ``dict2`` is kept. :param dict1: the first dictionary :type dict1: dict :param dict2: the second dictionary :type dict2: dict :param merge_lists: when this function encounters a key that contains lists in both input dictionaries, this parameter dictates whether or not those lists should be merged. If not specified, defaults to ``False``. :type merge_lists: bool :returns: dict """ merged = dict(dict1) for key, value in iteritems(dict2): if isinstance(merged.get(key), dict): merged[key] = merge_dict(merged[key], value) elif merge_lists and isinstance(merged.get(key), list): merged[key] = merge_list(merged[key], value) else: merged[key] = value return merged
python
{ "resource": "" }
q258356
output_error
validation
def output_error(msg): """ Prints the specified string to ``stderr``. :param msg: the message to print :type msg: str """ click.echo(click.style(msg, fg='red'), err=True)
python
{ "resource": "" }
q258357
mod_sys_path
validation
def mod_sys_path(paths): """ A context manager that will append the specified paths to Python's ``sys.path`` during the execution of the block. :param paths: the paths to append :type paths: list(str) """ old_path = sys.path sys.path = paths + sys.path try: yield finally: sys.path = old_path
python
{ "resource": "" }
q258358
compile_masks
validation
def compile_masks(masks): """ Compiles a list of regular expressions. :param masks: the regular expressions to compile :type masks: list(str) or str :returns: list(regular expression object) """ if not masks: masks = [] elif not isinstance(masks, (list, tuple)): masks = [masks] return [ re.compile(mask) for mask in masks ]
python
{ "resource": "" }
q258359
matches_masks
validation
def matches_masks(target, masks): """ Determines whether or not the target string matches any of the regular expressions specified. :param target: the string to check :type target: str :param masks: the regular expressions to check against :type masks: list(regular expression object) :returns: bool """ for mask in masks: if mask.search(target): return True return False
python
{ "resource": "" }
q258360
read_file
validation
def read_file(filepath): """ Retrieves the contents of the specified file. This function performs simple caching so that the same file isn't read more than once per process. :param filepath: the file to read :type filepath: str :returns: str """ with _FILE_CACHE_LOCK: if filepath not in _FILE_CACHE: _FILE_CACHE[filepath] = _read_file(filepath) return _FILE_CACHE[filepath]
python
{ "resource": "" }
q258361
parse_python_file
validation
def parse_python_file(filepath): """ Retrieves the AST of the specified file. This function performs simple caching so that the same file isn't read or parsed more than once per process. :param filepath: the file to parse :type filepath: str :returns: ast.AST """ with _AST_CACHE_LOCK: if filepath not in _AST_CACHE: source = read_file(filepath) _AST_CACHE[filepath] = ast.parse(source, filename=filepath) return _AST_CACHE[filepath]
python
{ "resource": "" }
q258362
Progress.on_tool_finish
validation
def on_tool_finish(self, tool): """ Called when an individual tool completes execution. :param tool: the name of the tool that completed :type tool: str """ with self._lock: if tool in self.current_tools: self.current_tools.remove(tool) self.completed_tools.append(tool)
python
{ "resource": "" }
q258363
Emulator.exec_command
validation
def exec_command(self, cmdstr): """ Execute an x3270 command `cmdstr` gets sent directly to the x3270 subprocess on it's stdin. """ if self.is_terminated: raise TerminatedError("this TerminalClient instance has been terminated") log.debug("sending command: %s", cmdstr) c = Command(self.app, cmdstr) start = time.time() c.execute() elapsed = time.time() - start log.debug("elapsed execution: {0}".format(elapsed)) self.status = Status(c.status_line) return c
python
{ "resource": "" }
q258364
Emulator.terminate
validation
def terminate(self): """ terminates the underlying x3270 subprocess. Once called, this Emulator instance must no longer be used. """ if not self.is_terminated: log.debug("terminal client terminated") try: self.exec_command(b"Quit") except BrokenPipeError: # noqa # x3270 was terminated, since we are just quitting anyway, ignore it. pass except socket.error as e: if e.errno != errno.ECONNRESET: raise # this can happen because wc3270 closes the socket before # the read() can happen, causing a socket error self.app.close() self.is_terminated = True
python
{ "resource": "" }
q258365
Emulator.is_connected
validation
def is_connected(self): """ Return bool indicating connection state """ # need to wrap in try/except b/c of wc3270's socket connection dynamics try: # this is basically a no-op, but it results in the the current status # getting updated self.exec_command(b"Query(ConnectionState)") # connected status is like 'C(192.168.1.1)', disconnected is 'N' return self.status.connection_state.startswith(b"C(") except NotConnectedException: return False
python
{ "resource": "" }
q258366
Emulator.connect
validation
def connect(self, host): """ Connect to a host """ if not self.app.connect(host): command = "Connect({0})".format(host).encode("ascii") self.exec_command(command) self.last_host = host
python
{ "resource": "" }
q258367
Emulator.wait_for_field
validation
def wait_for_field(self): """ Wait until the screen is ready, the cursor has been positioned on a modifiable field, and the keyboard is unlocked. Sometimes the server will "unlock" the keyboard but the screen will not yet be ready. In that case, an attempt to read or write to the screen will result in a 'E' keyboard status because we tried to read from a screen that is not yet ready. Using this method tells the client to wait until a field is detected and the cursor has been positioned on it. """ self.exec_command("Wait({0}, InputField)".format(self.timeout).encode("ascii")) if self.status.keyboard != b"U": raise KeyboardStateError( "keyboard not unlocked, state was: {0}".format( self.status.keyboard.decode("ascii") ) )
python
{ "resource": "" }
q258368
Emulator.move_to
validation
def move_to(self, ypos, xpos): """ move the cursor to the given co-ordinates. Co-ordinates are 1 based, as listed in the status area of the terminal. """ # the screen's co-ordinates are 1 based, but the command is 0 based xpos -= 1 ypos -= 1 self.exec_command("MoveCursor({0}, {1})".format(ypos, xpos).encode("ascii"))
python
{ "resource": "" }
q258369
Emulator.fill_field
validation
def fill_field(self, ypos, xpos, tosend, length): """ clears the field at the position given and inserts the string `tosend` tosend: the string to insert length: the length of the field Co-ordinates are 1 based, as listed in the status area of the terminal. raises: FieldTruncateError if `tosend` is longer than `length`. """ if length < len(tosend): raise FieldTruncateError('length limit %d, but got "%s"' % (length, tosend)) if xpos is not None and ypos is not None: self.move_to(ypos, xpos) self.delete_field() self.send_string(tosend)
python
{ "resource": "" }
q258370
Constraint.from_func
validation
def from_func(cls, func, variables, vartype, name=None): """Construct a constraint from a validation function. Args: func (function): Function that evaluates True when the variables satisfy the constraint. variables (iterable): Iterable of variable labels. vartype (:class:`~dimod.Vartype`/str/set): Variable type for the constraint. Accepted input values: * :attr:`~dimod.Vartype.SPIN`, ``'SPIN'``, ``{-1, 1}`` * :attr:`~dimod.Vartype.BINARY`, ``'BINARY'``, ``{0, 1}`` name (string, optional, default='Constraint'): Name for the constraint. Examples: This example creates a constraint that binary variables `a` and `b` are not equal. >>> import dwavebinarycsp >>> import operator >>> const = dwavebinarycsp.Constraint.from_func(operator.ne, ['a', 'b'], 'BINARY') >>> print(const.name) Constraint >>> (0, 1) in const.configurations True This example creates a constraint that :math:`out = NOT(x)` for spin variables. >>> import dwavebinarycsp >>> def not_(y, x): # y=NOT(x) for spin variables ... return (y == -x) ... >>> const = dwavebinarycsp.Constraint.from_func( ... not_, ... ['out', 'in'], ... {1, -1}, ... name='not_spin') >>> print(const.name) not_spin >>> (1, -1) in const.configurations True """ variables = tuple(variables) configurations = frozenset(config for config in itertools.product(vartype.value, repeat=len(variables)) if func(*config)) return cls(func, configurations, variables, vartype, name)
python
{ "resource": "" }
q258371
Constraint.from_configurations
validation
def from_configurations(cls, configurations, variables, vartype, name=None): """Construct a constraint from valid configurations. Args: configurations (iterable[tuple]): Valid configurations of the variables. Each configuration is a tuple of variable assignments ordered by :attr:`~Constraint.variables`. variables (iterable): Iterable of variable labels. vartype (:class:`~dimod.Vartype`/str/set): Variable type for the constraint. Accepted input values: * :attr:`~dimod.Vartype.SPIN`, ``'SPIN'``, ``{-1, 1}`` * :attr:`~dimod.Vartype.BINARY`, ``'BINARY'``, ``{0, 1}`` name (string, optional, default='Constraint'): Name for the constraint. Examples: This example creates a constraint that variables `a` and `b` are not equal. >>> import dwavebinarycsp >>> const = dwavebinarycsp.Constraint.from_configurations([(0, 1), (1, 0)], ... ['a', 'b'], dwavebinarycsp.BINARY) >>> print(const.name) Constraint >>> (0, 0) in const.configurations # Order matches variables: a,b False This example creates a constraint based on specified valid configurations that represents an OR gate for spin variables. >>> import dwavebinarycsp >>> const = dwavebinarycsp.Constraint.from_configurations( ... [(-1, -1, -1), (1, -1, 1), (1, 1, -1), (1, 1, 1)], ... ['y', 'x1', 'x2'], ... dwavebinarycsp.SPIN, name='or_spin') >>> print(const.name) or_spin >>> (1, 1, -1) in const.configurations # Order matches variables: y,x1,x2 True """ def func(*args): return args in configurations return cls(func, configurations, variables, vartype, name)
python
{ "resource": "" }
q258372
Constraint.check
validation
def check(self, solution): """Check that a solution satisfies the constraint. Args: solution (container): An assignment for the variables in the constraint. Returns: bool: True if the solution satisfies the constraint; otherwise False. Examples: This example creates a constraint that :math:`a \\ne b` on binary variables and tests it for two candidate solutions, with additional unconstrained variable c. >>> import dwavebinarycsp >>> const = dwavebinarycsp.Constraint.from_configurations([(0, 1), (1, 0)], ... ['a', 'b'], dwavebinarycsp.BINARY) >>> solution = {'a': 1, 'b': 1, 'c': 0} >>> const.check(solution) False >>> solution = {'a': 1, 'b': 0, 'c': 0} >>> const.check(solution) True """ return self.func(*(solution[v] for v in self.variables))
python
{ "resource": "" }
q258373
Constraint.fix_variable
validation
def fix_variable(self, v, value): """Fix the value of a variable and remove it from the constraint. Args: v (variable): Variable in the constraint to be set to a constant value. val (int): Value assigned to the variable. Values must match the :class:`.Vartype` of the constraint. Examples: This example creates a constraint that :math:`a \\ne b` on binary variables, fixes variable a to 0, and tests two candidate solutions. >>> import dwavebinarycsp >>> const = dwavebinarycsp.Constraint.from_func(operator.ne, ... ['a', 'b'], dwavebinarycsp.BINARY) >>> const.fix_variable('a', 0) >>> const.check({'b': 1}) True >>> const.check({'b': 0}) False """ variables = self.variables try: idx = variables.index(v) except ValueError: raise ValueError("given variable {} is not part of the constraint".format(v)) if value not in self.vartype.value: raise ValueError("expected value to be in {}, received {} instead".format(self.vartype.value, value)) configurations = frozenset(config[:idx] + config[idx + 1:] # exclude the fixed var for config in self.configurations if config[idx] == value) if not configurations: raise UnsatError("fixing {} to {} makes this constraint unsatisfiable".format(v, value)) variables = variables[:idx] + variables[idx + 1:] self.configurations = configurations self.variables = variables def func(*args): return args in configurations self.func = func self.name = '{} ({} fixed to {})'.format(self.name, v, value)
python
{ "resource": "" }
q258374
Constraint.flip_variable
validation
def flip_variable(self, v): """Flip a variable in the constraint. Args: v (variable): Variable in the constraint to take the complementary value of its construction value. Examples: This example creates a constraint that :math:`a = b` on binary variables and flips variable a. >>> import dwavebinarycsp >>> const = dwavebinarycsp.Constraint.from_func(operator.eq, ... ['a', 'b'], dwavebinarycsp.BINARY) >>> const.check({'a': 0, 'b': 0}) True >>> const.flip_variable('a') >>> const.check({'a': 1, 'b': 0}) True >>> const.check({'a': 0, 'b': 0}) False """ try: idx = self.variables.index(v) except ValueError: raise ValueError("variable {} is not a variable in constraint {}".format(v, self.name)) if self.vartype is dimod.BINARY: original_func = self.func def func(*args): new_args = list(args) new_args[idx] = 1 - new_args[idx] # negate v return original_func(*new_args) self.func = func self.configurations = frozenset(config[:idx] + (1 - config[idx],) + config[idx + 1:] for config in self.configurations) else: # SPIN original_func = self.func def func(*args): new_args = list(args) new_args[idx] = -new_args[idx] # negate v return original_func(*new_args) self.func = func self.configurations = frozenset(config[:idx] + (-config[idx],) + config[idx + 1:] for config in self.configurations) self.name = '{} ({} flipped)'.format(self.name, v)
python
{ "resource": "" }
q258375
Constraint.copy
validation
def copy(self): """Create a copy. Examples: This example copies constraint :math:`a \\ne b` and tests a solution on the copied constraint. >>> import dwavebinarycsp >>> import operator >>> const = dwavebinarycsp.Constraint.from_func(operator.ne, ... ['a', 'b'], 'BINARY') >>> const2 = const.copy() >>> const2 is const False >>> const2.check({'a': 1, 'b': 1}) False """ # each object is itself immutable (except the function) return self.__class__(self.func, self.configurations, self.variables, self.vartype, name=self.name)
python
{ "resource": "" }
q258376
Constraint.projection
validation
def projection(self, variables): """Create a new constraint that is the projection onto a subset of the variables. Args: variables (iterable): Subset of the constraint's variables. Returns: :obj:`.Constraint`: A new constraint over a subset of the variables. Examples: >>> import dwavebinarycsp ... >>> const = dwavebinarycsp.Constraint.from_configurations([(0, 0), (0, 1)], ... ['a', 'b'], ... dwavebinarycsp.BINARY) >>> proj = const.projection(['a']) >>> proj.variables ['a'] >>> proj.configurations {(0,)} """ # resolve iterables or mutability problems by casting the variables to a set variables = set(variables) if not variables.issubset(self.variables): raise ValueError("Cannot project to variables not in the constraint.") idxs = [i for i, v in enumerate(self.variables) if v in variables] configurations = frozenset(tuple(config[i] for i in idxs) for config in self.configurations) variables = tuple(self.variables[i] for i in idxs) return self.from_configurations(configurations, variables, self.vartype)
python
{ "resource": "" }
q258377
assert_penaltymodel_factory_available
validation
def assert_penaltymodel_factory_available(): """For `dwavebinarycsp` to be functional, at least one penalty model factory has to be installed. See discussion in setup.py for details. """ from pkg_resources import iter_entry_points from penaltymodel.core import FACTORY_ENTRYPOINT from itertools import chain supported = ('maxgap', 'mip') factories = chain(*(iter_entry_points(FACTORY_ENTRYPOINT, name) for name in supported)) try: next(factories) except StopIteration: raise AssertionError( "To use 'dwavebinarycsp', at least one penaltymodel factory must be installed. " "Try {}.".format( " or ".join("'pip install dwavebinarycsp[{}]'".format(name) for name in supported) ))
python
{ "resource": "" }
q258378
add_constraint
validation
def add_constraint(self, constraint, variables=tuple()): """Add a constraint. Args: constraint (function/iterable/:obj:`.Constraint`): Constraint definition in one of the supported formats: 1. Function, with input arguments matching the order and :attr:`~.ConstraintSatisfactionProblem.vartype` type of the `variables` argument, that evaluates True when the constraint is satisfied. 2. List explicitly specifying each allowed configuration as a tuple. 3. :obj:`.Constraint` object built either explicitly or by :mod:`dwavebinarycsp.factories`. variables(iterable): Variables associated with the constraint. Not required when `constraint` is a :obj:`.Constraint` object. Examples: This example defines a function that evaluates True when the constraint is satisfied. The function's input arguments match the order and type of the `variables` argument. >>> import dwavebinarycsp >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> def all_equal(a, b, c): # works for both dwavebinarycsp.BINARY and dwavebinarycsp.SPIN ... return (a == b) and (b == c) >>> csp.add_constraint(all_equal, ['a', 'b', 'c']) >>> csp.check({'a': 0, 'b': 0, 'c': 0}) True >>> csp.check({'a': 0, 'b': 0, 'c': 1}) False This example explicitly lists allowed configurations. >>> import dwavebinarycsp >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.SPIN) >>> eq_configurations = {(-1, -1), (1, 1)} >>> csp.add_constraint(eq_configurations, ['v0', 'v1']) >>> csp.check({'v0': -1, 'v1': +1}) False >>> csp.check({'v0': -1, 'v1': -1}) True This example uses a :obj:`.Constraint` object built by :mod:`dwavebinarycsp.factories`. >>> import dwavebinarycsp >>> import dwavebinarycsp.factories.constraint.gates as gates >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(gates.and_gate(['a', 'b', 'c'])) # add an AND gate >>> csp.add_constraint(gates.xor_gate(['a', 'c', 'd'])) # add an XOR gate >>> csp.check({'a': 1, 'b': 0, 'c': 0, 'd': 1}) True """ if isinstance(constraint, Constraint): if variables and (tuple(variables) != constraint.variables): raise ValueError("mismatched variables and Constraint") elif isinstance(constraint, Callable): constraint = Constraint.from_func(constraint, variables, self.vartype) elif isinstance(constraint, Iterable): constraint = Constraint.from_configurations(constraint, variables, self.vartype) else: raise TypeError("Unknown constraint type given") self.constraints.append(constraint) for v in constraint.variables: self.variables[v].append(constraint)
python
{ "resource": "" }
q258379
stitch
validation
def stitch(csp, min_classical_gap=2.0, max_graph_size=8): """Build a binary quadratic model with minimal energy levels at solutions to the specified constraint satisfaction problem. Args: csp (:obj:`.ConstraintSatisfactionProblem`): Constraint satisfaction problem. min_classical_gap (float, optional, default=2.0): Minimum energy gap from ground. Each constraint violated by the solution increases the energy level of the binary quadratic model by at least this much relative to ground energy. max_graph_size (int, optional, default=8): Maximum number of variables in the binary quadratic model that can be used to represent a single constraint. Returns: :class:`~dimod.BinaryQuadraticModel` Notes: For a `min_classical_gap` > 2 or constraints with more than two variables, requires access to factories from the penaltymodel_ ecosystem to construct the binary quadratic model. .. _penaltymodel: https://github.com/dwavesystems/penaltymodel Examples: This example creates a binary-valued constraint satisfaction problem with two constraints, :math:`a = b` and :math:`b \\ne c`, and builds a binary quadratic model with a minimum energy level of -2 such that each constraint violation by a solution adds the default minimum energy gap. >>> import dwavebinarycsp >>> import operator >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(operator.eq, ['a', 'b']) # a == b >>> csp.add_constraint(operator.ne, ['b', 'c']) # b != c >>> bqm = dwavebinarycsp.stitch(csp) >>> bqm.energy({'a': 0, 'b': 0, 'c': 1}) # satisfies csp -2.0 >>> bqm.energy({'a': 0, 'b': 0, 'c': 0}) # violates one constraint 0.0 >>> bqm.energy({'a': 1, 'b': 0, 'c': 0}) # violates two constraints 2.0 This example creates a binary-valued constraint satisfaction problem with two constraints, :math:`a = b` and :math:`b \\ne c`, and builds a binary quadratic model with a minimum energy gap of 4. Note that in this case the conversion to binary quadratic model adds two ancillary variables that must be minimized over when solving. >>> import dwavebinarycsp >>> import operator >>> import itertools >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(operator.eq, ['a', 'b']) # a == b >>> csp.add_constraint(operator.ne, ['b', 'c']) # b != c >>> bqm = dwavebinarycsp.stitch(csp, min_classical_gap=4.0) >>> list(bqm) # # doctest: +SKIP ['a', 'aux1', 'aux0', 'b', 'c'] >>> min([bqm.energy({'a': 0, 'b': 0, 'c': 1, 'aux0': aux0, 'aux1': aux1}) for ... aux0, aux1 in list(itertools.product([0, 1], repeat=2))]) # satisfies csp -6.0 >>> min([bqm.energy({'a': 0, 'b': 0, 'c': 0, 'aux0': aux0, 'aux1': aux1}) for ... aux0, aux1 in list(itertools.product([0, 1], repeat=2))]) # violates one constraint -2.0 >>> min([bqm.energy({'a': 1, 'b': 0, 'c': 0, 'aux0': aux0, 'aux1': aux1}) for ... aux0, aux1 in list(itertools.product([0, 1], repeat=2))]) # violates two constraints 2.0 This example finds for the previous example the minimum graph size. >>> import dwavebinarycsp >>> import operator >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(operator.eq, ['a', 'b']) # a == b >>> csp.add_constraint(operator.ne, ['b', 'c']) # b != c >>> for n in range(8, 1, -1): ... try: ... bqm = dwavebinarycsp.stitch(csp, min_classical_gap=4.0, max_graph_size=n) ... except dwavebinarycsp.exceptions.ImpossibleBQM: ... print(n+1) ... 3 """ # ensure we have penaltymodel factory available try: dwavebinarycsp.assert_penaltymodel_factory_available() except AssertionError as e: raise RuntimeError(e) def aux_factory(): for i in count(): yield 'aux{}'.format(i) aux = aux_factory() bqm = dimod.BinaryQuadraticModel.empty(csp.vartype) # developer note: we could cache them and relabel, for now though let's do the simple thing # penalty_models = {} for const in csp.constraints: configurations = const.configurations if len(const.variables) > max_graph_size: msg = ("The given csp contains a constraint {const} with {num_var} variables. " "This cannot be mapped to a graph with {max_graph_size} nodes. " "Consider checking whether your constraint is irreducible." "").format(const=const, num_var=len(const.variables), max_graph_size=max_graph_size) raise ImpossibleBQM(msg) pmodel = None if len(const) == 0: # empty constraint continue if min_classical_gap <= 2.0: if len(const) == 1 and max_graph_size >= 1: bqm.update(_bqm_from_1sat(const)) continue elif len(const) == 2 and max_graph_size >= 2: bqm.update(_bqm_from_2sat(const)) continue # developer note: we could cache them and relabel, for now though let's do the simple thing # if configurations in penalty_models: # raise NotImplementedError for G in iter_complete_graphs(const.variables, max_graph_size + 1, aux): # construct a specification spec = pm.Specification( graph=G, decision_variables=const.variables, feasible_configurations=configurations, min_classical_gap=min_classical_gap, vartype=csp.vartype ) # try to use the penaltymodel ecosystem try: pmodel = pm.get_penalty_model(spec) except pm.ImpossiblePenaltyModel: # hopefully adding more variables will make it possible continue if pmodel.classical_gap >= min_classical_gap: break # developer note: we could cache them and relabel, for now though let's do the simple thing # penalty_models[configurations] = pmodel else: msg = ("No penalty model can be build for constraint {}".format(const)) raise ImpossibleBQM(msg) bqm.update(pmodel.model) return bqm
python
{ "resource": "" }
q258380
_bqm_from_1sat
validation
def _bqm_from_1sat(constraint): """create a bqm for a constraint with only one variable bqm will have exactly classical gap 2. """ configurations = constraint.configurations num_configurations = len(configurations) bqm = dimod.BinaryQuadraticModel.empty(constraint.vartype) if num_configurations == 1: val, = next(iter(configurations)) v, = constraint.variables bqm.add_variable(v, -1 if val > 0 else +1, vartype=dimod.SPIN) else: bqm.add_variables_from((v, 0.0) for v in constraint.variables) return bqm
python
{ "resource": "" }
q258381
_bqm_from_2sat
validation
def _bqm_from_2sat(constraint): """create a bqm for a constraint with two variables. bqm will have exactly classical gap 2. """ configurations = constraint.configurations variables = constraint.variables vartype = constraint.vartype u, v = constraint.variables # if all configurations are present, then nothing is infeasible and the bqm is just all # 0.0s if len(configurations) == 4: return dimod.BinaryQuadraticModel.empty(constraint.vartype) # check if the constraint is irreducible, and if so, build the bqm for its two # components components = irreducible_components(constraint) if len(components) > 1: const0 = Constraint.from_configurations(((config[0],) for config in configurations), (u,), vartype) const1 = Constraint.from_configurations(((config[1],) for config in configurations), (v,), vartype) bqm = _bqm_from_1sat(const0) bqm.update(_bqm_from_1sat(const1)) return bqm assert len(configurations) > 1, "single configurations should be irreducible" # if it is not irreducible, and there are infeasible configurations, then it is time to # start building a bqm bqm = dimod.BinaryQuadraticModel.empty(vartype) # if the constraint is not irreducible and has two configurations, then it is either eq or ne if all(operator.eq(*config) for config in configurations): bqm.add_interaction(u, v, -1, vartype=dimod.SPIN) # equality elif all(operator.ne(*config) for config in configurations): bqm.add_interaction(u, v, +1, vartype=dimod.SPIN) # inequality elif (1, 1) not in configurations: bqm.add_interaction(u, v, 2, vartype=dimod.BINARY) # penalize (1, 1) elif (-1, +1) not in configurations and (0, 1) not in configurations: bqm.add_interaction(u, v, -2, vartype=dimod.BINARY) bqm.add_variable(v, 2, vartype=dimod.BINARY) elif (+1, -1) not in configurations and (1, 0) not in configurations: bqm.add_interaction(u, v, -2, vartype=dimod.BINARY) bqm.add_variable(u, 2, vartype=dimod.BINARY) else: # (0, 0) not in configurations bqm.add_interaction(u, v, 2, vartype=dimod.BINARY) bqm.add_variable(u, -2, vartype=dimod.BINARY) bqm.add_variable(v, -2, vartype=dimod.BINARY) return bqm
python
{ "resource": "" }
q258382
iter_complete_graphs
validation
def iter_complete_graphs(start, stop, factory=None): """Iterate over complete graphs. Args: start (int/iterable): Define the size of the starting graph. If an int, the nodes will be index-labeled, otherwise should be an iterable of node labels. stop (int): Stops yielding graphs when the size equals stop. factory (iterator, optional): If provided, nodes added will be labeled according to the values returned by factory. Otherwise the extra nodes will be index-labeled. Yields: :class:`nx.Graph` """ _, nodes = start nodes = list(nodes) # we'll be appending if factory is None: factory = count() while len(nodes) < stop: # we need to construct a new graph each time, this is actually faster than copy and add # the new edges in any case G = nx.complete_graph(nodes) yield G v = next(factory) while v in G: v = next(factory) nodes.append(v)
python
{ "resource": "" }
q258383
load_cnf
validation
def load_cnf(fp): """Load a constraint satisfaction problem from a .cnf file. Args: fp (file, optional): `.write()`-supporting `file object`_ DIMACS CNF formatted_ file. Returns: :obj:`.ConstraintSatisfactionProblem` a binary-valued SAT problem. Examples: >>> import dwavebinarycsp as dbcsp ... >>> with open('test.cnf', 'r') as fp: # doctest: +SKIP ... csp = dbcsp.cnf.load_cnf(fp) .. _file object: https://docs.python.org/3/glossary.html#term-file-object .. _formatted: http://www.satcompetition.org/2009/format-benchmarks2009.html """ fp = iter(fp) # handle lists/tuples/etc csp = ConstraintSatisfactionProblem(dimod.BINARY) # first look for the problem num_clauses = num_variables = 0 problem_pattern = re.compile(_PROBLEM_REGEX) for line in fp: matches = problem_pattern.findall(line) if matches: if len(matches) > 1: raise ValueError nv, nc = matches[0] num_variables, num_clauses = int(nv), int(nc) break # now parse the clauses, picking up where we left off looking for the header clause_pattern = re.compile(_CLAUSE_REGEX) for line in fp: if clause_pattern.match(line) is not None: clause = [int(v) for v in line.split(' ')[:-1]] # line ends with a trailing 0 # -1 is the notation for NOT(1) variables = [abs(v) for v in clause] f = _cnf_or(clause) csp.add_constraint(f, variables) for v in range(1, num_variables+1): csp.add_variable(v) for v in csp.variables: if v > num_variables: msg = ("given .cnf file's header defines variables [1, {}] and {} clauses " "but constraints a reference to variable {}").format(num_variables, num_clauses, v) raise ValueError(msg) if len(csp) != num_clauses: msg = ("given .cnf file's header defines {} " "clauses but the file contains {}").format(num_clauses, len(csp)) raise ValueError(msg) return csp
python
{ "resource": "" }
q258384
and_gate
validation
def and_gate(variables, vartype=dimod.BINARY, name='AND'): """AND gate. Args: variables (list): Variable labels for the and gate as `[in1, in2, out]`, where `in1, in2` are inputs and `out` the gate's output. vartype (Vartype, optional, default='BINARY'): Variable type. Accepted input values: * Vartype.SPIN, 'SPIN', {-1, 1} * Vartype.BINARY, 'BINARY', {0, 1} name (str, optional, default='AND'): Name for the constraint. Returns: Constraint(:obj:`.Constraint`): Constraint that is satisfied when its variables are assigned values that match the valid states of an AND gate. Examples: >>> import dwavebinarycsp >>> import dwavebinarycsp.factories.constraint.gates as gates >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(gates.and_gate(['a', 'b', 'c'], name='AND1')) >>> csp.check({'a': 1, 'b': 0, 'c': 0}) True """ variables = tuple(variables) if vartype is dimod.BINARY: configurations = frozenset([(0, 0, 0), (0, 1, 0), (1, 0, 0), (1, 1, 1)]) def func(in1, in2, out): return (in1 and in2) == out else: # SPIN, vartype is checked by the decorator configurations = frozenset([(-1, -1, -1), (-1, +1, -1), (+1, -1, -1), (+1, +1, +1)]) def func(in1, in2, out): return ((in1 > 0) and (in2 > 0)) == (out > 0) return Constraint(func, configurations, variables, vartype=vartype, name=name)
python
{ "resource": "" }
q258385
xor_gate
validation
def xor_gate(variables, vartype=dimod.BINARY, name='XOR'): """XOR gate. Args: variables (list): Variable labels for the and gate as `[in1, in2, out]`, where `in1, in2` are inputs and `out` the gate's output. vartype (Vartype, optional, default='BINARY'): Variable type. Accepted input values: * Vartype.SPIN, 'SPIN', {-1, 1} * Vartype.BINARY, 'BINARY', {0, 1} name (str, optional, default='XOR'): Name for the constraint. Returns: Constraint(:obj:`.Constraint`): Constraint that is satisfied when its variables are assigned values that match the valid states of an XOR gate. Examples: >>> import dwavebinarycsp >>> import dwavebinarycsp.factories.constraint.gates as gates >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(gates.xor_gate(['x', 'y', 'z'], name='XOR1')) >>> csp.check({'x': 1, 'y': 1, 'z': 1}) False """ variables = tuple(variables) if vartype is dimod.BINARY: configs = frozenset([(0, 0, 0), (0, 1, 1), (1, 0, 1), (1, 1, 0)]) def func(in1, in2, out): return (in1 != in2) == out else: # SPIN, vartype is checked by the decorator configs = frozenset([(-1, -1, -1), (-1, +1, +1), (+1, -1, +1), (+1, +1, -1)]) def func(in1, in2, out): return ((in1 > 0) != (in2 > 0)) == (out > 0) return Constraint(func, configs, variables, vartype=vartype, name=name)
python
{ "resource": "" }
q258386
halfadder_gate
validation
def halfadder_gate(variables, vartype=dimod.BINARY, name='HALF_ADDER'): """Half adder. Args: variables (list): Variable labels for the and gate as `[in1, in2, sum, carry]`, where `in1, in2` are inputs to be added and `sum` and 'carry' the resultant outputs. vartype (Vartype, optional, default='BINARY'): Variable type. Accepted input values: * Vartype.SPIN, 'SPIN', {-1, 1} * Vartype.BINARY, 'BINARY', {0, 1} name (str, optional, default='HALF_ADDER'): Name for the constraint. Returns: Constraint(:obj:`.Constraint`): Constraint that is satisfied when its variables are assigned values that match the valid states of a Boolean half adder. Examples: >>> import dwavebinarycsp >>> import dwavebinarycsp.factories.constraint.gates as gates >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(gates.halfadder_gate(['a', 'b', 'total', 'carry'], name='HA1')) >>> csp.check({'a': 1, 'b': 1, 'total': 0, 'carry': 1}) True """ variables = tuple(variables) if vartype is dimod.BINARY: configs = frozenset([(0, 0, 0, 0), (0, 1, 1, 0), (1, 0, 1, 0), (1, 1, 0, 1)]) else: # SPIN, vartype is checked by the decorator configs = frozenset([(-1, -1, -1, -1), (-1, +1, +1, -1), (+1, -1, +1, -1), (+1, +1, -1, +1)]) def func(augend, addend, sum_, carry): total = (augend > 0) + (addend > 0) if total == 0: return (sum_ <= 0) and (carry <= 0) elif total == 1: return (sum_ > 0) and (carry <= 0) elif total == 2: return (sum_ <= 0) and (carry > 0) else: raise ValueError("func recieved unexpected values") return Constraint(func, configs, variables, vartype=vartype, name=name)
python
{ "resource": "" }
q258387
fulladder_gate
validation
def fulladder_gate(variables, vartype=dimod.BINARY, name='FULL_ADDER'): """Full adder. Args: variables (list): Variable labels for the and gate as `[in1, in2, in3, sum, carry]`, where `in1, in2, in3` are inputs to be added and `sum` and 'carry' the resultant outputs. vartype (Vartype, optional, default='BINARY'): Variable type. Accepted input values: * Vartype.SPIN, 'SPIN', {-1, 1} * Vartype.BINARY, 'BINARY', {0, 1} name (str, optional, default='FULL_ADDER'): Name for the constraint. Returns: Constraint(:obj:`.Constraint`): Constraint that is satisfied when its variables are assigned values that match the valid states of a Boolean full adder. Examples: >>> import dwavebinarycsp >>> import dwavebinarycsp.factories.constraint.gates as gates >>> csp = dwavebinarycsp.ConstraintSatisfactionProblem(dwavebinarycsp.BINARY) >>> csp.add_constraint(gates.fulladder_gate(['a', 'b', 'c_in', 'total', 'c_out'], name='FA1')) >>> csp.check({'a': 1, 'b': 0, 'c_in': 1, 'total': 0, 'c_out': 1}) True """ variables = tuple(variables) if vartype is dimod.BINARY: configs = frozenset([(0, 0, 0, 0, 0), (0, 0, 1, 1, 0), (0, 1, 0, 1, 0), (0, 1, 1, 0, 1), (1, 0, 0, 1, 0), (1, 0, 1, 0, 1), (1, 1, 0, 0, 1), (1, 1, 1, 1, 1)]) else: # SPIN, vartype is checked by the decorator configs = frozenset([(-1, -1, -1, -1, -1), (-1, -1, +1, +1, -1), (-1, +1, -1, +1, -1), (-1, +1, +1, -1, +1), (+1, -1, -1, +1, -1), (+1, -1, +1, -1, +1), (+1, +1, -1, -1, +1), (+1, +1, +1, +1, +1)]) def func(in1, in2, in3, sum_, carry): total = (in1 > 0) + (in2 > 0) + (in3 > 0) if total == 0: return (sum_ <= 0) and (carry <= 0) elif total == 1: return (sum_ > 0) and (carry <= 0) elif total == 2: return (sum_ <= 0) and (carry > 0) elif total == 3: return (sum_ > 0) and (carry > 0) else: raise ValueError("func recieved unexpected values") return Constraint(func, configs, variables, vartype=vartype, name=name)
python
{ "resource": "" }
q258388
random_xorsat
validation
def random_xorsat(num_variables, num_clauses, vartype=dimod.BINARY, satisfiable=True): """Random XOR constraint satisfaction problem. Args: num_variables (integer): Number of variables (at least three). num_clauses (integer): Number of constraints that together constitute the constraint satisfaction problem. vartype (Vartype, optional, default='BINARY'): Variable type. Accepted input values: * Vartype.SPIN, 'SPIN', {-1, 1} * Vartype.BINARY, 'BINARY', {0, 1} satisfiable (bool, optional, default=True): True if the CSP can be satisfied. Returns: CSP (:obj:`.ConstraintSatisfactionProblem`): CSP that is satisfied when its variables are assigned values that satisfy a XOR satisfiability problem. Examples: This example creates a CSP with 5 variables and two random constraints and checks whether a particular assignment of variables satisifies it. >>> import dwavebinarycsp >>> import dwavebinarycsp.factories as sat >>> csp = sat.random_xorsat(5, 2) >>> csp.constraints # doctest: +SKIP [Constraint.from_configurations(frozenset({(1, 0, 0), (1, 1, 1), (0, 1, 0), (0, 0, 1)}), (4, 3, 0), Vartype.BINARY, name='XOR (0 flipped)'), Constraint.from_configurations(frozenset({(1, 1, 0), (0, 1, 1), (0, 0, 0), (1, 0, 1)}), (2, 0, 4), Vartype.BINARY, name='XOR (2 flipped) (0 flipped)')] >>> csp.check({0: 1, 1: 0, 2: 0, 3: 1, 4: 1}) # doctest: +SKIP True """ if num_variables < 3: raise ValueError("a xor problem needs at least 3 variables") if num_clauses > 8 * _nchoosek(num_variables, 3): # 8 different negation patterns raise ValueError("too many clauses") # also checks the vartype argument csp = ConstraintSatisfactionProblem(vartype) variables = list(range(num_variables)) constraints = set() if satisfiable: values = tuple(vartype.value) planted_solution = {v: choice(values) for v in variables} configurations = [(0, 0, 0), (0, 1, 1), (1, 0, 1), (1, 1, 0)] while len(constraints) < num_clauses: # because constraints are hashed on configurations/variables, and because the inputs # to xor can be swapped without loss of generality, we can order them x, y, z = sample(variables, 3) if y > x: x, y = y, x # get the constraint const = xor_gate([x, y, z], vartype=vartype) # pick (uniformly) a configuration and determine which variables we need to negate to # match the chosen configuration config = choice(configurations) for idx, v in enumerate(const.variables): if config[idx] != (planted_solution[v] > 0): const.flip_variable(v) assert const.check(planted_solution) constraints.add(const) else: while len(constraints) < num_clauses: # because constraints are hashed on configurations/variables, and because the inputs # to xor can be swapped without loss of generality, we can order them x, y, z = sample(variables, 3) if y > x: x, y = y, x # get the constraint const = xor_gate([x, y, z], vartype=vartype) # randomly flip each variable in the constraint for idx, v in enumerate(const.variables): if random() > .5: const.flip_variable(v) assert const.check(planted_solution) constraints.add(const) for const in constraints: csp.add_constraint(const) # in case any variables didn't make it in for v in variables: csp.add_variable(v) return csp
python
{ "resource": "" }
q258389
signature_matches
validation
def signature_matches(func, args=(), kwargs={}): """ Work out if a function is callable with some args or not. """ try: sig = inspect.signature(func) sig.bind(*args, **kwargs) except TypeError: return False else: return True
python
{ "resource": "" }
q258390
last_arg_decorator
validation
def last_arg_decorator(func): """ Allows a function to be used as either a decorator with args, or called as a normal function. @last_arg_decorator def register_a_thing(foo, func, bar=True): .. # Called as a decorator @register_a_thing("abc", bar=False) def my_func(): ... # Called as a normal function call def my_other_func(): ... register_a_thing("def", my_other_func, bar=True) """ @wraps(func) def decorator(*args, **kwargs): if signature_matches(func, args, kwargs): return func(*args, **kwargs) else: return lambda last: func(*(args + (last,)), **kwargs) return decorator
python
{ "resource": "" }
q258391
Registry.register_chooser
validation
def register_chooser(self, chooser, **kwargs): """Adds a model chooser definition to the registry.""" if not issubclass(chooser, Chooser): return self.register_simple_chooser(chooser, **kwargs) self.choosers[chooser.model] = chooser(**kwargs) return chooser
python
{ "resource": "" }
q258392
Registry.register_simple_chooser
validation
def register_simple_chooser(self, model, **kwargs): """ Generates a model chooser definition from a model, and adds it to the registry. """ name = '{}Chooser'.format(model._meta.object_name) attrs = {'model': model} attrs.update(kwargs) chooser = type(name, (Chooser,), attrs) self.register_chooser(chooser) return model
python
{ "resource": "" }
q258393
AudioField.formatter
validation
def formatter(self, api_client, data, newval): """Get audio-related fields Try to find fields for the audio url for specified preferred quality level, or next-lowest available quality url otherwise. """ url_map = data.get("audioUrlMap") audio_url = data.get("audioUrl") # Only an audio URL, not a quality map. This happens for most of the # mobile client tokens and some of the others now. In this case # substitute the empirically determined default values in the format # used by the rest of the function so downstream consumers continue to # work. if audio_url and not url_map: url_map = { BaseAPIClient.HIGH_AUDIO_QUALITY: { "audioUrl": audio_url, "bitrate": 64, "encoding": "aacplus", } } elif not url_map: # No audio url available (e.g. ad tokens) return None valid_audio_formats = [BaseAPIClient.HIGH_AUDIO_QUALITY, BaseAPIClient.MED_AUDIO_QUALITY, BaseAPIClient.LOW_AUDIO_QUALITY] # Only iterate over sublist, starting at preferred audio quality, or # from the beginning of the list if nothing is found. Ensures that the # bitrate used will always be the same or lower quality than was # specified to prevent audio from skipping for slow connections. preferred_quality = api_client.default_audio_quality if preferred_quality in valid_audio_formats: i = valid_audio_formats.index(preferred_quality) valid_audio_formats = valid_audio_formats[i:] for quality in valid_audio_formats: audio_url = url_map.get(quality) if audio_url: return audio_url[self.field] return audio_url[self.field] if audio_url else None
python
{ "resource": "" }
q258394
AdditionalUrlField.formatter
validation
def formatter(self, api_client, data, newval): """Parse additional url fields and map them to inputs Attempt to create a dictionary with keys being user input, and response being the returned URL """ if newval is None: return None user_param = data['_paramAdditionalUrls'] urls = {} if isinstance(newval, str): urls[user_param[0]] = newval else: for key, url in zip(user_param, newval): urls[key] = url return urls
python
{ "resource": "" }
q258395
PandoraModel.from_json_list
validation
def from_json_list(cls, api_client, data): """Convert a list of JSON values to a list of models """ return [cls.from_json(api_client, item) for item in data]
python
{ "resource": "" }
q258396
PandoraModel.populate_fields
validation
def populate_fields(api_client, instance, data): """Populate all fields of a model with data Given a model with a PandoraModel superclass will enumerate all declared fields on that model and populate the values of their Field and SyntheticField classes. All declared fields will have a value after this function runs even if they are missing from the incoming JSON. """ for key, value in instance.__class__._fields.items(): default = getattr(value, "default", None) newval = data.get(value.field, default) if isinstance(value, SyntheticField): newval = value.formatter(api_client, data, newval) setattr(instance, key, newval) continue model_class = getattr(value, "model", None) if newval and model_class: if isinstance(newval, list): newval = model_class.from_json_list(api_client, newval) else: newval = model_class.from_json(api_client, newval) if newval and value.formatter: newval = value.formatter(api_client, newval) setattr(instance, key, newval)
python
{ "resource": "" }
q258397
PandoraModel.from_json
validation
def from_json(cls, api_client, data): """Convert one JSON value to a model object """ self = cls(api_client) PandoraModel.populate_fields(api_client, self, data) return self
python
{ "resource": "" }
q258398
PandoraModel._base_repr
validation
def _base_repr(self, and_also=None): """Common repr logic for subclasses to hook """ items = [ "=".join((key, repr(getattr(self, key)))) for key in sorted(self._fields.keys())] if items: output = ", ".join(items) else: output = None if and_also: return "{}({}, {})".format(self.__class__.__name__, output, and_also) else: return "{}({})".format(self.__class__.__name__, output)
python
{ "resource": "" }
q258399
BasePlayer._send_cmd
validation
def _send_cmd(self, cmd): """Write command to remote process """ self._process.stdin.write("{}\n".format(cmd).encode("utf-8")) self._process.stdin.flush()
python
{ "resource": "" }