_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q268800
Property.validate_value
test
def validate_value(self, value): """ Validate new property value before setting it. value -- New value """ if 'readOnly' in self.metadata and self.metadata['readOnly']: raise PropertyError('Read-only property') try: validate(value, self.metadata) except ValidationError: raise PropertyError('Invalid property value')
python
{ "resource": "" }
q268801
Property.as_property_description
test
def as_property_description(self): """ Get the property description. Returns a dictionary describing the property. """ description = deepcopy(self.metadata) if 'links' not in description: description['links'] = [] description['links'].append( { 'rel': 'property', 'href': self.href_prefix + self.href, } ) return description
python
{ "resource": "" }
q268802
Property.set_value
test
def set_value(self, value): """ Set the current value of the property. value -- the value to set """ self.validate_value(value) self.value.set(value)
python
{ "resource": "" }
q268803
MultipleThings.get_thing
test
def get_thing(self, idx): """ Get the thing at the given index. idx -- the index """ try: idx = int(idx) except ValueError: return None if idx < 0 or idx >= len(self.things): return None return self.things[idx]
python
{ "resource": "" }
q268804
BaseHandler.initialize
test
def initialize(self, things, hosts): """ Initialize the handler. things -- list of Things managed by this server hosts -- list of allowed hostnames """ self.things = things self.hosts = hosts
python
{ "resource": "" }
q268805
BaseHandler.set_default_headers
test
def set_default_headers(self, *args, **kwargs): """Set the default headers for all requests.""" self.set_header('Access-Control-Allow-Origin', '*') self.set_header('Access-Control-Allow-Headers', 'Origin, X-Requested-With, Content-Type, Accept') self.set_header('Access-Control-Allow-Methods', 'GET, HEAD, PUT, POST, DELETE')
python
{ "resource": "" }
q268806
ThingHandler.prepare
test
def prepare(self): """Validate Host header.""" host = self.request.headers.get('Host', None) if host is not None and host in self.hosts: return raise tornado.web.HTTPError(403)
python
{ "resource": "" }
q268807
ThingHandler.get
test
def get(self, thing_id='0'): """ Handle a GET request, including websocket requests. thing_id -- ID of the thing this request is for """ self.thing = self.get_thing(thing_id) if self.thing is None: self.set_status(404) self.finish() return if self.request.headers.get('Upgrade', '').lower() == 'websocket': yield tornado.websocket.WebSocketHandler.get(self) return self.set_header('Content-Type', 'application/json') ws_href = '{}://{}'.format( 'wss' if self.request.protocol == 'https' else 'ws', self.request.headers.get('Host', '') ) description = self.thing.as_thing_description() description['links'].append({ 'rel': 'alternate', 'href': '{}{}'.format(ws_href, self.thing.get_href()), }) self.write(json.dumps(description)) self.finish()
python
{ "resource": "" }
q268808
ThingHandler.on_message
test
def on_message(self, message): """ Handle an incoming message. message -- message to handle """ try: message = json.loads(message) except ValueError: try: self.write_message(json.dumps({ 'messageType': 'error', 'data': { 'status': '400 Bad Request', 'message': 'Parsing request failed', }, })) except tornado.websocket.WebSocketClosedError: pass return if 'messageType' not in message or 'data' not in message: try: self.write_message(json.dumps({ 'messageType': 'error', 'data': { 'status': '400 Bad Request', 'message': 'Invalid message', }, })) except tornado.websocket.WebSocketClosedError: pass return msg_type = message['messageType'] if msg_type == 'setProperty': for property_name, property_value in message['data'].items(): try: self.thing.set_property(property_name, property_value) except PropertyError as e: self.write_message(json.dumps({ 'messageType': 'error', 'data': { 'status': '400 Bad Request', 'message': str(e), }, })) elif msg_type == 'requestAction': for action_name, action_params in message['data'].items(): input_ = None if 'input' in action_params: input_ = action_params['input'] action = self.thing.perform_action(action_name, input_) if action: tornado.ioloop.IOLoop.current().spawn_callback( perform_action, action, ) else: self.write_message(json.dumps({ 'messageType': 'error', 'data': { 'status': '400 Bad Request', 'message': 'Invalid action request', 'request': message, }, })) elif msg_type == 'addEventSubscription': for event_name in message['data'].keys(): self.thing.add_event_subscriber(event_name, self) else: try: self.write_message(json.dumps({ 'messageType': 'error', 'data': { 'status': '400 Bad Request', 'message': 'Unknown messageType: ' + msg_type, 'request': message, }, })) except tornado.websocket.WebSocketClosedError: pass
python
{ "resource": "" }
q268809
ActionsHandler.post
test
def post(self, thing_id='0'): """ Handle a POST request. thing_id -- ID of the thing this request is for """ thing = self.get_thing(thing_id) if thing is None: self.set_status(404) return try: message = json.loads(self.request.body.decode()) except ValueError: self.set_status(400) return response = {} for action_name, action_params in message.items(): input_ = None if 'input' in action_params: input_ = action_params['input'] action = thing.perform_action(action_name, input_) if action: response.update(action.as_action_description()) # Start the action tornado.ioloop.IOLoop.current().spawn_callback( perform_action, action, ) self.set_status(201) self.write(json.dumps(response))
python
{ "resource": "" }
q268810
ActionIDHandler.delete
test
def delete(self, thing_id='0', action_name=None, action_id=None): """ Handle a DELETE request. thing_id -- ID of the thing this request is for action_name -- name of the action from the URL path action_id -- the action ID from the URL path """ thing = self.get_thing(thing_id) if thing is None: self.set_status(404) return if thing.remove_action(action_name, action_id): self.set_status(204) else: self.set_status(404)
python
{ "resource": "" }
q268811
WebThingServer.start
test
def start(self): """Start listening for incoming connections.""" self.service_info = ServiceInfo( '_webthing._tcp.local.', '{}._webthing._tcp.local.'.format(self.name), address=socket.inet_aton(get_ip()), port=self.port, properties={ 'path': '/', }, server='{}.local.'.format(socket.gethostname())) self.zeroconf = Zeroconf() self.zeroconf.register_service(self.service_info) self.server.listen(self.port) tornado.ioloop.IOLoop.current().start()
python
{ "resource": "" }
q268812
Action.as_action_description
test
def as_action_description(self): """ Get the action description. Returns a dictionary describing the action. """ description = { self.name: { 'href': self.href_prefix + self.href, 'timeRequested': self.time_requested, 'status': self.status, }, } if self.input is not None: description[self.name]['input'] = self.input if self.time_completed is not None: description[self.name]['timeCompleted'] = self.time_completed return description
python
{ "resource": "" }
q268813
Action.start
test
def start(self): """Start performing the action.""" self.status = 'pending' self.thing.action_notify(self) self.perform_action() self.finish()
python
{ "resource": "" }
q268814
Action.finish
test
def finish(self): """Finish performing the action.""" self.status = 'completed' self.time_completed = timestamp() self.thing.action_notify(self)
python
{ "resource": "" }
q268815
Event.as_event_description
test
def as_event_description(self): """ Get the event description. Returns a dictionary describing the event. """ description = { self.name: { 'timestamp': self.time, }, } if self.data is not None: description[self.name]['data'] = self.data return description
python
{ "resource": "" }
q268816
get_ip
test
def get_ip(): """ Get the default local IP address. From: https://stackoverflow.com/a/28950776 """ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) try: s.connect(('10.255.255.255', 1)) ip = s.getsockname()[0] except (socket.error, IndexError): ip = '127.0.0.1' finally: s.close() return ip
python
{ "resource": "" }
q268817
get_addresses
test
def get_addresses(): """ Get all IP addresses. Returns list of addresses. """ addresses = set() for iface in ifaddr.get_adapters(): for addr in iface.ips: # Filter out link-local addresses. if addr.is_IPv4: ip = addr.ip if not ip.startswith('169.254.'): addresses.add(ip) elif addr.is_IPv6: # Sometimes, IPv6 addresses will have the interface name # appended, e.g. %eth0. Handle that. ip = addr.ip[0].split('%')[0].lower() if not ip.startswith('fe80:'): addresses.add('[{}]'.format(ip)) return sorted(list(addresses))
python
{ "resource": "" }
q268818
Value.set
test
def set(self, value): """ Set a new value for this thing. value -- value to set """ if self.value_forwarder is not None: self.value_forwarder(value) self.notify_of_external_update(value)
python
{ "resource": "" }
q268819
Value.notify_of_external_update
test
def notify_of_external_update(self, value): """ Notify observers of a new value. value -- new value """ if value is not None and value != self.last_value: self.last_value = value self.emit('update', value)
python
{ "resource": "" }
q268820
Thing.as_thing_description
test
def as_thing_description(self): """ Return the thing state as a Thing Description. Returns the state as a dictionary. """ thing = { 'name': self.name, 'href': self.href_prefix if self.href_prefix else '/', '@context': self.context, '@type': self.type, 'properties': self.get_property_descriptions(), 'actions': {}, 'events': {}, 'links': [ { 'rel': 'properties', 'href': '{}/properties'.format(self.href_prefix), }, { 'rel': 'actions', 'href': '{}/actions'.format(self.href_prefix), }, { 'rel': 'events', 'href': '{}/events'.format(self.href_prefix), }, ], } for name, action in self.available_actions.items(): thing['actions'][name] = action['metadata'] thing['actions'][name]['links'] = [ { 'rel': 'action', 'href': '{}/actions/{}'.format(self.href_prefix, name), }, ] for name, event in self.available_events.items(): thing['events'][name] = event['metadata'] thing['events'][name]['links'] = [ { 'rel': 'event', 'href': '{}/events/{}'.format(self.href_prefix, name), }, ] if self.ui_href is not None: thing['links'].append({ 'rel': 'alternate', 'mediaType': 'text/html', 'href': self.ui_href, }) if self.description: thing['description'] = self.description return thing
python
{ "resource": "" }
q268821
Thing.set_href_prefix
test
def set_href_prefix(self, prefix): """ Set the prefix of any hrefs associated with this thing. prefix -- the prefix """ self.href_prefix = prefix for property_ in self.properties.values(): property_.set_href_prefix(prefix) for action_name in self.actions.keys(): for action in self.actions[action_name]: action.set_href_prefix(prefix)
python
{ "resource": "" }
q268822
Thing.get_property_descriptions
test
def get_property_descriptions(self): """ Get the thing's properties as a dictionary. Returns the properties as a dictionary, i.e. name -> description. """ return {k: v.as_property_description() for k, v in self.properties.items()}
python
{ "resource": "" }
q268823
Thing.get_action_descriptions
test
def get_action_descriptions(self, action_name=None): """ Get the thing's actions as an array. action_name -- Optional action name to get descriptions for Returns the action descriptions. """ descriptions = [] if action_name is None: for name in self.actions: for action in self.actions[name]: descriptions.append(action.as_action_description()) elif action_name in self.actions: for action in self.actions[action_name]: descriptions.append(action.as_action_description()) return descriptions
python
{ "resource": "" }
q268824
Thing.get_event_descriptions
test
def get_event_descriptions(self, event_name=None): """ Get the thing's events as an array. event_name -- Optional event name to get descriptions for Returns the event descriptions. """ if event_name is None: return [e.as_event_description() for e in self.events] else: return [e.as_event_description() for e in self.events if e.get_name() == event_name]
python
{ "resource": "" }
q268825
Thing.add_property
test
def add_property(self, property_): """ Add a property to this thing. property_ -- property to add """ property_.set_href_prefix(self.href_prefix) self.properties[property_.name] = property_
python
{ "resource": "" }
q268826
Thing.remove_property
test
def remove_property(self, property_): """ Remove a property from this thing. property_ -- property to remove """ if property_.name in self.properties: del self.properties[property_.name]
python
{ "resource": "" }
q268827
Thing.get_property
test
def get_property(self, property_name): """ Get a property's value. property_name -- the property to get the value of Returns the properties value, if found, else None. """ prop = self.find_property(property_name) if prop: return prop.get_value() return None
python
{ "resource": "" }
q268828
Thing.get_properties
test
def get_properties(self): """ Get a mapping of all properties and their values. Returns a dictionary of property_name -> value. """ return {prop.get_name(): prop.get_value() for prop in self.properties.values()}
python
{ "resource": "" }
q268829
Thing.set_property
test
def set_property(self, property_name, value): """ Set a property value. property_name -- name of the property to set value -- value to set """ prop = self.find_property(property_name) if not prop: return prop.set_value(value)
python
{ "resource": "" }
q268830
Thing.get_action
test
def get_action(self, action_name, action_id): """ Get an action. action_name -- name of the action action_id -- ID of the action Returns the requested action if found, else None. """ if action_name not in self.actions: return None for action in self.actions[action_name]: if action.id == action_id: return action return None
python
{ "resource": "" }
q268831
Thing.add_event
test
def add_event(self, event): """ Add a new event and notify subscribers. event -- the event that occurred """ self.events.append(event) self.event_notify(event)
python
{ "resource": "" }
q268832
Thing.add_available_event
test
def add_available_event(self, name, metadata): """ Add an available event. name -- name of the event metadata -- event metadata, i.e. type, description, etc., as a dict """ if metadata is None: metadata = {} self.available_events[name] = { 'metadata': metadata, 'subscribers': set(), }
python
{ "resource": "" }
q268833
Thing.perform_action
test
def perform_action(self, action_name, input_=None): """ Perform an action on the thing. action_name -- name of the action input_ -- any action inputs Returns the action that was created. """ if action_name not in self.available_actions: return None action_type = self.available_actions[action_name] if 'input' in action_type['metadata']: try: validate(input_, action_type['metadata']['input']) except ValidationError: return None action = action_type['class'](self, input_=input_) action.set_href_prefix(self.href_prefix) self.action_notify(action) self.actions[action_name].append(action) return action
python
{ "resource": "" }
q268834
Thing.remove_action
test
def remove_action(self, action_name, action_id): """ Remove an existing action. action_name -- name of the action action_id -- ID of the action Returns a boolean indicating the presence of the action. """ action = self.get_action(action_name, action_id) if action is None: return False action.cancel() self.actions[action_name].remove(action) return True
python
{ "resource": "" }
q268835
Thing.add_available_action
test
def add_available_action(self, name, metadata, cls): """ Add an available action. name -- name of the action metadata -- action metadata, i.e. type, description, etc., as a dict cls -- class to instantiate for this action """ if metadata is None: metadata = {} self.available_actions[name] = { 'metadata': metadata, 'class': cls, } self.actions[name] = []
python
{ "resource": "" }
q268836
Thing.remove_subscriber
test
def remove_subscriber(self, ws): """ Remove a websocket subscriber. ws -- the websocket """ if ws in self.subscribers: self.subscribers.remove(ws) for name in self.available_events: self.remove_event_subscriber(name, ws)
python
{ "resource": "" }
q268837
Thing.add_event_subscriber
test
def add_event_subscriber(self, name, ws): """ Add a new websocket subscriber to an event. name -- name of the event ws -- the websocket """ if name in self.available_events: self.available_events[name]['subscribers'].add(ws)
python
{ "resource": "" }
q268838
Thing.remove_event_subscriber
test
def remove_event_subscriber(self, name, ws): """ Remove a websocket subscriber from an event. name -- name of the event ws -- the websocket """ if name in self.available_events and \ ws in self.available_events[name]['subscribers']: self.available_events[name]['subscribers'].remove(ws)
python
{ "resource": "" }
q268839
Thing.property_notify
test
def property_notify(self, property_): """ Notify all subscribers of a property change. property_ -- the property that changed """ message = json.dumps({ 'messageType': 'propertyStatus', 'data': { property_.name: property_.get_value(), } }) for subscriber in list(self.subscribers): try: subscriber.write_message(message) except tornado.websocket.WebSocketClosedError: pass
python
{ "resource": "" }
q268840
Thing.action_notify
test
def action_notify(self, action): """ Notify all subscribers of an action status change. action -- the action whose status changed """ message = json.dumps({ 'messageType': 'actionStatus', 'data': action.as_action_description(), }) for subscriber in list(self.subscribers): try: subscriber.write_message(message) except tornado.websocket.WebSocketClosedError: pass
python
{ "resource": "" }
q268841
Thing.event_notify
test
def event_notify(self, event): """ Notify all subscribers of an event. event -- the event that occurred """ if event.name not in self.available_events: return message = json.dumps({ 'messageType': 'event', 'data': event.as_event_description(), }) for subscriber in self.available_events[event.name]['subscribers']: try: subscriber.write_message(message) except tornado.websocket.WebSocketClosedError: pass
python
{ "resource": "" }
q268842
PostgresQuerySet.annotate
test
def annotate(self, **annotations): """Custom version of the standard annotate function that allows using field names as annotated fields. Normally, the annotate function doesn't allow you to use the name of an existing field on the model as the alias name. This version of the function does allow that. """ fields = { field.name: field for field in self.model._meta.get_fields() } # temporarily rename the fields that have the same # name as a field name, we'll rename them back after # the function in the base class ran new_annotations = {} renames = {} for name, value in annotations.items(): if name in fields: new_name = '%s_new' % name new_annotations[new_name] = value renames[new_name] = name else: new_annotations[name] = value # run the base class's annotate function result = super().annotate(**new_annotations) # rename the annotations back to as specified result.rename_annotations(**renames) return result
python
{ "resource": "" }
q268843
PostgresQuerySet.update
test
def update(self, **fields): """Updates all rows that match the filter.""" # build up the query to execute self._for_write = True if django.VERSION >= (2, 0): query = self.query.chain(UpdateQuery) else: query = self.query.clone(UpdateQuery) query._annotations = None query.add_update_values(fields) # build the compiler for for the query connection = django.db.connections[self.db] compiler = PostgresReturningUpdateCompiler(query, connection, self.db) # execute the query with transaction.atomic(using=self.db, savepoint=False): rows = compiler.execute_sql(CURSOR) self._result_cache = None # send out a signal for each row for row in rows: signals.update.send(self.model, pk=row[0]) # the original update(..) returns the amount of rows # affected, let's do the same return len(rows)
python
{ "resource": "" }
q268844
PostgresQuerySet.bulk_insert
test
def bulk_insert(self, rows, return_model=False): """Creates multiple new records in the database. This allows specifying custom conflict behavior using .on_conflict(). If no special behavior was specified, this uses the normal Django create(..) Arguments: rows: An array of dictionaries, where each dictionary describes the fields to insert. return_model (default: False): If model instances should be returned rather than just dicts. Returns: A list of either the dicts of the rows inserted, including the pk or the models of the rows inserted with defaults for any fields not specified """ if self.conflict_target or self.conflict_action: compiler = self._build_insert_compiler(rows) objs = compiler.execute_sql(return_id=True) if return_model: return [self.model(**dict(r, **k)) for r, k in zip(rows, objs)] else: return [dict(r, **k) for r, k in zip(rows, objs)] # no special action required, use the standard Django bulk_create(..) return super().bulk_create([self.model(**fields) for fields in rows])
python
{ "resource": "" }
q268845
PostgresQuerySet.insert
test
def insert(self, **fields): """Creates a new record in the database. This allows specifying custom conflict behavior using .on_conflict(). If no special behavior was specified, this uses the normal Django create(..) Arguments: fields: The fields of the row to create. Returns: The primary key of the record that was created. """ if self.conflict_target or self.conflict_action: compiler = self._build_insert_compiler([fields]) rows = compiler.execute_sql(return_id=True) pk_field_name = self.model._meta.pk.name return rows[0][pk_field_name] # no special action required, use the standard Django create(..) return super().create(**fields).pk
python
{ "resource": "" }
q268846
PostgresQuerySet.insert_and_get
test
def insert_and_get(self, **fields): """Creates a new record in the database and then gets the entire row. This allows specifying custom conflict behavior using .on_conflict(). If no special behavior was specified, this uses the normal Django create(..) Arguments: fields: The fields of the row to create. Returns: The model instance representing the row that was created. """ if not self.conflict_target and not self.conflict_action: # no special action required, use the standard Django create(..) return super().create(**fields) compiler = self._build_insert_compiler([fields]) rows = compiler.execute_sql(return_id=False) columns = rows[0] # get a list of columns that are officially part of the model and preserve the fact that the attribute name # might be different than the database column name model_columns = {} for field in self.model._meta.local_concrete_fields: model_columns[field.column] = field.attname # strip out any columns/fields returned by the db that # are not present in the model model_init_fields = {} for column_name, column_value in columns.items(): try: model_init_fields[model_columns[column_name]] = column_value except KeyError: pass return self.model(**model_init_fields)
python
{ "resource": "" }
q268847
PostgresQuerySet._build_insert_compiler
test
def _build_insert_compiler(self, rows: List[Dict]): """Builds the SQL compiler for a insert query. Arguments: rows: A list of dictionaries, where each entry describes a record to insert. Returns: The SQL compiler for the insert. """ # create model objects, we also have to detect cases # such as: # [dict(first_name='swen'), dict(fist_name='swen', last_name='kooij')] # we need to be certain that each row specifies the exact same # amount of fields/columns objs = [] field_count = len(rows[0]) for index, row in enumerate(rows): if field_count != len(row): raise SuspiciousOperation(( 'In bulk upserts, you cannot have rows with different field ' 'configurations. Row {0} has a different field config than ' 'the first row.' ).format(index)) objs.append(self.model(**row)) # indicate this query is going to perform write self._for_write = True # get the fields to be used during update/insert insert_fields, update_fields = self._get_upsert_fields(rows[0]) # build a normal insert query query = PostgresInsertQuery(self.model) query.conflict_action = self.conflict_action query.conflict_target = self.conflict_target query.index_predicate = self.index_predicate query.values(objs, insert_fields, update_fields) # use the postgresql insert query compiler to transform the insert # into an special postgresql insert connection = django.db.connections[self.db] compiler = PostgresInsertCompiler(query, connection, self.db) return compiler
python
{ "resource": "" }
q268848
PostgresQuerySet._is_magical_field
test
def _is_magical_field(self, model_instance, field, is_insert: bool): """Verifies whether this field is gonna modify something on its own. "Magical" means that a field modifies the field value during the pre_save. Arguments: model_instance: The model instance the field is defined on. field: The field to get of whether the field is magical. is_insert: Pretend whether this is an insert? Returns: True when this field modifies something. """ # does this field modify someting upon insert? old_value = getattr(model_instance, field.name, None) field.pre_save(model_instance, is_insert) new_value = getattr(model_instance, field.name, None) return old_value != new_value
python
{ "resource": "" }
q268849
PostgresQuerySet._get_upsert_fields
test
def _get_upsert_fields(self, kwargs): """Gets the fields to use in an upsert. This some nice magic. We'll split the fields into a group of "insert fields" and "update fields": INSERT INTO bla ("val1", "val2") ON CONFLICT DO UPDATE SET val1 = EXCLUDED.val1 ^^^^^^^^^^^^^^ ^^^^^^^^^^^^^^^^^^^^ insert_fields update_fields Often, fields appear in both lists. But, for example, a :see:DateTime field with `auto_now_add=True` set, will only appear in "insert_fields", since it won't be set on existing rows. Other than that, the user specificies a list of fields in the upsert() call. That migt not be all fields. The user could decide to leave out optional fields. If we end up doing an update, we don't want to overwrite those non-specified fields. We cannot just take the list of fields the user specifies, because as mentioned, some fields make modifications to the model on their own. We'll have to detect which fields make modifications and include them in the list of insert/update fields. """ model_instance = self.model(**kwargs) insert_fields = [] update_fields = [] for field in model_instance._meta.local_concrete_fields: has_default = field.default != NOT_PROVIDED if (field.name in kwargs or field.column in kwargs): insert_fields.append(field) update_fields.append(field) continue elif has_default: insert_fields.append(field) continue # special handling for 'pk' which always refers to # the primary key, so if we the user specifies `pk` # instead of a concrete field, we have to handle that if field.primary_key is True and 'pk' in kwargs: insert_fields.append(field) update_fields.append(field) continue if self._is_magical_field(model_instance, field, is_insert=True): insert_fields.append(field) if self._is_magical_field(model_instance, field, is_insert=False): update_fields.append(field) return insert_fields, update_fields
python
{ "resource": "" }
q268850
PostgresManager._on_model_save
test
def _on_model_save(sender, **kwargs): """When a model gets created or updated.""" created, instance = kwargs['created'], kwargs['instance'] if created: signals.create.send(sender, pk=instance.pk) else: signals.update.send(sender, pk=instance.pk)
python
{ "resource": "" }
q268851
PostgresManager._on_model_delete
test
def _on_model_delete(sender, **kwargs): """When a model gets deleted.""" instance = kwargs['instance'] signals.delete.send(sender, pk=instance.pk)
python
{ "resource": "" }
q268852
IsNotNone
test
def IsNotNone(*fields, default=None): """Selects whichever field is not None, in the specified order. Arguments: fields: The fields to attempt to get a value from, in order. default: The value to return in case all values are None. Returns: A Case-When expression that tries each field and returns the specified default value when all of them are None. """ when_clauses = [ expressions.When( ~expressions.Q(**{field: None}), then=expressions.F(field) ) for field in reversed(fields) ] return expressions.Case( *when_clauses, default=expressions.Value(default), output_field=CharField() )
python
{ "resource": "" }
q268853
HStoreValue.resolve_expression
test
def resolve_expression(self, *args, **kwargs): """Resolves expressions inside the dictionary.""" result = dict() for key, value in self.value.items(): if hasattr(value, 'resolve_expression'): result[key] = value.resolve_expression( *args, **kwargs) else: result[key] = value return HStoreValue(result)
python
{ "resource": "" }
q268854
HStoreValue.as_sql
test
def as_sql(self, compiler, connection): """Compiles the HStore value into SQL. Compiles expressions contained in the values of HStore entries as well. Given a dictionary like: dict(key1='val1', key2='val2') The resulting SQL will be: hstore(hstore('key1', 'val1'), hstore('key2', 'val2')) """ result = [] for key, value in self.value.items(): if hasattr(value, 'as_sql'): sql, params = value.as_sql(compiler, connection) result.append('hstore(\'%s\', %s)' % ( key, sql % params)) elif value is not None: result.append('hstore(\'%s\', \'%s\')' % (( key, value))) else: result.append('hstore(\'%s\', NULL)' % key) return '%s' % ' || '.join(result), []
python
{ "resource": "" }
q268855
HStoreColumn.relabeled_clone
test
def relabeled_clone(self, relabels): """Gets a re-labeled clone of this expression.""" return self.__class__( relabels.get(self.alias, self.alias), self.target, self.hstore_key, self.output_field )
python
{ "resource": "" }
q268856
PostgresQuery.add_join_conditions
test
def add_join_conditions(self, conditions: Dict[str, Any]) -> None: """Adds an extra condition to an existing JOIN. This allows you to for example do: INNER JOIN othertable ON (mytable.id = othertable.other_id AND [extra conditions]) This does not work if nothing else in your query doesn't already generate the initial join in the first place. """ alias = self.get_initial_alias() opts = self.get_meta() for name, value in conditions.items(): parts = name.split(LOOKUP_SEP) join_info = self.setup_joins(parts, opts, alias, allow_many=True) self.trim_joins(join_info[1], join_info[3], join_info[4]) target_table = join_info[3][-1] field = join_info[1][-1] join = self.alias_map.get(target_table) if not join: raise SuspiciousOperation(( 'Cannot add an extra join condition for "%s", there\'s no' ' existing join to add it to.' ) % target_table) # convert the Join object into a ConditionalJoin object, which # allows us to add the extra condition if not isinstance(join, ConditionalJoin): self.alias_map[target_table] = ConditionalJoin.from_join(join) join = self.alias_map[target_table] join.add_condition(field, value)
python
{ "resource": "" }
q268857
PostgresQuery._is_hstore_field
test
def _is_hstore_field(self, field_name: str) -> Tuple[bool, Optional[models.Field]]: """Gets whether the field with the specified name is a HStoreField. Returns A tuple of a boolean indicating whether the field with the specified name is a HStoreField, and the field instance. """ field_instance = None for field in self.model._meta.local_concrete_fields: if field.name == field_name or field.column == field_name: field_instance = field break return isinstance(field_instance, HStoreField), field_instance
python
{ "resource": "" }
q268858
PostgresInsertQuery.values
test
def values(self, objs: List, insert_fields: List, update_fields: List=[]): """Sets the values to be used in this query. Insert fields are fields that are definitely going to be inserted, and if an existing row is found, are going to be overwritten with the specified value. Update fields are fields that should be overwritten in case an update takes place rather than an insert. If we're dealing with a INSERT, these will not be used. Arguments: objs: The objects to apply this query to. insert_fields: The fields to use in the INSERT statement update_fields: The fields to only use in the UPDATE statement. """ self.insert_values(insert_fields, objs, raw=False) self.update_fields = update_fields
python
{ "resource": "" }
q268859
HStoreRequiredSchemaEditorMixin._create_hstore_required
test
def _create_hstore_required(self, table_name, field, key): """Creates a REQUIRED CONSTRAINT for the specified hstore key.""" name = self._required_constraint_name( table_name, field, key) sql = self.sql_hstore_required_create.format( name=self.quote_name(name), table=self.quote_name(table_name), field=self.quote_name(field.column), key=key ) self.execute(sql)
python
{ "resource": "" }
q268860
HStoreRequiredSchemaEditorMixin._rename_hstore_required
test
def _rename_hstore_required(self, old_table_name, new_table_name, old_field, new_field, key): """Renames an existing REQUIRED CONSTRAINT for the specified hstore key.""" old_name = self._required_constraint_name( old_table_name, old_field, key) new_name = self._required_constraint_name( new_table_name, new_field, key) sql = self.sql_hstore_required_rename.format( table=self.quote_name(new_table_name), old_name=self.quote_name(old_name), new_name=self.quote_name(new_name) ) self.execute(sql)
python
{ "resource": "" }
q268861
HStoreRequiredSchemaEditorMixin._drop_hstore_required
test
def _drop_hstore_required(self, table_name, field, key): """Drops a REQUIRED CONSTRAINT for the specified hstore key.""" name = self._required_constraint_name( table_name, field, key) sql = self.sql_hstore_required_drop.format( table=self.quote_name(table_name), name=self.quote_name(name) ) self.execute(sql)
python
{ "resource": "" }
q268862
HStoreRequiredSchemaEditorMixin._required_constraint_name
test
def _required_constraint_name(table: str, field, key): """Gets the name for a CONSTRAINT that applies to a single hstore key. Arguments: table: The name of the table the field is a part of. field: The hstore field to create a UNIQUE INDEX for. key: The name of the hstore key to create the name for. Returns: The name for the UNIQUE index. """ return '{table}_{field}_required_{postfix}'.format( table=table, field=field.column, postfix=key )
python
{ "resource": "" }
q268863
ConditionalUniqueIndex.create_sql
test
def create_sql(self, model, schema_editor, using=''): """Creates the actual SQL used when applying the migration.""" if django.VERSION >= (2, 0): statement = super().create_sql(model, schema_editor, using) statement.template = self.sql_create_index statement.parts['condition'] = self.condition return statement else: sql_create_index = self.sql_create_index sql_parameters = { **Index.get_sql_create_template_values(self, model, schema_editor, using), 'condition': self.condition } return sql_create_index % sql_parameters
python
{ "resource": "" }
q268864
create_command
test
def create_command(text, commands): """Creates a custom setup.py command.""" class CustomCommand(BaseCommand): description = text def run(self): for cmd in commands: subprocess.check_call(cmd) return CustomCommand
python
{ "resource": "" }
q268865
_get_backend_base
test
def _get_backend_base(): """Gets the base class for the custom database back-end. This should be the Django PostgreSQL back-end. However, some people are already using a custom back-end from another package. We are nice people and expose an option that allows them to configure the back-end we base upon. As long as the specified base eventually also has the PostgreSQL back-end as a base, then everything should work as intended. """ base_class_name = getattr( settings, 'POSTGRES_EXTRA_DB_BACKEND_BASE', 'django.db.backends.postgresql' ) base_class_module = importlib.import_module(base_class_name + '.base') base_class = getattr(base_class_module, 'DatabaseWrapper', None) if not base_class: raise ImproperlyConfigured(( '\'%s\' is not a valid database back-end.' ' The module does not define a DatabaseWrapper class.' ' Check the value of POSTGRES_EXTRA_DB_BACKEND_BASE.' ) % base_class_name) if isinstance(base_class, Psycopg2DatabaseWrapper): raise ImproperlyConfigured(( '\'%s\' is not a valid database back-end.' ' It does inherit from the PostgreSQL back-end.' ' Check the value of POSTGRES_EXTRA_DB_BACKEND_BASE.' ) % base_class_name) return base_class
python
{ "resource": "" }
q268866
DatabaseWrapper.prepare_database
test
def prepare_database(self): """Ran to prepare the configured database. This is where we enable the `hstore` extension if it wasn't enabled yet.""" super().prepare_database() with self.cursor() as cursor: try: cursor.execute('CREATE EXTENSION IF NOT EXISTS hstore') except ProgrammingError: # permission denied logger.warning( 'Failed to create "hstore" extension. ' 'Tables with hstore columns may fail to migrate. ' 'If hstore is needed, make sure you are connected ' 'to the database as a superuser ' 'or add the extension manually.', exc_info=True)
python
{ "resource": "" }
q268867
HStoreField.get_prep_value
test
def get_prep_value(self, value): """Override the base class so it doesn't cast all values to strings. psqlextra supports expressions in hstore fields, so casting all values to strings is a bad idea.""" value = Field.get_prep_value(self, value) if isinstance(value, dict): prep_value = {} for key, val in value.items(): if isinstance(val, Expression): prep_value[key] = val elif val is not None: prep_value[key] = str(val) else: prep_value[key] = val value = prep_value if isinstance(value, list): value = [str(item) for item in value] return value
python
{ "resource": "" }
q268868
PostgresReturningUpdateCompiler._form_returning
test
def _form_returning(self): """Builds the RETURNING part of the query.""" qn = self.connection.ops.quote_name return ' RETURNING %s' % qn(self.query.model._meta.pk.attname)
python
{ "resource": "" }
q268869
PostgresInsertCompiler.as_sql
test
def as_sql(self, return_id=False): """Builds the SQL INSERT statement.""" queries = [ self._rewrite_insert(sql, params, return_id) for sql, params in super().as_sql() ] return queries
python
{ "resource": "" }
q268870
PostgresInsertCompiler._rewrite_insert
test
def _rewrite_insert(self, sql, params, return_id=False): """Rewrites a formed SQL INSERT query to include the ON CONFLICT clause. Arguments: sql: The SQL INSERT query to rewrite. params: The parameters passed to the query. returning: What to put in the `RETURNING` clause of the resulting query. Returns: A tuple of the rewritten SQL query and new params. """ returning = self.qn(self.query.model._meta.pk.attname) if return_id else '*' if self.query.conflict_action.value == 'UPDATE': return self._rewrite_insert_update(sql, params, returning) elif self.query.conflict_action.value == 'NOTHING': return self._rewrite_insert_nothing(sql, params, returning) raise SuspiciousOperation(( '%s is not a valid conflict action, specify ' 'ConflictAction.UPDATE or ConflictAction.NOTHING.' ) % str(self.query.conflict_action))
python
{ "resource": "" }
q268871
PostgresInsertCompiler._rewrite_insert_update
test
def _rewrite_insert_update(self, sql, params, returning): """Rewrites a formed SQL INSERT query to include the ON CONFLICT DO UPDATE clause.""" update_columns = ', '.join([ '{0} = EXCLUDED.{0}'.format(self.qn(field.column)) for field in self.query.update_fields ]) # build the conflict target, the columns to watch # for conflicts conflict_target = self._build_conflict_target() index_predicate = self.query.index_predicate sql_template = ( '{insert} ON CONFLICT {conflict_target} DO UPDATE ' 'SET {update_columns} RETURNING {returning}' ) if index_predicate: sql_template = ( '{insert} ON CONFLICT {conflict_target} WHERE {index_predicate} DO UPDATE ' 'SET {update_columns} RETURNING {returning}' ) return ( sql_template.format( insert=sql, conflict_target=conflict_target, update_columns=update_columns, returning=returning, index_predicate=index_predicate, ), params )
python
{ "resource": "" }
q268872
PostgresInsertCompiler._rewrite_insert_nothing
test
def _rewrite_insert_nothing(self, sql, params, returning): """Rewrites a formed SQL INSERT query to include the ON CONFLICT DO NOTHING clause.""" # build the conflict target, the columns to watch # for conflicts conflict_target = self._build_conflict_target() where_clause = ' AND '.join([ '{0} = %s'.format(self._format_field_name(field_name)) for field_name in self.query.conflict_target ]) where_clause_params = [ self._format_field_value(field_name) for field_name in self.query.conflict_target ] params = params + tuple(where_clause_params) # this looks complicated, and it is, but it is for a reason... a normal # ON CONFLICT DO NOTHING doesn't return anything if the row already exists # so we do DO UPDATE instead that never executes to lock the row, and then # select from the table in case we're dealing with an existing row.. return ( ( 'WITH insdata AS (' '{insert} ON CONFLICT {conflict_target} DO UPDATE' ' SET {pk_column} = NULL WHERE FALSE RETURNING {returning})' ' SELECT * FROM insdata UNION ALL' ' SELECT {returning} FROM {table} WHERE {where_clause} LIMIT 1;' ).format( insert=sql, conflict_target=conflict_target, pk_column=self.qn(self.query.model._meta.pk.column), returning=returning, table=self.query.objs[0]._meta.db_table, where_clause=where_clause ), params )
python
{ "resource": "" }
q268873
PostgresInsertCompiler._build_conflict_target
test
def _build_conflict_target(self): """Builds the `conflict_target` for the ON CONFLICT clause.""" conflict_target = [] if not isinstance(self.query.conflict_target, list): raise SuspiciousOperation(( '%s is not a valid conflict target, specify ' 'a list of column names, or tuples with column ' 'names and hstore key.' ) % str(self.query.conflict_target)) def _assert_valid_field(field_name): field_name = self._normalize_field_name(field_name) if self._get_model_field(field_name): return raise SuspiciousOperation(( '%s is not a valid conflict target, specify ' 'a list of column names, or tuples with column ' 'names and hstore key.' ) % str(field_name)) for field_name in self.query.conflict_target: _assert_valid_field(field_name) # special handling for hstore keys if isinstance(field_name, tuple): conflict_target.append( '(%s->\'%s\')' % ( self._format_field_name(field_name), field_name[1] ) ) else: conflict_target.append( self._format_field_name(field_name)) return '(%s)' % ','.join(conflict_target)
python
{ "resource": "" }
q268874
PostgresInsertCompiler._get_model_field
test
def _get_model_field(self, name: str): """Gets the field on a model with the specified name. Arguments: name: The name of the field to look for. This can be both the actual field name, or the name of the column, both will work :) Returns: The field with the specified name or None if no such field exists. """ field_name = self._normalize_field_name(name) # 'pk' has special meaning and always refers to the primary # key of a model, we have to respect this de-facto standard behaviour if field_name == 'pk' and self.query.model._meta.pk: return self.query.model._meta.pk for field in self.query.model._meta.local_concrete_fields: if field.name == field_name or field.column == field_name: return field return None
python
{ "resource": "" }
q268875
PostgresInsertCompiler._format_field_name
test
def _format_field_name(self, field_name) -> str: """Formats a field's name for usage in SQL. Arguments: field_name: The field name to format. Returns: The specified field name formatted for usage in SQL. """ field = self._get_model_field(field_name) return self.qn(field.column)
python
{ "resource": "" }
q268876
PostgresInsertCompiler._format_field_value
test
def _format_field_value(self, field_name) -> str: """Formats a field's value for usage in SQL. Arguments: field_name: The name of the field to format the value of. Returns: The field's value formatted for usage in SQL. """ field_name = self._normalize_field_name(field_name) field = self._get_model_field(field_name) return SQLInsertCompiler.prepare_value( self, field, # Note: this deliberately doesn't use `pre_save_val` as we don't # want things like auto_now on DateTimeField (etc.) to change the # value. We rely on pre_save having already been done by the # underlying compiler so that things like FileField have already had # the opportunity to save out their data. getattr(self.query.objs[0], field.attname) )
python
{ "resource": "" }
q268877
HStoreUniqueSchemaEditorMixin._create_hstore_unique
test
def _create_hstore_unique(self, model, field, keys): """Creates a UNIQUE constraint for the specified hstore keys.""" name = self._unique_constraint_name( model._meta.db_table, field, keys) columns = [ '(%s->\'%s\')' % (field.column, key) for key in keys ] sql = self.sql_hstore_unique_create.format( name=self.quote_name(name), table=self.quote_name(model._meta.db_table), columns=','.join(columns) ) self.execute(sql)
python
{ "resource": "" }
q268878
HStoreUniqueSchemaEditorMixin._rename_hstore_unique
test
def _rename_hstore_unique(self, old_table_name, new_table_name, old_field, new_field, keys): """Renames an existing UNIQUE constraint for the specified hstore keys.""" old_name = self._unique_constraint_name( old_table_name, old_field, keys) new_name = self._unique_constraint_name( new_table_name, new_field, keys) sql = self.sql_hstore_unique_rename.format( old_name=self.quote_name(old_name), new_name=self.quote_name(new_name) ) self.execute(sql)
python
{ "resource": "" }
q268879
HStoreUniqueSchemaEditorMixin._drop_hstore_unique
test
def _drop_hstore_unique(self, model, field, keys): """Drops a UNIQUE constraint for the specified hstore keys.""" name = self._unique_constraint_name( model._meta.db_table, field, keys) sql = self.sql_hstore_unique_drop.format(name=self.quote_name(name)) self.execute(sql)
python
{ "resource": "" }
q268880
HStoreUniqueSchemaEditorMixin._unique_constraint_name
test
def _unique_constraint_name(table: str, field, keys): """Gets the name for a UNIQUE INDEX that applies to one or more keys in a hstore field. Arguments: table: The name of the table the field is a part of. field: The hstore field to create a UNIQUE INDEX for. key: The name of the hstore key to create the name for. This can also be a tuple of multiple names. Returns: The name for the UNIQUE index. """ postfix = '_'.join(keys) return '{table}_{field}_unique_{postfix}'.format( table=table, field=field.column, postfix=postfix )
python
{ "resource": "" }
q268881
HStoreUniqueSchemaEditorMixin._iterate_uniqueness_keys
test
def _iterate_uniqueness_keys(self, field): """Iterates over the keys marked as "unique" in the specified field. Arguments: field: The field of which key's to iterate over. """ uniqueness = getattr(field, 'uniqueness', None) if not uniqueness: return for keys in uniqueness: composed_keys = self._compose_keys(keys) yield composed_keys
python
{ "resource": "" }
q268882
ConditionalJoin.add_condition
test
def add_condition(self, field, value: Any) -> None: """Adds an extra condition to this join. Arguments: field: The field that the condition will apply to. value: The value to compare. """ self.extra_conditions.append((field, value))
python
{ "resource": "" }
q268883
ConditionalJoin.as_sql
test
def as_sql(self, compiler, connection) -> Tuple[str, List[Any]]: """Compiles this JOIN into a SQL string.""" sql, params = super().as_sql(compiler, connection) qn = compiler.quote_name_unless_alias # generate the extra conditions extra_conditions = ' AND '.join([ '{}.{} = %s'.format( qn(self.table_name), qn(field.column) ) for field, value in self.extra_conditions ]) # add to the existing params, so the connector will # actually nicely format the value for us for _, value in self.extra_conditions: params.append(value) # rewrite the sql to include the extra conditions rewritten_sql = sql.replace(')', ' AND {})'.format(extra_conditions)) return rewritten_sql, params
python
{ "resource": "" }
q268884
tdist95conf_level
test
def tdist95conf_level(df): """Approximate the 95% confidence interval for Student's T distribution. Given the degrees of freedom, returns an approximation to the 95% confidence interval for the Student's T distribution. Args: df: An integer, the number of degrees of freedom. Returns: A float. """ df = int(round(df)) highest_table_df = len(_T_DIST_95_CONF_LEVELS) if df >= 200: return 1.960 if df >= 100: return 1.984 if df >= 80: return 1.990 if df >= 60: return 2.000 if df >= 50: return 2.009 if df >= 40: return 2.021 if df >= highest_table_df: return _T_DIST_95_CONF_LEVELS[highest_table_df - 1] return _T_DIST_95_CONF_LEVELS[df]
python
{ "resource": "" }
q268885
pooled_sample_variance
test
def pooled_sample_variance(sample1, sample2): """Find the pooled sample variance for two samples. Args: sample1: one sample. sample2: the other sample. Returns: Pooled sample variance, as a float. """ deg_freedom = len(sample1) + len(sample2) - 2 mean1 = statistics.mean(sample1) squares1 = ((x - mean1) ** 2 for x in sample1) mean2 = statistics.mean(sample2) squares2 = ((x - mean2) ** 2 for x in sample2) return (math.fsum(squares1) + math.fsum(squares2)) / float(deg_freedom)
python
{ "resource": "" }
q268886
tscore
test
def tscore(sample1, sample2): """Calculate a t-test score for the difference between two samples. Args: sample1: one sample. sample2: the other sample. Returns: The t-test score, as a float. """ if len(sample1) != len(sample2): raise ValueError("different number of values") error = pooled_sample_variance(sample1, sample2) / len(sample1) diff = statistics.mean(sample1) - statistics.mean(sample2) return diff / math.sqrt(error * 2)
python
{ "resource": "" }
q268887
is_significant
test
def is_significant(sample1, sample2): """Determine whether two samples differ significantly. This uses a Student's two-sample, two-tailed t-test with alpha=0.95. Args: sample1: one sample. sample2: the other sample. Returns: (significant, t_score) where significant is a bool indicating whether the two samples differ significantly; t_score is the score from the two-sample T test. """ deg_freedom = len(sample1) + len(sample2) - 2 critical_value = tdist95conf_level(deg_freedom) t_score = tscore(sample1, sample2) return (abs(t_score) >= critical_value, t_score)
python
{ "resource": "" }
q268888
topoSort
test
def topoSort(roots, getParents): """Return a topological sorting of nodes in a graph. roots - list of root nodes to search from getParents - function which returns the parents of a given node """ results = [] visited = set() # Use iterative version to avoid stack limits for large datasets stack = [(node, 0) for node in roots] while stack: current, state = stack.pop() if state == 0: # before recursing if current not in visited: visited.add(current) stack.append((current, 1)) stack.extend((parent, 0) for parent in getParents(current)) else: # after recursing assert(current in visited) results.append(current) return results
python
{ "resource": "" }
q268889
n_queens
test
def n_queens(queen_count): """N-Queens solver. Args: queen_count: the number of queens to solve for. This is also the board size. Yields: Solutions to the problem. Each yielded value is looks like (3, 8, 2, 1, 4, ..., 6) where each number is the column position for the queen, and the index into the tuple indicates the row. """ cols = range(queen_count) for vec in permutations(cols): if (queen_count == len(set(vec[i] + i for i in cols)) == len(set(vec[i] - i for i in cols))): yield vec
python
{ "resource": "" }
q268890
UCTNode.play
test
def play(self, board): """ uct tree search """ color = board.color node = self path = [node] while True: pos = node.select(board) if pos == PASS: break board.move(pos) child = node.pos_child[pos] if not child: child = node.pos_child[pos] = UCTNode() child.unexplored = board.useful_moves() child.pos = pos child.parent = node path.append(child) break path.append(child) node = child self.random_playout(board) self.update_path(board, color, path)
python
{ "resource": "" }
q268891
UCTNode.select
test
def select(self, board): """ select move; unexplored children first, then according to uct value """ if self.unexplored: i = random.randrange(len(self.unexplored)) pos = self.unexplored[i] self.unexplored[i] = self.unexplored[len(self.unexplored) - 1] self.unexplored.pop() return pos elif self.bestchild: return self.bestchild.pos else: return PASS
python
{ "resource": "" }
q268892
UCTNode.random_playout
test
def random_playout(self, board): """ random play until both players pass """ for x in range(MAXMOVES): # XXX while not self.finished? if board.finished: break board.move(board.random_move())
python
{ "resource": "" }
q268893
filter_benchmarks
test
def filter_benchmarks(benchmarks, bench_funcs, base_ver): """Filters out benchmarks not supported by both Pythons. Args: benchmarks: a set() of benchmark names bench_funcs: dict mapping benchmark names to functions python: the interpereter commands (as lists) Returns: The filtered set of benchmark names """ for bm in list(benchmarks): func = bench_funcs[bm] if getattr(func, '_python2_only', False) and (3, 0) <= base_ver: benchmarks.discard(bm) logging.info("Skipping Python2-only benchmark %s; " "not compatible with Python %s" % (bm, base_ver)) continue return benchmarks
python
{ "resource": "" }
q268894
expand_benchmark_name
test
def expand_benchmark_name(bm_name, bench_groups): """Recursively expand name benchmark names. Args: bm_name: string naming a benchmark or benchmark group. Yields: Names of actual benchmarks, with all group names fully expanded. """ expansion = bench_groups.get(bm_name) if expansion: for name in expansion: for name in expand_benchmark_name(name, bench_groups): yield name else: yield bm_name
python
{ "resource": "" }
q268895
gen_string_table
test
def gen_string_table(n): """Generates the list of strings that will be used in the benchmarks. All strings have repeated prefixes and suffices, and n specifies the number of repetitions. """ strings = [] def append(s): if USE_BYTES_IN_PY3K: strings.append(s.encode('latin1')) else: strings.append(s) append('-' * n + 'Perl' + '-' * n) append('P' * n + 'Perl' + 'P' * n) append('-' * n + 'Perl' + '-' * n) append('-' * n + 'Perl' + '-' * n) append('-' * n + 'Python' + '-' * n) append('P' * n + 'Python' + 'P' * n) append('-' * n + 'Python' + '-' * n) append('-' * n + 'Python' + '-' * n) append('-' * n + 'Python' + '-' * n) append('-' * n + 'Python' + '-' * n) append('-' * n + 'Perl' + '-' * n) append('P' * n + 'Perl' + 'P' * n) append('-' * n + 'Perl' + '-' * n) append('-' * n + 'Perl' + '-' * n) append('-' * n + 'PythonPython' + '-' * n) append('P' * n + 'PythonPython' + 'P' * n) append('-' * n + 'a5,b7,c9,' + '-' * n) append('-' * n + 'a5,b7,c9,' + '-' * n) append('-' * n + 'a5,b7,c9,' + '-' * n) append('-' * n + 'a5,b7,c9,' + '-' * n) append('-' * n + 'Python' + '-' * n) return strings
python
{ "resource": "" }
q268896
init_benchmarks
test
def init_benchmarks(n_values=None): """Initialize the strings we'll run the regexes against. The strings used in the benchmark are prefixed and suffixed by strings that are repeated n times. The sequence n_values contains the values for n. If n_values is None the values of n from the original benchmark are used. The generated list of strings is cached in the string_tables variable, which is indexed by n. Returns: A list of string prefix/suffix lengths. """ if n_values is None: n_values = (0, 5, 50, 250, 1000, 5000, 10000) string_tables = {n: gen_string_table(n) for n in n_values} regexs = gen_regex_table() data = [] for n in n_values: for id in xrange(len(regexs)): regex = regexs[id] string = string_tables[n][id] data.append((regex, string)) return data
python
{ "resource": "" }
q268897
Spline.GetDomain
test
def GetDomain(self): """Returns the domain of the B-Spline""" return (self.knots[self.degree - 1], self.knots[len(self.knots) - self.degree])
python
{ "resource": "" }
q268898
Mattermost.fetch_items
test
def fetch_items(self, category, **kwargs): """Fetch the messages. :param category: the category of items to fetch :param kwargs: backend arguments :returns: a generator of items """ from_date = kwargs['from_date'] logger.info("Fetching messages of '%s' - '%s' channel from %s", self.url, self.channel, str(from_date)) fetching = True page = 0 nposts = 0 # Convert timestamp to integer for comparing since = int(from_date.timestamp() * 1000) while fetching: raw_posts = self.client.posts(self.channel, page=page) posts_before = nposts for post in self._parse_posts(raw_posts): if post['update_at'] < since: fetching = False break # Fetch user data user_id = post['user_id'] user = self._get_or_fetch_user(user_id) post['user_data'] = user yield post nposts += 1 if fetching: # If no new posts were fetched; stop the process if posts_before == nposts: fetching = False else: page += 1 logger.info("Fetch process completed: %s posts fetched", nposts)
python
{ "resource": "" }
q268899
Mattermost._parse_posts
test
def _parse_posts(self, raw_posts): """Parse posts and returns in order.""" parsed_posts = self.parse_json(raw_posts) # Posts are not sorted. The order is provided by # 'order' key. for post_id in parsed_posts['order']: yield parsed_posts['posts'][post_id]
python
{ "resource": "" }