repo_name stringlengths 7 65 | path stringlengths 5 185 | copies stringlengths 1 4 | size stringlengths 4 6 | content stringlengths 977 990k | license stringclasses 14 values | hash stringlengths 32 32 | line_mean float64 7.18 99.4 | line_max int64 31 999 | alpha_frac float64 0.25 0.95 | ratio float64 1.5 7.84 | autogenerated bool 1 class | config_or_test bool 2 classes | has_no_keywords bool 2 classes | has_few_assignments bool 1 class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
globocom/database-as-a-service | dbaas/maintenance/admin/restart_database.py | 1 | 3427 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.conf.urls import patterns, url
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.utils.html import format_html
from maintenance.models import RestartDatabase
from notification.tasks import TaskRegister
from .database_maintenance_task import DatabaseMaintenanceTaskAdmin
class RestartDatabaseAdmin(DatabaseMaintenanceTaskAdmin):
list_filter = [
"status"
]
search_fields = ("task__id", "task__task_id")
list_display = (
"current_step", "database", "friendly_status",
"maintenance_action", "link_task",
"started_at", "finished_at"
)
readonly_fields = (
"current_step_class", "database", "link_task",
"started_at", "finished_at", "status", "task_schedule",
"maintenance_action"
)
def maintenance_action(self, maintenance_task):
if not maintenance_task.is_status_error:
return 'N/A'
if not maintenance_task.can_do_retry:
return 'N/A'
url_retry = "/admin/maintenance/restartdatabase/{}/retry/".format(
maintenance_task.id
)
html_retry = (
"<a title='Retry' class='btn btn-info' href='{}'>Retry</a>".format(
url_retry
)
)
return format_html(html_retry)
def get_urls(self):
base = super(RestartDatabaseAdmin, self).get_urls()
admin = patterns(
'',
url(
r'^/?(?P<manager_id>\d+)/retry/$',
self.admin_site.admin_view(self.retry_view),
name="restart_database_retry"
)
)
return admin + base
def retry_view(self, request, manager_id):
retry_from = get_object_or_404(RestartDatabase, pk=manager_id)
success, redirect = self.check_status(request, retry_from, 'retry')
if not success:
return redirect
TaskRegister.restart_database(
database=retry_from.database,
user=request.user,
since_step=retry_from.current_step,
step_manager=retry_from
)
return self.redirect_to_database(retry_from)
def check_status(self, request, step_manager, operation):
success = True
if success and not step_manager.is_status_error:
success = False
messages.add_message(
request, messages.ERROR,
"You can not do {} because current status is '{}'".format(
operation, step_manager.get_status_display()
),
)
if success and not step_manager.can_do_retry:
success = False
messages.add_message(
request, messages.ERROR,
"{} is disabled".format(operation.capitalize())
)
return success, HttpResponseRedirect(
reverse(
'admin:maintenance_restartdatabase_change',
args=(step_manager.id,)
)
)
def redirect_to_database(self, maintenance):
return HttpResponseRedirect(reverse(
'admin:logical_database_hosts', kwargs={
'id': maintenance.database.id
})
)
| bsd-3-clause | a8b4c2a4e9702254558be34745044dfe | 31.951923 | 79 | 0.583601 | 4.348985 | false | false | false | false |
globocom/database-as-a-service | dbaas/physical/migrations/0081_auto__chg_field_enginepatch_patch_version__chg_field_engine_major_vers.py | 1 | 26351 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'EnginePatch.patch_version'
db.alter_column(u'physical_enginepatch', 'patch_version', self.gf('django.db.models.fields.PositiveIntegerField')())
# Changing field 'Engine.major_version'
db.alter_column(u'physical_engine', 'major_version', self.gf('django.db.models.fields.PositiveIntegerField')(null=True))
# Changing field 'Engine.minor_version'
db.alter_column(u'physical_engine', 'minor_version', self.gf('django.db.models.fields.PositiveIntegerField')(null=True))
def backwards(self, orm):
# Changing field 'EnginePatch.patch_version'
db.alter_column(u'physical_enginepatch', 'patch_version', self.gf('django.db.models.fields.IntegerField')())
# Changing field 'Engine.major_version'
db.alter_column(u'physical_engine', 'major_version', self.gf('django.db.models.fields.IntegerField')(null=True))
# Changing field 'Engine.minor_version'
db.alter_column(u'physical_engine', 'minor_version', self.gf('django.db.models.fields.IntegerField')(null=True))
models = {
u'physical.cloud': {
'Meta': {'object_name': 'Cloud'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.databaseinfra': {
'Meta': {'object_name': 'DatabaseInfra'},
'backup_hour': ('django.db.models.fields.IntegerField', [], {}),
'capacity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'endpoint': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'endpoint_dns': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Engine']"}),
'engine_patch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EnginePatch']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_vm_created': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'name_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'name_stamp': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'blank': 'True'}),
'per_database_size_mbytes': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Plan']"}),
'ssl_configured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'physical.databaseinfraparameter': {
'Meta': {'unique_together': "((u'databaseinfra', u'parameter'),)", 'object_name': 'DatabaseInfraParameter'},
'applied_on_database': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_value': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.DatabaseInfra']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parameter': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Parameter']"}),
'reset_default_value': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'physical.diskoffering': {
'Meta': {'object_name': 'DiskOffering'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'size_kb': ('django.db.models.fields.PositiveIntegerField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.engine': {
'Meta': {'ordering': "(u'engine_type__name', u'version')", 'unique_together': "((u'version', u'engine_type'),)", 'object_name': 'Engine'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'engines'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
'engine_upgrade_option': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_engine'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Engine']"}),
'has_users': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'major_version': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'minor_version': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'read_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'template_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user_data_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'write_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'physical.enginepatch': {
'Meta': {'object_name': 'EnginePatch'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'patchs'", 'to': u"orm['physical.Engine']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_initial_patch': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'patch_path': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'patch_version': ('django.db.models.fields.PositiveIntegerField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.enginetype': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'EngineType'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_memory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.environment': {
'Meta': {'object_name': 'Environment'},
'cloud': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'environment_cloud'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Cloud']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'migrate_environment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Environment']"}),
'min_of_zones': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.environmentgroup': {
'Meta': {'object_name': 'EnvironmentGroup'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'groups'", 'symmetrical': 'False', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.host': {
'Meta': {'object_name': 'Host'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'future_host': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Host']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255'}),
'monitor_url': ('django.db.models.fields.URLField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'offering': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Offering']", 'null': 'True'}),
'os_description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'physical.instance': {
'Meta': {'unique_together': "((u'address', u'port'),)", 'object_name': 'Instance'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.DatabaseInfra']"}),
'dns': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'future_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Instance']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'port': ('django.db.models.fields.IntegerField', [], {}),
'read_only': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'shard': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'total_size_in_bytes': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'used_size_in_bytes': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'})
},
u'physical.offering': {
'Meta': {'object_name': 'Offering'},
'cpus': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'offerings'", 'symmetrical': 'False', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'memory_size_mb': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.parameter': {
'Meta': {'ordering': "(u'engine_type__name', u'name')", 'unique_together': "((u'name', u'engine_type'),)", 'object_name': 'Parameter'},
'allowed_values': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'custom_method': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'dynamic': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'enginetype'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter_type': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.plan': {
'Meta': {'object_name': 'Plan'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'plans'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plans'", 'to': u"orm['physical.Engine']"}),
'engine_equivalent_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_plan'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'plans'", 'symmetrical': 'False', 'to': u"orm['physical.Environment']"}),
'has_persistence': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_ha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'max_db_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'migrate_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Plan']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'provider': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'replication_topology': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'replication_topology'", 'null': 'True', 'to': u"orm['physical.ReplicationTopology']"}),
'stronger_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'main_offerings'", 'null': 'True', 'to': u"orm['physical.Offering']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'weaker_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'weaker_offerings'", 'null': 'True', 'to': u"orm['physical.Offering']"})
},
u'physical.planattribute': {
'Meta': {'object_name': 'PlanAttribute'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plan_attributes'", 'to': u"orm['physical.Plan']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'physical.replicationtopology': {
'Meta': {'object_name': 'ReplicationTopology'},
'can_change_parameters': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_clone_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_reinstall_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_resize_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_setup_ssl': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_switch_master': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_upgrade_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'class_path': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'replication_topologies'", 'symmetrical': 'False', 'to': u"orm['physical.Engine']"}),
'has_horizontal_scalability': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'replication_topologies'", 'blank': 'True', 'to': u"orm['physical.Parameter']"}),
'script': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'replication_topologies'", 'null': 'True', 'to': u"orm['physical.Script']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.script': {
'Meta': {'object_name': 'Script'},
'configuration': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'initialization': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'metric_collector': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'start_database': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'start_replication': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.topologyparametercustomvalue': {
'Meta': {'unique_together': "((u'topology', u'parameter'),)", 'object_name': 'TopologyParameterCustomValue'},
'attr_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parameter': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'topology_custom_values'", 'to': u"orm['physical.Parameter']"}),
'topology': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'param_custom_values'", 'to': u"orm['physical.ReplicationTopology']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.vip': {
'Meta': {'object_name': 'Vip'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'infra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'vips'", 'to': u"orm['physical.DatabaseInfra']"}),
'original_vip': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Vip']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.volume': {
'Meta': {'object_name': 'Volume'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'host': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'volumes'", 'to': u"orm['physical.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'total_size_kb': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'used_size_kb': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['physical'] | bsd-3-clause | 8aac04b47754d82350342a1abc276894 | 90.5 | 227 | 0.567531 | 3.570596 | false | false | false | false |
globocom/database-as-a-service | dbaas/physical/migrations/0060_auto__add_field_host_identifier.py | 1 | 21469 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Host.identifier'
db.add_column(u'physical_host', 'identifier',
self.gf('django.db.models.fields.CharField')(default=u'', max_length=255),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Host.identifier'
db.delete_column(u'physical_host', 'identifier')
models = {
u'dbaas_cloudstack.cloudstackoffering': {
'Meta': {'object_name': 'CloudStackOffering'},
'cpus': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'equivalent_offering': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['dbaas_cloudstack.CloudStackOffering']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'memory_size_mb': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'cs_offering_region'", 'null': 'True', 'to': u"orm['dbaas_cloudstack.CloudStackRegion']"}),
'serviceofferingid': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'weaker': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'dbaas_cloudstack.cloudstackregion': {
'Meta': {'object_name': 'CloudStackRegion'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'cs_environment_region'", 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.databaseinfra': {
'Meta': {'object_name': 'DatabaseInfra'},
'capacity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'endpoint': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'endpoint_dns': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Engine']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_vm_created': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'name_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'name_stamp': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'blank': 'True'}),
'per_database_size_mbytes': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Plan']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'physical.databaseinfraparameter': {
'Meta': {'unique_together': "((u'databaseinfra', u'parameter'),)", 'object_name': 'DatabaseInfraParameter'},
'applied_on_database': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_value': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.DatabaseInfra']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parameter': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Parameter']"}),
'reset_default_value': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'physical.diskoffering': {
'Meta': {'object_name': 'DiskOffering'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'size_kb': ('django.db.models.fields.PositiveIntegerField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.engine': {
'Meta': {'ordering': "(u'engine_type__name', u'version')", 'unique_together': "((u'version', u'engine_type'),)", 'object_name': 'Engine'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'engines'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
'engine_upgrade_option': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_engine'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Engine']"}),
'has_users': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'read_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'template_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user_data_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'write_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'physical.enginetype': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'EngineType'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_memory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.environment': {
'Meta': {'object_name': 'Environment'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'migrate_environment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Environment']"}),
'min_of_zones': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.host': {
'Meta': {'object_name': 'Host'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'future_host': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Host']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255'}),
'monitor_url': ('django.db.models.fields.URLField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'offering': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['dbaas_cloudstack.CloudStackOffering']", 'null': 'True'}),
'os_description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'physical.instance': {
'Meta': {'unique_together': "((u'address', u'port'),)", 'object_name': 'Instance'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.DatabaseInfra']"}),
'dns': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'future_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Instance']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'port': ('django.db.models.fields.IntegerField', [], {}),
'read_only': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'shard': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'total_size_in_bytes': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'used_size_in_bytes': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'})
},
u'physical.parameter': {
'Meta': {'ordering': "(u'engine_type__name', u'name')", 'unique_together': "((u'name', u'engine_type'),)", 'object_name': 'Parameter'},
'allowed_values': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'custom_method': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'dynamic': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'enginetype'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter_type': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.plan': {
'Meta': {'object_name': 'Plan'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'plans'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plans'", 'to': u"orm['physical.Engine']"}),
'engine_equivalent_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_plan'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'plans'", 'symmetrical': 'False', 'to': u"orm['physical.Environment']"}),
'has_persistence': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_ha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'max_db_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'migrate_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Plan']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'provider': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'replication_topology': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'replication_topology'", 'null': 'True', 'to': u"orm['physical.ReplicationTopology']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.planattribute': {
'Meta': {'object_name': 'PlanAttribute'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plan_attributes'", 'to': u"orm['physical.Plan']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'physical.replicationtopology': {
'Meta': {'object_name': 'ReplicationTopology'},
'can_change_parameters': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_clone_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_reinstall_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_resize_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_switch_master': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_upgrade_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'class_path': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'replication_topologies'", 'symmetrical': 'False', 'to': u"orm['physical.Engine']"}),
'has_horizontal_scalability': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'replication_topologies'", 'blank': 'True', 'to': u"orm['physical.Parameter']"}),
'script': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'replication_topologies'", 'null': 'True', 'to': u"orm['physical.Script']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.script': {
'Meta': {'object_name': 'Script'},
'configuration': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'initialization': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'start_database': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'start_replication': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.topologyparametercustomvalue': {
'Meta': {'unique_together': "((u'topology', u'parameter'),)", 'object_name': 'TopologyParameterCustomValue'},
'attr_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parameter': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'topology_custom_values'", 'to': u"orm['physical.Parameter']"}),
'topology': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'param_custom_values'", 'to': u"orm['physical.ReplicationTopology']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
}
}
complete_apps = ['physical'] | bsd-3-clause | 671e77c424ac0e9766cbd8f3295c010c | 91.145923 | 227 | 0.565001 | 3.574592 | false | false | false | false |
globocom/database-as-a-service | dbaas/maintenance/admin/add_instances_to_database.py | 1 | 1619 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.utils.html import format_html
from .database_maintenance_task import DatabaseMaintenanceTaskAdmin
class AddInstancesToDatabaseAdmin(DatabaseMaintenanceTaskAdmin):
list_filter = [
"database__team", "current_step", "number_of_instances", "status",
"number_of_instances_before"
]
list_display = (
"database", "database_team", "number_of_instances",
"number_of_instances_before", "current_step", "friendly_status",
"maintenance_action", "link_task", "started_at", "finished_at"
)
readonly_fields = (
"current_step_class", "database", "number_of_instances",
"number_of_instances_before", "link_task", "started_at",
"finished_at", "current_step", "status",
"maintenance_action", "task_schedule"
)
def maintenance_action(self, maintenance):
if not maintenance.is_status_error or not maintenance.can_do_retry:
return 'N/A'
url_retry = maintenance.database.get_add_instances_database_retry_url()
html_retry = ("<a title='Retry' class='btn btn-warning' "
"href='{}'>Retry</a>").format(url_retry)
url_rollback = maintenance.database.get_add_instances_database_rollback_url()
html_rollback = ("<a title='Rollback' class='btn btn-danger' "
"href='{}'>Rollback</a>").format(url_rollback)
spaces = ' ' * 3
html_content = '{}{}{}'.format(html_rollback, spaces, html_retry)
return format_html(html_content)
| bsd-3-clause | 71fb2fe929374a24446d64dd6cc1cad8 | 38.487805 | 85 | 0.628783 | 3.756381 | false | false | false | false |
globocom/database-as-a-service | dbaas/system/tasks.py | 1 | 1030 | # -*- coding: utf-8 -*-
from dbaas.celery import app
from util.decorators import only_one
from models import CeleryHealthCheck
from notification.models import TaskHistory
from notification.tasks import get_worker_name
import logging
LOG = logging.getLogger(__name__)
@app.task(bind=True)
@only_one(key="celery_healthcheck_last_update", timeout=20)
def set_celery_healthcheck_last_update(self):
try:
worker_name = get_worker_name()
task_history = TaskHistory.register(request=self.request, user=None,
worker_name=worker_name)
task_history.relevance = TaskHistory.RELEVANCE_WARNING
LOG.info("Setting Celery healthcheck last update")
CeleryHealthCheck.set_last_update()
task_history.update_status_for(
TaskHistory.STATUS_SUCCESS, details="Finished")
except Exception, e:
LOG.warn("Oopss...{}".format(e))
task_history.update_status_for(TaskHistory.STATUS_ERROR, details=e)
finally:
return
| bsd-3-clause | 0e17ee053ff1e3dc35e8052e22468f87 | 33.333333 | 76 | 0.679612 | 3.857678 | false | false | false | false |
globocom/database-as-a-service | dbaas/workflow/steps/util/disk.py | 1 | 4990 | # -*- coding: utf-8 -*-
import logging
from base import BaseInstanceStep
LOG = logging.getLogger(__name__)
class Disk(BaseInstanceStep):
OLD_DIRECTORY = '/data'
NEW_DIRECTORY = '/new_data'
@property
def is_valid(self):
return bool(self.instance.hostname.volumes.first())
@property
def has_active(self):
return self.host.volumes.filter(is_active=True).exists()
class DiskCommand(Disk):
@property
def scripts(self):
raise NotImplementedError
def do(self):
if not self.is_valid:
return
for message, script in self.scripts.items():
self.host.ssh.run_script(script)
def undo(self):
# TODO
pass
class CopyDataBetweenExports(DiskCommand):
def __unicode__(self):
return "Coping data {} -> {}...".format(
self.OLD_DIRECTORY, self.NEW_DIRECTORY
)
@property
def scripts(self):
message = 'Could not copy data {} -> {}'.format(
self.OLD_DIRECTORY, self.NEW_DIRECTORY)
script = "rsync -arm --exclude='{0}/.snapshot' {0}/* {1}".format(
self.OLD_DIRECTORY, self.NEW_DIRECTORY
)
return {message: script}
class CopyDataBetweenExportsMigration(CopyDataBetweenExports):
NEW_DIRECTORY = '{}/data'.format(CopyDataBetweenExports.OLD_DIRECTORY)
OLD_DIRECTORY = '{}/data'.format(CopyDataBetweenExports.NEW_DIRECTORY)
@property
def is_valid(self):
is_valid = super(CopyDataBetweenExportsMigration, self).is_valid
if not is_valid:
return False
if not self.infra.plan.has_persistence:
return False
return True
@property
def host(self):
host = super(CopyDataBetweenExportsMigration, self).host
return host.future_host
class CleanData(DiskCommand):
def __unicode__(self):
if not self.is_valid:
return "Skipped because the instance is master"
return "Removing data from slave..."
@property
def is_valid(self):
return self.restore.is_slave(self.instance)
@property
def directory(self):
return '{}/data/*'.format(self.OLD_DIRECTORY)
@property
def scripts(self):
message = 'Could not remove data from {}'.format(self.OLD_DIRECTORY)
script = 'rm -rf {}'.format(self.directory)
return {message: script}
class CleanSSLDir(CleanData):
@property
def is_valid(self):
return True
@property
def directory(self):
return '/data/ssl/*'
@property
def scripts(self):
message = 'Could not remove data from {}'.format(self.OLD_DIRECTORY)
script = '[ -d /data/ssl ] && rm -rf {} || mkdir -p /data/ssl'.format(
self.directory
)
return {message: script}
class CleanDataRecreateSlave(CleanData):
@property
def is_valid(self):
return self.instance.is_slave
def do(self):
if self.is_database_instance:
super(CleanDataRecreateSlave, self).do()
class CleanDataMigrate(CleanData):
def __unicode__(self):
return "Removing data..."
@property
def is_valid(self):
return self.is_database_instance
class CleanReplRecreateSlave(CleanData):
@property
def is_valid(self):
return self.instance.is_slave
@property
def directory(self):
return '{}/repl/*'.format(self.OLD_DIRECTORY)
class CleanDataArbiter(CleanData):
def __unicode__(self):
return "Removing data from arbiter..."
@property
def is_valid(self):
return self.instance.instance_type == self.instance.MONGODB_ARBITER
class RemoveDeprecatedFiles(DiskCommand):
def __unicode__(self):
return "Removing deprecated files..."
@property
def scripts(self):
driver = self.infra.get_driver()
return {'Remove Deprecated': driver.remove_deprectaed_files()}
class ChangeSnapshotOwner(Disk):
def __unicode__(self):
return "Change snapshots owner..."
@property
def can_run(self):
if self.host_migrate.database_migrate:
return False
return super(ChangeSnapshotOwner, self).can_run
def do(self):
for volume in self.instance.hostname.volumes.all():
volume.is_active = False
volume.host = self.host
volume.save()
def undo(self):
volume = None
for volume in self.host.volumes.filter(is_active=False):
volume.host = self.instance.hostname
volume.save()
if volume:
volume.is_active = True
volume.save()
class CleanDataNonDatabaseInstanceRollback(CleanData):
@property
def is_valid(self):
return not self.instance.is_database
@property
def host(self):
return self.instance.hostname
def do(self):
pass
def undo(self):
return super(CleanDataNonDatabaseInstanceRollback, self).do()
| bsd-3-clause | 99d043dcacc8ebb872ec9e878542570b | 23.10628 | 78 | 0.618437 | 4.037217 | false | false | false | false |
globocom/database-as-a-service | dbaas/logical/migrations/0026_auto__add_databaselock.py | 1 | 21078 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'DatabaseLock'
db.create_table(u'logical_databaselock', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('updated_at', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('database', self.gf('django.db.models.fields.related.ForeignKey')(related_name=u'lock', unique=True, to=orm['logical.Database'])),
('task', self.gf('django.db.models.fields.related.ForeignKey')(related_name=u'lock', to=orm['notification.TaskHistory'])),
))
db.send_create_signal(u'logical', ['DatabaseLock'])
def backwards(self, orm):
# Deleting model 'DatabaseLock'
db.delete_table(u'logical_databaselock')
models = {
u'account.team': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Team'},
'contacts': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_alocation_limit': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '2'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.User']", 'symmetrical': 'False'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'logical.credential': {
'Meta': {'ordering': "(u'database', u'user')", 'unique_together': "((u'user', u'database'),)", 'object_name': 'Credential'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'credentials'", 'to': u"orm['logical.Database']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'logical.database': {
'Meta': {'ordering': "(u'name',)", 'unique_together': "((u'name', u'environment'),)", 'object_name': 'Database'},
'backup_path': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_task': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database'", 'null': 'True', 'to': u"orm['notification.TaskHistory']"}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DatabaseInfra']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_auto_resize': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_quarantine': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_index': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['logical.Project']"}),
'quarantine_dt': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'subscribe_to_email_events': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases'", 'null': 'True', 'to': u"orm['account.Team']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'used_size_in_bytes': ('django.db.models.fields.FloatField', [], {'default': '0.0'})
},
u'logical.databaselock': {
'Meta': {'object_name': 'DatabaseLock'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'lock'", 'unique': 'True', 'to': u"orm['logical.Database']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'lock'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'logical.project': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Project'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'notification.taskhistory': {
'Meta': {'object_name': 'TaskHistory'},
'arguments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'context': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'db_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'ended_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'task_id': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_status': ('django.db.models.fields.CharField', [], {'default': "u'PENDING'", 'max_length': '100', 'db_index': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'physical.databaseinfra': {
'Meta': {'object_name': 'DatabaseInfra'},
'capacity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'endpoint': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'endpoint_dns': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Engine']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_vm_created': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'name_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'name_stamp': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'blank': 'True'}),
'per_database_size_mbytes': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Plan']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'physical.diskoffering': {
'Meta': {'object_name': 'DiskOffering'},
'available_size_kb': ('django.db.models.fields.PositiveIntegerField', [], {}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'size_kb': ('django.db.models.fields.PositiveIntegerField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.engine': {
'Meta': {'ordering': "(u'engine_type__name', u'version')", 'unique_together': "((u'version', u'engine_type'),)", 'object_name': 'Engine'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'engines'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
'engine_upgrade_option': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_engine'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Engine']"}),
'has_users': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'read_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'template_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user_data_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'write_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'physical.enginetype': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'EngineType'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_memory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.environment': {
'Meta': {'object_name': 'Environment'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'min_of_zones': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.plan': {
'Meta': {'object_name': 'Plan'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'plans'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plans'", 'to': u"orm['physical.Engine']"}),
'engine_equivalent_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_plan'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'plans'", 'symmetrical': 'False', 'to': u"orm['physical.Environment']"}),
'flipperfox_equivalent_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'flipperfox_migration_plan'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'has_persistence': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_ha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'max_db_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'provider': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'replication_topology': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'replication_topology'", 'null': 'True', 'to': u"orm['physical.ReplicationTopology']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.replicationtopology': {
'Meta': {'object_name': 'ReplicationTopology'},
'class_path': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'replication_topologies'", 'symmetrical': 'False', 'to': u"orm['physical.Engine']"}),
'has_horizontal_scalability': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
}
}
complete_apps = ['logical'] | bsd-3-clause | 372c6e7439015b34da8a8c4c34b90f22 | 89.467811 | 239 | 0.561913 | 3.561075 | false | false | false | false |
globocom/database-as-a-service | dbaas/dbaas_services/analyzing/models.py | 1 | 4398 | # -*- coding: utf-8 -*-
from django.db import models
from util.models import BaseModel
from django.utils.translation import ugettext_lazy as _
class AnalyzeRepository(BaseModel):
analyzed_at = models.DateTimeField(
verbose_name=_("Analyzed at"),
db_index=True
)
database_name = models.CharField(
verbose_name=_("Database name"),
max_length=60,
unique=False, null=False, blank=False,
db_index=True
)
databaseinfra_name = models.CharField(
verbose_name=_("Database Infra name"), max_length=60,
unique=False, null=False, blank=False,
db_index=True
)
instance_name = models.CharField(
verbose_name=_("Instance name"), max_length=100,
unique=False, null=False, blank=False,
db_index=True
)
engine_name = models.CharField(
verbose_name=_("Engine name"), max_length=20,
unique=False, null=False, blank=False,
db_index=True
)
environment_name = models.CharField(
verbose_name=_("Environment name"), max_length=30,
unique=False, null=False, blank=False,
db_index=True
)
cpu_alarm = models.BooleanField(verbose_name=_("CPU alarm"), default=False)
cpu_threshold = models.IntegerField(
verbose_name=_("CPU Threshold"), unique=False,
null=False, default=50
)
memory_alarm = models.BooleanField(
verbose_name=_("Memory alarm"), default=False
)
memory_threshold = models.IntegerField(
verbose_name=_("Memory Threshold"), unique=False,
null=False, default=50
)
volume_alarm = models.BooleanField(
verbose_name=_("Volume alarm"), default=False
)
volume_threshold = models.IntegerField(
verbose_name=_("Volume Threshold"), unique=False,
null=False, default=50
)
email_sent = models.BooleanField(
verbose_name=_("Email sent?"), default=False,
db_index=True
)
class Meta:
unique_together = (
('analyzed_at', 'instance_name',)
)
permissions = (
("view_analyzerepository", "Can view analyze repository"),
)
verbose_name = 'Resource use report'
def __unicode__(self):
return self.instance_name
class ExecutionPlan(BaseModel):
plan_name = models.CharField(
verbose_name=_("Execution plan name"), max_length=60,
unique=True, null=False, blank=False,
db_index=True
)
metrics = models.CharField(
verbose_name=_("Metrics used by plan"), max_length=200,
unique=True, null=False, blank=False, db_index=True,
help_text=_(('Comma separated list of metrics. '
'Ex.: cpu.cpu_used,cpu.cpu_free,...'))
)
threshold = models.IntegerField(
verbose_name=_("Threshold"), unique=False,
null=False, default=50
)
proccess_function = models.CharField(
verbose_name=_("Proccess function used by service"),
max_length=150, unique=False, null=False, blank=False,
)
adapter = models.CharField(
verbose_name=_("Adapter used by service"),
max_length=150, unique=False, null=False, blank=False,
)
alarm_repository_attr = models.CharField(
verbose_name=_("Alarm field on repository"),
max_length=150, unique=True, null=False,
blank=False,
)
threshold_repository_attr = models.CharField(
verbose_name=_("Threshold field on repository"),
max_length=150, unique=True, null=False,
blank=False,
)
minimum_value = models.IntegerField(
verbose_name=_("Minimum resource"), unique=False,
null=False, blank=False,
)
field_to_check_value = models.CharField(
verbose_name=_("Field to check minimum value"),
max_length=150, unique=True, null=False,
blank=False, help_text=_('{model}.{field}')
)
class Meta:
permissions = (
("view_executionplan", "Can view Execution Plan"),
)
def __parse_metrics(self):
return self.metrics.split(',')
def setup_execution_params(self):
return {
'metrics': self.__parse_metrics(),
'proccess_function': self.proccess_function,
'threshold': self.threshold, 'adapter': self.adapter
}
def __unicode__(self):
return self.plan_name
| bsd-3-clause | 49d1db96babaf2d99df2e810df9ab633 | 31.577778 | 79 | 0.607094 | 4.106443 | false | false | false | false |
globocom/database-as-a-service | dbaas/logical/migrations/0031_auto__add_field_credential_privileges.py | 1 | 27261 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Credential.privileges'
db.add_column(u'logical_credential', 'privileges',
self.gf('django.db.models.fields.CharField')(default=u'Owner', max_length=10),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Credential.privileges'
db.delete_column(u'logical_credential', 'privileges')
models = {
u'account.team': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Team'},
'contacts': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_alocation_limit': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '2'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.User']", 'symmetrical': 'False'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'logical.credential': {
'Meta': {'ordering': "(u'database', u'user')", 'unique_together': "((u'user', u'database'),)", 'object_name': 'Credential'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'credentials'", 'to': u"orm['logical.Database']"}),
'force_ssl': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406'}),
'privileges': ('django.db.models.fields.CharField', [], {'default': "u'Owner'", 'max_length': '10'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'logical.database': {
'Meta': {'ordering': "(u'name',)", 'unique_together': "((u'name', u'environment'),)", 'object_name': 'Database'},
'backup_path': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DatabaseInfra']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_auto_resize': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_quarantine': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_index': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['logical.Project']"}),
'quarantine_dt': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'quarantine_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_quarantine'", 'null': 'True', 'to': u"orm['auth.User']"}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'subscribe_to_email_events': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases'", 'null': 'True', 'to': u"orm['account.Team']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'used_size_in_bytes': ('django.db.models.fields.FloatField', [], {'default': '0.0'})
},
u'logical.databasehistory': {
'Meta': {'object_name': 'DatabaseHistory'},
'created_at': ('django.db.models.fields.DateTimeField', [], {}),
'database_id': ('django.db.models.fields.IntegerField', [], {'db_index': 'True'}),
'databaseinfra_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_size_kb': ('django.db.models.fields.PositiveIntegerField', [], {}),
'engine': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'environment': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'has_persistence': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'plan': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'project': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'team': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'logical.databaselock': {
'Meta': {'object_name': 'DatabaseLock'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'lock'", 'unique': 'True', 'to': u"orm['logical.Database']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'lock'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'logical.project': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Project'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'notification.taskhistory': {
'Meta': {'object_name': 'TaskHistory'},
'arguments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'context': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'db_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'ended_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_class': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'task_id': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_status': ('django.db.models.fields.CharField', [], {'default': "u'WAITING'", 'max_length': '100', 'db_index': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'physical.databaseinfra': {
'Meta': {'object_name': 'DatabaseInfra'},
'capacity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'endpoint': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'endpoint_dns': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Engine']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_vm_created': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'name_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'name_stamp': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'blank': 'True'}),
'per_database_size_mbytes': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Plan']"}),
'ssl_configured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'physical.diskoffering': {
'Meta': {'object_name': 'DiskOffering'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'size_kb': ('django.db.models.fields.PositiveIntegerField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.engine': {
'Meta': {'ordering': "(u'engine_type__name', u'version')", 'unique_together': "((u'version', u'engine_type'),)", 'object_name': 'Engine'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'engines'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
'engine_upgrade_option': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_engine'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Engine']"}),
'has_users': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'read_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'template_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user_data_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'write_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'physical.enginetype': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'EngineType'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_memory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.environment': {
'Meta': {'object_name': 'Environment'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'migrate_environment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Environment']"}),
'min_of_zones': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.offering': {
'Meta': {'object_name': 'Offering'},
'cpus': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'offerings'", 'symmetrical': 'False', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'memory_size_mb': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.parameter': {
'Meta': {'ordering': "(u'engine_type__name', u'name')", 'unique_together': "((u'name', u'engine_type'),)", 'object_name': 'Parameter'},
'allowed_values': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'custom_method': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'dynamic': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'enginetype'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter_type': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.plan': {
'Meta': {'object_name': 'Plan'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'plans'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plans'", 'to': u"orm['physical.Engine']"}),
'engine_equivalent_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_plan'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'plans'", 'symmetrical': 'False', 'to': u"orm['physical.Environment']"}),
'has_persistence': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_ha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'max_db_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'migrate_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Plan']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'provider': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'replication_topology': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'replication_topology'", 'null': 'True', 'to': u"orm['physical.ReplicationTopology']"}),
'stronger_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'main_offerings'", 'null': 'True', 'to': u"orm['physical.Offering']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'weaker_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'weaker_offerings'", 'null': 'True', 'to': u"orm['physical.Offering']"})
},
u'physical.replicationtopology': {
'Meta': {'object_name': 'ReplicationTopology'},
'can_change_parameters': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_clone_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_reinstall_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_resize_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_setup_ssl': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_switch_master': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_upgrade_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'class_path': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'replication_topologies'", 'symmetrical': 'False', 'to': u"orm['physical.Engine']"}),
'has_horizontal_scalability': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'replication_topologies'", 'blank': 'True', 'to': u"orm['physical.Parameter']"}),
'script': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'replication_topologies'", 'null': 'True', 'to': u"orm['physical.Script']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.script': {
'Meta': {'object_name': 'Script'},
'configuration': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'initialization': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'start_database': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'start_replication': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
}
}
complete_apps = ['logical'] | bsd-3-clause | b91c3340d3acf02b295d060b01d2744a | 91.413559 | 227 | 0.558967 | 3.570063 | false | false | false | false |
globocom/database-as-a-service | dbaas/notification/tests/test_periodic_tasks/test_check_ssl_expire_at.py | 1 | 5994 | from django.test import TestCase
from mock import patch, MagicMock
from datetime import date, timedelta
from model_mommy import mommy
from physical.models import Instance
from dbaas.tests.helpers import InstanceHelper
from notification.tasks import check_ssl_expire_at
from maintenance.models import TaskSchedule
from dbaas.tests.helpers import DatabaseHelper, InfraHelper, PlanHelper
__all__ = ('CheckSslExpireAt',)
FAKE_TODAY = date(2019, 12, 17)
class FakeDate(date):
@staticmethod
def today():
return FAKE_TODAY
@patch('notification.tasks.get_worker_name', new=MagicMock())
@patch('notification.tasks.TaskHistory', new=MagicMock())
class CheckSslExpireAt(TestCase):
instance_helper = InstanceHelper
def setUp(self):
self.today = FAKE_TODAY
self.engine_type, self.engine, _, self.plan = PlanHelper.create()
self.environment, self.databaseinfra, self.hostname, self.database = (
self._create_database()
)
self.one_month_later = self.today + timedelta(days=30)
def _create_database(self, env_name='fake_env',
infra_name='__test__ mysqlinfra2'):
environment = mommy.make(
'Environment',
name=env_name
)
databaseinfra = InfraHelper.create(
name=infra_name,
user="root", password='fake_pass',
engine=self.engine,
plan=self.plan,
ssl_configured=True,
environment=environment
)
hostname = mommy.make(
'Host',
ssl_expire_at=FAKE_TODAY + timedelta(days=16)
)
self.instance_helper.create_instances_by_quant(
instance_type=Instance.MYSQL, qt=1,
infra=databaseinfra, hostname=hostname
)
database = DatabaseHelper.create(
name='test_db_1',
databaseinfra=databaseinfra,
)
return environment, databaseinfra, hostname, database
@patch('notification.tasks.maintenance_models.TaskSchedule.objects.filter')
def test_dont_find_infras(self, filter_mock):
self.databaseinfra.ssl_configured = False
self.databaseinfra.save()
check_ssl_expire_at()
self.assertFalse(filter_mock.called)
@patch('notification.tasks.maintenance_models.TaskSchedule.objects.create')
@patch('util.email_notifications.schedule_task_notification', new=MagicMock())
def test_already_have_task_scheduled(self, create_mock):
task_schedule = TaskSchedule()
task_schedule.database = self.database
task_schedule.scheduled_for = self.one_month_later
task_schedule.status = TaskSchedule.SCHEDULED
task_schedule.save()
check_ssl_expire_at()
self.assertFalse(create_mock.called)
def test_create_task_scheduled(self):
task_schedule = TaskSchedule.objects.filter(database=self.database)
self.hostname.ssl_expire_at = self.one_month_later
self.hostname.save()
self.assertEqual(task_schedule.count(), 0)
check_ssl_expire_at()
task_schedule = TaskSchedule.objects.filter(database=self.database)
self.assertEqual(task_schedule.count(), 1)
def test_create_task_scheduled_percona(self):
self.engine_type.name = 'mysql_percona'
self.engine_type.save()
task_schedule = TaskSchedule.objects.filter(database=self.database)
self.hostname.ssl_expire_at = self.one_month_later
self.hostname.save()
self.assertEqual(task_schedule.count(), 0)
check_ssl_expire_at()
task_schedule = TaskSchedule.objects.filter(database=self.database)
self.assertEqual(task_schedule.count(), 1)
@patch('notification.tasks.Configuration.get_by_name',
new=MagicMock(return_value='fake_env'))
def test_create_task_scheduled_if_configured(self):
task_schedule = TaskSchedule.objects.filter(database=self.database)
self.hostname.ssl_expire_at = self.one_month_later
self.hostname.save()
self.assertEqual(task_schedule.count(), 0)
check_ssl_expire_at()
task_schedule = TaskSchedule.objects.filter(database=self.database)
self.assertEqual(task_schedule.count(), 1)
def _fake_get_by_name(self, conf_name):
if conf_name == 'schedule_send_mail':
return 0
else:
return self.check_ssl_envs
@patch('notification.tasks.Configuration.get_by_name')
def test_create_task_scheduled_if_configured_multiple_envs(
self, get_by_name_mock):
self.check_ssl_envs = 'fake_env,another_env'
get_by_name_mock.side_effect = self._fake_get_by_name
environment, databaseinfra, hostname, database = self._create_database(
env_name='another_env',
infra_name='__test__ another_infra'
)
task_schedule = TaskSchedule.objects.filter(database=self.database)
self.hostname.ssl_expire_at = self.one_month_later
self.hostname.save()
hostname.ssl_expire_at = self.one_month_later
hostname.save()
self.assertEqual(task_schedule.count(), 0)
check_ssl_expire_at()
task_schedule = TaskSchedule.objects.all()
self.assertEqual(task_schedule.count(), 2)
@patch('notification.tasks.date')
def test_create_task_scheduled_next_maintenance_window(self, date_mock):
date_mock.today.return_value = FAKE_TODAY
self.databaseinfra.maintenance_window = 3
self.databaseinfra.maintenance_day = 5
self.databaseinfra.save()
check_ssl_expire_at()
task_schedule = TaskSchedule.objects.get(database=self.database)
self.assertEqual(
task_schedule.scheduled_for.weekday(),
4
)
self.assertEqual(
task_schedule.scheduled_for.date().strftime("%Y-%m-%d"),
"2019-12-27"
)
self.assertEqual(task_schedule.scheduled_for.hour, 3)
| bsd-3-clause | 7d11806d3275f4129e86cd1b84334f49 | 37.670968 | 82 | 0.651318 | 3.897269 | false | true | false | false |
globocom/database-as-a-service | dbaas/maintenance/admin/database_maintenance_task.py | 1 | 2758 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.contrib import admin
from django.core.urlresolvers import reverse
from django.utils.html import format_html
from ..models import DatabaseMaintenanceTask
class DatabaseMaintenanceTaskAdmin(admin.ModelAdmin):
list_select_related = None
search_fields = ("database__name", "task__id", "task__task_id")
list_filter = [
"database__team", "status",
]
exclude = ("task", "can_do_retry")
actions = None
list_display = (
"database", "database_team", "current_step", "friendly_status",
"maintenance_action", "link_task", "started_at", "finished_at"
)
readonly_fields = (
"current_step_class", "database", "link_task", "started_at", "finished_at",
"current_step", "status", "maintenance_action", "task_schedule"
)
ordering = ["-started_at"]
def friendly_status(self, maintenance_task):
html_waiting = '<span class="label label-warning">Waiting</span>'
html_running = '<span class="label label-success">Running</span>'
html_error = '<span class="label label-important">Error</span>'
html_success = '<span class="label label-info">Success</span>'
html_rollback = '<span class="label label-info">Rollback</span>'
html_status = ''
if maintenance_task.status == DatabaseMaintenanceTask.WAITING:
html_status = html_waiting
elif maintenance_task.status == DatabaseMaintenanceTask.RUNNING:
html_status = html_running
elif maintenance_task.status == DatabaseMaintenanceTask.ERROR:
html_status = html_error
elif maintenance_task.status == DatabaseMaintenanceTask.SUCCESS:
html_status = html_success
elif maintenance_task.status == DatabaseMaintenanceTask.ROLLBACK:
html_status = html_rollback
return format_html(html_status)
friendly_status.short_description = "Status"
def database_team(self, maintenance_task):
return maintenance_task.database.team.name
database_team.short_description = "Team"
def link_task(self, maintenance_task):
url = reverse(
'admin:notification_taskhistory_change',
args=[maintenance_task.task.id]
)
return format_html(
"<a href={}>{}</a>".format(url, maintenance_task.task.id)
)
link_task.short_description = "Task"
def has_delete_permission(self, request, obj=None):
return False
def has_add_permission(self, request, obj=None):
return False
def maintenance_action(self, maintenance_task):
raise NotImplementedError()
maintenance_action.short_description = "Action"
| bsd-3-clause | d29a86660a30fb15a089eae1225175a6 | 36.27027 | 83 | 0.650471 | 4.079882 | false | false | false | false |
globocom/database-as-a-service | dbaas/account/forms/change_password_form.py | 1 | 1727 | # -*- coding:utf-8 -*-
from django import forms
from django.utils.translation import ugettext, ugettext_lazy as _
from ..backends import DbaasBackend
import ldap
class ChangePasswordForm(forms.Form):
"""
A form that lets a user change set his/her password without entering the
old password
"""
error_messages = {
'password_mismatch': _("The two password fields didn't match."),
}
new_password1 = forms.CharField(label=_("New password"),
widget=forms.PasswordInput)
new_password2 = forms.CharField(label=_("New password confirmation"),
widget=forms.PasswordInput)
def __init__(self, user, *args, **kwargs):
self.user = user
super(ChangePasswordForm, self).__init__(*args, **kwargs)
def clean_new_password2(self):
password1 = self.cleaned_data.get('new_password1')
password2 = self.cleaned_data.get('new_password2')
if password1 and password2:
if password1 != password2:
raise forms.ValidationError(
self.error_messages['password_mismatch'])
else:
ret = DbaasBackend.change_password(
self.user.username,
old_password=None,
new_password=self.cleaned_data['new_password1']
)
if isinstance(ret, ldap.CONSTRAINT_VIOLATION):
raise forms.ValidationError('Password recently used')
return password2
def save(self, commit=True):
# self.user.set_password(self.cleaned_data['new_password1'])
if commit:
self.user.save()
return self.user
| bsd-3-clause | 73e0e46252a830b26b7a4919326d54be | 34.979167 | 76 | 0.585987 | 4.520942 | false | false | false | false |
globocom/database-as-a-service | dbaas/physical/ssh.py | 1 | 5956 | import socket
import logging
from time import sleep
from StringIO import StringIO
from uuid import uuid4
from io import BytesIO
import paramiko
LOG = logging.getLogger(__name__)
def connect_host(func):
def wrapper(self, *args, **kw):
# try:
# self.connect()
# return func(self, *args, **kw)
# except (paramiko.ssh_exception.BadHostKeyException,
# paramiko.ssh_exception.AuthenticationException,
# paramiko.ssh_exception.SSHException,
# socket.error) as e:
# msg = "We caught an exception: {}.".format(e)
# LOG.warning(msg)
# self.output['exception'] = str(e)
# return self.output
self.connect()
return func(self, *args, **kw)
return wrapper
class ScriptFailedException(Exception):
pass
class PassAndPkeyEmptyException(Exception):
pass
class HostSSH(object):
ScriptFailedException = ScriptFailedException
def __init__(self, address, username, password=None, private_key=None):
self.address = address
self.private_key = private_key
if self.private_key:
self.private_key = self.private_key.replace('\\n', '\n')
if not any([password, private_key]):
raise PassAndPkeyEmptyException(
"You need set password or private key"
)
self.auth = {'username': username}
if password:
self.auth['password'] = password
else:
self.auth['pkey'] = self.pkey
self.stdout = ''
self.stdin = ''
self.stderr = ''
self.output = {
'stdout': self.stdout,
'stderr': self.stderr,
'exception': '',
}
self.script_file_dir = ''
self.script_file_name = ''
self.script_file_full_path = ''
def connect(self, timeout=None):
self.client = paramiko.SSHClient()
self.client.load_system_host_keys()
self.client.set_missing_host_key_policy(
paramiko.AutoAddPolicy()
)
self.client.connect(
self.address,
timeout=timeout,
**self.auth
)
@property
def pkey(self):
return paramiko.RSAKey.from_private_key(
StringIO(self.private_key)
)
def handle_command_output(self, command_output):
stdin, stdout, stderr = command_output
self.stdout = stdout.readlines()
self.stderr = stderr.readlines()
self.script_exit_code = stdout.channel.recv_exit_status()
self.output.update({
'stdout': self.stdout,
'stderr': self.stderr,
'exception': '',
'exit_code': self.script_exit_code
})
def clean_script_files(self):
if self.script_file_full_path:
ftp = self.client.open_sftp()
try:
ftp.remove(self.script_file_full_path)
except IOError:
pass
ftp.close()
def set_script_file_variables(self):
self.script_file_name = '{}.sh'.format(uuid4())
self.script_file_dir = '/tmp'
self.script_file_full_path = '{}/{}'.format(
self.script_file_dir,
self.script_file_name
)
def create_script_file(self, script):
self.set_script_file_variables()
ftp = self.client.open_sftp()
ftp.get_channel().settimeout(600)
ftp.putfo(BytesIO(script.encode()), self.script_file_full_path)
ftp.close()
@property
def run_script_file_command(self):
return 'sudo sh {}'.format(
self.script_file_full_path
)
@connect_host
def run_script(self, script, get_pty=False, raise_if_error=True,
retry=False):
self.create_script_file(script)
LOG.info(
"Executing command [{}] on remote server {}".format(
script, self.address
)
)
command_output = self.client.exec_command(
self.run_script_file_command,
get_pty=get_pty
)
self.handle_command_output(command_output)
LOG.info(
"Command output: [{}]".format(self.output)
)
self.clean_script_files()
if self.script_exit_code != 0:
if retry:
return self.run_script(
script=script,
get_pty=get_pty,
raise_if_error=raise_if_error,
retry=False
)
elif raise_if_error:
raise ScriptFailedException(
'Could not execute script with exit code {}: {}'.format(
self.script_exit_code, self.output
)
)
return self.output
def check(self, retries=30, wait=30, interval=40, timeout=None):
LOG.info(
"Waiting {} seconds to check {} ssh connection...".format(
wait, self.address
)
)
sleep(wait)
for attempt in range(retries):
try:
LOG.info(
"Login attempt number {} on {} ".format(
attempt + 1, self.address
)
)
self.connect(timeout=timeout)
return True
except (paramiko.ssh_exception.BadHostKeyException,
paramiko.ssh_exception.AuthenticationException,
paramiko.ssh_exception.SSHException,
socket.error) as err:
if attempt == retries - 1:
LOG.error(
"Maximum number of login attempts : {} .".format(err)
)
return False
LOG.warning("We caught an exception: {} .".format(err))
sleep(interval)
| bsd-3-clause | 62580d3f6c7ac03eb187bdbe2b71658c | 29.233503 | 77 | 0.524009 | 4.312817 | false | false | false | false |
globocom/database-as-a-service | dbaas/physical/migrations/0050_auto__add_field_plan_migrate_plan.py | 1 | 16854 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Plan.migrate_plan'
db.add_column(u'physical_plan', 'migrate_plan',
self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name=u'migrate_to', null=True, to=orm['physical.Plan']),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Plan.migrate_plan'
db.delete_column(u'physical_plan', 'migrate_plan_id')
models = {
u'physical.databaseinfra': {
'Meta': {'object_name': 'DatabaseInfra'},
'capacity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'endpoint': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'endpoint_dns': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Engine']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_vm_created': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'name_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'name_stamp': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'blank': 'True'}),
'per_database_size_mbytes': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Plan']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'physical.databaseinfraparameter': {
'Meta': {'unique_together': "((u'databaseinfra', u'parameter'),)", 'object_name': 'DatabaseInfraParameter'},
'applied_on_database': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_value': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.DatabaseInfra']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parameter': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Parameter']"}),
'reset_default_value': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'physical.diskoffering': {
'Meta': {'object_name': 'DiskOffering'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'size_kb': ('django.db.models.fields.PositiveIntegerField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.engine': {
'Meta': {'ordering': "(u'engine_type__name', u'version')", 'unique_together': "((u'version', u'engine_type'),)", 'object_name': 'Engine'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'engines'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
'engine_upgrade_option': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_engine'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Engine']"}),
'has_users': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'read_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'template_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user_data_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'write_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'physical.enginetype': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'EngineType'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_memory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.environment': {
'Meta': {'object_name': 'Environment'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'migrate_environment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Environment']"}),
'min_of_zones': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.host': {
'Meta': {'object_name': 'Host'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'future_host': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Host']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'monitor_url': ('django.db.models.fields.URLField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'os_description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.instance': {
'Meta': {'unique_together': "((u'address', u'port'),)", 'object_name': 'Instance'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.DatabaseInfra']"}),
'dns': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'future_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Instance']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'port': ('django.db.models.fields.IntegerField', [], {}),
'read_only': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.parameter': {
'Meta': {'ordering': "(u'engine_type__name', u'name')", 'unique_together': "((u'name', u'engine_type'),)", 'object_name': 'Parameter'},
'class_path': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'dynamic': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'enginetype'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.plan': {
'Meta': {'object_name': 'Plan'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'plans'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plans'", 'to': u"orm['physical.Engine']"}),
'engine_equivalent_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_plan'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'plans'", 'symmetrical': 'False', 'to': u"orm['physical.Environment']"}),
'has_persistence': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_ha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'max_db_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'migrate_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Plan']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'provider': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'replication_topology': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'replication_topology'", 'null': 'True', 'to': u"orm['physical.ReplicationTopology']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.planattribute': {
'Meta': {'object_name': 'PlanAttribute'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plan_attributes'", 'to': u"orm['physical.Plan']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'physical.replicationtopology': {
'Meta': {'object_name': 'ReplicationTopology'},
'class_path': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'replication_topologies'", 'symmetrical': 'False', 'to': u"orm['physical.Engine']"}),
'has_horizontal_scalability': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'replication_topologies'", 'blank': 'True', 'to': u"orm['physical.Parameter']"}),
'script': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'replication_topologies'", 'null': 'True', 'to': u"orm['physical.Script']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.script': {
'Meta': {'object_name': 'Script'},
'configuration': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'initialization': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'start_database': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'start_replication': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
}
}
complete_apps = ['physical'] | bsd-3-clause | 1359aa12fb81c09d0c63f21b02b10aff | 89.13369 | 227 | 0.56408 | 3.56246 | false | false | false | false |
globocom/database-as-a-service | dbaas/dbaas/middleware.py | 1 | 1390 | from datetime import datetime, timedelta
from threading import current_thread
from django.conf import settings
from django.contrib import auth
class AutoLogout:
def process_request(self, request):
if not request.user.is_authenticated():
return
if 'last_touch' in request.session:
max_inactive = timedelta(0, settings.AUTO_LOGOUT_DELAY * 60, 0)
current_inactive = datetime.now() - request.session['last_touch']
if current_inactive > max_inactive:
auth.logout(request)
return
request.session['last_touch'] = datetime.now()
class UserMiddleware(object):
_requests = {}
@classmethod
def current_user(cls):
current_request = cls._requests.get(current_thread().ident, None)
if not current_request:
return
return current_request.user
@classmethod
def set_current_user(cls, user):
current_request = cls._requests[current_thread().ident]
current_request.user = user
def process_request(self, request):
self._requests[current_thread().ident] = request
def process_response(self, request, response):
self._requests.pop(current_thread().ident, None)
return response
def process_exception(self, request, exception):
self._requests.pop(current_thread().ident, None)
| bsd-3-clause | 424f73427879245b247ec6831743229d | 27.958333 | 77 | 0.646763 | 4.34375 | false | false | false | false |
globocom/database-as-a-service | dbaas/extra_dns/models.py | 1 | 1790 | # -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from util.models import BaseModel
from django.db.models.signals import post_delete, post_save
from django.dispatch import receiver
from dbaas_dbmonitor.provider import DBMonitorProvider
import logging
LOG = logging.getLogger(__name__)
class ExtraDns(BaseModel):
database = models.ForeignKey('logical.Database',
related_name="extra_dns",
unique=False, null=False, blank=False,
on_delete=models.CASCADE)
dns = models.CharField(
verbose_name=_("DNS"), max_length=200, null=False, blank=False,)
class Meta:
permissions = (
("view_extradns", "Can view extra dns"),
)
def __unicode__(self):
return u"Extra dns: {} for database : {}".format(self.dns, self.database)
@receiver(post_save, sender=ExtraDns)
def extra_dns_post_save(sender, **kwargs):
"""
extra dns post save signal. Inserts extra dns on dbmonitor's database
"""
LOG.debug("extra_dns post-save triggered")
extra_dns = kwargs.get("instance")
is_new = kwargs.get("created")
if is_new:
database = extra_dns.database
DBMonitorProvider().insert_extra_dns(
database=database, extra_dns=extra_dns.dns)
@receiver(post_delete, sender=ExtraDns)
def extra_dns_post_delete(sender, **kwargs):
"""
extra dns post delete signal. Delete extra dns on dbmonitor's database
"""
LOG.debug("extra_dns post-delete triggered")
extra_dns = kwargs.get("instance")
database = extra_dns.database
DBMonitorProvider().remove_extra_dns(
database=database, extra_dns=extra_dns.dns)
| bsd-3-clause | f81669645bc9bd4c31e1edb04013f3f8 | 28.833333 | 81 | 0.641341 | 3.986637 | false | false | false | false |
globocom/database-as-a-service | dbaas/notification/admin/task_history.py | 1 | 3225 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
import logging
from ..models import TaskHistory
from dbaas import constants
from account.models import Team
LOG = logging.getLogger(__name__)
class TaskHistoryAdmin(admin.ModelAdmin):
perm_add_database_infra = constants.PERM_ADD_DATABASE_INFRA
actions = None
list_display_basic = ["task_id", "friendly_task_name", "task_status", "arguments", "friendly_details", "created_at",
"ended_at"]
list_display_advanced = list_display_basic + ["user"]
search_fields = (
'task_id', "task_name", "task_status", "user", "arguments")
list_filter_basic = ["task_status", ]
list_filter_advanced = list_filter_basic + ["task_name", "user", ]
readonly_fields = ('created_at', 'ended_at', 'task_name', 'task_id', 'task_status', 'user', 'context', 'arguments',
'friendly_details_read', 'db_id', 'relevance')
exclude = ('details', 'object_id', 'object_class', 'database_name')
def friendly_task_name(self, task_history):
if task_history.task_name:
return "%s" % task_history.task_name.split('.')[::-1][0]
else:
return "N/A"
friendly_task_name.short_description = "Task Name"
def friendly_details(self, task_history):
if task_history.details:
return task_history.details.split("\n")[-1]
else:
return "N/A"
friendly_details.short_description = "Current Step"
def friendly_details_read(self, task_history):
if task_history.details:
return task_history.details.lstrip()
friendly_details_read.short_description = "Details"
def has_delete_permission(self, request, obj=None): # note the obj=None
return False
def has_add_permission(self, request, obj=None): # note the obj=None
return False
def has_save_permission(self, request, obj=None): # note the obj=None
return False
def queryset(self, request):
qs = None
if request.user.has_perm(self.perm_add_database_infra):
qs = super(TaskHistoryAdmin, self).queryset(request)
return qs
if request.GET.get('user'):
query_dict_copy = request.GET.copy()
del query_dict_copy['user']
request.GET = query_dict_copy
qs = super(TaskHistoryAdmin, self).queryset(request)
same_team_users = Team.users_at_same_team(request.user)
return qs.filter(user__in=[user.username for user in same_team_users])
def changelist_view(self, request, extra_context=None):
if request.user.has_perm(self.perm_add_database_infra):
self.list_display = self.list_display_advanced
self.list_filter = self.list_filter_advanced
self.list_display_links = ("task_id",)
else:
self.list_display = self.list_display_basic
self.list_filter = self.list_filter_basic
self.list_display_links = (None,)
return super(TaskHistoryAdmin, self).changelist_view(request, extra_context=extra_context)
| bsd-3-clause | 86e41c5ea0330363bfc1122359f755a6 | 36.941176 | 120 | 0.633488 | 3.758741 | false | false | false | false |
globocom/database-as-a-service | dbaas/maintenance/migrations/0026_auto__chg_field_databasecreate_plan__chg_field_databaseupgrade_target_.py | 1 | 44564 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'DatabaseCreate.plan'
db.alter_column(u'maintenance_databasecreate', 'plan_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, on_delete=models.SET_NULL, to=orm['physical.Plan']))
# Changing field 'DatabaseUpgrade.target_plan'
db.alter_column(u'maintenance_databaseupgrade', 'target_plan_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, on_delete=models.SET_NULL, to=orm['physical.Plan']))
# Changing field 'DatabaseUpgrade.source_plan'
db.alter_column(u'maintenance_databaseupgrade', 'source_plan_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, on_delete=models.SET_NULL, to=orm['physical.Plan']))
def backwards(self, orm):
# Changing field 'DatabaseCreate.plan'
db.alter_column(u'maintenance_databasecreate', 'plan_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['physical.Plan']))
# Changing field 'DatabaseUpgrade.target_plan'
db.alter_column(u'maintenance_databaseupgrade', 'target_plan_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['physical.Plan']))
# Changing field 'DatabaseUpgrade.source_plan'
db.alter_column(u'maintenance_databaseupgrade', 'source_plan_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['physical.Plan']))
models = {
u'account.team': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Team'},
'contacts': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_alocation_limit': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '2'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.User']", 'symmetrical': 'False'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'backup.backupgroup': {
'Meta': {'object_name': 'BackupGroup'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'dbaas_cloudstack.cloudstackoffering': {
'Meta': {'object_name': 'CloudStackOffering'},
'cpus': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'equivalent_offering': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['dbaas_cloudstack.CloudStackOffering']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'memory_size_mb': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'cs_offering_region'", 'null': 'True', 'to': u"orm['dbaas_cloudstack.CloudStackRegion']"}),
'serviceofferingid': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'weaker': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'dbaas_cloudstack.cloudstackpack': {
'Meta': {'object_name': 'CloudStackPack'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'cs_packs'", 'to': u"orm['physical.EngineType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'offering': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'cs_offering_packs'", 'to': u"orm['dbaas_cloudstack.CloudStackOffering']"}),
'script_file': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'dbaas_cloudstack.cloudstackregion': {
'Meta': {'object_name': 'CloudStackRegion'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'cs_environment_region'", 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'logical.database': {
'Meta': {'ordering': "(u'name',)", 'unique_together': "((u'name', u'environment'),)", 'object_name': 'Database'},
'backup_path': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DatabaseInfra']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_auto_resize': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_quarantine': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_index': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['logical.Project']"}),
'quarantine_dt': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'quarantine_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_quarantine'", 'null': 'True', 'to': u"orm['auth.User']"}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'subscribe_to_email_events': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases'", 'null': 'True', 'to': u"orm['account.Team']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'used_size_in_bytes': ('django.db.models.fields.FloatField', [], {'default': '0.0'})
},
u'logical.project': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Project'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databasechangeparameter': {
'Meta': {'object_name': 'DatabaseChangeParameter'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'change_parameters'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_change_parameters'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databasecreate': {
'Meta': {'object_name': 'DatabaseCreate'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'to': u"orm['logical.Database']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['physical.Environment']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'infra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['physical.DatabaseInfra']"}),
'is_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'to': u"orm['logical.Project']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'subscribe_to_email_events': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'create_database'", 'to': u"orm['notification.TaskHistory']"}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['account.Team']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'maintenance.databasereinstallvm': {
'Meta': {'object_name': 'DatabaseReinstallVM'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'reinstall_vm'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_reinstall_vm'", 'to': u"orm['physical.Instance']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_reinsgtall_vm'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseresize': {
'Meta': {'object_name': 'DatabaseResize'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'resizes'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source_offer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_resizes_source'", 'to': u"orm['dbaas_cloudstack.CloudStackPack']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'target_offer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_resizes_target'", 'to': u"orm['dbaas_cloudstack.CloudStackPack']"}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_resizes'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaserestore': {
'Meta': {'object_name': 'DatabaseRestore'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['backup.BackupGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_restore_new'", 'null': 'True', 'to': u"orm['backup.BackupGroup']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaserestoreinstancepair': {
'Meta': {'unique_together': "((u'master', u'slave', u'restore'),)", 'object_name': 'DatabaseRestoreInstancePair'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_master'", 'to': u"orm['physical.Instance']"}),
'restore': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_instances'", 'to': u"orm['maintenance.DatabaseRestore']"}),
'slave': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_slave'", 'to': u"orm['physical.Instance']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseupgrade': {
'Meta': {'object_name': 'DatabaseUpgrade'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'upgrades'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_upgrades_source'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'source_plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'target_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_upgrades_target'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'target_plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_upgrades'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.hostmaintenance': {
'Meta': {'unique_together': "((u'host', u'maintenance'),)", 'object_name': 'HostMaintenance', 'index_together': "[[u'host', u'maintenance']]"},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'host': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'host_maintenance'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Host']"}),
'hostname': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_log': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'maintenance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'maintenance'", 'to': u"orm['maintenance.Maintenance']"}),
'rollback_log': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '4'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.maintenance': {
'Meta': {'object_name': 'Maintenance'},
'affected_hosts': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'celery_task_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'hostsid': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'max_length': '10000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_script': ('django.db.models.fields.TextField', [], {}),
'maximum_workers': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'revoked_by': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'rollback_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'scheduled_for': ('django.db.models.fields.DateTimeField', [], {'unique': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.maintenanceparameters': {
'Meta': {'object_name': 'MaintenanceParameters'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'function_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'maintenance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'maintenance_params'", 'to': u"orm['maintenance.Maintenance']"}),
'parameter_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'notification.taskhistory': {
'Meta': {'object_name': 'TaskHistory'},
'arguments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'context': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'db_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'ended_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_class': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'task_id': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_status': ('django.db.models.fields.CharField', [], {'default': "u'WAITING'", 'max_length': '100', 'db_index': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'physical.databaseinfra': {
'Meta': {'object_name': 'DatabaseInfra'},
'capacity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'endpoint': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'endpoint_dns': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Engine']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_vm_created': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'name_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'name_stamp': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'blank': 'True'}),
'per_database_size_mbytes': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Plan']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'physical.diskoffering': {
'Meta': {'object_name': 'DiskOffering'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'size_kb': ('django.db.models.fields.PositiveIntegerField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.engine': {
'Meta': {'ordering': "(u'engine_type__name', u'version')", 'unique_together': "((u'version', u'engine_type'),)", 'object_name': 'Engine'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'engines'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
'engine_upgrade_option': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_engine'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Engine']"}),
'has_users': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'read_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'template_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user_data_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'write_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'physical.enginetype': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'EngineType'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_memory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.environment': {
'Meta': {'object_name': 'Environment'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'migrate_environment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Environment']"}),
'min_of_zones': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.host': {
'Meta': {'object_name': 'Host'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'future_host': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Host']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'monitor_url': ('django.db.models.fields.URLField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'os_description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.instance': {
'Meta': {'unique_together': "((u'address', u'port'),)", 'object_name': 'Instance'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.DatabaseInfra']"}),
'dns': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'future_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Instance']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'port': ('django.db.models.fields.IntegerField', [], {}),
'read_only': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'shard': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.parameter': {
'Meta': {'ordering': "(u'engine_type__name', u'name')", 'unique_together': "((u'name', u'engine_type'),)", 'object_name': 'Parameter'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'custom_method': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'dynamic': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'enginetype'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.plan': {
'Meta': {'object_name': 'Plan'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'plans'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plans'", 'to': u"orm['physical.Engine']"}),
'engine_equivalent_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_plan'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'plans'", 'symmetrical': 'False', 'to': u"orm['physical.Environment']"}),
'has_persistence': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_ha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'max_db_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'migrate_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Plan']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'provider': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'replication_topology': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'replication_topology'", 'null': 'True', 'to': u"orm['physical.ReplicationTopology']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.replicationtopology': {
'Meta': {'object_name': 'ReplicationTopology'},
'can_change_parameters': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_clone_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_reinstall_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_resize_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_switch_master': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_upgrade_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'class_path': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'replication_topologies'", 'symmetrical': 'False', 'to': u"orm['physical.Engine']"}),
'has_horizontal_scalability': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'replication_topologies'", 'blank': 'True', 'to': u"orm['physical.Parameter']"}),
'script': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'replication_topologies'", 'null': 'True', 'to': u"orm['physical.Script']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.script': {
'Meta': {'object_name': 'Script'},
'configuration': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'initialization': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'start_database': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'start_replication': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
}
}
complete_apps = ['maintenance'] | bsd-3-clause | 10524a51f3c41fa5008c3026dbf9af9b | 94.83871 | 227 | 0.569092 | 3.576278 | false | false | false | false |
globocom/database-as-a-service | dbaas/physical/migrations/0021_auto__add_field_engine_engine_upgrade_option.py | 1 | 9942 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Engine.engine_upgrade_option'
db.add_column(u'physical_engine', 'engine_upgrade_option',
self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name=u'backwards_engine', null=True, on_delete=models.SET_NULL, to=orm['physical.Engine']),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Engine.engine_upgrade_option'
db.delete_column(u'physical_engine', 'engine_upgrade_option_id')
models = {
u'physical.databaseinfra': {
'Meta': {'object_name': 'DatabaseInfra'},
'capacity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'endpoint': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'endpoint_dns': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Engine']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'blank': 'True'}),
'per_database_size_mbytes': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Plan']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'physical.engine': {
'Meta': {'unique_together': "((u'version', u'engine_type'),)", 'object_name': 'Engine'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'engines'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
'engine_upgrade_option': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_engine'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Engine']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'template_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user_data_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'physical.enginetype': {
'Meta': {'object_name': 'EngineType'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.environment': {
'Meta': {'object_name': 'Environment'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'equivalent_environment': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Environment']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.host': {
'Meta': {'object_name': 'Host'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'future_host': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Host']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'monitor_url': ('django.db.models.fields.URLField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.instance': {
'Meta': {'unique_together': "((u'address', u'port'),)", 'object_name': 'Instance'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.DatabaseInfra']"}),
'dns': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'future_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Instance']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_arbiter': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'port': ('django.db.models.fields.IntegerField', [], {}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.plan': {
'Meta': {'object_name': 'Plan'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plans'", 'to': u"orm['physical.Engine']"}),
'engine_equivalent_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_plan'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['physical.Environment']", 'symmetrical': 'False'}),
'equivalent_plan': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Plan']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_default': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_ha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'max_db_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'provider': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.planattribute': {
'Meta': {'object_name': 'PlanAttribute'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plan_attributes'", 'to': u"orm['physical.Plan']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
}
}
complete_apps = ['physical'] | bsd-3-clause | c5652bd7e22423d98b68ec073eb02dba | 81.858333 | 227 | 0.564474 | 3.567277 | false | false | false | false |
globocom/database-as-a-service | dbaas/maintenance/migrations/0027_auto__add_field_databaseresize_source_offer_name__add_field_databasere.py | 1 | 45919 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'DatabaseResize.source_offer_name'
db.add_column(u'maintenance_databaseresize', 'source_offer_name',
self.gf('django.db.models.fields.CharField')(max_length=100, null=True, blank=True),
keep_default=False)
# Adding field 'DatabaseResize.target_offer_name'
db.add_column(u'maintenance_databaseresize', 'target_offer_name',
self.gf('django.db.models.fields.CharField')(max_length=100, null=True, blank=True),
keep_default=False)
# Changing field 'DatabaseResize.target_offer'
db.alter_column(u'maintenance_databaseresize', 'target_offer_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['dbaas_cloudstack.CloudStackPack']))
# Changing field 'DatabaseResize.source_offer'
db.alter_column(u'maintenance_databaseresize', 'source_offer_id', self.gf('django.db.models.fields.related.ForeignKey')(null=True, to=orm['dbaas_cloudstack.CloudStackPack']))
def backwards(self, orm):
# Deleting field 'DatabaseResize.source_offer_name'
db.delete_column(u'maintenance_databaseresize', 'source_offer_name')
# Deleting field 'DatabaseResize.target_offer_name'
db.delete_column(u'maintenance_databaseresize', 'target_offer_name')
# User chose to not deal with backwards NULL issues for 'DatabaseResize.target_offer'
raise RuntimeError("Cannot reverse this migration. 'DatabaseResize.target_offer' and its values cannot be restored.")
# The following code is provided here to aid in writing a correct migration
# Changing field 'DatabaseResize.target_offer'
db.alter_column(u'maintenance_databaseresize', 'target_offer_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['dbaas_cloudstack.CloudStackPack']))
# User chose to not deal with backwards NULL issues for 'DatabaseResize.source_offer'
raise RuntimeError("Cannot reverse this migration. 'DatabaseResize.source_offer' and its values cannot be restored.")
# The following code is provided here to aid in writing a correct migration
# Changing field 'DatabaseResize.source_offer'
db.alter_column(u'maintenance_databaseresize', 'source_offer_id', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['dbaas_cloudstack.CloudStackPack']))
models = {
u'account.team': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Team'},
'contacts': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_alocation_limit': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '2'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.User']", 'symmetrical': 'False'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'backup.backupgroup': {
'Meta': {'object_name': 'BackupGroup'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'dbaas_cloudstack.cloudstackoffering': {
'Meta': {'object_name': 'CloudStackOffering'},
'cpus': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'equivalent_offering': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['dbaas_cloudstack.CloudStackOffering']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'memory_size_mb': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'cs_offering_region'", 'null': 'True', 'to': u"orm['dbaas_cloudstack.CloudStackRegion']"}),
'serviceofferingid': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'weaker': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'dbaas_cloudstack.cloudstackpack': {
'Meta': {'object_name': 'CloudStackPack'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'cs_packs'", 'to': u"orm['physical.EngineType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'offering': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'cs_offering_packs'", 'to': u"orm['dbaas_cloudstack.CloudStackOffering']"}),
'script_file': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'dbaas_cloudstack.cloudstackregion': {
'Meta': {'object_name': 'CloudStackRegion'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'cs_environment_region'", 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'logical.database': {
'Meta': {'ordering': "(u'name',)", 'unique_together': "((u'name', u'environment'),)", 'object_name': 'Database'},
'backup_path': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DatabaseInfra']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_auto_resize': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_quarantine': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_index': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['logical.Project']"}),
'quarantine_dt': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'quarantine_user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_quarantine'", 'null': 'True', 'to': u"orm['auth.User']"}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'subscribe_to_email_events': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases'", 'null': 'True', 'to': u"orm['account.Team']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'used_size_in_bytes': ('django.db.models.fields.FloatField', [], {'default': '0.0'})
},
u'logical.project': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Project'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databasechangeparameter': {
'Meta': {'object_name': 'DatabaseChangeParameter'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'change_parameters'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_change_parameters'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databasecreate': {
'Meta': {'object_name': 'DatabaseCreate'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'to': u"orm['logical.Database']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['physical.Environment']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'infra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['physical.DatabaseInfra']"}),
'is_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases_create'", 'null': 'True', 'to': u"orm['logical.Project']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'subscribe_to_email_events': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'create_database'", 'to': u"orm['notification.TaskHistory']"}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases_create'", 'to': u"orm['account.Team']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'maintenance.databasereinstallvm': {
'Meta': {'object_name': 'DatabaseReinstallVM'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'reinstall_vm'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_reinstall_vm'", 'to': u"orm['physical.Instance']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_reinsgtall_vm'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseresize': {
'Meta': {'object_name': 'DatabaseResize'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'resizes'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source_offer': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_resizes_source'", 'null': 'True', 'to': u"orm['dbaas_cloudstack.CloudStackPack']"}),
'source_offer_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'target_offer': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_resizes_target'", 'null': 'True', 'to': u"orm['dbaas_cloudstack.CloudStackPack']"}),
'target_offer_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_resizes'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaserestore': {
'Meta': {'object_name': 'DatabaseRestore'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['backup.BackupGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_restore_new'", 'null': 'True', 'to': u"orm['backup.BackupGroup']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_restore'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaserestoreinstancepair': {
'Meta': {'unique_together': "((u'master', u'slave', u'restore'),)", 'object_name': 'DatabaseRestoreInstancePair'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_master'", 'to': u"orm['physical.Instance']"}),
'restore': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_instances'", 'to': u"orm['maintenance.DatabaseRestore']"}),
'slave': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'restore_slave'", 'to': u"orm['physical.Instance']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseupgrade': {
'Meta': {'object_name': 'DatabaseUpgrade'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'upgrades'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_upgrades_source'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'source_plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'target_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'database_upgrades_target'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'target_plan_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_upgrades'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.hostmaintenance': {
'Meta': {'unique_together': "((u'host', u'maintenance'),)", 'object_name': 'HostMaintenance', 'index_together': "[[u'host', u'maintenance']]"},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'host': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'host_maintenance'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Host']"}),
'hostname': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_log': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'maintenance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'maintenance'", 'to': u"orm['maintenance.Maintenance']"}),
'rollback_log': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '4'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.maintenance': {
'Meta': {'object_name': 'Maintenance'},
'affected_hosts': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'celery_task_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'hostsid': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'max_length': '10000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_script': ('django.db.models.fields.TextField', [], {}),
'maximum_workers': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'revoked_by': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'rollback_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'scheduled_for': ('django.db.models.fields.DateTimeField', [], {'unique': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.maintenanceparameters': {
'Meta': {'object_name': 'MaintenanceParameters'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'function_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'maintenance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'maintenance_params'", 'to': u"orm['maintenance.Maintenance']"}),
'parameter_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'notification.taskhistory': {
'Meta': {'object_name': 'TaskHistory'},
'arguments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'context': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'db_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'ended_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_class': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'object_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'task_id': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_status': ('django.db.models.fields.CharField', [], {'default': "u'WAITING'", 'max_length': '100', 'db_index': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'physical.databaseinfra': {
'Meta': {'object_name': 'DatabaseInfra'},
'capacity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'endpoint': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'endpoint_dns': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Engine']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_vm_created': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'name_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'name_stamp': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'blank': 'True'}),
'per_database_size_mbytes': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Plan']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'physical.diskoffering': {
'Meta': {'object_name': 'DiskOffering'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'size_kb': ('django.db.models.fields.PositiveIntegerField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.engine': {
'Meta': {'ordering': "(u'engine_type__name', u'version')", 'unique_together': "((u'version', u'engine_type'),)", 'object_name': 'Engine'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'engines'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
'engine_upgrade_option': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_engine'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Engine']"}),
'has_users': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'read_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'template_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user_data_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'write_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'physical.enginetype': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'EngineType'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_memory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.environment': {
'Meta': {'object_name': 'Environment'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'migrate_environment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Environment']"}),
'min_of_zones': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.host': {
'Meta': {'object_name': 'Host'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'future_host': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Host']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'monitor_url': ('django.db.models.fields.URLField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'os_description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.instance': {
'Meta': {'unique_together': "((u'address', u'port'),)", 'object_name': 'Instance'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.DatabaseInfra']"}),
'dns': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'future_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Instance']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'port': ('django.db.models.fields.IntegerField', [], {}),
'read_only': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'shard': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.parameter': {
'Meta': {'ordering': "(u'engine_type__name', u'name')", 'unique_together': "((u'name', u'engine_type'),)", 'object_name': 'Parameter'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'custom_method': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'dynamic': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'enginetype'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.plan': {
'Meta': {'object_name': 'Plan'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'plans'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plans'", 'to': u"orm['physical.Engine']"}),
'engine_equivalent_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_plan'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'plans'", 'symmetrical': 'False', 'to': u"orm['physical.Environment']"}),
'has_persistence': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_ha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'max_db_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'migrate_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Plan']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'provider': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'replication_topology': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'replication_topology'", 'null': 'True', 'to': u"orm['physical.ReplicationTopology']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.replicationtopology': {
'Meta': {'object_name': 'ReplicationTopology'},
'can_change_parameters': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_clone_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_reinstall_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_resize_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_switch_master': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_upgrade_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'class_path': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'replication_topologies'", 'symmetrical': 'False', 'to': u"orm['physical.Engine']"}),
'has_horizontal_scalability': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'replication_topologies'", 'blank': 'True', 'to': u"orm['physical.Parameter']"}),
'script': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'replication_topologies'", 'null': 'True', 'to': u"orm['physical.Script']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.script': {
'Meta': {'object_name': 'Script'},
'configuration': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'initialization': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'start_database': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'start_replication': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
}
}
complete_apps = ['maintenance'] | bsd-3-clause | dc8243eb20163d65cfee4efa29ed62e1 | 93.680412 | 227 | 0.571267 | 3.596695 | false | false | false | false |
globocom/database-as-a-service | dbaas/account/migrations/0009_auto__add_roleenvironment.py | 1 | 9522 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'RoleEnvironment'
db.create_table(u'account_roleenvironment', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('updated_at', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('role', self.gf('django.db.models.fields.related.OneToOneField')(related_name=u'role_environment', unique=True, to=orm['auth.Group'])),
))
db.send_create_signal(u'account', ['RoleEnvironment'])
# Adding M2M table for field environments on 'RoleEnvironment'
m2m_table_name = db.shorten_name(u'account_roleenvironment_environments')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('roleenvironment', models.ForeignKey(orm[u'account.roleenvironment'], null=False)),
('environment', models.ForeignKey(orm[u'physical.environment'], null=False))
))
db.create_unique(m2m_table_name, ['roleenvironment_id', 'environment_id'])
def backwards(self, orm):
# Deleting model 'RoleEnvironment'
db.delete_table(u'account_roleenvironment')
# Removing M2M table for field environments on 'RoleEnvironment'
db.delete_table(db.shorten_name(u'account_roleenvironment_environments'))
models = {
u'account.organization': {
'Meta': {'object_name': 'Organization'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'external': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'grafana_datasource': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'grafana_endpoint': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'grafana_hostgroup': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'grafana_orgid': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'account.roleenvironment': {
'Meta': {'object_name': 'RoleEnvironment'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'roles'", 'symmetrical': 'False', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'role': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "u'role_environment'", 'unique': 'True', 'to': u"orm['auth.Group']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'account.team': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Team'},
'contacts': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_alocation_limit': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '2'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'organization': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'team_organization'", 'on_delete': 'models.PROTECT', 'to': u"orm['account.Organization']"}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.User']", 'symmetrical': 'False'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'physical.cloud': {
'Meta': {'object_name': 'Cloud'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.environment': {
'Meta': {'object_name': 'Environment'},
'cloud': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'environment_cloud'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Cloud']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'migrate_environment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Environment']"}),
'min_of_zones': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
}
}
complete_apps = ['account'] | bsd-3-clause | 0a42b33400bb419daba24257b65cc24b | 73.984252 | 195 | 0.573199 | 3.639908 | false | false | false | false |
globocom/database-as-a-service | dbaas/physical/migrations/0051_auto__add_topologyparametercustomvalue__add_unique_topologyparametercu.py | 1 | 18812 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'TopologyParameterCustomValue'
db.create_table(u'physical_topologyparametercustomvalue', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('updated_at', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('topology', self.gf('django.db.models.fields.related.ForeignKey')(related_name=u'param_custom_values', to=orm['physical.ReplicationTopology'])),
('parameter', self.gf('django.db.models.fields.related.ForeignKey')(related_name=u'topology_custom_values', to=orm['physical.Parameter'])),
('attr_name', self.gf('django.db.models.fields.CharField')(max_length=200)),
))
db.send_create_signal(u'physical', ['TopologyParameterCustomValue'])
# Adding unique constraint on 'TopologyParameterCustomValue', fields ['topology', 'parameter']
db.create_unique(u'physical_topologyparametercustomvalue', ['topology_id', 'parameter_id'])
def backwards(self, orm):
# Removing unique constraint on 'TopologyParameterCustomValue', fields ['topology', 'parameter']
db.delete_unique(u'physical_topologyparametercustomvalue', ['topology_id', 'parameter_id'])
# Deleting model 'TopologyParameterCustomValue'
db.delete_table(u'physical_topologyparametercustomvalue')
models = {
u'physical.databaseinfra': {
'Meta': {'object_name': 'DatabaseInfra'},
'capacity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'endpoint': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'endpoint_dns': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Engine']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_vm_created': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'name_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'name_stamp': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'blank': 'True'}),
'per_database_size_mbytes': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Plan']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'physical.databaseinfraparameter': {
'Meta': {'unique_together': "((u'databaseinfra', u'parameter'),)", 'object_name': 'DatabaseInfraParameter'},
'applied_on_database': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_value': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.DatabaseInfra']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parameter': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Parameter']"}),
'reset_default_value': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'physical.diskoffering': {
'Meta': {'object_name': 'DiskOffering'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'size_kb': ('django.db.models.fields.PositiveIntegerField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.engine': {
'Meta': {'ordering': "(u'engine_type__name', u'version')", 'unique_together': "((u'version', u'engine_type'),)", 'object_name': 'Engine'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'engines'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
'engine_upgrade_option': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_engine'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Engine']"}),
'has_users': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'read_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'template_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user_data_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'write_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'physical.enginetype': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'EngineType'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_memory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.environment': {
'Meta': {'object_name': 'Environment'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'migrate_environment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Environment']"}),
'min_of_zones': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.host': {
'Meta': {'object_name': 'Host'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'future_host': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Host']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'monitor_url': ('django.db.models.fields.URLField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'os_description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.instance': {
'Meta': {'unique_together': "((u'address', u'port'),)", 'object_name': 'Instance'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.DatabaseInfra']"}),
'dns': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'future_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Instance']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'port': ('django.db.models.fields.IntegerField', [], {}),
'read_only': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.parameter': {
'Meta': {'ordering': "(u'engine_type__name', u'name')", 'unique_together': "((u'name', u'engine_type'),)", 'object_name': 'Parameter'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'custom_method': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'dynamic': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'enginetype'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.plan': {
'Meta': {'object_name': 'Plan'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'plans'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plans'", 'to': u"orm['physical.Engine']"}),
'engine_equivalent_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_plan'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'plans'", 'symmetrical': 'False', 'to': u"orm['physical.Environment']"}),
'has_persistence': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_ha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'max_db_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'migrate_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Plan']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'provider': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'replication_topology': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'replication_topology'", 'null': 'True', 'to': u"orm['physical.ReplicationTopology']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.planattribute': {
'Meta': {'object_name': 'PlanAttribute'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plan_attributes'", 'to': u"orm['physical.Plan']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'physical.replicationtopology': {
'Meta': {'object_name': 'ReplicationTopology'},
'class_path': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'replication_topologies'", 'symmetrical': 'False', 'to': u"orm['physical.Engine']"}),
'has_horizontal_scalability': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'replication_topologies'", 'blank': 'True', 'to': u"orm['physical.Parameter']"}),
'script': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'replication_topologies'", 'null': 'True', 'to': u"orm['physical.Script']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.script': {
'Meta': {'object_name': 'Script'},
'configuration': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'initialization': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'start_database': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'start_replication': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.topologyparametercustomvalue': {
'Meta': {'unique_together': "((u'topology', u'parameter'),)", 'object_name': 'TopologyParameterCustomValue'},
'attr_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parameter': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'topology_custom_values'", 'to': u"orm['physical.Parameter']"}),
'topology': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'param_custom_values'", 'to': u"orm['physical.ReplicationTopology']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
}
}
complete_apps = ['physical'] | bsd-3-clause | 3a00217dfd1a9edc7814ad89e69a2d10 | 89.447115 | 227 | 0.574314 | 3.597629 | false | false | false | false |
globocom/database-as-a-service | dbaas/maintenance/migrations/0020_auto__add_databasechangeparameter.py | 1 | 34232 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'DatabaseChangeParameter'
db.create_table(u'maintenance_databasechangeparameter', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('updated_at', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('current_step', self.gf('django.db.models.fields.PositiveSmallIntegerField')(default=0)),
('status', self.gf('django.db.models.fields.IntegerField')(default=0)),
('started_at', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('finished_at', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('can_do_retry', self.gf('django.db.models.fields.BooleanField')(default=True)),
('database', self.gf('django.db.models.fields.related.ForeignKey')(related_name=u'change_parameters', to=orm['logical.Database'])),
('task', self.gf('django.db.models.fields.related.ForeignKey')(related_name=u'database_change_parameters', to=orm['notification.TaskHistory'])),
))
db.send_create_signal(u'maintenance', ['DatabaseChangeParameter'])
def backwards(self, orm):
# Deleting model 'DatabaseChangeParameter'
db.delete_table(u'maintenance_databasechangeparameter')
models = {
u'account.team': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Team'},
'contacts': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_alocation_limit': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '2'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.Group']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.User']", 'symmetrical': 'False'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'dbaas_cloudstack.cloudstackoffering': {
'Meta': {'object_name': 'CloudStackOffering'},
'cpus': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'equivalent_offering': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['dbaas_cloudstack.CloudStackOffering']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'memory_size_mb': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'region': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'cs_offering_region'", 'null': 'True', 'to': u"orm['dbaas_cloudstack.CloudStackRegion']"}),
'serviceofferingid': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'weaker': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'dbaas_cloudstack.cloudstackpack': {
'Meta': {'object_name': 'CloudStackPack'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'cs_packs'", 'to': u"orm['physical.EngineType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'offering': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'cs_offering_packs'", 'to': u"orm['dbaas_cloudstack.CloudStackOffering']"}),
'script_file': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'dbaas_cloudstack.cloudstackregion': {
'Meta': {'object_name': 'CloudStackRegion'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'cs_environment_region'", 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'logical.database': {
'Meta': {'ordering': "(u'name',)", 'unique_together': "((u'name', u'environment'),)", 'object_name': 'Database'},
'backup_path': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DatabaseInfra']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_auto_resize': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databases'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_quarantine': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_protected': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_index': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['logical.Project']"}),
'quarantine_dt': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'subscribe_to_email_events': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'databases'", 'null': 'True', 'to': u"orm['account.Team']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'used_size_in_bytes': ('django.db.models.fields.FloatField', [], {'default': '0.0'})
},
u'logical.project': {
'Meta': {'ordering': "[u'name']", 'object_name': 'Project'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databasechangeparameter': {
'Meta': {'object_name': 'DatabaseChangeParameter'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'change_parameters'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_change_parameters'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseresize': {
'Meta': {'object_name': 'DatabaseResize'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'resizes'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source_offer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_resizes_source'", 'to': u"orm['dbaas_cloudstack.CloudStackPack']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'target_offer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_resizes_target'", 'to': u"orm['dbaas_cloudstack.CloudStackPack']"}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_resizes'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.databaseupgrade': {
'Meta': {'object_name': 'DatabaseUpgrade'},
'can_do_retry': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_step': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'database': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'upgrades'", 'to': u"orm['logical.Database']"}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'source_plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_upgrades_source'", 'to': u"orm['physical.Plan']"}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'target_plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_upgrades_target'", 'to': u"orm['physical.Plan']"}),
'task': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'database_upgrades'", 'to': u"orm['notification.TaskHistory']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.hostmaintenance': {
'Meta': {'unique_together': "((u'host', u'maintenance'),)", 'object_name': 'HostMaintenance', 'index_together': "[[u'host', u'maintenance']]"},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'host': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'host_maintenance'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Host']"}),
'hostname': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_log': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'maintenance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'maintenance'", 'to': u"orm['maintenance.Maintenance']"}),
'rollback_log': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '4'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.maintenance': {
'Meta': {'object_name': 'Maintenance'},
'affected_hosts': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'celery_task_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'finished_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'hostsid': ('django.db.models.fields.CommaSeparatedIntegerField', [], {'max_length': '10000'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'main_script': ('django.db.models.fields.TextField', [], {}),
'maximum_workers': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '1'}),
'revoked_by': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'rollback_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'scheduled_for': ('django.db.models.fields.DateTimeField', [], {'unique': 'True'}),
'started_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'maintenance.maintenanceparameters': {
'Meta': {'object_name': 'MaintenanceParameters'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'function_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'maintenance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'maintenance_params'", 'to': u"orm['maintenance.Maintenance']"}),
'parameter_name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'notification.taskhistory': {
'Meta': {'object_name': 'TaskHistory'},
'arguments': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'context': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'db_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'ended_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'task_id': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'task_status': ('django.db.models.fields.CharField', [], {'default': "u'PENDING'", 'max_length': '100', 'db_index': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'physical.databaseinfra': {
'Meta': {'object_name': 'DatabaseInfra'},
'capacity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'endpoint': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'endpoint_dns': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Engine']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_vm_created': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'name_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'name_stamp': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'blank': 'True'}),
'per_database_size_mbytes': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Plan']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'physical.diskoffering': {
'Meta': {'object_name': 'DiskOffering'},
'available_size_kb': ('django.db.models.fields.PositiveIntegerField', [], {}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'size_kb': ('django.db.models.fields.PositiveIntegerField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.engine': {
'Meta': {'ordering': "(u'engine_type__name', u'version')", 'unique_together': "((u'version', u'engine_type'),)", 'object_name': 'Engine'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'engines'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
'engine_upgrade_option': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_engine'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Engine']"}),
'has_users': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'read_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'template_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user_data_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'write_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'physical.enginetype': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'EngineType'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_memory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.environment': {
'Meta': {'object_name': 'Environment'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'min_of_zones': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.host': {
'Meta': {'object_name': 'Host'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'future_host': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Host']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'monitor_url': ('django.db.models.fields.URLField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'os_description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.parameter': {
'Meta': {'ordering': "(u'engine_type__name', u'name')", 'unique_together': "((u'name', u'engine_type'),)", 'object_name': 'Parameter'},
'class_path': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'dynamic': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'enginetype'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.plan': {
'Meta': {'object_name': 'Plan'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'plans'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plans'", 'to': u"orm['physical.Engine']"}),
'engine_equivalent_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_plan'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'plans'", 'symmetrical': 'False', 'to': u"orm['physical.Environment']"}),
'has_persistence': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_ha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'max_db_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'provider': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'replication_topology': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'replication_topology'", 'null': 'True', 'to': u"orm['physical.ReplicationTopology']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.replicationtopology': {
'Meta': {'object_name': 'ReplicationTopology'},
'class_path': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'replication_topologies'", 'symmetrical': 'False', 'to': u"orm['physical.Engine']"}),
'has_horizontal_scalability': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'replication_topologies'", 'blank': 'True', 'to': u"orm['physical.Parameter']"}),
'script': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'replication_topologies'", 'null': 'True', 'to': u"orm['physical.Script']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.script': {
'Meta': {'object_name': 'Script'},
'configuration': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'initialization': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'start_database': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'start_replication': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
}
}
complete_apps = ['maintenance'] | bsd-3-clause | 6ef15fbd259e8ebcc7f723741acf7331 | 92.024457 | 227 | 0.566166 | 3.589389 | false | false | false | false |
globocom/database-as-a-service | dbaas/util/tests/test_email_notification/test_disk_resize.py | 1 | 3000 | from __future__ import absolute_import
from django.contrib.sites.models import Site
from django.core import mail
from model_mommy import mommy
from util.email_notifications import get_domain, email_from, email_to, \
disk_resize_notification
from .base import EmailBaseTest
from physical.models import DiskOffering
__all__ = ('DiskResizeTestCase',)
SUBJECT_DISK_AUTO_RESIZE = '[DBaaS] Database {} auto disk resize to {}'
SUBJECT_DISK_FINAL_AUTO_RESIZE = (
'[DBaaS] Database {} final auto disk resize to {}'
)
class DiskResizeTestCase(EmailBaseTest):
def setUp(self):
DiskOffering.objects.all().delete()
super(DiskResizeTestCase, self).setUp()
self.greater_disk = mommy.make(
'DiskOffering',
size_kb=200
)
self.greater_disk.environments.add(self.database.environment)
self.disk = mommy.make(
'DiskOffering',
size_kb=100
)
self.disk.environments.add(self.database.environment)
def test_can_get_domain(self):
my_domain = Site.objects.get(id=1).domain
self.assertNotIn('http://', my_domain)
new_domain = get_domain()
self.assertIn('http://', new_domain)
def test_can_get_email_from(self):
self.assertEqual(self.email_from.value, email_from())
def test_can_get_email_to(self):
self.assertEqual(self.email_adm.value, email_to(team=None))
def test_can_get_email_to_with_team(self):
expected_emails = [self.team.email, self.email_adm.value]
self.assertEqual(expected_emails, email_to(team=self.team))
def test_can_get_email_to_with_team_without_email(self):
self.team.email = ''
self.assertEqual(self.email_adm.value, email_to(self.team))
def test_can_send_email_disk_auto_resize(self):
usage_percentage = 76.89
disk_resize_notification(
database=self.database, new_disk=self.disk,
usage_percentage=usage_percentage
)
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(
mail.outbox[0].subject,
SUBJECT_DISK_AUTO_RESIZE.format(self.database, self.disk)
)
self.assertEqual(
mail.outbox[1].subject,
SUBJECT_DISK_AUTO_RESIZE.format(self.database, self.disk)
)
def test_can_send_email_disk_final_auto_resize(self):
usage_percentage = 76.89
disk_resize_notification(
database=self.database, new_disk=self.greater_disk,
usage_percentage=usage_percentage
)
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(
mail.outbox[0].subject,
SUBJECT_DISK_FINAL_AUTO_RESIZE.format(
self.database, self.greater_disk
)
)
self.assertEqual(
mail.outbox[1].subject,
SUBJECT_DISK_FINAL_AUTO_RESIZE.format(
self.database, self.greater_disk
)
)
| bsd-3-clause | afc7aaf62d721188e899bf74409b92ba | 29.612245 | 72 | 0.623667 | 3.708282 | false | true | false | false |
globocom/database-as-a-service | dbaas/api/zabbix_disk_size_alert.py | 1 | 3347 | # encoding: utf-8
from __future__ import absolute_import, unicode_literals
import logging
from rest_framework.authentication import BasicAuthentication
from rest_framework.permissions import IsAuthenticated
from rest_framework import views, serializers, status, permissions
from rest_framework.response import Response
from logical.models import Database
from maintenance.tasks import TaskRegisterMaintenance
from physical.models import Host
LOG = logging.getLogger(__name__)
class ZabbixHostInformationSerializer(serializers.Serializer):
host = serializers.CharField()
value = serializers.CharField()
ip = serializers.CharField()
host_id = serializers.CharField()
class ZabbixDiskSizeAlertSerializer(serializers.Serializer):
host = ZabbixHostInformationSerializer()
class ZabbixDiskSizeAlertAPIView(views.APIView):
model = Database
permission_classes = (IsAuthenticated,)
authentication_classes = (BasicAuthentication,)
running_status = ('RUNNING', 'WAITING')
def post(self, request, *args, **kwargs):
LOG.info("Resize Zabbix Alert -> Received payload: {}".format(request.DATA))
data = request.DATA
serializer = ZabbixDiskSizeAlertSerializer(data=data)
if serializer.is_valid():
host = serializer.data['host']
# busca a database pelo IP do Host
database = self.get_database_from_host_ip(host['ip'])
if database is None:
return Response({'message': 'Database não encontrada'}, status=status.HTTP_404_NOT_FOUND)
# Valida se nao tem nenhuma task de resize rodando para a database
is_running = self.is_running_resize_task_for_database(database)
# Chama funcao assincrona, para nao deixar o zabbix esperando enquanto roda
TaskRegisterMaintenance.zabbix_alert_resize_disk(database, is_running)
LOG.info("No resize task is running for database {}".format(database.name))
return Response(status=status.HTTP_201_CREATED)
LOG.error("Serializer erros: {}".format(serializer.errors))
return Response(serializer.errors, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
def is_running_resize_task_for_database(self, database):
from notification.models import TaskHistory
# busca task de resize de disco para a database
running_task = TaskHistory.objects.filter(database_name=database.name, task_name='database_disk_resize',
task_status__in=self.running_status).first()
if running_task:
return True
return False
def get_database_from_host_ip(self, ip): # "ip="10.89.1.108" -> Database object"
# busca host pelo ip
host = Host.objects.filter(address=ip).first()
if not host:
LOG.error("Host with IP {} not found!".format(ip))
return None
LOG.info("Host with IP {} is {}".format(ip, host.hostname))
# busca database atraves da databaseinfra do host
database = Database.objects.filter(databaseinfra=host.databaseinfra).first()
if not database:
LOG.error("Database with Host {} not found!".format(host.id))
LOG.info("Database for Host {} is {}".format(host.hostname, database.name))
return database
| bsd-3-clause | b56ab3c380ded7868105374f86923764 | 37.45977 | 112 | 0.679617 | 4.230088 | false | false | false | false |
globocom/database-as-a-service | dbaas/physical/migrations/0048_auto__del_field_parameter_class_path__add_field_parameter_custom_metho.py | 1 | 16826 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Parameter.class_path'
db.delete_column(u'physical_parameter', 'class_path')
# Adding field 'Parameter.custom_method'
db.add_column(u'physical_parameter', 'custom_method',
self.gf('django.db.models.fields.CharField')(max_length=200, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Adding field 'Parameter.class_path'
db.add_column(u'physical_parameter', 'class_path',
self.gf('django.db.models.fields.CharField')(max_length=200, null=True, blank=True),
keep_default=False)
# Deleting field 'Parameter.custom_method'
db.delete_column(u'physical_parameter', 'custom_method')
models = {
u'physical.databaseinfra': {
'Meta': {'object_name': 'DatabaseInfra'},
'capacity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'endpoint': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'endpoint_dns': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Engine']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_vm_created': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'name_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'name_stamp': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'blank': 'True'}),
'per_database_size_mbytes': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Plan']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'physical.databaseinfraparameter': {
'Meta': {'unique_together': "((u'databaseinfra', u'parameter'),)", 'object_name': 'DatabaseInfraParameter'},
'applied_on_database': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'current_value': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.DatabaseInfra']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parameter': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Parameter']"}),
'reset_default_value': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'physical.diskoffering': {
'Meta': {'object_name': 'DiskOffering'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'size_kb': ('django.db.models.fields.PositiveIntegerField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.engine': {
'Meta': {'ordering': "(u'engine_type__name', u'version')", 'unique_together': "((u'version', u'engine_type'),)", 'object_name': 'Engine'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'engines'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
'engine_upgrade_option': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_engine'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Engine']"}),
'has_users': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'read_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'template_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user_data_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'write_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'physical.enginetype': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'EngineType'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_memory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.environment': {
'Meta': {'object_name': 'Environment'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'min_of_zones': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.host': {
'Meta': {'object_name': 'Host'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'future_host': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Host']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'monitor_url': ('django.db.models.fields.URLField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'os_description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.instance': {
'Meta': {'unique_together': "((u'address', u'port'),)", 'object_name': 'Instance'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.DatabaseInfra']"}),
'dns': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'future_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Instance']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'port': ('django.db.models.fields.IntegerField', [], {}),
'read_only': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.parameter': {
'Meta': {'ordering': "(u'engine_type__name', u'name')", 'unique_together': "((u'name', u'engine_type'),)", 'object_name': 'Parameter'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'custom_method': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'dynamic': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'enginetype'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.plan': {
'Meta': {'object_name': 'Plan'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'plans'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plans'", 'to': u"orm['physical.Engine']"}),
'engine_equivalent_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_plan'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'plans'", 'symmetrical': 'False', 'to': u"orm['physical.Environment']"}),
'has_persistence': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_ha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'max_db_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'provider': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'replication_topology': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'replication_topology'", 'null': 'True', 'to': u"orm['physical.ReplicationTopology']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.planattribute': {
'Meta': {'object_name': 'PlanAttribute'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plan_attributes'", 'to': u"orm['physical.Plan']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'physical.replicationtopology': {
'Meta': {'object_name': 'ReplicationTopology'},
'class_path': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'replication_topologies'", 'symmetrical': 'False', 'to': u"orm['physical.Engine']"}),
'has_horizontal_scalability': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'replication_topologies'", 'blank': 'True', 'to': u"orm['physical.Parameter']"}),
'script': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'replication_topologies'", 'null': 'True', 'to': u"orm['physical.Script']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.script': {
'Meta': {'object_name': 'Script'},
'configuration': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'initialization': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'start_database': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'start_replication': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
}
}
complete_apps = ['physical'] | bsd-3-clause | 2bac97b70f30ca48d3b4f4c351f3f8dc | 86.186528 | 227 | 0.564305 | 3.583049 | false | false | false | false |
globocom/database-as-a-service | dbaas/backup/migrations/0011_auto__del_field_snapshot_identifier__del_field_snapshot_export_path.py | 1 | 22033 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Snapshot.identifier'
db.delete_column(u'backup_snapshot', 'identifier')
# Deleting field 'Snapshot.export_path'
db.delete_column(u'backup_snapshot', 'export_path')
def backwards(self, orm):
# Adding field 'Snapshot.identifier'
db.add_column(u'backup_snapshot', 'identifier',
self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True),
keep_default=False)
# Adding field 'Snapshot.export_path'
db.add_column(u'backup_snapshot', 'export_path',
self.gf('django.db.models.fields.CharField')(max_length=200, null=True, blank=True),
keep_default=False)
models = {
u'backup.backupgroup': {
'Meta': {'object_name': 'BackupGroup'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'backup.snapshot': {
'Meta': {'object_name': 'Snapshot'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'end_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'backup_environment'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Environment']"}),
'error': ('django.db.models.fields.CharField', [], {'max_length': '400', 'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'backups'", 'null': 'True', 'to': u"orm['backup.BackupGroup']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'backup_instance'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Instance']"}),
'is_automatic': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'purge_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'size': ('django.db.models.fields.BigIntegerField', [], {'null': 'True', 'blank': 'True'}),
'snapshopt_id': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'snapshot_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'start_at': ('django.db.models.fields.DateTimeField', [], {}),
'status': ('django.db.models.fields.IntegerField', [], {}),
'type': ('django.db.models.fields.IntegerField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'volume': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'backups'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Volume']"})
},
u'physical.databaseinfra': {
'Meta': {'object_name': 'DatabaseInfra'},
'capacity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'database_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'endpoint': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'endpoint_dns': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Engine']"}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_vm_created': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'name_prefix': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'name_stamp': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'blank': 'True'}),
'per_database_size_mbytes': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'databaseinfras'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.Plan']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'physical.diskoffering': {
'Meta': {'object_name': 'DiskOffering'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'size_kb': ('django.db.models.fields.PositiveIntegerField', [], {}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.engine': {
'Meta': {'ordering': "(u'engine_type__name', u'version')", 'unique_together': "((u'version', u'engine_type'),)", 'object_name': 'Engine'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'engines'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
'engine_upgrade_option': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_engine'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Engine']"}),
'has_users': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'read_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'template_name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user_data_script': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'write_node_description': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'physical.enginetype': {
'Meta': {'ordering': "(u'name',)", 'object_name': 'EngineType'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_in_memory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.environment': {
'Meta': {'object_name': 'Environment'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'migrate_environment': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Environment']"}),
'min_of_zones': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.host': {
'Meta': {'object_name': 'Host'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'future_host': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Host']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255'}),
'monitor_url': ('django.db.models.fields.URLField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'offering': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Offering']", 'null': 'True'}),
'os_description': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '406', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
u'physical.instance': {
'Meta': {'unique_together': "((u'address', u'port'),)", 'object_name': 'Instance'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'databaseinfra': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.DatabaseInfra']"}),
'dns': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'future_instance': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['physical.Instance']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'hostname': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'instances'", 'to': u"orm['physical.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'instance_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'port': ('django.db.models.fields.IntegerField', [], {}),
'read_only': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'shard': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'total_size_in_bytes': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'used_size_in_bytes': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'})
},
u'physical.offering': {
'Meta': {'object_name': 'Offering'},
'cpus': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'environment': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'offerings'", 'to': u"orm['physical.Environment']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'memory_size_mb': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.parameter': {
'Meta': {'ordering': "(u'engine_type__name', u'name')", 'unique_together': "((u'name', u'engine_type'),)", 'object_name': 'Parameter'},
'allowed_values': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'custom_method': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'dynamic': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'engine_type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'enginetype'", 'on_delete': 'models.PROTECT', 'to': u"orm['physical.EngineType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter_type': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '100'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.plan': {
'Meta': {'object_name': 'Plan'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'disk_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'plans'", 'null': 'True', 'on_delete': 'models.PROTECT', 'to': u"orm['physical.DiskOffering']"}),
'engine': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'plans'", 'to': u"orm['physical.Engine']"}),
'engine_equivalent_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'backwards_plan'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['physical.Plan']"}),
'environments': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'plans'", 'symmetrical': 'False', 'to': u"orm['physical.Environment']"}),
'has_persistence': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_ha': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'max_db_size': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'migrate_plan': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'migrate_to'", 'null': 'True', 'to': u"orm['physical.Plan']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'provider': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'replication_topology': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'replication_topology'", 'null': 'True', 'to': u"orm['physical.ReplicationTopology']"}),
'stronger_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'main_offerings'", 'null': 'True', 'to': u"orm['physical.Offering']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'weaker_offering': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'weaker_offerings'", 'null': 'True', 'to': u"orm['physical.Offering']"})
},
u'physical.replicationtopology': {
'Meta': {'object_name': 'ReplicationTopology'},
'can_change_parameters': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_clone_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_reinstall_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_resize_vm': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_switch_master': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_upgrade_db': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'class_path': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'details': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'engine': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "u'replication_topologies'", 'symmetrical': 'False', 'to': u"orm['physical.Engine']"}),
'has_horizontal_scalability': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parameter': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'replication_topologies'", 'blank': 'True', 'to': u"orm['physical.Parameter']"}),
'script': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'replication_topologies'", 'null': 'True', 'to': u"orm['physical.Script']"}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.script': {
'Meta': {'object_name': 'Script'},
'configuration': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'initialization': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'start_database': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'start_replication': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'})
},
u'physical.volume': {
'Meta': {'object_name': 'Volume'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'host': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'volumes'", 'to': u"orm['physical.Host']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'total_size_kb': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'used_size_kb': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['backup'] | bsd-3-clause | a64a643d6dbf6ea741549452c4ce180e | 90.808333 | 227 | 0.561113 | 3.551991 | false | false | false | false |
wal-e/wal-e | wal_e/worker/prefetch.py | 3 | 6196 | """Manage WAL-prefetching
Normally, wal-fetch executed by Postgres, and then subsequently
Postgres replays the WAL segment. These are not pipelined, so the
time spent recovering is not also spent downloading more WAL.
Prefetch provides better performance by speculatively downloading WAL
segments in advance.
"""
import errno
import os
import re
import shutil
import tempfile
from os import path
from wal_e import log_help
from wal_e import storage
logger = log_help.WalELogger(__name__)
class AtomicDownload(object):
"""Provide a temporary file for downloading exactly one segment.
This moves the temp file on success and does cleanup.
"""
def __init__(self, prefetch_dir, segment):
self.prefetch_dir = prefetch_dir
self.segment = segment
self.failed = None
@property
def dest(self):
return self.tf.name
def __enter__(self):
self.tf = tempfile.NamedTemporaryFile(
dir=self.prefetch_dir.seg_dir(self.segment), delete=False)
return self
def __exit__(self, exc_type, exc_val, exc_tb):
try:
if exc_type is None:
# Success. Mark the segment as complete.
#
# In event of a crash, this os.link() without an fsync
# can leave a corrupt file in the prefetch directory,
# but given Postgres retries corrupt archive logs
# (because it itself makes no provisions to sync
# them), that is assumed to be acceptable.
os.link(self.tf.name, path.join(
self.prefetch_dir.prefetched_dir, self.segment.name))
finally:
shutil.rmtree(self.prefetch_dir.seg_dir(self.segment))
class Dirs(object):
"""Create and query directories holding prefetched segments
Prefetched segments are held in a ".wal-e" directory that look
like this:
.wal-e
prefetch
000000070000EBC00000006C
000000070000EBC00000006D
running
000000070000EBC000000072
tmpVrRwCu
000000070000EBC000000073
Files in the "prefetch" directory are complete. The "running"
sub-directory has directories with the in-progress WAL segment and
a temporary file with the partial contents.
"""
def __init__(self, base):
self.base = base
self.prefetched_dir = path.join(base, '.wal-e', 'prefetch')
self.running = path.join(self.prefetched_dir, 'running')
def seg_dir(self, segment):
return path.join(self.running, segment.name)
def create(self, segment):
"""A best-effort attempt to create directories.
Warnings are issued to the user if those directories could not
created or if they don't exist.
The caller should only call this function if the user
requested prefetching (i.e. concurrency) to avoid spurious
warnings.
"""
def lackadaisical_mkdir(place):
ok = False
place = path.realpath(place)
try:
os.makedirs(place, 0o700)
ok = True
except EnvironmentError as e:
if e.errno == errno.EEXIST:
# Has already been created: this is the most
# common situation, and is fine.
ok = True
else:
logger.warning(
msg='could not create prefetch directory',
detail=('Prefetch directory creation target: {0}, {1}'
.format(place, e.strerror)))
return ok
ok = True
for d in [self.prefetched_dir, self.running]:
ok &= lackadaisical_mkdir(d)
lackadaisical_mkdir(self.seg_dir(segment))
def clear(self):
def warn_on_cant_remove(function, path, excinfo):
# Not really expected, so until complaints come in, just
# dump a ton of information.
logger.warning(
msg='cannot clear prefetch data',
detail='{0!r}\n{1!r}\n{2!r}'.format(function, path, excinfo),
hint=('Report this as a bug: '
'a better error message should be written.'))
shutil.rmtree(self.prefetched_dir, False, warn_on_cant_remove)
def clear_except(self, retained_segments):
sn = set(s.name for s in retained_segments)
try:
for n in os.listdir(self.running):
if n not in sn and re.match(storage.SEGMENT_REGEXP, n):
try:
shutil.rmtree(path.join(self.running, n))
except EnvironmentError as e:
if e.errno != errno.ENOENT:
raise
except EnvironmentError as e:
if e.errno != errno.ENOENT:
raise
try:
for n in os.listdir(self.prefetched_dir):
if n not in sn and re.match(storage.SEGMENT_REGEXP, n):
try:
os.remove(path.join(self.prefetched_dir, n))
except EnvironmentError as e:
if e.errno != errno.ENOENT:
raise
except EnvironmentError as e:
if e.errno != errno.ENOENT:
raise
def contains(self, segment):
return path.isfile(path.join(self.prefetched_dir, segment.name))
def is_running(self, segment):
return path.isdir(self.seg_dir(segment))
def running_size(self, segment):
byts = 0
try:
sd = self.seg_dir(segment)
for s in os.listdir(sd):
byts += path.getsize(path.join(sd, s))
return byts
except EnvironmentError as e:
if e.errno == errno.ENOENT:
return byts
raise
def promote(self, segment, destination):
source = path.join(self.prefetched_dir, segment.name)
os.rename(source, destination)
def download(self, segment):
return AtomicDownload(self, segment)
| bsd-3-clause | d410f2cda0a92ec793c3c4a15ec59bf4 | 30.938144 | 78 | 0.570852 | 4.369535 | false | false | false | false |
wal-e/wal-e | setup.py | 1 | 1996 | #!/usr/bin/env python
import os.path
import sys
# Version file managment scheme and graceful degredation for
# setuptools borrowed and adapted from GitPython.
try:
from setuptools import setup, find_packages
# Silence pyflakes
assert setup
assert find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
if sys.version_info < (3, 4):
raise RuntimeError('Python versions < 3.4 are not supported.')
# Utility function to read the contents of short files.
def read(fname):
with open(os.path.join(os.path.dirname(__file__), fname)) as f:
return f.read()
VERSION = read(os.path.join('wal_e', 'VERSION')).strip()
setup(
name="wal-e",
version=VERSION,
packages=find_packages(),
install_requires=['gevent>=1.1.1'],
extras_require={
'aws': ['boto>=2.40.0'],
'azure': ['azure==3.0.0'],
'google': ['google-cloud-storage>=1.4.0'],
'swift': ['python-swiftclient>=3.0.0',
'python-keystoneclient>=3.0.0']
},
# metadata for upload to PyPI
author="The WAL-E Contributors",
author_email="wal-e@googlegroups.com",
maintainer="Daniel Farina",
maintainer_email="daniel@fdr.io",
description="Continuous Archiving for Postgres",
long_description=read('README.rst'),
classifiers=[
'Topic :: Database',
'Topic :: System :: Archiving',
'Topic :: System :: Recovery Tools',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5'
],
platforms=['any'],
license="BSD",
keywords=("postgres postgresql database backup archive archiving s3 aws "
"openstack swift wabs azure google gce gcs wal shipping"),
url="https://github.com/wal-e/wal-e",
# Include the VERSION file
package_data={'wal_e': ['VERSION']},
# install
entry_points={'console_scripts': ['wal-e=wal_e.cmd:main']})
| bsd-3-clause | 7fd44fb2f5fa156df1dc5c2809eec19b | 28.352941 | 77 | 0.634269 | 3.696296 | false | false | false | false |
wal-e/wal-e | wal_e/blobstore/wabs/calling_format.py | 1 | 1087 | from azure.storage.blob.blockblobservice import BlockBlobService
from wal_e import log_help
logger = log_help.WalELogger(__name__)
# WABS connection requirements are not quite this same as those of
# S3 and so this class is overkill. Implementing for the sake of
# consistency only
class CallingInfo(object):
"""Encapsulate information used to produce a WABS connection.
"""
def __init__(self, account_name):
self.account_name = account_name
def __repr__(self):
return ('CallingInfo({account_name})'.format(**self.__dict__))
def __str__(self):
return repr(self)
def connect(self, creds):
"""Return an azure BlockBlobService instance.
"""
return BlockBlobService(account_name=creds.account_name,
account_key=creds.account_key,
sas_token=creds.access_token,
protocol='https')
def from_store_name(container_name):
"""Construct a CallingInfo value from a target container name.
"""
return CallingInfo(container_name)
| bsd-3-clause | 965c178fa695e3faca8cd5626862feaf | 30.057143 | 70 | 0.642134 | 4.213178 | false | false | false | false |
wal-e/wal-e | wal_e/worker/upload_pool.py | 13 | 3971 | import gc
import gevent
from wal_e import channel
from wal_e import tar_partition
from wal_e.exception import UserCritical
class TarUploadPool(object):
def __init__(self, uploader, max_concurrency,
max_members=tar_partition.PARTITION_MAX_MEMBERS):
# Injected upload mechanism
self.uploader = uploader
# Concurrency maximums
self.max_members = max_members
self.max_concurrency = max_concurrency
# Current concurrency burden
self.member_burden = 0
# Synchronization and tasks
self.wait_change = channel.Channel()
self.closed = False
# Used for both synchronization and measurement.
self.concurrency_burden = 0
def _start(self, tpart):
"""Start upload and accout for resource consumption."""
g = gevent.Greenlet(self.uploader, tpart)
g.link(self._finish)
# Account for concurrency_burden before starting the greenlet
# to avoid racing against .join.
self.concurrency_burden += 1
self.member_burden += len(tpart)
g.start()
def _finish(self, g):
"""Called on completion of an upload greenlet.
Takes care to forward Exceptions or, if there is no error, the
finished TarPartition value across a channel.
"""
assert g.ready()
if g.successful():
finished_tpart = g.get()
self.wait_change.put(finished_tpart)
else:
self.wait_change.put(g.exception)
def _wait(self):
"""Block until an upload finishes
Raise an exception if that tar volume failed with an error.
"""
val = self.wait_change.get()
if isinstance(val, Exception):
# Don't other uncharging, because execution is going to stop
raise val
else:
# Uncharge for resources.
self.member_burden -= len(val)
self.concurrency_burden -= 1
def put(self, tpart):
"""Upload a tar volume
Blocks if there is too much work outstanding already, and
raise errors of previously submitted greenlets that die
unexpectedly.
"""
if self.closed:
raise UserCritical(msg='attempt to upload tar after closing',
hint='report a bug')
while True:
too_many = (
self.concurrency_burden + 1 > self.max_concurrency
or self.member_burden + len(tpart) > self.max_members
)
if too_many:
# If there are not enough resources to start an upload
# even with zero uploads in progress, then something
# has gone wrong: the user should not be given enough
# rope to hang themselves in this way.
if self.concurrency_burden == 0:
raise UserCritical(
msg=('not enough resources in pool to '
'support an upload'),
hint='report a bug')
# _wait blocks until an upload finishes and clears its
# used resources, after which another attempt to
# evaluate scheduling resources for another upload
# might be worth evaluating.
#
# Alternatively, an error was encountered in a
# previous upload in which case it'll be raised here
# and cause the process to regard the upload as a
# failure.
self._wait()
gc.collect()
else:
# Enough resources available: commence upload
self._start(tpart)
return
def join(self):
"""Wait for uploads to exit, raising errors as necessary."""
self.closed = True
while self.concurrency_burden > 0:
self._wait()
| bsd-3-clause | 1c0a5b1e961a5162072e928aead450d4 | 32.091667 | 73 | 0.564341 | 4.77858 | false | false | false | false |
wal-e/wal-e | wal_e/log_help.py | 3 | 6037 | """
A module to assist with using the Python logging module
"""
import datetime
import errno
import logging
import os
from logging import handlers
from os import path
# Global logging handlers created by configure.
HANDLERS = []
class IndentFormatter(logging.Formatter):
def format(self, record, *args, **kwargs):
"""
Format a message in the log
Act like the normal format, but indent anything that is a
newline within the message.
"""
return logging.Formatter.format(
self, record, *args, **kwargs).replace('\n', '\n' + ' ' * 8)
def configure(*args, **kwargs):
"""
Configure logging.
Borrowed from logging.basicConfig
Uses the IndentFormatter instead of the regular Formatter
Also, opts the caller into Syslog output, unless syslog could not
be opened for some reason or another, in which case a warning will
be printed to the other log handlers.
"""
# Configuration must only happen once: no mechanism for avoiding
# duplication of handlers exists.
assert len(HANDLERS) == 0
log_destinations = get_log_destinations()
if 'stderr' in log_destinations:
# Add stderr output.
HANDLERS.append(logging.StreamHandler())
def terrible_log_output(s):
import sys
print(s, file=sys.stderr)
places = [
# Linux
'/dev/log',
# FreeBSD
'/var/run/log',
# Macintosh
'/var/run/syslog',
]
default_syslog_address = places[0]
for p in places:
if path.exists(p):
default_syslog_address = p
break
syslog_address = kwargs.setdefault('syslog_address',
default_syslog_address)
valid_facility = False
if 'syslog' in log_destinations:
facility, valid_facility = get_syslog_facility()
if not valid_facility:
terrible_log_output('invalid syslog facility level specified')
try:
# Add syslog output.
HANDLERS.append(handlers.SysLogHandler(syslog_address,
facility=facility))
except EnvironmentError as e:
if e.errno in [errno.EACCES, errno.ECONNREFUSED]:
message = ('wal-e: Could not set up syslog, '
'continuing anyway. '
'Reason: {0}').format(errno.errorcode[e.errno])
terrible_log_output(message)
fs = kwargs.get("format", logging.BASIC_FORMAT)
dfs = kwargs.get("datefmt", None)
fmt = IndentFormatter(fs, dfs)
for handler in HANDLERS:
handler.setFormatter(fmt)
logging.root.addHandler(handler)
# Default to INFO level logging.
set_level(kwargs.get('level', logging.INFO))
def get_log_destinations():
"""Parse env string"""
# if env var is not set default to stderr + syslog
env = os.getenv('WALE_LOG_DESTINATION', 'stderr,syslog')
return env.split(",")
def get_syslog_facility():
"""Get syslog facility from ENV var"""
facil = os.getenv('WALE_SYSLOG_FACILITY', 'user')
valid_facility = True
try:
facility = handlers.SysLogHandler.facility_names[facil.lower()]
except KeyError:
valid_facility = False
facility = handlers.SysLogHandler.LOG_USER
return facility, valid_facility
def set_level(level):
"""Adjust the logging level of WAL-E"""
for handler in HANDLERS:
handler.setLevel(level)
logging.root.setLevel(level)
class WalELogger(object):
def __init__(self, *args, **kwargs):
self._logger = logging.getLogger(*args, **kwargs)
@staticmethod
def _fmt_structured(d):
"""Formats '{k1:v1, k2:v2}' => 'time=... pid=... k1=v1 k2=v2'
Output is lexically sorted, *except* the time and pid always
come first, to assist with human scanning of the data.
"""
timeEntry = datetime.datetime.utcnow().strftime(
"time=%Y-%m-%dT%H:%M:%S.%f-00")
pidEntry = "pid=" + str(os.getpid())
rest = sorted('='.join([str(k), str(v)])
for (k, v) in list(d.items()))
return ' '.join([timeEntry, pidEntry] + rest)
@staticmethod
def fmt_logline(msg, detail=None, hint=None, structured=None):
msg_parts = ['MSG: ' + msg]
if detail is not None:
msg_parts.append('DETAIL: ' + detail)
if hint is not None:
msg_parts.append('HINT: ' + hint)
# Initialize a fresh dictionary if structured is not passed,
# because keyword arguments are not re-evaluated when calling
# the function and it's okay for callees to mutate their
# passed dictionary.
if structured is None:
structured = {}
msg_parts.append('STRUCTURED: ' +
WalELogger._fmt_structured(structured))
return '\n'.join(msg_parts)
def log(self, level, msg, *args, **kwargs):
detail = kwargs.pop('detail', None)
hint = kwargs.pop('hint', None)
structured = kwargs.pop('structured', None)
self._logger.log(
level,
self.fmt_logline(msg, detail, hint, structured),
*args, **kwargs)
# Boilerplate convenience shims to different logging levels. One
# could abuse dynamism to generate these bindings in a loop, but
# one day I hope to run with PyPy and tricks like that tend to
# lobotomize an optimizer something fierce.
def debug(self, *args, **kwargs):
self.log(logging.DEBUG, *args, **kwargs)
def info(self, *args, **kwargs):
self.log(logging.INFO, *args, **kwargs)
def warning(self, *args, **kwargs):
self.log(logging.WARNING, *args, **kwargs)
def error(self, *args, **kwargs):
self.log(logging.ERROR, *args, **kwargs)
def critical(self, *args, **kwargs):
self.log(logging.CRITICAL, *args, **kwargs)
# End convenience shims
| bsd-3-clause | a64b57c03c8d4c0299779d346ec079bd | 27.611374 | 78 | 0.599636 | 3.998013 | false | false | false | false |
mozilla/zamboni | mkt/site/models.py | 5 | 11210 | import contextlib
import threading
from django.db import models, transaction
from django.utils import translation
import multidb.pinning
from mkt.translations.hold import save_translations
_locals = threading.local()
class TransformQuerySet(models.query.QuerySet):
def __init__(self, *args, **kwargs):
super(TransformQuerySet, self).__init__(*args, **kwargs)
self._transform_fns = []
def _clone(self, klass=None, setup=False, **kw):
c = super(TransformQuerySet, self)._clone(klass, setup, **kw)
c._transform_fns = self._transform_fns
return c
def transform(self, fn):
self._transform_fns.append(fn)
return self
def iterator(self):
result_iter = super(TransformQuerySet, self).iterator()
if self._transform_fns:
results = list(result_iter)
for fn in self._transform_fns:
fn(results)
return iter(results)
return result_iter
def pop_transforms(self):
qs = self._clone()
transforms = qs._transform_fns
qs._transform_fns = []
return transforms, qs
def no_transforms(self):
return self.pop_transforms()[1]
def only_translations(self):
"""Remove all transforms except translations."""
from mkt.translations import transformer
# Add an extra select so these are cached separately.
return (self.no_transforms().extra(select={'_only_trans': 1})
.transform(transformer.get_trans))
class TransformManager(models.Manager):
def get_queryset(self):
return TransformQuerySet(self.model)
@contextlib.contextmanager
def use_master():
"""Within this context, all queries go to the master."""
old = getattr(multidb.pinning._locals, 'pinned', False)
multidb.pinning.pin_this_thread()
try:
yield
finally:
multidb.pinning._locals.pinned = old
class RawQuerySet(models.query.RawQuerySet):
"""A RawQuerySet with __len__."""
def __init__(self, *args, **kw):
super(RawQuerySet, self).__init__(*args, **kw)
self._result_cache = None
def __iter__(self):
if self._result_cache is None:
self._result_cache = list(super(RawQuerySet, self).__iter__())
return iter(self._result_cache)
def __len__(self):
return len(list(self.__iter__()))
class ManagerBase(models.Manager):
# FIXME: remove this, let django use a plain manager for related fields.
# The issue is, it breaks transforms and in particular, translations. See
# bug 952550.
use_for_related_fields = True
def get_queryset(self):
# Clone from the original get_queryset() implementation in order to
# avoid breaking features like <manager>.from_queryset().
qs = super(ManagerBase, self).get_queryset()
return self._with_translations(qs._clone(klass=TransformQuerySet))
def _with_translations(self, qs):
from mkt.translations import transformer
# Since we're attaching translations to the object, we need to stick
# the locale in the query so objects aren't shared across locales.
if hasattr(self.model._meta, 'translated_fields'):
lang = translation.get_language()
qs = (qs.transform(transformer.get_trans)
.extra(where=['"%s"="%s"' % (lang, lang)]))
return qs
def transform(self, fn):
return self.all().transform(fn)
def raw(self, raw_query, params=None, *args, **kwargs):
return RawQuerySet(raw_query, self.model, params=params,
using=self._db, *args, **kwargs)
def safer_get_or_create(self, defaults=None, **kw):
"""
This is subjective, but I don't trust get_or_create until #13906
gets fixed. It's probably fine, but this makes me happy for the moment
and solved a get_or_create we've had in the past.
"""
with transaction.atomic():
try:
return self.get(**kw), False
except self.model.DoesNotExist:
if defaults is not None:
kw.update(defaults)
return self.create(**kw), True
class _NoChangeInstance(object):
"""A proxy for object instances to make safe operations within an
OnChangeMixin.on_change() callback.
"""
def __init__(self, instance):
self.__instance = instance
def __repr__(self):
return u'<%s for %r>' % (self.__class__.__name__, self.__instance)
def __getattr__(self, attr):
return getattr(self.__instance, attr)
def __setattr__(self, attr, val):
if attr.endswith('__instance'):
# _NoChangeInstance__instance
self.__dict__[attr] = val
else:
setattr(self.__instance, attr, val)
def save(self, *args, **kw):
kw['_signal'] = False
return self.__instance.save(*args, **kw)
def update(self, *args, **kw):
kw['_signal'] = False
return self.__instance.update(*args, **kw)
_on_change_callbacks = {}
class OnChangeMixin(object):
"""Mixin for a Model that allows you to observe attribute changes.
Register change observers with::
class YourModel(mkt.site.models.OnChangeMixin,
mkt.site.models.ModelBase):
# ...
pass
YourModel.on_change(callback)
"""
def __init__(self, *args, **kw):
super(OnChangeMixin, self).__init__(*args, **kw)
self.reset_attrs()
def reset_attrs(self):
self._initial_attr = dict(self.__dict__)
@classmethod
def on_change(cls, callback):
"""Register a function to call on save or update to respond to changes.
For example::
def watch_status(old_attr={}, new_attr={},
instance=None, sender=None, **kw):
if old_attr.get('status') != new_attr.get('status'):
# ...
new_instance.save(_signal=False)
TheModel.on_change(watch_status)
.. note::
Any call to instance.save() or instance.update() within a callback
will not trigger any change handlers.
.. note::
Duplicates based on function.__name__ are ignored for a given
class.
"""
existing = _on_change_callbacks.get(cls, [])
if callback.__name__ in [e.__name__ for e in existing]:
return callback
_on_change_callbacks.setdefault(cls, []).append(callback)
return callback
def _send_changes(self, old_attr, new_attr_kw):
new_attr = old_attr.copy()
new_attr.update(new_attr_kw)
for cb in _on_change_callbacks[self.__class__]:
cb(old_attr=old_attr, new_attr=new_attr,
instance=_NoChangeInstance(self), sender=self.__class__)
def save(self, *args, **kw):
"""
Save changes to the model instance.
If _signal=False is in `kw` the on_change() callbacks won't be called.
"""
signal = kw.pop('_signal', True)
result = super(OnChangeMixin, self).save(*args, **kw)
if signal and self.__class__ in _on_change_callbacks:
self._send_changes(self._initial_attr, dict(self.__dict__))
# Now that we saved and triggered the callbacks, reset the attrs.
self.reset_attrs()
return result
def update(self, **kw):
"""
Shortcut for doing an UPDATE on this object.
If _signal=False is in ``kw`` the post_save signal won't be sent.
"""
signal = kw.pop('_signal', True)
old_attr = dict(self.__dict__)
result = super(OnChangeMixin, self).update(_signal=signal, **kw)
if signal and self.__class__ in _on_change_callbacks:
self._send_changes(old_attr, kw)
return result
class ModelBase(models.Model):
"""
Base class for zamboni models to abstract some common features.
* Adds automatic created and modified fields to the model.
* Fetches all translations in one subsequent query during initialization.
"""
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
objects = ManagerBase()
class Meta:
abstract = True
get_latest_by = 'created'
def get_absolute_url(self, *args, **kwargs):
return self.get_url_path(*args, **kwargs)
def reload(self):
"""Reloads the instance from the database."""
from_db = self.__class__.objects.get(pk=self.pk)
for field in self.__class__._meta.fields:
try:
setattr(self, field.name, getattr(from_db, field.name))
except models.ObjectDoesNotExist:
# reload() can be called before cleaning up an object of stale
# related fields, when we do soft-deletion for instance. Avoid
# failing because of that.
pass
return self
def update(self, **kwargs):
"""
Shortcut for doing an UPDATE on this object.
By default it uses django's .save(update_fields=[]) feature, but if you
pass _signal=False, it uses <manager>.update(**kwargs) instead in order
to avoid sending pre_save and post_save signals.
Gotchas:
- Like django regular .save() method, it does not work if the default
manager queryset hides some rows (often the case on models where soft
deletion is implemented). You need to unhide the instance from the
default manager before using this method.
- When using _signal=False, updating translated fields won't work,
since translated fields require pre_save signal to work.
"""
signal = kwargs.pop('_signal', True)
for key, value in kwargs.items():
setattr(self, key, value)
if signal:
return self.save(update_fields=kwargs.keys())
else:
cls = self.__class__
cls.objects.filter(pk=self.pk).update(**kwargs)
def save(self, **kwargs):
if hasattr(self._meta, 'translated_fields'):
save_translations(id(self))
return super(ModelBase, self).save(**kwargs)
def manual_order(qs, pks, pk_name='id'):
"""
Given a query set and a list of primary keys, return a set of objects from
the query set in that exact order.
"""
if not pks:
return qs.none()
return qs.filter(id__in=pks).extra(
select={'_manual': 'FIELD(%s, %s)'
% (pk_name, ','.join(map(str, pks)))},
order_by=['_manual'])
class DynamicBoolFieldsMixin(object):
def _fields(self):
"""Returns array of all field names starting with 'has'."""
return [f.name for f in self._meta.fields if f.name.startswith('has')]
def to_dict(self):
return dict((f, getattr(self, f)) for f in self._fields())
def to_keys(self):
return [k for k, v in self.to_dict().iteritems() if v]
class FakeEmail(ModelBase):
message = models.TextField()
class Meta:
db_table = 'fake_email'
| bsd-3-clause | a052b48ca21edd85519052ff149bdf5d | 31.492754 | 79 | 0.59554 | 4.127393 | false | false | false | false |
mozilla/zamboni | mkt/stats/views.py | 6 | 13229 | from django import http
import commonware
import requests
from rest_framework.exceptions import ParseError
from rest_framework.generics import ListAPIView
from rest_framework.permissions import AllowAny, BasePermission
from rest_framework.response import Response
from rest_framework.views import APIView
import mkt
from lib.metrics import get_monolith_client
from mkt.api.authentication import (RestOAuthAuthentication,
RestSharedSecretAuthentication)
from mkt.api.base import CORSMixin, SlugOrIdMixin
from mkt.api.exceptions import ServiceUnavailable
from mkt.api.permissions import AllowAppOwner, AnyOf, GroupPermission
from mkt.purchase.models import Contribution
from mkt.webapps.models import Webapp
from .forms import StatsForm
log = commonware.log.getLogger('z.stats')
class PublicStats(BasePermission):
"""
Allow for app's with `public_stats` set to True.
"""
def has_permission(self, request, view):
# Anonymous is allowed if app.public_stats is True.
return True
def has_object_permission(self, request, view, obj):
return obj.public_stats
# Map of URL metric name to monolith metric name.
#
# The 'dimensions' key is optional query string arguments with defaults that is
# passed to the monolith client and used in the facet filters. If the default
# is `None`, the dimension is excluded unless specified via the API.
#
# The 'lines' key is optional and used for multi-line charts. The format is:
# {'<name>': {'<dimension-key>': '<dimension-value>'}}
# where <name> is what's returned in the JSON output and the dimension
# key/value is what's sent to Monolith similar to the 'dimensions' above.
#
# The 'coerce' key is optional and used to coerce data types returned from
# monolith to other types. Provide the name of the key in the data you want to
# coerce with a callback for how you want the data coerced. E.g.:
# {'count': str}
def lines(name, vals):
return dict((val, {name: val}) for val in vals)
STATS = {
'apps_added_by_package': {
'metric': 'apps_added_package_count',
'dimensions': {'region': 'us'},
'lines': lines('package_type', mkt.ADDON_WEBAPP_TYPES.values()),
},
'apps_added_by_premium': {
'metric': 'apps_added_premium_count',
'dimensions': {'region': 'us'},
'lines': lines('premium_type', mkt.ADDON_PREMIUM_API.values()),
},
'apps_available_by_package': {
'metric': 'apps_available_package_count',
'dimensions': {'region': 'us'},
'lines': lines('package_type', mkt.ADDON_WEBAPP_TYPES.values()),
},
'apps_available_by_premium': {
'metric': 'apps_available_premium_count',
'dimensions': {'region': 'us'},
'lines': lines('premium_type', mkt.ADDON_PREMIUM_API.values()),
},
'apps_installed': {
'metric': 'app_installs',
'dimensions': {'region': None},
},
'total_developers': {
'metric': 'total_dev_count',
},
'total_visits': {
'metric': 'visits',
},
'ratings': {
'metric': 'apps_ratings',
},
'abuse_reports': {
'metric': 'apps_abuse_reports',
},
'revenue': {
'metric': 'gross_revenue',
# Counts are floats. Let's convert them to strings with 2 decimals.
'coerce': {'count': lambda d: '{0:.2f}'.format(d)},
},
}
APP_STATS = {
'installs': {
'metric': 'app_installs',
'dimensions': {'region': None},
},
'visits': {
'metric': 'app_visits',
},
'ratings': {
'metric': 'apps_ratings',
},
'average_rating': {
'metric': 'apps_average_rating',
},
'abuse_reports': {
'metric': 'apps_abuse_reports',
},
'revenue': {
'metric': 'gross_revenue',
# Counts are floats. Let's convert them to strings with 2 decimals.
'coerce': {'count': lambda d: '{0:.2f}'.format(d)},
},
}
# The total API will iterate over each key and return statistical totals
# information on them all.
STATS_TOTAL = {
'installs': {
'metric': 'app_installs',
},
'ratings': {
'metric': 'apps_ratings',
},
'abuse_reports': {
'metric': 'apps_abuse_reports',
},
}
APP_STATS_TOTAL = {
'installs': {
'metric': 'app_installs',
},
'ratings': {
'metric': 'apps_ratings',
},
'abuse_reports': {
'metric': 'apps_abuse_reports',
},
}
def _get_monolith_data(stat, start, end, interval, dimensions):
# If stat has a 'lines' attribute, it's a multi-line graph. Do a
# request for each item in 'lines' and compose them in a single
# response.
try:
client = get_monolith_client()
except requests.ConnectionError as e:
log.info('Monolith connection error: {0}'.format(e))
raise ServiceUnavailable
def _coerce(data):
for key, coerce in stat.get('coerce', {}).items():
if data.get(key):
data[key] = coerce(data[key])
return data
try:
data = {}
if 'lines' in stat:
for line_name, line_dimension in stat['lines'].items():
dimensions.update(line_dimension)
data[line_name] = map(_coerce,
client(stat['metric'], start, end,
interval, **dimensions))
else:
data['objects'] = map(_coerce,
client(stat['metric'], start, end, interval,
**dimensions))
except ValueError as e:
# This occurs if monolith doesn't have our metric and we get an
# elasticsearch SearchPhaseExecutionException error.
log.info('Monolith ValueError for metric {0}: {1}'.format(
stat['metric'], e))
raise ParseError('Invalid metric at this time. Try again later.')
return data
class GlobalStats(CORSMixin, APIView):
authentication_classes = (RestOAuthAuthentication,
RestSharedSecretAuthentication)
cors_allowed_methods = ['get']
permission_classes = [AllowAny]
def get(self, request, metric):
if metric not in STATS:
raise http.Http404('No metric by that name.')
stat = STATS[metric]
# Perform form validation.
form = StatsForm(request.GET)
if not form.is_valid():
exc = ParseError()
exc.detail = {'detail': dict(form.errors.items())}
raise exc
qs = form.cleaned_data
dimensions = {}
if 'dimensions' in stat:
for key, default in stat['dimensions'].items():
val = request.GET.get(key, default)
if val is not None:
# Avoid passing kwargs to the monolith client when the
# dimension is None to avoid facet filters being applied.
dimensions[key] = request.GET.get(key, default)
return Response(_get_monolith_data(stat, qs.get('start'),
qs.get('end'), qs.get('interval'),
dimensions))
class AppStats(CORSMixin, SlugOrIdMixin, ListAPIView):
authentication_classes = (RestOAuthAuthentication,
RestSharedSecretAuthentication)
cors_allowed_methods = ['get']
permission_classes = [AnyOf(PublicStats, AllowAppOwner,
GroupPermission('Stats', 'View'))]
queryset = Webapp.objects.all()
slug_field = 'app_slug'
def get(self, request, pk, metric):
if metric not in APP_STATS:
raise http.Http404('No metric by that name.')
app = self.get_object()
stat = APP_STATS[metric]
# Perform form validation.
form = StatsForm(request.GET)
if not form.is_valid():
exc = ParseError()
exc.detail = {'detail': dict(form.errors.items())}
raise exc
qs = form.cleaned_data
dimensions = {'app-id': app.id}
if 'dimensions' in stat:
for key, default in stat['dimensions'].items():
val = request.GET.get(key, default)
if val is not None:
# Avoid passing kwargs to the monolith client when the
# dimension is None to avoid facet filters being applied.
dimensions[key] = request.GET.get(key, default)
return Response(_get_monolith_data(stat, qs.get('start'),
qs.get('end'), qs.get('interval'),
dimensions))
class StatsTotalBase(object):
"""
A place for a few helper methods for totals stats API.
"""
def get_client(self):
try:
client = get_monolith_client()
except requests.ConnectionError as e:
log.info('Monolith connection error: {0}'.format(e))
raise ServiceUnavailable
return client
def get_query(self, metric, field, app_id=None):
query = {
'query': {
'match_all': {}
},
'facets': {
metric: {
'statistical': {
'field': field
}
}
},
'size': 0
}
# If this is per-app, add the facet_filter.
if app_id:
query['facets'][metric]['facet_filter'] = {
'term': {
'app-id': app_id
}
}
return query
def process_response(self, resp, data):
for metric, facet in resp.get('facets', {}).items():
count = facet.get('count', 0)
# We filter out facets with count=0 to avoid returning things
# like `'max': u'-Infinity'`.
if count > 0:
for field in ('max', 'mean', 'min', 'std_deviation',
'sum_of_squares', 'total', 'variance'):
value = facet.get(field)
if value is not None:
data[metric][field] = value
class GlobalStatsTotal(CORSMixin, APIView, StatsTotalBase):
authentication_classes = (RestOAuthAuthentication,
RestSharedSecretAuthentication)
cors_allowed_methods = ['get']
permission_classes = [AllowAny]
slug_field = 'app_slug'
def get(self, request):
client = self.get_client()
# Note: We have to do this as separate requests so that if one fails
# the rest can still be returned.
data = {}
for metric, stat in STATS_TOTAL.items():
data[metric] = {}
query = self.get_query(metric, stat['metric'])
try:
resp = client.raw(query)
except ValueError as e:
log.info('Received value error from monolith client: %s' % e)
continue
self.process_response(resp, data)
return Response(data)
class AppStatsTotal(CORSMixin, SlugOrIdMixin, ListAPIView, StatsTotalBase):
authentication_classes = (RestOAuthAuthentication,
RestSharedSecretAuthentication)
cors_allowed_methods = ['get']
permission_classes = [AnyOf(PublicStats, AllowAppOwner,
GroupPermission('Stats', 'View'))]
queryset = Webapp.objects.all()
slug_field = 'app_slug'
def get(self, request, pk):
app = self.get_object()
client = self.get_client()
# Note: We have to do this as separate requests so that if one fails
# the rest can still be returned.
data = {}
for metric, stat in APP_STATS_TOTAL.items():
data[metric] = {}
query = self.get_query(metric, stat['metric'], app.id)
try:
resp = client.raw(query)
except ValueError as e:
log.info('Received value error from monolith client: %s' % e)
continue
self.process_response(resp, data)
return Response(data)
class TransactionAPI(CORSMixin, APIView):
"""
API to query by transaction ID.
Note: This is intended for Monolith to be able to associate a Solitude
transaction with an app and price tier amount in USD.
"""
authentication_classes = (RestOAuthAuthentication,
RestSharedSecretAuthentication)
cors_allowed_methods = ['get']
permission_classes = [GroupPermission('RevenueStats', 'View')]
def get(self, request, transaction_id):
try:
contrib = (Contribution.objects.select_related('price_tier').
get(transaction_id=transaction_id))
except Contribution.DoesNotExist:
raise http.Http404('No transaction by that ID.')
data = {
'id': transaction_id,
'app_id': contrib.addon_id,
'amount_USD': unicode(contrib.price_tier.price),
'type': mkt.CONTRIB_TYPES[contrib.type],
}
return Response(data)
| bsd-3-clause | 9fed36a72ac5fd728d41d034141da9ba | 31.664198 | 79 | 0.564366 | 4.140532 | false | false | false | false |
mozilla/zamboni | mkt/abuse/models.py | 1 | 1424 | import logging
from django.db import models
from mkt.extensions.models import Extension
from mkt.site.models import ModelBase
from mkt.users.models import UserProfile
from mkt.webapps.models import Webapp
from mkt.websites.models import Website
log = logging.getLogger('z.abuse')
class AbuseReport(ModelBase):
# NULL if the reporter is anonymous.
reporter = models.ForeignKey(UserProfile, null=True,
blank=True, related_name='abuse_reported')
ip_address = models.CharField(max_length=255, default='0.0.0.0')
# An abuse report can be for an app, a user, a website, or an extension.
# Only one of these should be set.
addon = models.ForeignKey(Webapp, null=True, related_name='abuse_reports')
user = models.ForeignKey(UserProfile, null=True,
related_name='abuse_reports')
website = models.ForeignKey(Website, null=True,
related_name='abuse_reports')
extension = models.ForeignKey(Extension, null=True,
related_name='abuse_reports')
message = models.TextField()
read = models.BooleanField(default=False)
class Meta:
db_table = 'abuse_reports'
@property
def object(self):
return self.addon or self.user or self.website or self.extension
# Add index on `created`.
AbuseReport._meta.get_field('created').db_index = True
| bsd-3-clause | 5e62d0aedb3dcdfd171249156b233af6 | 33.731707 | 78 | 0.66573 | 3.955556 | false | false | false | false |
mozilla/zamboni | mkt/versions/models.py | 7 | 12033 | # -*- coding: utf-8 -*-
import datetime
import json
import os
import django.dispatch
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
import commonware.log
import jinja2
import mkt
from mkt.files import utils
from mkt.files.models import cleanup_file, File
from mkt.site.decorators import use_master
from mkt.site.models import ManagerBase, ModelBase
from mkt.site.storage_utils import private_storage, public_storage
from mkt.site.utils import cached_property, sorted_groupby
from mkt.translations.fields import PurifiedField, save_signal
from mkt.versions.tasks import update_supported_locales_single
log = commonware.log.getLogger('z.versions')
class VersionManager(ManagerBase):
def __init__(self, include_deleted=False):
ManagerBase.__init__(self)
self.include_deleted = include_deleted
def get_queryset(self):
qs = super(VersionManager, self).get_queryset()
if not self.include_deleted:
qs = qs.exclude(deleted=True)
return qs.transform(Version.transformer)
class Version(ModelBase):
addon = models.ForeignKey('webapps.Webapp', related_name='versions')
releasenotes = PurifiedField()
approvalnotes = models.TextField(default='', null=True)
version = models.CharField(max_length=255, default='0.1')
nomination = models.DateTimeField(null=True)
reviewed = models.DateTimeField(null=True)
has_info_request = models.BooleanField(default=False)
has_editor_comment = models.BooleanField(default=False)
deleted = models.BooleanField(default=False)
supported_locales = models.CharField(max_length=255)
_developer_name = models.CharField(max_length=255, default='',
editable=False)
objects = VersionManager()
with_deleted = VersionManager(include_deleted=True)
class Meta(ModelBase.Meta):
db_table = 'versions'
ordering = ['-created', '-modified']
def __init__(self, *args, **kwargs):
super(Version, self).__init__(*args, **kwargs)
def __unicode__(self):
return jinja2.escape(self.version)
def save(self, *args, **kw):
creating = not self.id
super(Version, self).save(*args, **kw)
if creating:
# To avoid circular import.
from mkt.webapps.models import AppFeatures
AppFeatures.objects.create(version=self)
return self
@classmethod
def from_upload(cls, upload, addon, send_signal=True):
data = utils.parse_addon(upload, addon)
max_len = cls._meta.get_field_by_name('_developer_name')[0].max_length
developer = data.get('developer_name', '')[:max_len]
v = cls.objects.create(addon=addon, version=data['version'],
_developer_name=developer)
log.info('New version: %r (%s) from %r' % (v, v.id, upload))
# To avoid circular import.
from mkt.webapps.models import AppManifest
# Note: This must happen before we call `File.from_upload`.
manifest = utils.WebAppParser().get_json_data(upload)
AppManifest.objects.create(
version=v, manifest=json.dumps(manifest))
File.from_upload(upload, v, parse_data=data)
# Update supported locales from manifest.
# Note: This needs to happen after we call `File.from_upload`.
update_supported_locales_single.apply_async(
args=[addon.id], kwargs={'latest': True},
eta=datetime.datetime.now() +
datetime.timedelta(seconds=settings.NFS_LAG_DELAY)
)
v.disable_old_files()
# After the upload has been copied, remove the upload.
private_storage.delete(upload.path)
if send_signal:
version_uploaded.send(sender=v)
# If packaged app and app is blocked, put in escalation queue.
if addon.is_packaged and addon.status == mkt.STATUS_BLOCKED:
# To avoid circular import.
from mkt.reviewers.models import EscalationQueue
EscalationQueue.objects.create(addon=addon)
return v
@property
def path_prefix(self):
return os.path.join(settings.ADDONS_PATH, str(self.addon_id))
def delete(self):
log.info(u'Version deleted: %r (%s)' % (self, self.id))
mkt.log(mkt.LOG.DELETE_VERSION, self.addon, str(self.version))
models.signals.pre_delete.send(sender=Version, instance=self)
was_current = False
if self == self.addon.current_version:
was_current = True
self.update(deleted=True)
# Set file status to disabled.
f = self.all_files[0]
f.update(status=mkt.STATUS_DISABLED, _signal=False)
f.hide_disabled_file()
# If version deleted was the current version and there now exists
# another current_version, we need to call some extra methods to update
# various bits for packaged apps.
if was_current and self.addon.current_version:
self.addon.update_name_from_package_manifest()
self.addon.update_supported_locales()
if self.addon.is_packaged:
# Unlink signed packages if packaged app.
public_storage.delete(f.signed_file_path)
log.info(u'Unlinked file: %s' % f.signed_file_path)
private_storage.delete(f.signed_reviewer_file_path)
log.info(u'Unlinked file: %s' % f.signed_reviewer_file_path)
models.signals.post_delete.send(sender=Version, instance=self)
@cached_property(writable=True)
def all_activity(self):
from mkt.developers.models import VersionLog
al = (VersionLog.objects.filter(version=self.id).order_by('created')
.select_related('activity_log', 'version'))
return al
@cached_property(writable=True)
def all_files(self):
"""Shortcut for list(self.files.all()). Heavily cached."""
return list(self.files.all())
@property
def status(self):
status_choices = mkt.STATUS_FILE_CHOICES
if self.deleted:
return [status_choices[mkt.STATUS_DELETED]]
else:
return [status_choices[f.status] for f in self.all_files]
@property
def statuses(self):
"""Unadulterated statuses, good for an API."""
return [(f.id, f.status) for f in self.all_files]
def is_public(self):
# To be public, a version must not be deleted, must belong to a public
# addon, and all its attached files must have public status.
try:
return (not self.deleted and self.addon.is_public() and
all(f.status == mkt.STATUS_PUBLIC for f in self.all_files))
except ObjectDoesNotExist:
return False
@property
def has_files(self):
return bool(self.all_files)
@classmethod
def transformer(cls, versions):
"""Attach all the files to the versions."""
ids = set(v.id for v in versions)
if not versions:
return
def rollup(xs):
groups = sorted_groupby(xs, 'version_id')
return dict((k, list(vs)) for k, vs in groups)
file_dict = rollup(File.objects.filter(version__in=ids))
for version in versions:
v_id = version.id
version.all_files = file_dict.get(v_id, [])
for f in version.all_files:
f.version = version
@classmethod
def transformer_activity(cls, versions):
"""Attach all the activity to the versions."""
from mkt.developers.models import VersionLog
ids = set(v.id for v in versions)
if not versions:
return
al = (VersionLog.objects.filter(version__in=ids).order_by('created')
.select_related('activity_log', 'version'))
def rollup(xs):
groups = sorted_groupby(xs, 'version_id')
return dict((k, list(vs)) for k, vs in groups)
al_dict = rollup(al)
for version in versions:
v_id = version.id
version.all_activity = al_dict.get(v_id, [])
def disable_old_files(self):
qs = File.objects.filter(version__addon=self.addon_id,
version__lt=self.id,
version__deleted=False,
status__in=[mkt.STATUS_PENDING])
# Use File.update so signals are triggered.
for f in qs:
f.update(status=mkt.STATUS_DISABLED)
@property
def developer_name(self):
return self._developer_name
@cached_property(writable=True)
def is_privileged(self):
"""
Return whether the corresponding addon is privileged by looking at
the manifest file.
This is a cached property, to avoid going in the manifest more than
once for a given instance. It's also directly writable do allow you to
bypass the manifest fetching if you *know* your app is privileged or
not already and want to pass the instance to some code that will use
that property.
"""
if not self.addon.is_packaged or not self.all_files:
return False
data = self.addon.get_manifest_json(file_obj=self.all_files[0])
return data.get('type') == 'privileged'
@cached_property
def manifest(self):
# To avoid circular import.
from mkt.webapps.models import AppManifest
try:
manifest = self.manifest_json.manifest
except AppManifest.DoesNotExist:
manifest = None
return json.loads(manifest) if manifest else {}
@use_master
def update_status(sender, instance, **kw):
if not kw.get('raw'):
try:
instance.addon.reload()
instance.addon.update_status()
instance.addon.update_version()
except models.ObjectDoesNotExist:
log.info('Got ObjectDoesNotExist processing Version change signal',
exc_info=True)
pass
def inherit_nomination(sender, instance, **kw):
"""Inherit nomination date for new packaged app versions."""
if kw.get('raw'):
return
addon = instance.addon
if addon.is_packaged:
# If prior version's file is pending, inherit nomination. Otherwise,
# set nomination to now.
last_ver = (Version.objects.filter(addon=addon)
.exclude(pk=instance.pk)
.order_by('-nomination'))
if (last_ver.exists() and
last_ver[0].all_files[0].status == mkt.STATUS_PENDING):
instance.update(nomination=last_ver[0].nomination, _signal=False)
log.debug('[Webapp:%s] Inheriting nomination from prior pending '
'version' % addon.id)
elif (addon.status in mkt.WEBAPPS_APPROVED_STATUSES and
not instance.nomination):
log.debug('[Webapp:%s] Setting nomination date to now for new '
'version.' % addon.id)
instance.update(nomination=datetime.datetime.now(), _signal=False)
def cleanup_version(sender, instance, **kw):
"""On delete of the version object call the file delete and signals."""
if kw.get('raw'):
return
for file_ in instance.files.all():
cleanup_file(file_.__class__, file_)
version_uploaded = django.dispatch.Signal()
models.signals.pre_save.connect(
save_signal, sender=Version, dispatch_uid='version_translations')
models.signals.post_save.connect(
update_status, sender=Version, dispatch_uid='version_update_status')
models.signals.post_save.connect(
inherit_nomination, sender=Version,
dispatch_uid='version_inherit_nomination')
models.signals.post_delete.connect(
update_status, sender=Version, dispatch_uid='version_update_status')
models.signals.pre_delete.connect(
cleanup_version, sender=Version, dispatch_uid='cleanup_version')
| bsd-3-clause | 8523b877569edb5564a55f84be68634c | 34.600592 | 79 | 0.62794 | 4.046066 | false | false | false | false |
mozilla/zamboni | mkt/developers/urls.py | 2 | 6621 | from django import http
from django.conf.urls import include, patterns, url
from django.core.urlresolvers import reverse
from rest_framework.routers import SimpleRouter
import mkt
from lib.misc.urlconf_decorator import decorate
from mkt.api.base import SubRouter
from mkt.developers.api_payments import (PaymentCheckViewSet,
PaymentDebugViewSet)
from mkt.developers.views import ContentRatingList
from mkt.inapp.views import InAppProductViewSet, StubInAppProductViewSet
from mkt.receipts.urls import test_patterns
from mkt.site.decorators import use_master
from . import views
from . import views_payments
# These will all start with /app/<app_slug>/
app_detail_patterns = patterns(
'',
# Redirect people who go to / instead of /edit.
('^$', lambda r, app_slug: http.HttpResponseRedirect(
reverse('mkt.developers.apps.edit', args=[app_slug]))),
url('^edit$', views.edit, name='mkt.developers.apps.edit'),
url('^edit_(?P<section>[^/]+)(?:/(?P<editable>[^/]+))?$',
views.addons_section, name='mkt.developers.apps.section'),
url('^refresh_manifest$', views.refresh_manifest,
name='mkt.developers.apps.refresh_manifest'),
url('^ownership$', views.ownership, name='mkt.developers.apps.owner'),
url('^enable$', views.enable, name='mkt.developers.apps.enable'),
url('^delete$', views.delete, name='mkt.developers.apps.delete'),
url('^disable$', views.disable, name='mkt.developers.apps.disable'),
url('^status$', views.status, name='mkt.developers.apps.versions'),
url('^blocklist$', views.blocklist, name='mkt.developers.apps.blocklist'),
# IARC content ratings.
url('^content_ratings$', views.content_ratings,
name='mkt.developers.apps.ratings'),
# TODO: '^versions/$'
url('^versions/(?P<version_id>\d+)$', views.version_edit,
name='mkt.developers.apps.versions.edit'),
url('^versions/delete$', views.version_delete,
name='mkt.developers.apps.versions.delete'),
url('^versions/publicise$', views.version_publicise,
name='mkt.developers.apps.versions.publicise'),
url('^payments/$', views_payments.payments,
name='mkt.developers.apps.payments'),
url('^payments/bango-portal$', views_payments.bango_portal_from_addon,
name='mkt.developers.apps.payments.bango_portal_from_addon'),
# Old stuff.
url('^upload_preview$', views.upload_media, {'upload_type': 'preview'},
name='mkt.developers.apps.upload_preview'),
url('^upload_icon$', views.upload_media, {'upload_type': 'icon'},
name='mkt.developers.apps.upload_icon'),
url('^upload_image$', views.upload_media, {'upload_type': 'image'},
name='mkt.developers.apps.upload_image'),
url('^rmlocale$', views.remove_locale,
name='mkt.developers.apps.remove-locale'),
# Not apps-specific (yet).
url('^file/(?P<file_id>[^/]+)/validation$', views.file_validation,
name='mkt.developers.apps.file_validation'),
url('^file/(?P<file_id>[^/]+)/validation.json$',
views.json_file_validation,
name='mkt.developers.apps.json_file_validation'),
url('^upload$', views.upload_for_addon,
name='mkt.developers.upload_for_addon'),
url('^upload/(?P<uuid>[^/]+)$', views.upload_detail_for_addon,
name='mkt.developers.upload_detail_for_addon'),
)
# These will all start with /ajax/app/<app_slug>/
ajax_patterns = patterns(
'',
url('^image/status$', views.image_status,
name='mkt.developers.apps.ajax.image.status'),
)
urlpatterns = decorate(use_master, patterns(
'',
# Redirect people who have /apps/ instead of /app/.
('^apps/\d+/.*',
lambda r: http.HttpResponseRedirect(r.path.replace('apps', 'app', 1))),
# Redirect to /addons/ at the base.
url('^submissions$', views.dashboard, name='mkt.developers.apps'),
url('^upload$', views.upload_new, name='mkt.developers.upload'),
url('^upload/([^/]+)(?:/([^/]+))?$', views.upload_detail,
name='mkt.developers.upload_detail'),
url('^standalone-hosted-upload$', views.standalone_hosted_upload,
name='mkt.developers.standalone_hosted_upload'),
url('^standalone-packaged-upload$', views.standalone_packaged_upload,
name='mkt.developers.standalone_packaged_upload'),
url('^standalone-(hosted|packaged)-upload/([^/]+)$',
views.standalone_upload_detail,
name='mkt.developers.standalone_upload_detail'),
# Standalone tools.
url('^validator/?$', views.validate_app,
name='mkt.developers.validate_app'),
url('^upload-manifest$', views.upload_manifest,
name='mkt.developers.upload_manifest'),
# URLs for a single app.
url('^app/%s/' % mkt.APP_SLUG, include(app_detail_patterns)),
url('^ajax/app/%s/' % mkt.APP_SLUG, include(ajax_patterns)),
url('^terms$', views.terms, name='mkt.developers.apps.terms'),
url('^terms/standalone$', views.terms_standalone,
name='mkt.developers.apps.terms_standalone'),
url('^api$', views.api, name='mkt.developers.apps.api'),
# Set the developer Message of the Day.
url('^motd$', views.motd, name='mkt.developers.motd'),
# Developer docs
url('docs/(?P<doc_name>[-_\w]+)?$',
views.docs, name='mkt.developers.docs'),
url('docs/(?P<doc_name>[-_\w]+)/(?P<doc_page>[-_\w]+)',
views.docs, name='mkt.developers.docs'),
url('^transactions/', views.transactions,
name='mkt.developers.transactions'),
url('^test/$', views.testing, name='mkt.developers.apps.testing'),
url('^test/receipts/', include(test_patterns)),
))
api_payments = SimpleRouter()
api_payments.register(r'stub-in-app-products', StubInAppProductViewSet,
base_name='stub-in-app-products')
in_app_products = SimpleRouter()
in_app_products.register(r'in-app', InAppProductViewSet,
base_name='in-app-products')
app_payments = SubRouter()
app_payments.register(r'payments/status', PaymentCheckViewSet,
base_name='app-payments-status')
app_payments.register(r'payments/debug', PaymentDebugViewSet,
base_name='app-payments-debug')
payments_api_patterns = patterns(
'',
url(r'^payments/', include(api_payments.urls)),
url(r'^payments/(?P<origin>(app://|https?://|marketplace:)[^/]+)/',
include(in_app_products.urls)),
url(r'^apps/app/', include(app_payments.urls)),
)
dev_api_patterns = patterns(
'',
url(r'^apps/app/(?P<pk>[^/<>"\']+)/content-ratings/',
ContentRatingList.as_view(), name='content-ratings-list'),
)
| bsd-3-clause | a6f11f554b6ee185bca4b02d9291f96e | 39.87037 | 78 | 0.652923 | 3.571197 | false | false | false | false |
mozilla/zamboni | mkt/reviewers/forms.py | 5 | 8951 | import logging
from datetime import timedelta
from django import forms
from django.forms import widgets
import happyforms
from django.utils.translation import ugettext as _
from django.utils.translation import ugettext_lazy as _lazy
import mkt
from mkt.api.forms import CustomNullBooleanSelect
from mkt.reviewers.models import CannedResponse, SHOWCASE_TAG
from mkt.reviewers.utils import ReviewHelper
from mkt.search.forms import ApiSearchForm, SimpleSearchForm
from mkt.webapps.models import AddonDeviceType
log = logging.getLogger('z.reviewers.forms')
# We set 'any' here since we need to default this field
# to PUBLIC if not specified for consumer pages.
STATUS_CHOICES = [('any', _lazy(u'Any Status'))]
for status in mkt.WEBAPPS_UNLISTED_STATUSES + mkt.LISTED_STATUSES:
STATUS_CHOICES.append((mkt.STATUS_CHOICES_API[status],
mkt.STATUS_CHOICES[status]))
MODERATE_ACTION_FILTERS = (('', ''), ('approved', _lazy(u'Approved reviews')),
('deleted', _lazy(u'Deleted reviews')))
MODERATE_ACTION_DICT = {'approved': mkt.LOG.APPROVE_REVIEW,
'deleted': mkt.LOG.DELETE_REVIEW}
COMBINED_DEVICE_CHOICES = [('', _lazy(u'Any Device'))] + [
(dev.api_name, dev.name) for dev in mkt.DEVICE_TYPE_LIST]
class ModerateLogForm(happyforms.Form):
start = forms.DateField(required=False,
label=_lazy(u'View entries between'))
end = forms.DateField(required=False,
label=_lazy(u'and'))
search = forms.ChoiceField(required=False, choices=MODERATE_ACTION_FILTERS,
label=_lazy(u'Filter by type/action'))
def clean(self):
data = self.cleaned_data
# We want this to be inclusive of the end date.
if 'end' in data and data['end']:
data['end'] += timedelta(days=1)
if 'search' in data and data['search']:
data['search'] = MODERATE_ACTION_DICT[data['search']]
return data
class ModerateLogDetailForm(happyforms.Form):
action = forms.CharField(
required=True,
widget=forms.HiddenInput(attrs={'value': 'undelete', }))
class ReviewLogForm(happyforms.Form):
start = forms.DateField(required=False,
label=_lazy(u'View entries between'))
end = forms.DateField(required=False, label=_lazy(u'and'))
search = forms.CharField(required=False, label=_lazy(u'containing'))
def __init__(self, *args, **kw):
super(ReviewLogForm, self).__init__(*args, **kw)
# L10n: start, as in "start date"
self.fields['start'].widget.attrs = {'placeholder': _('start'),
'size': 10}
# L10n: end, as in "end date"
self.fields['end'].widget.attrs = {'size': 10, 'placeholder': _('end')}
self.fields['search'].widget.attrs = {
# L10n: Descript of what can be searched for.
'placeholder': _lazy(u'app, reviewer, or comment'),
'size': 30}
def clean(self):
data = self.cleaned_data
# We want this to be inclusive of the end date.
if 'end' in data and data['end']:
data['end'] += timedelta(days=1)
return data
class NonValidatingChoiceField(forms.ChoiceField):
"""A ChoiceField that doesn't validate."""
def validate(self, value):
pass
class TestedOnForm(happyforms.Form):
device_type = NonValidatingChoiceField(
choices=([('', 'Choose...')] +
[(v.name, v.name) for _, v in mkt.DEVICE_TYPES.items()]),
label=_lazy(u'Device Type:'), required=False)
device = forms.CharField(required=False, label=_lazy(u'Device:'))
version = forms.CharField(required=False, label=_lazy(u'Firefox Version:'))
TestedOnFormSet = forms.formsets.formset_factory(TestedOnForm)
class MOTDForm(happyforms.Form):
motd = forms.CharField(required=True, widget=widgets.Textarea())
class ReviewAppForm(happyforms.Form):
comments = forms.CharField(widget=forms.Textarea(),
label=_lazy(u'Comments:'))
canned_response = NonValidatingChoiceField(required=False)
action = forms.ChoiceField(widget=forms.RadioSelect())
device_override = forms.TypedMultipleChoiceField(
choices=[(k, v.name) for k, v in mkt.DEVICE_TYPES.items()],
coerce=int, label=_lazy(u'Device Type Override:'),
widget=forms.CheckboxSelectMultiple, required=False)
is_showcase = forms.BooleanField(
required=False, label=_lazy(u'Nominate this app to be featured.'))
def __init__(self, *args, **kw):
self.helper = kw.pop('helper')
super(ReviewAppForm, self).__init__(*args, **kw)
# We're starting with an empty one, which will be hidden via CSS.
canned_choices = [['', [('', _('Choose a canned response...'))]]]
responses = CannedResponse.objects.all()
# Loop through the actions.
for k, action in self.helper.actions.iteritems():
action_choices = [[c.response, c.name] for c in responses
if c.sort_group and k in c.sort_group.split(',')]
# Add the group of responses to the canned_choices array.
if action_choices:
canned_choices.append([action['label'], action_choices])
# Now, add everything not in a group.
for r in responses:
if not r.sort_group:
canned_choices.append([r.response, r.name])
self.fields['canned_response'].choices = canned_choices
self.fields['action'].choices = [(k, v['label']) for k, v
in self.helper.actions.items()]
device_types = AddonDeviceType.objects.filter(
addon=self.helper.addon).values_list('device_type', flat=True)
if device_types:
self.initial['device_override'] = device_types
self.initial['is_showcase'] = (
self.helper.addon.tags.filter(tag_text=SHOWCASE_TAG).exists())
def is_valid(self):
result = super(ReviewAppForm, self).is_valid()
if result:
self.helper.set_data(self.cleaned_data)
return result
def get_review_form(data, files, request=None, addon=None, version=None,
attachment_formset=None, testedon_formset=None):
helper = ReviewHelper(request=request, addon=addon, version=version,
attachment_formset=attachment_formset,
testedon_formset=testedon_formset)
return ReviewAppForm(data=data, files=files, helper=helper)
def _search_form_status(cleaned_data):
status = cleaned_data['status']
if status == 'any':
return None
return mkt.STATUS_CHOICES_API_LOOKUP.get(status, mkt.STATUS_PENDING)
class ApiReviewersSearchForm(ApiSearchForm):
status = forms.ChoiceField(required=False, choices=STATUS_CHOICES,
label=_lazy(u'Status'))
has_editor_comment = forms.NullBooleanField(
required=False,
label=_lazy(u'Has Editor Comment'),
widget=CustomNullBooleanSelect)
has_info_request = forms.NullBooleanField(
required=False,
label=_lazy(u'More Info Requested'),
widget=CustomNullBooleanSelect)
is_escalated = forms.NullBooleanField(
required=False,
label=_lazy(u'Escalated'),
widget=CustomNullBooleanSelect)
dev_and_device = forms.ChoiceField(
required=False, choices=COMBINED_DEVICE_CHOICES,
label=_lazy(u'Device'))
def __init__(self, *args, **kwargs):
super(ApiReviewersSearchForm, self).__init__(*args, **kwargs)
# Mobile form, to render, expects choices from the Django field.
BOOL_CHOICES = ((u'', _lazy('Unknown')),
(u'true', _lazy('Yes')),
(u'false', _lazy('No')))
for field_name, field in self.fields.iteritems():
if isinstance(field, forms.NullBooleanField):
self.fields[field_name].choices = BOOL_CHOICES
def clean_status(self):
return _search_form_status(self.cleaned_data)
def clean(self):
# Transform dev_and_device into the separate dev/device parameters.
# We then call super() so that it gets transformed into ids that ES
# will accept.
dev_and_device = self.cleaned_data.pop('dev_and_device', '').split('+')
self.cleaned_data['dev'] = dev_and_device[0]
if len(dev_and_device) > 1:
self.cleaned_data['device'] = dev_and_device[1]
return super(ApiReviewersSearchForm, self).clean()
class ReviewersWebsiteSearchForm(SimpleSearchForm):
status = forms.ChoiceField(required=False, choices=STATUS_CHOICES,
label=_lazy(u'Status'))
def clean_status(self):
return _search_form_status(self.cleaned_data)
| bsd-3-clause | 513221adf1649b2a5b5016cb6e3f90da | 37.252137 | 79 | 0.62183 | 3.934505 | false | false | false | false |
mozilla/zamboni | mkt/comm/migrations/0002_auto_20150727_1017.py | 13 | 1816 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('comm', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('versions', '0001_initial'),
('webapps', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='communicationthread',
name='_addon',
field=models.ForeignKey(related_name='threads', db_column=b'addon_id', to='webapps.Webapp'),
preserve_default=True,
),
migrations.AddField(
model_name='communicationthread',
name='_version',
field=models.ForeignKey(related_name='threads', db_column=b'version_id', to='versions.Version', null=True),
preserve_default=True,
),
migrations.AlterUniqueTogether(
name='communicationthread',
unique_together=set([('_addon', '_version')]),
),
migrations.AddField(
model_name='communicationnote',
name='author',
field=models.ForeignKey(related_name='comm_notes', blank=True, to=settings.AUTH_USER_MODEL, null=True),
preserve_default=True,
),
migrations.AddField(
model_name='communicationnote',
name='thread',
field=models.ForeignKey(related_name='notes', to='comm.CommunicationThread'),
preserve_default=True,
),
migrations.AddField(
model_name='commattachment',
name='note',
field=models.ForeignKey(related_name='attachments', to='comm.CommunicationNote'),
preserve_default=True,
),
]
| bsd-3-clause | 7b0599d3bccdb9add63c78c9d708fb40 | 33.923077 | 119 | 0.587004 | 4.483951 | false | false | false | false |
mozilla/zamboni | mkt/comm/forms.py | 6 | 2760 | from django import forms
from django.conf import settings
from django.forms import ValidationError
import happyforms
from jinja2.filters import do_filesizeformat
from django.utils.translation import ugettext as _, ugettext_lazy as _lazy
from mkt.api.forms import SluggableModelChoiceField
from mkt.comm.models import CommunicationThread
from mkt.constants import comm
from mkt.extensions.models import Extension
from mkt.webapps.models import Webapp
class AppSlugForm(happyforms.Form):
app = SluggableModelChoiceField(queryset=Webapp.with_deleted.all(),
sluggable_to_field_name='app_slug')
class ExtensionSlugForm(happyforms.Form):
extension = SluggableModelChoiceField(queryset=Extension.objects.all(),
sluggable_to_field_name='slug')
class CreateCommNoteForm(happyforms.Form):
body = forms.CharField(
error_messages={'required': _lazy('Note body is empty.')})
note_type = forms.TypedChoiceField(
empty_value=comm.NO_ACTION,
coerce=int, choices=[(x, x) for x in comm.API_NOTE_TYPE_ALLOWED],
error_messages={'invalid_choice': _lazy(u'Invalid note type.')})
class CreateCommThreadForm(CreateCommNoteForm):
app = SluggableModelChoiceField(queryset=Webapp.with_deleted.all(),
sluggable_to_field_name='app_slug')
version = forms.CharField()
def clean_version(self):
version_num = self.cleaned_data['version']
versions = self.cleaned_data['app'].versions.filter(
version=version_num).order_by('-created')
if versions.exists():
return versions[0]
raise forms.ValidationError(
_('Version %s does not exist' % version_num))
class CommAttachmentForm(happyforms.Form):
attachment = forms.FileField(label=_lazy(u'Attachment:'))
description = forms.CharField(required=False, label=_lazy(u'Description:'))
max_upload_size = settings.MAX_REVIEW_ATTACHMENT_UPLOAD_SIZE
def clean(self, *args, **kwargs):
data = super(CommAttachmentForm, self).clean(*args, **kwargs)
attachment = data.get('attachment')
max_size = self.max_upload_size
if attachment and attachment.size > max_size:
# L10n: error raised when review attachment is too large.
exc = _('Attachment exceeds maximum size of %s.' %
do_filesizeformat(self.max_upload_size))
raise ValidationError(exc)
return data
CommAttachmentFormSet = forms.formsets.formset_factory(CommAttachmentForm)
class UnCCForm(happyforms.Form):
pk = SluggableModelChoiceField(
queryset=CommunicationThread.objects.all(),
sluggable_to_field_name='id')
| bsd-3-clause | 668322fb85e7fc2685b111d04fa92cba | 36.297297 | 79 | 0.681159 | 4.08284 | false | false | false | false |
mozilla/zamboni | mkt/abuse/tests/test_views.py | 1 | 6887 | # -*- coding: utf-8 -*-
import json
from django.core.urlresolvers import reverse
from django.utils.http import urlencode
from nose.tools import eq_
from mkt.abuse.models import AbuseReport
from mkt.api.tests.test_oauth import RestOAuth
from mkt.constants.base import STATUS_PUBLIC
from mkt.extensions.models import Extension
from mkt.site.fixtures import fixture
from mkt.webapps.models import Webapp
from mkt.users.models import UserProfile
from mkt.websites.utils import website_factory
class BaseTestAbuseResource(object):
"""
Setup for AbuseResource tests that require inheritance from TestCase.
"""
resource_name = None
def setUp(self):
super(BaseTestAbuseResource, self).setUp()
self.list_url = reverse('%s-abuse-list' % (self.resource_name,))
self.headers = {
'REMOTE_ADDR': '48.151.623.42'
}
class AbuseResourceTests(object):
"""
Setup for AbuseResource tests that do not require inheritance from
TestCase.
Separate from BaseTestAbuseResource to ensure that test_* methods of this
abstract base class are not discovered by the runner.
"""
default_data = None
def _call(self, anonymous=False, data=None):
post_data = self.default_data.copy()
if anonymous:
post_data['tuber'] = ''
post_data['sprout'] = 'potato'
if data:
post_data.update(data)
client = self.anon if anonymous else self.client
res = client.post(self.list_url, data=urlencode(post_data),
content_type='application/x-www-form-urlencoded',
**self.headers)
try:
res_data = json.loads(res.content)
# Pending #855817, some errors will return an empty response body.
except ValueError:
res_data = res.content
return res, res_data
def _test_success(self, res, data):
"""
Tests common when looking to ensure complete successful responses.
"""
eq_(201, res.status_code, res.content)
fields = self.default_data.copy()
del fields['sprout']
if 'user' in fields:
eq_(data.pop('user')['display_name'], self.user.display_name)
del fields['user']
if 'app' in fields:
eq_(int(data.pop('app')['id']), self.app.pk)
del fields['app']
if 'website' in fields:
eq_(int(data.pop('website')['id']), self.website.pk)
del fields['website']
if 'extension' in fields:
eq_(int(data.pop('extension')['id']), self.extension.pk)
del fields['extension']
for name in fields.keys():
eq_(fields[name], data[name])
newest_report = AbuseReport.objects.order_by('-id')[0]
eq_(newest_report.message, data['text'])
eq_(newest_report.ip_address, self.headers['REMOTE_ADDR'])
def test_get(self):
res = self.client.get(self.list_url)
eq_(res.status_code, 405)
def test_send(self):
res, data = self._call()
self._test_success(res, data)
assert 'display_name' in data['reporter']
assert 'ip_address' not in data
def test_send_anonymous(self):
res, data = self._call(anonymous=True)
self._test_success(res, data)
eq_(data['reporter'], None)
assert 'ip_address' not in data
def test_send_potato(self):
tuber_res, tuber_data = self._call(data={'tuber': 'potat-toh'},
anonymous=True)
potato_res, potato_data = self._call(data={'sprout': 'potat-toh'},
anonymous=True)
eq_(tuber_res.status_code, 400)
eq_(potato_res.status_code, 400)
class TestUserAbuseResource(AbuseResourceTests, BaseTestAbuseResource,
RestOAuth):
resource_name = 'user'
def setUp(self):
super(TestUserAbuseResource, self).setUp()
self.user = UserProfile.objects.get(pk=2519)
self.default_data = {
'text': '@cvan is very abusive.',
'sprout': 'potato',
'user': self.user.pk
}
def test_invalid_user(self):
res, data = self._call(data={'user': '-1'})
eq_(400, res.status_code)
assert 'Invalid' in data['user'][0]
class TestAppAbuseResource(AbuseResourceTests, BaseTestAbuseResource,
RestOAuth):
fixtures = RestOAuth.fixtures + fixture('webapp_337141')
resource_name = 'app'
def setUp(self):
super(TestAppAbuseResource, self).setUp()
self.app = Webapp.objects.get(pk=337141)
self.default_data = {
'text': "@cvan's app is very abusive.",
'sprout': 'potato',
'app': self.app.pk
}
def test_invalid_app(self):
res, data = self._call(data={'app': -1})
eq_(400, res.status_code)
assert 'does not exist' in data['app'][0]
def test_slug_app(self):
res, data = self._call(data={'app': self.app.app_slug})
eq_(201, res.status_code)
class TestWebsiteAbuseResource(AbuseResourceTests, BaseTestAbuseResource,
RestOAuth):
resource_name = 'website'
def setUp(self):
super(TestWebsiteAbuseResource, self).setUp()
self.website = website_factory()
self.default_data = {
'text': 'This website is weird.',
'sprout': 'potato',
'website': self.website.pk
}
def test_invalid_website(self):
res, data = self._call(data={'website': self.website.pk + 42})
eq_(400, res.status_code)
assert 'does not exist' in data['website'][0]
class TestExtensionAbuseResource(AbuseResourceTests, BaseTestAbuseResource,
RestOAuth):
resource_name = 'extension'
def setUp(self):
super(TestExtensionAbuseResource, self).setUp()
self.extension = Extension.objects.create(
name=u'Test Êxtension')
self.extension.update(status=STATUS_PUBLIC)
self.default_data = {
'text': 'Lies! This extension is an add-on!',
'sprout': 'potato',
'extension': self.extension.pk
}
def test_invalid_extension(self):
res, data = self._call(data={'extension': -1})
eq_(400, res.status_code)
assert 'does not exist' in data['extension'][0]
def test_deleted_extension(self):
data = {'extension': self.extension.slug}
self.extension.delete()
res, data = self._call(data=data)
eq_(400, res.status_code)
assert 'does not exist' in data['extension'][0]
def test_slug_extension(self):
res, data = self._call(data={'extension': self.extension.slug})
eq_(201, res.status_code)
| bsd-3-clause | 83c62412498e36f6810da97bdade5514 | 31.947368 | 77 | 0.588295 | 3.796031 | false | true | false | false |
mozilla/zamboni | mkt/feed/constants.py | 19 | 2421 | FEEDAPP_ICON = 'icon'
FEEDAPP_IMAGE = 'image'
FEEDAPP_DESC = 'description'
FEEDAPP_QUOTE = 'quote'
FEEDAPP_PREVIEW = 'preview'
FEEDAPP_TYPES = (
FEEDAPP_ICON,
FEEDAPP_IMAGE,
FEEDAPP_DESC,
FEEDAPP_QUOTE,
FEEDAPP_PREVIEW,
)
FEEDAPP_TYPE_CHOICES = [(c, c) for c in FEEDAPP_TYPES]
# Editorial Brand types, represented as a list of slug-like strings. L10n for
# these are handled on the client side.
BRAND_TYPES = (
'apps-for-albania',
'apps-for-argentina',
'apps-for-bangladesh',
'apps-for-brazil',
'apps-for-bulgaria',
'apps-for-chile',
'apps-for-china',
'apps-for-colombia',
'apps-for-costa-rica',
'apps-for-croatia',
'apps-for-czech-republic',
'apps-for-ecuador',
'apps-for-el-salvador',
'apps-for-france',
'apps-for-germany',
'apps-for-greece',
'apps-for-hungary',
'apps-for-india',
'apps-for-italy',
'apps-for-japan',
'apps-for-macedonia',
'apps-for-mexico',
'apps-for-montenegro',
'apps-for-nicaragua',
'apps-for-panama',
'apps-for-peru',
'apps-for-poland',
'apps-for-russia',
'apps-for-serbia',
'apps-for-south-africa',
'apps-for-spain',
'apps-for-uruguay',
'apps-for-venezuela',
'arts-entertainment',
'book',
'creativity',
'education',
'games',
'groundbreaking',
'health-fitness',
'hidden-gem',
'lifestyle',
'local-favorite',
'maps-navigation',
'music',
'mystery-app',
'news-weather',
'photo-video',
'shopping',
'social',
'sports',
'tools-time-savers',
'travel',
'work-business',
)
BRAND_TYPE_CHOICES = [(c, c) for c in BRAND_TYPES]
# Editorial Brand layouts
BRAND_GRID = 'grid'
BRAND_LIST = 'list'
BRAND_LAYOUTS = (
BRAND_GRID,
BRAND_LIST
)
BRAND_LAYOUT_CHOICES = [(c, c) for c in BRAND_LAYOUTS]
COLLECTION_PROMO = 'promo'
COLLECTION_LISTING = 'listing'
COLLECTION_TYPES = (
COLLECTION_PROMO,
COLLECTION_LISTING,
)
COLLECTION_TYPE_CHOICES = [(c, c) for c in COLLECTION_TYPES]
FEED_TYPE_APP = 'app'
FEED_TYPE_BRAND = 'brand'
FEED_TYPE_COLL = 'collection'
FEED_TYPE_SHELF = 'shelf'
# Number of apps we need to deserialize for the homepage/actual feed.
HOME_NUM_APPS_BRAND = 6
HOME_NUM_APPS_LISTING_COLL = 6
HOME_NUM_APPS_PROMO_COLL = 3
HOME_NUM_APPS_SHELF = 0
# Minimum number of apps needed after filtering to be displayed for colls.
MIN_APPS_COLLECTION = 3
| bsd-3-clause | 95390eddd28c2ac6745cd6216f8e3d63 | 22.057143 | 77 | 0.638579 | 2.684035 | false | false | false | false |
mozilla/zamboni | mkt/regions/middleware.py | 16 | 2541 | from django.conf import settings
import commonware.log
from django_statsd.clients import statsd
from lib.geoip import GeoIP
import mkt
from mkt.regions.utils import parse_region
log = commonware.log.getLogger('mkt.regions')
class RegionMiddleware(object):
"""Figure out the user's region and set request.REGION accordingly, storing
it on the request.user if there is one.
- Outside the API, we automatically set RESTOFWORLD.
- In the API, it tries to find a valid region in the query parameters,
additionnally falling back to GeoIP for API v1 (for later versions we
never do GeoIP automatically)."""
def __init__(self):
self.geoip = GeoIP(settings)
def store_region(self, request, user_region):
request.REGION = user_region
mkt.regions.set_region(user_region)
def region_from_request(self, request):
address = request.META.get('REMOTE_ADDR')
ip_reg = self.geoip.lookup(address)
log.info('Geodude lookup for {0} returned {1}'
.format(address, ip_reg))
return parse_region(ip_reg) or mkt.regions.RESTOFWORLD
def process_request(self, request):
regions = mkt.regions.REGION_LOOKUP
user_region = restofworld = mkt.regions.RESTOFWORLD
if not getattr(request, 'API', False):
request.REGION = restofworld
mkt.regions.set_region(restofworld)
return
# Try 'region' in POST/GET data first, if it's not there try geoip.
url_region = request.GET.get('region')
if url_region in regions:
statsd.incr('z.regions.middleware.source.url')
user_region = regions[url_region]
log.info('Region {0} specified in URL; region set as {1}'
.format(url_region, user_region.slug))
elif getattr(request, 'API_VERSION', None) == 1:
# Fallback to GeoIP, but only for API version 1.
statsd.incr('z.regions.middleware.source.geoip')
user_region = self.region_from_request(request)
log.info('Region not specified in URL; region set as {0}'
.format(user_region.slug))
# Update the region on the user object if it changed.
if (request.user.is_authenticated() and
request.user.region != user_region.slug):
request.user.region = user_region.slug
request.user.save()
# Persist the region on the request / local thread.
self.store_region(request, user_region)
| bsd-3-clause | 75f5027ddad0650e878b47cd9b783d18 | 36.925373 | 79 | 0.639906 | 4.007886 | false | false | false | false |
mozilla/zamboni | mkt/access/acl.py | 5 | 3138 | import mkt
def match_rules(rules, app, action):
"""
This will match rules found in Group.
"""
for rule in rules.split(','):
rule_app, rule_action = rule.split(':')
if rule_app == '*' or rule_app == app:
if (rule_action == '*' or
rule_action == action or
action == '%'):
return True
return False
def action_allowed(request, app, action):
"""
Determines if the request user has permission to do a certain action
'Admin:%' is true if the user has any of:
('Admin:*', 'Admin:%s'%whatever, '*:*',) as rules.
"""
allowed = any(match_rules(group.rules, app, action) for group in
getattr(request, 'groups', ()))
return allowed
def action_allowed_user(user, app, action):
"""Similar to action_allowed, but takes user instead of request."""
allowed = any(match_rules(group.rules, app, action) for group in
user.groups.all())
return allowed
def check_ownership(request, obj, require_owner=False, require_author=False,
ignore_disabled=False, admin=True):
"""
A convenience function. Check if request.user has permissions
for the object.
"""
if hasattr(obj, 'check_ownership'):
return obj.check_ownership(request, require_owner=require_owner,
require_author=require_author,
ignore_disabled=ignore_disabled,
admin=admin)
return False
def check_addon_ownership(request, addon, viewer=False, dev=False,
support=False, admin=True, ignore_disabled=False):
"""
Check request.user's permissions for the addon.
If user is an admin they can do anything.
If the app is disabled only admins have permission.
If they're an app owner they can do anything.
dev=True checks that the user has an owner or developer role.
viewer=True checks that the user has an owner, developer, or viewer role.
support=True checks that the user has a support role.
"""
if not request.user.is_authenticated():
return False
# Deleted apps can't be edited at all.
if addon.is_deleted:
return False
# Users with 'Apps:Edit' can do anything.
if admin and action_allowed(request, 'Apps', 'Edit'):
return True
# Only admins can edit banned addons.
if addon.status == mkt.STATUS_DISABLED and not ignore_disabled:
return False
# Addon owners can do everything else.
roles = (mkt.AUTHOR_ROLE_OWNER,)
if dev:
roles += (mkt.AUTHOR_ROLE_DEV,)
# Viewer privs are implied for devs.
elif viewer:
roles += (mkt.AUTHOR_ROLE_DEV, mkt.AUTHOR_ROLE_VIEWER,
mkt.AUTHOR_ROLE_SUPPORT)
# Support can do support.
elif support:
roles += (mkt.AUTHOR_ROLE_SUPPORT,)
return addon.authors.filter(pk=request.user.pk,
addonuser__role__in=roles).exists()
def check_reviewer(request):
return action_allowed(request, 'Apps', 'Review')
| bsd-3-clause | 9c93519e5474039f69f0bfa1157528ea | 33.483516 | 77 | 0.603888 | 4.156291 | false | false | false | false |
mozilla/zamboni | mkt/webapps/decorators.py | 17 | 1686 | import functools
from django.core.exceptions import PermissionDenied
from django.shortcuts import get_object_or_404
import commonware.log
from mkt.webapps.models import Webapp
log = commonware.log.getLogger('mkt.purchase')
def has_purchased(f):
"""
If the addon is premium, require a purchase.
Must be called after addon_view decorator.
"""
@functools.wraps(f)
def wrapper(request, addon, *args, **kw):
if addon.is_premium() and not addon.has_purchased(request.user):
log.info('Not purchased: %d' % addon.pk)
raise PermissionDenied
return f(request, addon, *args, **kw)
return wrapper
def can_become_premium(f):
"""Check that the addon can become premium."""
@functools.wraps(f)
def wrapper(request, addon_id, addon, *args, **kw):
if not addon.can_become_premium():
log.info('Cannot become premium: %d' % addon.pk)
raise PermissionDenied
return f(request, addon_id, addon, *args, **kw)
return wrapper
def app_view(f, qs=Webapp.objects.all):
@functools.wraps(f)
def wrapper(request, app_slug, *args, **kw):
addon = get_object_or_404(qs(), app_slug=app_slug)
return f(request, addon, *args, **kw)
return wrapper
def app_view_factory(qs):
"""
Don't evaluate qs or the locale will get stuck on whatever the server
starts with. The app_view() decorator will call qs with no arguments before
doing anything, so lambdas are ok.
GOOD: Webapp.objects.valid
GOOD: lambda: Webapp.objects.valid().filter(...)
BAD: Webapp.objects.valid()
"""
return functools.partial(app_view, qs=qs)
| bsd-3-clause | 30b7d73499befe79c2d6bee2803ee530 | 28.068966 | 79 | 0.652432 | 3.594883 | false | false | false | false |
mozilla/zamboni | mkt/inapp/tests/test_serializers.py | 20 | 2406 | import mock
from nose.tools import eq_, ok_
from requests.exceptions import RequestException
from mkt.inapp.serializers import InAppProductSerializer
from .test_views import BaseInAppProductViewSetTests
class TestInAppProductSerializer(BaseInAppProductViewSetTests):
def post(self, **kw):
if 'data' not in kw:
kw['data'] = self.valid_in_app_product_data
return InAppProductSerializer(**kw)
def assert_logo_error(self, serializer):
eq_(serializer.errors['logo_url'],
['Product logo must be a 64x64 image. '
'Check that the URL is correct.'])
def test_valid(self):
self.mock_logo_url()
serializer = self.post()
ok_(serializer.is_valid())
def test_no_logo_url(self):
product_data = dict(self.valid_in_app_product_data)
del product_data['logo_url']
serializer = self.post(data=product_data)
ok_(serializer.is_valid(), serializer.errors)
def test_wrong_logo_size(self):
self.mock_logo_url(resource='logo-128.png')
serializer = self.post()
ok_(not serializer.is_valid())
self.assert_logo_error(serializer)
def test_bad_logo_url(self):
self.mock_logo_url(url_side_effect=RequestException('404'))
serializer = self.post()
ok_(not serializer.is_valid())
self.assert_logo_error(serializer)
def test_logo_image_error(self):
self.mock_logo_url()
p = mock.patch('mkt.inapp.serializers.Image.open')
opener = p.start()
self.addCleanup(p.stop)
img = mock.Mock()
img.verify.side_effect = ValueError('not an image')
opener.return_value = img
serializer = self.post()
ok_(not serializer.is_valid())
self.assert_logo_error(serializer)
def test_logo_url_to_big(self):
self.mock_logo_url()
serializer = self.post()
with self.settings(MAX_INAPP_IMAGE_SIZE=2):
ok_(not serializer.is_valid())
self.assert_logo_error(serializer)
def test_create_ftp_scheme(self):
product_data = dict(self.valid_in_app_product_data)
product_data['logo_url'] = 'ftp://example.com/awesome.png'
serializer = self.post(data=product_data)
ok_(not serializer.is_valid())
eq_(serializer.errors['logo_url'],
['Scheme should be one of http, https.'])
| bsd-3-clause | 7c7f87bbba5a58a409f7fd34a5a51e44 | 32.416667 | 67 | 0.62926 | 3.701538 | false | true | false | false |
mozilla/zamboni | mkt/constants/search.py | 19 | 2850 | # These two dicts are mapping between language codes in zamboni and language
# analyzers in elasticsearch.
#
# Each key value of ANALYZER_MAP is language analyzer supported by
# elasticsearch. See
# http://www.elasticsearch.org/guide/reference/index-modules/analysis/lang-analyzer.html
#
# Each value of ANALYZER_MAP is a list which is supported by the key analyzer.
# All values are picked from AMO_LANGUAGES in settings.py.
#
# The rows commented out are that the language is not supported by
# elasticsearch yet. We should update it when elasticsearch supports new
# analyzer for the language.
SEARCH_ANALYZER_MAP = {
# '': ['af'], # Afrikaans
'arabic': ['ar'],
'bulgarian': ['bg'],
'catalan': ['ca'],
'czech': ['cs'],
'danish': ['da'],
'german': ['de'],
'greek': ['el'],
'english': ['en-us'],
'spanish': ['es'],
'basque': ['eu'],
'persian': ['fa'],
'finnish': ['fi'],
'french': ['fr'],
# '': ['ga-ie'], # Gaelic - Ireland
# '': ['he'], # Hebrew
'hungarian': ['hu'],
'indonesian': ['id'],
'italian': ['it'],
'cjk': ['ja', 'ko'],
# '': ['mn'], # Mongolian
'dutch': ['nl'],
# Polish requires the Elasticsearch plugin:
# https://github.com/elasticsearch/elasticsearch-analysis-stempel
'polish': ['pl'],
'brazilian': ['pt-br'],
'portuguese': ['pt-pt'],
'romanian': ['ro'],
'russian': ['ru'],
# '': ['sk'], # Slovak
# '': ['sl'], # Slovenian
# '': ['sq'], # Albanian
'swedish': ['sv-se'],
# '': ['uk'], # Ukrainian
# '': ['vi'], # Vietnamese
'chinese': ['zh-cn', 'zh-tw'],
}
# This dict is an inverse mapping of ANALYZER_MAP.
SEARCH_LANGUAGE_TO_ANALYZER = {}
for analyzer, languages in SEARCH_ANALYZER_MAP.items():
for language in languages:
SEARCH_LANGUAGE_TO_ANALYZER[language] = analyzer
# List of analyzers that require a plugin. Depending on settings.ES_USE_PLUGINS
# we may disable or bypass these.
SEARCH_ANALYZER_PLUGINS = [
'polish',
]
# Which stemmer to use for each langauge.
#
# Note: We use the keys of this dict for supported stop words, also, which is
# specified as, e.g., '_english_'.
STEMMER_MAP = {
'arabic': 'arabic',
'basque': 'basque',
'brazilian': 'brazilian',
'bulgarian': 'bulgarian',
'catalan': 'catalan',
'czech': 'czech',
'danish': 'danish',
'dutch': 'dutch',
'english': 'minimal_english',
'finnish': 'light_finish', # Yes, this is misspelled in ES.
'french': 'light_french',
'german': 'light_german',
'greek': 'greek',
'hungarian': 'light_hungarian',
'indonesian': 'indonesian',
'italian': 'light_italian',
'portuguese': 'light_portuguese',
'romanian': 'romanian',
'russian': 'russian',
'spanish': 'light_spanish',
'swedish': 'light_swedish',
}
| bsd-3-clause | 281a3d2a02ee3e5a47a3ae4cd71f27a0 | 29 | 88 | 0.592632 | 3.054662 | false | false | false | false |
mozilla/zamboni | mkt/translations/query.py | 5 | 5302 | import itertools
from django.conf import settings
from django.db import models
from django.db.models.sql.compiler import SQLCompiler
from django.db.models.sql.constants import LOUTER
from django.db.models.sql.datastructures import Join
from django.utils import translation as translation_utils
def order_by_translation(qs, fieldname):
"""
Order the QuerySet by the translated field, honoring the current and
fallback locales. Returns a new QuerySet.
The model being sorted needs a get_fallback() classmethod that describes
the fallback locale. get_fallback() can return a string or a Field.
"""
if fieldname.startswith('-'):
desc = True
fieldname = fieldname[1:]
else:
desc = False
qs = qs.all()
model = qs.model
field = model._meta.get_field(fieldname)
# Doing the manual joins is flying under Django's radar, so we need to make
# sure the initial alias (the main table) is set up.
if not qs.query.tables:
qs.query.get_initial_alias()
# Force two new joins against the translation table, without reusing any
# aliases. We'll hook up the language fallbacks later.
# Passing `reuse=set()` force new joins, and passing `nullable=True`
# forces django to make LEFT OUTER JOINs (otherwise django, because we are
# building the query manually, does not detect that an inner join would
# remove results and happily simplifies the LEFT OUTER JOINs to
# INNER JOINs)
qs.query = qs.query.clone(TranslationQuery)
t1 = qs.query.join(
Join(field.rel.to._meta.db_table, model._meta.db_table,
None, LOUTER, field, True),
reuse=set())
t2 = qs.query.join(
Join(field.rel.to._meta.db_table, model._meta.db_table,
None, LOUTER, field, True),
reuse=set())
qs.query.translation_aliases = {field: (t1, t2)}
f1, f2 = '%s.`localized_string`' % t1, '%s.`localized_string`' % t2
name = 'translated_%s' % field.column
ifnull = 'IFNULL(%s, %s)' % (f1, f2)
prefix = '-' if desc else ''
return qs.extra(select={name: ifnull},
where=['(%s IS NOT NULL OR %s IS NOT NULL)' % (f1, f2)],
order_by=[prefix + name])
class TranslationQuery(models.sql.query.Query):
"""
Overrides sql.Query to hit our special compiler that knows how to JOIN
translations.
"""
def clone(self, klass=None, **kwargs):
# Maintain translation_aliases across clones.
c = super(TranslationQuery, self).clone(klass, **kwargs)
c.translation_aliases = self.translation_aliases
return c
def get_compiler(self, using=None, connection=None):
# Call super to figure out using and connection.
c = super(TranslationQuery, self).get_compiler(using, connection)
return SQLCompiler(self, c.connection, c.using)
class SQLCompiler(SQLCompiler):
"""Overrides get_from_clause to LEFT JOIN translations with a locale."""
def get_from_clause(self):
# Temporarily remove translation tables from query.tables so Django
# doesn't create joins against them.
old_tables = list(self.query.tables)
for table in itertools.chain(*self.query.translation_aliases.values()):
if table in self.query.tables:
self.query.tables.remove(table)
joins, params = super(SQLCompiler, self).get_from_clause()
# fallback could be a string locale or a model field.
params.append(translation_utils.get_language())
if hasattr(self.query.model, 'get_fallback'):
fallback = self.query.model.get_fallback()
else:
fallback = settings.LANGUAGE_CODE
if not isinstance(fallback, models.Field):
params.append(fallback)
# Add our locale-aware joins. We're not respecting the table ordering
# Django had in query.tables, but that seems to be ok.
for field, aliases in self.query.translation_aliases.items():
t1, t2 = aliases
joins.append(self.join_with_locale(t1))
joins.append(self.join_with_locale(t2, fallback))
self.query.tables = old_tables
return joins, params
def join_with_locale(self, alias, fallback=None):
# This is all lifted from the real sql.compiler.get_from_clause(),
# except for the extra AND clause. Fun project: fix Django to use Q
# objects here instead of a bunch of strings.
qn = self.quote_name_unless_alias
qn2 = self.connection.ops.quote_name
join = self.query.alias_map[alias]
lhs_col, rhs_col = join.join_cols[0]
alias_str = '' if join.table_alias == join.table_name else (
' %s' % join.table_alias)
if isinstance(fallback, models.Field):
fallback_str = '%s.%s' % (qn(self.query.model._meta.db_table),
qn(fallback.column))
else:
fallback_str = '%s'
return ('%s %s%s ON (%s.%s = %s.%s AND %s.%s = %s)' %
(join.join_type, qn(join.table_name), alias_str,
qn(join.parent_alias), qn2(lhs_col), qn(join.table_alias),
qn2(rhs_col), qn(join.table_alias), qn('locale'),
fallback_str))
| bsd-3-clause | 34af71d106180155a14e009be704b430 | 39.473282 | 79 | 0.631648 | 3.853198 | false | false | false | false |
mozilla/zamboni | mkt/site/helpers.py | 3 | 12157 | import json as jsonlib
import pytz
from urlparse import urljoin
from django.conf import settings
from django.core.urlresolvers import reverse
from django.forms import CheckboxInput
from django.template import defaultfilters
from django.utils import translation
from django.utils.encoding import smart_unicode
import commonware.log
import jinja2
from babel.support import Format
from jingo import register
# Needed to make sure our own |f filter overrides jingo's one.
from jingo import helpers # noqa
from jingo_minify import helpers as jingo_minify_helpers
from six import text_type
from django.utils.translation import ugettext as _
from mkt.translations.helpers import truncate
from mkt.translations.utils import get_locale_from_lang
from mkt.site.utils import env, append_tz
log = commonware.log.getLogger('z.mkt.site')
@jinja2.contextfunction
@register.function
def css(context, bundle, media=False, debug=None):
if debug is None:
debug = settings.TEMPLATE_DEBUG
# ?debug=true gives you unminified CSS for testing on -dev/prod.
if context['request'].GET.get('debug'):
debug = True
return jingo_minify_helpers.css(bundle, media, debug)
@jinja2.contextfunction
@register.function
def js(context, bundle, debug=None, defer=False, async=False):
if debug is None:
debug = settings.TEMPLATE_DEBUG
# ?debug=true gives you unminified JS for testing on -dev/prod.
if context['request'].GET.get('debug'):
debug = True
return jingo_minify_helpers.js(bundle, debug, defer, async)
@register.function
def no_results():
# This prints a "No results found" message. That's all. Carry on.
t = env.get_template('site/helpers/no_results.html').render()
return jinja2.Markup(t)
@jinja2.contextfunction
@register.function
def market_button(context, product, receipt_type=None, classes=None):
request = context['request']
purchased = False
classes = (classes or []) + ['button', 'product']
reviewer = receipt_type == 'reviewer'
data_attrs = {'manifest_url': product.get_manifest_url(reviewer),
'is_packaged': jsonlib.dumps(product.is_packaged)}
installed = None
if request.user.is_authenticated():
installed_set = request.user.installed_set
installed = installed_set.filter(addon=product).exists()
# Handle premium apps.
if product.has_premium():
# User has purchased app.
purchased = (request.user.is_authenticated() and
product.pk in request.user.purchase_ids())
# App authors are able to install their apps free of charge.
if (not purchased and
request.check_ownership(product, require_author=True)):
purchased = True
if installed or purchased or not product.has_premium():
label = _('Install')
else:
label = product.get_tier_name()
# Free apps and purchased apps get active install buttons.
if not product.is_premium() or purchased:
classes.append('install')
c = dict(product=product, label=label, purchased=purchased,
data_attrs=data_attrs, classes=' '.join(classes))
t = env.get_template('site/helpers/webapp_button.html')
return jinja2.Markup(t.render(c))
def product_as_dict(request, product, purchased=None, receipt_type=None,
src=''):
receipt_url = (reverse('receipt.issue', args=[product.app_slug]) if
receipt_type else product.get_detail_url('record'))
token_url = reverse('generate-reviewer-token', args=[product.app_slug])
src = src or request.GET.get('src', '')
reviewer = receipt_type == 'reviewer'
# This is the only info. we need to render the app buttons on the
# Reviewer Tools pages.
ret = {
'id': product.id,
'name': product.name,
'categories': product.categories,
'manifest_url': product.get_manifest_url(reviewer),
'recordUrl': helpers.urlparams(receipt_url, src=src),
'tokenUrl': token_url,
'is_packaged': product.is_packaged,
'src': src
}
if product.premium:
ret.update({
'price': product.get_price(region=request.REGION.id),
'priceLocale': product.get_price_locale(region=request.REGION.id),
})
if request.user.is_authenticated():
ret['isPurchased'] = purchased
# Jinja2 escape everything except this list so that bool is retained
# for the JSON encoding.
wl = ('categories', 'currencies', 'isPurchased', 'is_packaged', 'previews',
'price', 'priceLocale')
return dict([k, jinja2.escape(v) if k not in wl else v]
for k, v in ret.items())
@register.function
@jinja2.contextfunction
def mkt_breadcrumbs(context, product=None, items=None, crumb_size=40,
add_default=True, cls=None):
"""
Wrapper function for ``breadcrumbs``.
**items**
list of [(url, label)] to be inserted after Add-on.
**product**
Adds the App/Add-on name to the end of the trail. If items are
specified then the App/Add-on will be linked.
**add_default**
Prepends trail back to home when True. Default is True.
"""
if add_default:
crumbs = [(reverse('home'), _('Home'))]
else:
crumbs = []
if product:
if items:
url_ = product.get_detail_url()
else:
# The Product is the end of the trail.
url_ = None
crumbs += [(None, _('Apps')), (url_, product.name)]
if items:
crumbs.extend(items)
if len(crumbs) == 1:
crumbs = []
crumbs = [(u, truncate(label, crumb_size)) for (u, label) in crumbs]
t = env.get_template('site/helpers/breadcrumbs.html').render(
{'breadcrumbs': crumbs, 'cls': cls})
return jinja2.Markup(t)
@register.function
def form_field(field, label=None, tag='div', req=None, opt=False, hint=False,
tooltip=False, some_html=False, cc_startswith=None, cc_for=None,
cc_maxlength=None, grid=False, cls=None, validate=False):
attrs = {}
# Add a `required` attribute so we can do form validation.
# TODO(cvan): Write tests for kumar some day.
if validate and field.field.required:
attrs['required'] = ''
c = dict(field=field, label=label or field.label, tag=tag, req=req,
opt=opt, hint=hint, tooltip=tooltip, some_html=some_html,
cc_startswith=cc_startswith, cc_for=cc_for,
cc_maxlength=cc_maxlength, grid=grid, cls=cls, attrs=attrs)
t = env.get_template('site/helpers/simple_field.html').render(c)
return jinja2.Markup(t)
@register.filter
@jinja2.contextfilter
def timelabel(context, time):
t = env.get_template('site/helpers/timelabel.html').render(
{'time': time})
return jinja2.Markup(t)
@register.function
def mkt_admin_site_links():
return {
'addons': [
('Fake mail', reverse('zadmin.mail')),
],
'settings': [
('View site settings', reverse('zadmin.settings')),
('Django admin pages', reverse('zadmin.home')),
],
'tools': [
('View request environment', reverse('mkt.env')),
('View elasticsearch settings', reverse('zadmin.elastic')),
('Purge data from memcache', reverse('zadmin.memcache')),
('Generate error', reverse('zadmin.generate-error')),
('Site Status', reverse('mkt.monitor')),
('Force Manifest Re-validation',
reverse('zadmin.manifest_revalidation'))
],
}
@register.function
@jinja2.contextfunction
def get_doc_template(context, template):
lang = getattr(context['request'], 'LANG', 'en-US')
if lang in settings.AMO_LANGUAGES:
try:
template = env.get_template('%s/%s.html' % (template, lang))
except jinja2.TemplateNotFound:
pass
else:
return jinja2.Markup(template.render(context))
template = env.get_template('%s/en-US.html' % template)
return jinja2.Markup(template.render(context))
@register.function
@jinja2.contextfunction
def get_doc_path(context, path, extension):
"""
Gets the path to a localizable document in the current language with
fallback to en-US.
"""
lang = getattr(context['request'], 'LANG', 'en-US')
if lang in settings.AMO_LANGUAGES:
try:
localized_file_path = '%s/%s.%s' % (path, lang, extension)
with open(localized_file_path):
return localized_file_path
except IOError:
return '%s/en-US.%s' % (path, extension)
@register.filter
def absolutify(url, site=None):
"""Takes a URL and prepends the SITE_URL"""
if url.startswith('http'):
return url
else:
return urljoin(site or settings.SITE_URL, url)
def _get_format():
lang = translation.get_language()
return Format(get_locale_from_lang(lang))
@register.filter
def babel_datetime(dt, format='medium'):
return _get_format().datetime(dt, format=format) if dt else ''
@register.filter
def babel_date(date, format='medium'):
return _get_format().date(date, format=format) if date else ''
@register.filter
def is_choice_field(value):
try:
return isinstance(value.field.widget, CheckboxInput)
except AttributeError:
pass
@register.filter
def numberfmt(num, format=None):
return _get_format().decimal(num, format)
@register.function
@jinja2.contextfunction
def page_title(context, title):
title = smart_unicode(title)
base_title = _('Firefox Marketplace')
return u'%s | %s' % (title, base_title)
@register.filter
def timesince(time):
if not time:
return u''
ago = defaultfilters.timesince(time)
# L10n: relative time in the past, like '4 days ago'
return _(u'{0} ago').format(ago)
@register.function
def url(viewname, *args, **kwargs):
"""Helper for Django's ``reverse`` in templates."""
host = kwargs.pop('host', '')
src = kwargs.pop('src', '')
url = '%s%s' % (host, reverse(viewname, args=args, kwargs=kwargs))
if src:
url = helpers.urlparams(url, src=src)
return url
@register.filter
def impala_paginator(pager):
t = env.get_template('site/impala_paginator.html')
return jinja2.Markup(t.render({'pager': pager}))
@register.filter
def json(s):
return jsonlib.dumps(s)
@register.function
@jinja2.contextfunction
def media(context, url, key='MEDIA_URL'):
"""Get a MEDIA_URL link with a cache buster querystring."""
if 'BUILD_ID' in context:
build = context['BUILD_ID']
else:
if url.endswith('.js'):
build = context['BUILD_ID_JS']
elif url.endswith('.css'):
build = context['BUILD_ID_CSS']
else:
build = context['BUILD_ID_IMG']
return urljoin(context[key], helpers.urlparams(url, b=build))
@register.function
@jinja2.contextfunction
def static(context, url):
"""Get a STATIC_URL link with a cache buster querystring."""
return media(context, url, 'STATIC_URL')
@register.filter
def f(string, *args, **kwargs):
"""This overrides jingo.helpers.f to convert input to unicode if needed.
This is needed because of
https://github.com/jbalogh/jingo/pull/54#issuecomment-36728948
"""
if not isinstance(string, text_type):
string = text_type(string)
return string.format(*args, **kwargs)
def strip_controls(s):
"""
Strips control characters from a string.
"""
# Translation table of control characters.
control_trans = dict((n, None) for n in xrange(32) if n not in [10, 13])
rv = unicode(s).translate(control_trans)
return jinja2.Markup(rv) if isinstance(s, jinja2.Markup) else rv
@register.function
@jinja2.contextfunction
def prefer_signin(context):
return 'has_logged_in' in context['request'].COOKIES
@register.filter
def isotime(t):
"""Date/Time format according to ISO 8601"""
if not hasattr(t, 'tzinfo'):
return
return append_tz(t).astimezone(pytz.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
| bsd-3-clause | 98fcb7e95a87ffd69b5c8e0604c37986 | 29.85533 | 79 | 0.643169 | 3.665059 | false | false | false | false |
mozilla/zamboni | mkt/receipts/utils.py | 20 | 5995 | import calendar
import time
from urllib import urlencode
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.core.urlresolvers import reverse
import jwt
from nose.tools import nottest
from receipts.receipts import Receipt
from lib.crypto import receipt
from lib.utils import static_url
from mkt.access import acl
from mkt.site.helpers import absolutify
def get_uuid(app, user):
"""
Returns a users uuid suitable for use in the receipt, by looking up
the purchase table. Otherwise it just returns 'none'.
:params app: the app record.
:params user: the UserProfile record.
"""
try:
return app.addonpurchase_set.get(user=user).uuid
except ObjectDoesNotExist:
return 'none'
def sign(data):
"""
Returns a signed receipt. If the seperate signing server is present then
it will use that. Otherwise just uses JWT.
:params receipt: the receipt to be signed.
"""
if settings.SIGNING_SERVER_ACTIVE:
return receipt.sign(data)
else:
return jwt.encode(data, get_key(), u'RS512')
def create_receipt(webapp, user, uuid, flavour=None, contrib=None):
return sign(create_receipt_data(webapp, user, uuid, flavour=flavour,
contrib=contrib))
def create_receipt_data(webapp, user, uuid, flavour=None, contrib=None):
"""
Creates receipt data for use in payments.
:params app: the app record.
:params user: the UserProfile record.
:params uuid: a uuid placed in the user field for this purchase.
:params flavour: None, developer, inapp, or reviewer - the flavour
of receipt.
:param: contrib: the Contribution object for the purchase.
"""
# Unflavo(u)red receipts are for plain ol' vanilla app purchases.
assert flavour in (None, 'developer', 'inapp', 'reviewer'), (
'Invalid flavour: %s' % flavour)
time_ = calendar.timegm(time.gmtime())
typ = 'purchase-receipt'
storedata = {'id': int(webapp.pk)}
# Generate different receipts for reviewers or developers.
expiry = time_ + settings.WEBAPPS_RECEIPT_EXPIRY_SECONDS
verify = static_url('WEBAPPS_RECEIPT_URL')
if flavour == 'inapp':
if not contrib:
raise ValueError(
'a contribution object is required for in-app receipts')
if not contrib.inapp_product:
raise ValueError(
'contribution {c} does not link to an in-app product'
.format(c=contrib))
storedata['contrib'] = int(contrib.pk)
storedata['inapp_id'] = contrib.inapp_product.guid
elif flavour in ('developer', 'reviewer'):
if not (acl.action_allowed_user(user, 'Apps', 'Review') or
webapp.has_author(user)):
raise ValueError('User %s is not a reviewer or developer' %
user.pk)
# Developer and reviewer receipts should expire after 24 hours.
expiry = time_ + (60 * 60 * 24)
typ = flavour + '-receipt'
verify = absolutify(reverse('receipt.verify', args=[webapp.guid]))
product = {'storedata': urlencode(storedata),
# Packaged and hosted apps should have an origin. If there
# isn't one, fallback to the SITE_URL.
'url': webapp.origin or settings.SITE_URL}
reissue = absolutify(reverse('receipt.reissue'))
receipt = dict(exp=expiry, iat=time_,
iss=settings.SITE_URL, nbf=time_, product=product,
# TODO: This is temporary until detail pages get added.
# TODO: bug 1020997, bug 1020999
detail=absolutify(reissue), # Currently this is a 404.
reissue=absolutify(reissue),
typ=typ,
user={'type': 'directed-identifier',
'value': uuid},
verify=verify)
return receipt
def create_inapp_receipt(contrib):
"""
Creates a receipt for an in-app purchase.
:params contrib: the Contribution object for the purchase.
"""
if contrib.is_inapp_simulation():
storedata = {'id': 0, 'contrib': int(contrib.pk),
'inapp_id': contrib.inapp_product.guid}
return create_test_receipt(settings.SITE_URL, 'ok',
storedata=storedata)
return create_receipt(contrib.addon, None, 'anonymous-user',
flavour='inapp', contrib=contrib)
def reissue_receipt(receipt):
"""
Reissues and existing receipt by updating the timestamps and resigning
the receipt. This requires a well formatted receipt, but does not verify
the receipt contents.
:params receipt: an existing receipt
"""
time_ = calendar.timegm(time.gmtime())
receipt_obj = Receipt(receipt)
data = receipt_obj.receipt_decoded()
data.update({
'exp': time_ + settings.WEBAPPS_RECEIPT_EXPIRY_SECONDS,
'iat': time_,
'nbf': time_,
})
return sign(data)
@nottest
def create_test_receipt(root, status, storedata=None):
if not storedata:
storedata = {'id': 0}
time_ = calendar.timegm(time.gmtime())
detail = absolutify(reverse('receipt.test.details'))
receipt = {
'detail': absolutify(detail),
'exp': time_ + (60 * 60 * 24),
'iat': time_,
'iss': settings.SITE_URL,
'nbf': time_,
'product': {
'storedata': urlencode(storedata),
'url': root,
},
'reissue': detail,
'typ': 'test-receipt',
'user': {
'type': 'directed-identifier',
'value': 'none'
},
'verify': absolutify(reverse('receipt.test.verify',
kwargs={'status': status}))
}
return sign(receipt)
def get_key():
"""Return a key for using with encode."""
return jwt.rsa_load(settings.WEBAPPS_RECEIPT_KEY)
| bsd-3-clause | 04d280d7488d62a9a7be8130618fc508 | 32.305556 | 76 | 0.608841 | 3.931148 | false | false | false | false |
mozilla/zamboni | mkt/webapps/management/commands/list_packaged_apps.py | 18 | 1152 | from optparse import make_option
from django.core.exceptions import ObjectDoesNotExist
from django.core.management.base import BaseCommand
import mkt
from mkt.files.models import File
HELP = 'List all Marketplace packaged apps'
statuses = {'pending': mkt.STATUS_PENDING,
'public': mkt.STATUS_PUBLIC,
'approved': mkt.STATUS_APPROVED,
'rejected': mkt.STATUS_DISABLED}
class Command(BaseCommand):
"""
Usage:
python manage.py list_packaged_apps --status=<status>
"""
option_list = BaseCommand.option_list + (
make_option('--status',
choices=statuses.keys(),
help='Status of packaged-app files'),
)
help = HELP
def handle(self, *args, **kwargs):
files = File.objects.filter(version__addon__is_packaged=True)
if kwargs.get('status'):
files = files.filter(status=statuses[kwargs['status']])
filenames = []
for f in files:
try:
filenames.append(f.file_path)
except ObjectDoesNotExist:
pass
print '\n'.join(filenames)
| bsd-3-clause | 6d03b1c6880f36c93d86bd940d973b77 | 23.510638 | 69 | 0.598958 | 4.314607 | false | false | false | false |
gitpython-developers/gitpython | git/objects/tree.py | 2 | 14228 | # tree.py
# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
#
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
from git.util import IterableList, join_path
import git.diff as git_diff
from git.util import to_bin_sha
from . import util
from .base import IndexObject, IndexObjUnion
from .blob import Blob
from .submodule.base import Submodule
from .fun import tree_entries_from_data, tree_to_stream
# typing -------------------------------------------------
from typing import (
Any,
Callable,
Dict,
Iterable,
Iterator,
List,
Tuple,
Type,
Union,
cast,
TYPE_CHECKING,
)
from git.types import PathLike, Literal
if TYPE_CHECKING:
from git.repo import Repo
from io import BytesIO
TreeCacheTup = Tuple[bytes, int, str]
TraversedTreeTup = Union[Tuple[Union["Tree", None], IndexObjUnion, Tuple["Submodule", "Submodule"]]]
# def is_tree_cache(inp: Tuple[bytes, int, str]) -> TypeGuard[TreeCacheTup]:
# return isinstance(inp[0], bytes) and isinstance(inp[1], int) and isinstance([inp], str)
# --------------------------------------------------------
cmp: Callable[[str, str], int] = lambda a, b: (a > b) - (a < b)
__all__ = ("TreeModifier", "Tree")
def git_cmp(t1: TreeCacheTup, t2: TreeCacheTup) -> int:
a, b = t1[2], t2[2]
# assert isinstance(a, str) and isinstance(b, str)
len_a, len_b = len(a), len(b)
min_len = min(len_a, len_b)
min_cmp = cmp(a[:min_len], b[:min_len])
if min_cmp:
return min_cmp
return len_a - len_b
def merge_sort(a: List[TreeCacheTup], cmp: Callable[[TreeCacheTup, TreeCacheTup], int]) -> None:
if len(a) < 2:
return None
mid = len(a) // 2
lefthalf = a[:mid]
righthalf = a[mid:]
merge_sort(lefthalf, cmp)
merge_sort(righthalf, cmp)
i = 0
j = 0
k = 0
while i < len(lefthalf) and j < len(righthalf):
if cmp(lefthalf[i], righthalf[j]) <= 0:
a[k] = lefthalf[i]
i = i + 1
else:
a[k] = righthalf[j]
j = j + 1
k = k + 1
while i < len(lefthalf):
a[k] = lefthalf[i]
i = i + 1
k = k + 1
while j < len(righthalf):
a[k] = righthalf[j]
j = j + 1
k = k + 1
class TreeModifier(object):
"""A utility class providing methods to alter the underlying cache in a list-like fashion.
Once all adjustments are complete, the _cache, which really is a reference to
the cache of a tree, will be sorted. Assuring it will be in a serializable state"""
__slots__ = "_cache"
def __init__(self, cache: List[TreeCacheTup]) -> None:
self._cache = cache
def _index_by_name(self, name: str) -> int:
""":return: index of an item with name, or -1 if not found"""
for i, t in enumerate(self._cache):
if t[2] == name:
return i
# END found item
# END for each item in cache
return -1
# { Interface
def set_done(self) -> "TreeModifier":
"""Call this method once you are done modifying the tree information.
It may be called several times, but be aware that each call will cause
a sort operation
:return self:"""
merge_sort(self._cache, git_cmp)
return self
# } END interface
# { Mutators
def add(self, sha: bytes, mode: int, name: str, force: bool = False) -> "TreeModifier":
"""Add the given item to the tree. If an item with the given name already
exists, nothing will be done, but a ValueError will be raised if the
sha and mode of the existing item do not match the one you add, unless
force is True
:param sha: The 20 or 40 byte sha of the item to add
:param mode: int representing the stat compatible mode of the item
:param force: If True, an item with your name and information will overwrite
any existing item with the same name, no matter which information it has
:return: self"""
if "/" in name:
raise ValueError("Name must not contain '/' characters")
if (mode >> 12) not in Tree._map_id_to_type:
raise ValueError("Invalid object type according to mode %o" % mode)
sha = to_bin_sha(sha)
index = self._index_by_name(name)
item = (sha, mode, name)
# assert is_tree_cache(item)
if index == -1:
self._cache.append(item)
else:
if force:
self._cache[index] = item
else:
ex_item = self._cache[index]
if ex_item[0] != sha or ex_item[1] != mode:
raise ValueError("Item %r existed with different properties" % name)
# END handle mismatch
# END handle force
# END handle name exists
return self
def add_unchecked(self, binsha: bytes, mode: int, name: str) -> None:
"""Add the given item to the tree, its correctness is assumed, which
puts the caller into responsibility to assure the input is correct.
For more information on the parameters, see ``add``
:param binsha: 20 byte binary sha"""
assert isinstance(binsha, bytes) and isinstance(mode, int) and isinstance(name, str)
tree_cache = (binsha, mode, name)
self._cache.append(tree_cache)
def __delitem__(self, name: str) -> None:
"""Deletes an item with the given name if it exists"""
index = self._index_by_name(name)
if index > -1:
del self._cache[index]
# } END mutators
class Tree(IndexObject, git_diff.Diffable, util.Traversable, util.Serializable):
"""Tree objects represent an ordered list of Blobs and other Trees.
``Tree as a list``::
Access a specific blob using the
tree['filename'] notation.
You may as well access by index
blob = tree[0]
"""
type: Literal["tree"] = "tree"
__slots__ = "_cache"
# actual integer ids for comparison
commit_id = 0o16 # equals stat.S_IFDIR | stat.S_IFLNK - a directory link
blob_id = 0o10
symlink_id = 0o12
tree_id = 0o04
_map_id_to_type: Dict[int, Type[IndexObjUnion]] = {
commit_id: Submodule,
blob_id: Blob,
symlink_id: Blob
# tree id added once Tree is defined
}
def __init__(
self,
repo: "Repo",
binsha: bytes,
mode: int = tree_id << 12,
path: Union[PathLike, None] = None,
):
super(Tree, self).__init__(repo, binsha, mode, path)
@classmethod
def _get_intermediate_items(
cls,
index_object: IndexObjUnion,
) -> Union[Tuple["Tree", ...], Tuple[()]]:
if index_object.type == "tree":
return tuple(index_object._iter_convert_to_object(index_object._cache))
return ()
def _set_cache_(self, attr: str) -> None:
if attr == "_cache":
# Set the data when we need it
ostream = self.repo.odb.stream(self.binsha)
self._cache: List[TreeCacheTup] = tree_entries_from_data(ostream.read())
else:
super(Tree, self)._set_cache_(attr)
# END handle attribute
def _iter_convert_to_object(self, iterable: Iterable[TreeCacheTup]) -> Iterator[IndexObjUnion]:
"""Iterable yields tuples of (binsha, mode, name), which will be converted
to the respective object representation"""
for binsha, mode, name in iterable:
path = join_path(self.path, name)
try:
yield self._map_id_to_type[mode >> 12](self.repo, binsha, mode, path)
except KeyError as e:
raise TypeError("Unknown mode %o found in tree data for path '%s'" % (mode, path)) from e
# END for each item
def join(self, file: str) -> IndexObjUnion:
"""Find the named object in this tree's contents
:return: ``git.Blob`` or ``git.Tree`` or ``git.Submodule``
:raise KeyError: if given file or tree does not exist in tree"""
msg = "Blob or Tree named %r not found"
if "/" in file:
tree = self
item = self
tokens = file.split("/")
for i, token in enumerate(tokens):
item = tree[token]
if item.type == "tree":
tree = item
else:
# safety assertion - blobs are at the end of the path
if i != len(tokens) - 1:
raise KeyError(msg % file)
return item
# END handle item type
# END for each token of split path
if item == self:
raise KeyError(msg % file)
return item
else:
for info in self._cache:
if info[2] == file: # [2] == name
return self._map_id_to_type[info[1] >> 12](
self.repo, info[0], info[1], join_path(self.path, info[2])
)
# END for each obj
raise KeyError(msg % file)
# END handle long paths
def __truediv__(self, file: str) -> IndexObjUnion:
"""For PY3 only"""
return self.join(file)
@property
def trees(self) -> List["Tree"]:
""":return: list(Tree, ...) list of trees directly below this tree"""
return [i for i in self if i.type == "tree"]
@property
def blobs(self) -> List[Blob]:
""":return: list(Blob, ...) list of blobs directly below this tree"""
return [i for i in self if i.type == "blob"]
@property
def cache(self) -> TreeModifier:
"""
:return: An object allowing to modify the internal cache. This can be used
to change the tree's contents. When done, make sure you call ``set_done``
on the tree modifier, or serialization behaviour will be incorrect.
See the ``TreeModifier`` for more information on how to alter the cache"""
return TreeModifier(self._cache)
def traverse(
self, # type: ignore[override]
predicate: Callable[[Union[IndexObjUnion, TraversedTreeTup], int], bool] = lambda i, d: True,
prune: Callable[[Union[IndexObjUnion, TraversedTreeTup], int], bool] = lambda i, d: False,
depth: int = -1,
branch_first: bool = True,
visit_once: bool = False,
ignore_self: int = 1,
as_edge: bool = False,
) -> Union[Iterator[IndexObjUnion], Iterator[TraversedTreeTup]]:
"""For documentation, see util.Traversable._traverse()
Trees are set to visit_once = False to gain more performance in the traversal"""
# """
# # To typecheck instead of using cast.
# import itertools
# def is_tree_traversed(inp: Tuple) -> TypeGuard[Tuple[Iterator[Union['Tree', 'Blob', 'Submodule']]]]:
# return all(isinstance(x, (Blob, Tree, Submodule)) for x in inp[1])
# ret = super(Tree, self).traverse(predicate, prune, depth, branch_first, visit_once, ignore_self)
# ret_tup = itertools.tee(ret, 2)
# assert is_tree_traversed(ret_tup), f"Type is {[type(x) for x in list(ret_tup[0])]}"
# return ret_tup[0]"""
return cast(
Union[Iterator[IndexObjUnion], Iterator[TraversedTreeTup]],
super(Tree, self)._traverse(
predicate,
prune,
depth, # type: ignore
branch_first,
visit_once,
ignore_self,
),
)
def list_traverse(self, *args: Any, **kwargs: Any) -> IterableList[IndexObjUnion]:
"""
:return: IterableList with the results of the traversal as produced by
traverse()
Tree -> IterableList[Union['Submodule', 'Tree', 'Blob']]
"""
return super(Tree, self)._list_traverse(*args, **kwargs)
# List protocol
def __getslice__(self, i: int, j: int) -> List[IndexObjUnion]:
return list(self._iter_convert_to_object(self._cache[i:j]))
def __iter__(self) -> Iterator[IndexObjUnion]:
return self._iter_convert_to_object(self._cache)
def __len__(self) -> int:
return len(self._cache)
def __getitem__(self, item: Union[str, int, slice]) -> IndexObjUnion:
if isinstance(item, int):
info = self._cache[item]
return self._map_id_to_type[info[1] >> 12](self.repo, info[0], info[1], join_path(self.path, info[2]))
if isinstance(item, str):
# compatibility
return self.join(item)
# END index is basestring
raise TypeError("Invalid index type: %r" % item)
def __contains__(self, item: Union[IndexObjUnion, PathLike]) -> bool:
if isinstance(item, IndexObject):
for info in self._cache:
if item.binsha == info[0]:
return True
# END compare sha
# END for each entry
# END handle item is index object
# compatibility
# treat item as repo-relative path
else:
path = self.path
for info in self._cache:
if item == join_path(path, info[2]):
return True
# END for each item
return False
def __reversed__(self) -> Iterator[IndexObjUnion]:
return reversed(self._iter_convert_to_object(self._cache)) # type: ignore
def _serialize(self, stream: "BytesIO") -> "Tree":
"""Serialize this tree into the stream. Please note that we will assume
our tree data to be in a sorted state. If this is not the case, serialization
will not generate a correct tree representation as these are assumed to be sorted
by algorithms"""
tree_to_stream(self._cache, stream.write)
return self
def _deserialize(self, stream: "BytesIO") -> "Tree":
self._cache = tree_entries_from_data(stream.read())
return self
# END tree
# finalize map definition
Tree._map_id_to_type[Tree.tree_id] = Tree
#
| bsd-3-clause | 95a14f50a2c6375c5622a2c34ee22d92 | 32.71564 | 114 | 0.569651 | 3.856872 | false | false | false | false |
gitpython-developers/gitpython | test/test_exc.py | 2 | 5255 | # -*- coding: utf-8 -*-
# test_exc.py
# Copyright (C) 2008, 2009, 2016 Michael Trier (mtrier@gmail.com) and contributors
#
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
import re
import ddt
from git.exc import (
InvalidGitRepositoryError,
WorkTreeRepositoryUnsupported,
NoSuchPathError,
CommandError,
GitCommandNotFound,
GitCommandError,
CheckoutError,
CacheError,
UnmergedEntriesError,
HookExecutionError,
RepositoryDirtyError,
)
from git.util import remove_password_if_present
from test.lib import TestBase
import itertools as itt
_cmd_argvs = (
("cmd",),
("θνιψοδε",),
("θνιψοδε", "normal", "argvs"),
("cmd", "ελληνικα", "args"),
("θνιψοδε", "κι", "αλλα", "strange", "args"),
("θνιψοδε", "κι", "αλλα", "non-unicode", "args"),
(
"git",
"clone",
"-v",
"https://fakeuser:fakepassword1234@fakerepo.example.com/testrepo",
),
)
_causes_n_substrings = (
(None, None), # noqa: E241 @IgnorePep8
(7, "exit code(7)"), # noqa: E241 @IgnorePep8
("Some string", "'Some string'"), # noqa: E241 @IgnorePep8
("παλιο string", "'παλιο string'"), # noqa: E241 @IgnorePep8
(Exception("An exc."), "Exception('An exc.')"), # noqa: E241 @IgnorePep8
(Exception("Κακια exc."), "Exception('Κακια exc.')"), # noqa: E241 @IgnorePep8
(object(), "<object object at "), # noqa: E241 @IgnorePep8
)
_streams_n_substrings = (
None,
"steram",
"ομορφο stream",
)
@ddt.ddt
class TExc(TestBase):
def test_ExceptionsHaveBaseClass(self):
from git.exc import GitError
self.assertIsInstance(GitError(), Exception)
exception_classes = [
InvalidGitRepositoryError,
WorkTreeRepositoryUnsupported,
NoSuchPathError,
CommandError,
GitCommandNotFound,
GitCommandError,
CheckoutError,
CacheError,
UnmergedEntriesError,
HookExecutionError,
RepositoryDirtyError,
]
for ex_class in exception_classes:
self.assertTrue(issubclass(ex_class, GitError))
@ddt.data(*list(itt.product(_cmd_argvs, _causes_n_substrings, _streams_n_substrings)))
def test_CommandError_unicode(self, case):
argv, (cause, subs), stream = case
cls = CommandError
c = cls(argv, cause)
s = str(c)
self.assertIsNotNone(c._msg)
self.assertIn(" cmdline: ", s)
for a in remove_password_if_present(argv):
self.assertIn(a, s)
if not cause:
self.assertIn("failed!", s)
else:
self.assertIn(" failed due to:", s)
if subs is not None:
# Substrings (must) already contain opening `'`.
subs = "(?<!')%s(?!')" % re.escape(subs)
self.assertRegex(s, subs)
if not stream:
c = cls(argv, cause)
s = str(c)
self.assertNotIn(" stdout:", s)
self.assertNotIn(" stderr:", s)
else:
c = cls(argv, cause, stream)
s = str(c)
self.assertIn(" stderr:", s)
self.assertIn(stream, s)
c = cls(argv, cause, None, stream)
s = str(c)
self.assertIn(" stdout:", s)
self.assertIn(stream, s)
c = cls(argv, cause, stream, stream + "no2")
s = str(c)
self.assertIn(" stderr:", s)
self.assertIn(stream, s)
self.assertIn(" stdout:", s)
self.assertIn(stream + "no2", s)
@ddt.data(
(["cmd1"], None),
(["cmd1"], "some cause"),
(["cmd1"], Exception()),
)
def test_GitCommandNotFound(self, init_args):
argv, cause = init_args
c = GitCommandNotFound(argv, cause)
s = str(c)
self.assertIn(argv[0], s)
if cause:
self.assertIn(" not found due to: ", s)
self.assertIn(str(cause), s)
else:
self.assertIn(" not found!", s)
@ddt.data(
(["cmd1"], None),
(["cmd1"], "some cause"),
(["cmd1", "https://fakeuser@fakerepo.example.com/testrepo"], Exception()),
)
def test_GitCommandError(self, init_args):
argv, cause = init_args
c = GitCommandError(argv, cause)
s = str(c)
for arg in remove_password_if_present(argv):
self.assertIn(arg, s)
if cause:
self.assertIn(" failed due to: ", s)
self.assertIn(str(cause), s)
else:
self.assertIn(" failed!", s)
@ddt.data(
(["cmd1"], None),
(["cmd1"], "some cause"),
(["cmd1"], Exception()),
)
def test_HookExecutionError(self, init_args):
argv, cause = init_args
c = HookExecutionError(argv, cause)
s = str(c)
self.assertIn(argv[0], s)
if cause:
self.assertTrue(s.startswith("Hook("), s)
self.assertIn(str(cause), s)
else:
self.assertIn(" failed!", s)
| bsd-3-clause | 068e4b8e9dc5a1d72dc860a6d3382561 | 27.624309 | 90 | 0.545262 | 3.541353 | false | true | false | false |
gitpython-developers/gitpython | git/util.py | 2 | 39858 | # utils.py
# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
#
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
from abc import abstractmethod
import os.path as osp
from .compat import is_win
import contextlib
from functools import wraps
import getpass
import logging
import os
import platform
import subprocess
import re
import shutil
import stat
from sys import maxsize
import time
from urllib.parse import urlsplit, urlunsplit
import warnings
# from git.objects.util import Traversable
# typing ---------------------------------------------------------
from typing import (
Any,
AnyStr,
BinaryIO,
Callable,
Dict,
Generator,
IO,
Iterator,
List,
Optional,
Pattern,
Sequence,
Tuple,
TypeVar,
Union,
cast,
TYPE_CHECKING,
overload,
)
import pathlib
if TYPE_CHECKING:
from git.remote import Remote
from git.repo.base import Repo
from git.config import GitConfigParser, SectionConstraint
from git import Git
# from git.objects.base import IndexObject
from .types import (
Literal,
SupportsIndex,
Protocol,
runtime_checkable, # because behind py version guards
PathLike,
HSH_TD,
Total_TD,
Files_TD, # aliases
Has_id_attribute,
)
T_IterableObj = TypeVar("T_IterableObj", bound=Union["IterableObj", "Has_id_attribute"], covariant=True)
# So IterableList[Head] is subtype of IterableList[IterableObj]
# ---------------------------------------------------------------------
from gitdb.util import ( # NOQA @IgnorePep8
make_sha,
LockedFD, # @UnusedImport
file_contents_ro, # @UnusedImport
file_contents_ro_filepath, # @UnusedImport
LazyMixin, # @UnusedImport
to_hex_sha, # @UnusedImport
to_bin_sha, # @UnusedImport
bin_to_hex, # @UnusedImport
hex_to_bin, # @UnusedImport
)
# NOTE: Some of the unused imports might be used/imported by others.
# Handle once test-cases are back up and running.
# Most of these are unused here, but are for use by git-python modules so these
# don't see gitdb all the time. Flake of course doesn't like it.
__all__ = [
"stream_copy",
"join_path",
"to_native_path_linux",
"join_path_native",
"Stats",
"IndexFileSHA1Writer",
"IterableObj",
"IterableList",
"BlockingLockFile",
"LockFile",
"Actor",
"get_user_id",
"assure_directory_exists",
"RemoteProgress",
"CallableRemoteProgress",
"rmtree",
"unbare_repo",
"HIDE_WINDOWS_KNOWN_ERRORS",
]
log = logging.getLogger(__name__)
# types############################################################
#: We need an easy way to see if Appveyor TCs start failing,
#: so the errors marked with this var are considered "acknowledged" ones, awaiting remedy,
#: till then, we wish to hide them.
HIDE_WINDOWS_KNOWN_ERRORS = is_win and os.environ.get("HIDE_WINDOWS_KNOWN_ERRORS", True)
HIDE_WINDOWS_FREEZE_ERRORS = is_win and os.environ.get("HIDE_WINDOWS_FREEZE_ERRORS", True)
# { Utility Methods
T = TypeVar("T")
def unbare_repo(func: Callable[..., T]) -> Callable[..., T]:
"""Methods with this decorator raise InvalidGitRepositoryError if they
encounter a bare repository"""
from .exc import InvalidGitRepositoryError
@wraps(func)
def wrapper(self: "Remote", *args: Any, **kwargs: Any) -> T:
if self.repo.bare:
raise InvalidGitRepositoryError("Method '%s' cannot operate on bare repositories" % func.__name__)
# END bare method
return func(self, *args, **kwargs)
# END wrapper
return wrapper
@contextlib.contextmanager
def cwd(new_dir: PathLike) -> Generator[PathLike, None, None]:
old_dir = os.getcwd()
os.chdir(new_dir)
try:
yield new_dir
finally:
os.chdir(old_dir)
def rmtree(path: PathLike) -> None:
"""Remove the given recursively.
:note: we use shutil rmtree but adjust its behaviour to see whether files that
couldn't be deleted are read-only. Windows will not remove them in that case"""
def onerror(func: Callable, path: PathLike, exc_info: str) -> None:
# Is the error an access error ?
os.chmod(path, stat.S_IWUSR)
try:
func(path) # Will scream if still not possible to delete.
except Exception as ex:
if HIDE_WINDOWS_KNOWN_ERRORS:
from unittest import SkipTest
raise SkipTest("FIXME: fails with: PermissionError\n {}".format(ex)) from ex
raise
return shutil.rmtree(path, False, onerror)
def rmfile(path: PathLike) -> None:
"""Ensure file deleted also on *Windows* where read-only files need special treatment."""
if osp.isfile(path):
if is_win:
os.chmod(path, 0o777)
os.remove(path)
def stream_copy(source: BinaryIO, destination: BinaryIO, chunk_size: int = 512 * 1024) -> int:
"""Copy all data from the source stream into the destination stream in chunks
of size chunk_size
:return: amount of bytes written"""
br = 0
while True:
chunk = source.read(chunk_size)
destination.write(chunk)
br += len(chunk)
if len(chunk) < chunk_size:
break
# END reading output stream
return br
def join_path(a: PathLike, *p: PathLike) -> PathLike:
"""Join path tokens together similar to osp.join, but always use
'/' instead of possibly '\' on windows."""
path = str(a)
for b in p:
b = str(b)
if not b:
continue
if b.startswith("/"):
path += b[1:]
elif path == "" or path.endswith("/"):
path += b
else:
path += "/" + b
# END for each path token to add
return path
if is_win:
def to_native_path_windows(path: PathLike) -> PathLike:
path = str(path)
return path.replace("/", "\\")
def to_native_path_linux(path: PathLike) -> str:
path = str(path)
return path.replace("\\", "/")
__all__.append("to_native_path_windows")
to_native_path = to_native_path_windows
else:
# no need for any work on linux
def to_native_path_linux(path: PathLike) -> str:
return str(path)
to_native_path = to_native_path_linux
def join_path_native(a: PathLike, *p: PathLike) -> PathLike:
"""
As join path, but makes sure an OS native path is returned. This is only
needed to play it safe on my dear windows and to assure nice paths that only
use '\'"""
return to_native_path(join_path(a, *p))
def assure_directory_exists(path: PathLike, is_file: bool = False) -> bool:
"""Assure that the directory pointed to by path exists.
:param is_file: If True, path is assumed to be a file and handled correctly.
Otherwise it must be a directory
:return: True if the directory was created, False if it already existed"""
if is_file:
path = osp.dirname(path)
# END handle file
if not osp.isdir(path):
os.makedirs(path, exist_ok=True)
return True
return False
def _get_exe_extensions() -> Sequence[str]:
PATHEXT = os.environ.get("PATHEXT", None)
return (
tuple(p.upper() for p in PATHEXT.split(os.pathsep)) if PATHEXT else (".BAT", "COM", ".EXE") if is_win else ("")
)
def py_where(program: str, path: Optional[PathLike] = None) -> List[str]:
# From: http://stackoverflow.com/a/377028/548792
winprog_exts = _get_exe_extensions()
def is_exec(fpath: str) -> bool:
return (
osp.isfile(fpath)
and os.access(fpath, os.X_OK)
and (os.name != "nt" or not winprog_exts or any(fpath.upper().endswith(ext) for ext in winprog_exts))
)
progs = []
if not path:
path = os.environ["PATH"]
for folder in str(path).split(os.pathsep):
folder = folder.strip('"')
if folder:
exe_path = osp.join(folder, program)
for f in [exe_path] + ["%s%s" % (exe_path, e) for e in winprog_exts]:
if is_exec(f):
progs.append(f)
return progs
def _cygexpath(drive: Optional[str], path: str) -> str:
if osp.isabs(path) and not drive:
# Invoked from `cygpath()` directly with `D:Apps\123`?
# It's an error, leave it alone just slashes)
p = path # convert to str if AnyPath given
else:
p = path and osp.normpath(osp.expandvars(osp.expanduser(path)))
if osp.isabs(p):
if drive:
# Confusing, maybe a remote system should expand vars.
p = path
else:
p = cygpath(p)
elif drive:
p = "/proc/cygdrive/%s/%s" % (drive.lower(), p)
p_str = str(p) # ensure it is a str and not AnyPath
return p_str.replace("\\", "/")
_cygpath_parsers: Tuple[Tuple[Pattern[str], Callable, bool], ...] = (
# See: https://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx
# and: https://www.cygwin.com/cygwin-ug-net/using.html#unc-paths
(
re.compile(r"\\\\\?\\UNC\\([^\\]+)\\([^\\]+)(?:\\(.*))?"),
(lambda server, share, rest_path: "//%s/%s/%s" % (server, share, rest_path.replace("\\", "/"))),
False,
),
(re.compile(r"\\\\\?\\(\w):[/\\](.*)"), (_cygexpath), False),
(re.compile(r"(\w):[/\\](.*)"), (_cygexpath), False),
(re.compile(r"file:(.*)", re.I), (lambda rest_path: rest_path), True),
(re.compile(r"(\w{2,}:.*)"), (lambda url: url), False), # remote URL, do nothing
)
def cygpath(path: str) -> str:
"""Use :meth:`git.cmd.Git.polish_url()` instead, that works on any environment."""
path = str(path) # ensure is str and not AnyPath.
# Fix to use Paths when 3.5 dropped. or to be just str if only for urls?
if not path.startswith(("/cygdrive", "//", "/proc/cygdrive")):
for regex, parser, recurse in _cygpath_parsers:
match = regex.match(path)
if match:
path = parser(*match.groups())
if recurse:
path = cygpath(path)
break
else:
path = _cygexpath(None, path)
return path
_decygpath_regex = re.compile(r"(?:/proc)?/cygdrive/(\w)(/.*)?")
def decygpath(path: PathLike) -> str:
path = str(path)
m = _decygpath_regex.match(path)
if m:
drive, rest_path = m.groups()
path = "%s:%s" % (drive.upper(), rest_path or "")
return path.replace("/", "\\")
#: Store boolean flags denoting if a specific Git executable
#: is from a Cygwin installation (since `cache_lru()` unsupported on PY2).
_is_cygwin_cache: Dict[str, Optional[bool]] = {}
@overload
def is_cygwin_git(git_executable: None) -> Literal[False]:
...
@overload
def is_cygwin_git(git_executable: PathLike) -> bool:
...
def is_cygwin_git(git_executable: Union[None, PathLike]) -> bool:
if is_win:
# is_win seems to be true only for Windows-native pythons
# cygwin has os.name = posix, I think
return False
if git_executable is None:
return False
git_executable = str(git_executable)
is_cygwin = _is_cygwin_cache.get(git_executable) # type: Optional[bool]
if is_cygwin is None:
is_cygwin = False
try:
git_dir = osp.dirname(git_executable)
if not git_dir:
res = py_where(git_executable)
git_dir = osp.dirname(res[0]) if res else ""
# Just a name given, not a real path.
uname_cmd = osp.join(git_dir, "uname")
process = subprocess.Popen([uname_cmd], stdout=subprocess.PIPE, universal_newlines=True)
uname_out, _ = process.communicate()
# retcode = process.poll()
is_cygwin = "CYGWIN" in uname_out
except Exception as ex:
log.debug("Failed checking if running in CYGWIN due to: %r", ex)
_is_cygwin_cache[git_executable] = is_cygwin
return is_cygwin
def get_user_id() -> str:
""":return: string identifying the currently active system user as name@node"""
return "%s@%s" % (getpass.getuser(), platform.node())
def finalize_process(proc: Union[subprocess.Popen, "Git.AutoInterrupt"], **kwargs: Any) -> None:
"""Wait for the process (clone, fetch, pull or push) and handle its errors accordingly"""
# TODO: No close proc-streams??
proc.wait(**kwargs)
@overload
def expand_path(p: None, expand_vars: bool = ...) -> None:
...
@overload
def expand_path(p: PathLike, expand_vars: bool = ...) -> str:
# improve these overloads when 3.5 dropped
...
def expand_path(p: Union[None, PathLike], expand_vars: bool = True) -> Optional[PathLike]:
if isinstance(p, pathlib.Path):
return p.resolve()
try:
p = osp.expanduser(p) # type: ignore
if expand_vars:
p = osp.expandvars(p) # type: ignore
return osp.normpath(osp.abspath(p)) # type: ignore
except Exception:
return None
def remove_password_if_present(cmdline: Sequence[str]) -> List[str]:
"""
Parse any command line argument and if on of the element is an URL with a
username and/or password, replace them by stars (in-place).
If nothing found just returns the command line as-is.
This should be used for every log line that print a command line, as well as
exception messages.
"""
new_cmdline = []
for index, to_parse in enumerate(cmdline):
new_cmdline.append(to_parse)
try:
url = urlsplit(to_parse)
# Remove password from the URL if present
if url.password is None and url.username is None:
continue
if url.password is not None:
url = url._replace(netloc=url.netloc.replace(url.password, "*****"))
if url.username is not None:
url = url._replace(netloc=url.netloc.replace(url.username, "*****"))
new_cmdline[index] = urlunsplit(url)
except ValueError:
# This is not a valid URL
continue
return new_cmdline
# } END utilities
# { Classes
class RemoteProgress(object):
"""
Handler providing an interface to parse progress information emitted by git-push
and git-fetch and to dispatch callbacks allowing subclasses to react to the progress.
"""
_num_op_codes: int = 9
(
BEGIN,
END,
COUNTING,
COMPRESSING,
WRITING,
RECEIVING,
RESOLVING,
FINDING_SOURCES,
CHECKING_OUT,
) = [1 << x for x in range(_num_op_codes)]
STAGE_MASK = BEGIN | END
OP_MASK = ~STAGE_MASK
DONE_TOKEN = "done."
TOKEN_SEPARATOR = ", "
__slots__ = (
"_cur_line",
"_seen_ops",
"error_lines", # Lines that started with 'error:' or 'fatal:'.
"other_lines",
) # Lines not denoting progress (i.e.g. push-infos).
re_op_absolute = re.compile(r"(remote: )?([\w\s]+):\s+()(\d+)()(.*)")
re_op_relative = re.compile(r"(remote: )?([\w\s]+):\s+(\d+)% \((\d+)/(\d+)\)(.*)")
def __init__(self) -> None:
self._seen_ops: List[int] = []
self._cur_line: Optional[str] = None
self.error_lines: List[str] = []
self.other_lines: List[str] = []
def _parse_progress_line(self, line: AnyStr) -> None:
"""Parse progress information from the given line as retrieved by git-push
or git-fetch.
- Lines that do not contain progress info are stored in :attr:`other_lines`.
- Lines that seem to contain an error (i.e. start with error: or fatal:) are stored
in :attr:`error_lines`."""
# handle
# Counting objects: 4, done.
# Compressing objects: 50% (1/2)
# Compressing objects: 100% (2/2)
# Compressing objects: 100% (2/2), done.
if isinstance(line, bytes): # mypy argues about ternary assignment
line_str = line.decode("utf-8")
else:
line_str = line
self._cur_line = line_str
if self._cur_line.startswith(("error:", "fatal:")):
self.error_lines.append(self._cur_line)
return
# find escape characters and cut them away - regex will not work with
# them as they are non-ascii. As git might expect a tty, it will send them
last_valid_index = None
for i, c in enumerate(reversed(line_str)):
if ord(c) < 32:
# its a slice index
last_valid_index = -i - 1
# END character was non-ascii
# END for each character in line
if last_valid_index is not None:
line_str = line_str[:last_valid_index]
# END cut away invalid part
line_str = line_str.rstrip()
cur_count, max_count = None, None
match = self.re_op_relative.match(line_str)
if match is None:
match = self.re_op_absolute.match(line_str)
if not match:
self.line_dropped(line_str)
self.other_lines.append(line_str)
return
# END could not get match
op_code = 0
_remote, op_name, _percent, cur_count, max_count, message = match.groups()
# get operation id
if op_name == "Counting objects":
op_code |= self.COUNTING
elif op_name == "Compressing objects":
op_code |= self.COMPRESSING
elif op_name == "Writing objects":
op_code |= self.WRITING
elif op_name == "Receiving objects":
op_code |= self.RECEIVING
elif op_name == "Resolving deltas":
op_code |= self.RESOLVING
elif op_name == "Finding sources":
op_code |= self.FINDING_SOURCES
elif op_name == "Checking out files":
op_code |= self.CHECKING_OUT
else:
# Note: On windows it can happen that partial lines are sent
# Hence we get something like "CompreReceiving objects", which is
# a blend of "Compressing objects" and "Receiving objects".
# This can't really be prevented, so we drop the line verbosely
# to make sure we get informed in case the process spits out new
# commands at some point.
self.line_dropped(line_str)
# Note: Don't add this line to the other lines, as we have to silently
# drop it
return None
# END handle op code
# figure out stage
if op_code not in self._seen_ops:
self._seen_ops.append(op_code)
op_code |= self.BEGIN
# END begin opcode
if message is None:
message = ""
# END message handling
message = message.strip()
if message.endswith(self.DONE_TOKEN):
op_code |= self.END
message = message[: -len(self.DONE_TOKEN)]
# END end message handling
message = message.strip(self.TOKEN_SEPARATOR)
self.update(
op_code,
cur_count and float(cur_count),
max_count and float(max_count),
message,
)
def new_message_handler(self) -> Callable[[str], None]:
"""
:return:
a progress handler suitable for handle_process_output(), passing lines on to this Progress
handler in a suitable format"""
def handler(line: AnyStr) -> None:
return self._parse_progress_line(line.rstrip())
# end
return handler
def line_dropped(self, line: str) -> None:
"""Called whenever a line could not be understood and was therefore dropped."""
pass
def update(
self,
op_code: int,
cur_count: Union[str, float],
max_count: Union[str, float, None] = None,
message: str = "",
) -> None:
"""Called whenever the progress changes
:param op_code:
Integer allowing to be compared against Operation IDs and stage IDs.
Stage IDs are BEGIN and END. BEGIN will only be set once for each Operation
ID as well as END. It may be that BEGIN and END are set at once in case only
one progress message was emitted due to the speed of the operation.
Between BEGIN and END, none of these flags will be set
Operation IDs are all held within the OP_MASK. Only one Operation ID will
be active per call.
:param cur_count: Current absolute count of items
:param max_count:
The maximum count of items we expect. It may be None in case there is
no maximum number of items or if it is (yet) unknown.
:param message:
In case of the 'WRITING' operation, it contains the amount of bytes
transferred. It may possibly be used for other purposes as well.
You may read the contents of the current line in self._cur_line"""
pass
class CallableRemoteProgress(RemoteProgress):
"""An implementation forwarding updates to any callable"""
__slots__ = "_callable"
def __init__(self, fn: Callable) -> None:
self._callable = fn
super(CallableRemoteProgress, self).__init__()
def update(self, *args: Any, **kwargs: Any) -> None:
self._callable(*args, **kwargs)
class Actor(object):
"""Actors hold information about a person acting on the repository. They
can be committers and authors or anything with a name and an email as
mentioned in the git log entries."""
# PRECOMPILED REGEX
name_only_regex = re.compile(r"<(.*)>")
name_email_regex = re.compile(r"(.*) <(.*?)>")
# ENVIRONMENT VARIABLES
# read when creating new commits
env_author_name = "GIT_AUTHOR_NAME"
env_author_email = "GIT_AUTHOR_EMAIL"
env_committer_name = "GIT_COMMITTER_NAME"
env_committer_email = "GIT_COMMITTER_EMAIL"
# CONFIGURATION KEYS
conf_name = "name"
conf_email = "email"
__slots__ = ("name", "email")
def __init__(self, name: Optional[str], email: Optional[str]) -> None:
self.name = name
self.email = email
def __eq__(self, other: Any) -> bool:
return self.name == other.name and self.email == other.email
def __ne__(self, other: Any) -> bool:
return not (self == other)
def __hash__(self) -> int:
return hash((self.name, self.email))
def __str__(self) -> str:
return self.name if self.name else ""
def __repr__(self) -> str:
return '<git.Actor "%s <%s>">' % (self.name, self.email)
@classmethod
def _from_string(cls, string: str) -> "Actor":
"""Create an Actor from a string.
:param string: is the string, which is expected to be in regular git format
John Doe <jdoe@example.com>
:return: Actor"""
m = cls.name_email_regex.search(string)
if m:
name, email = m.groups()
return Actor(name, email)
else:
m = cls.name_only_regex.search(string)
if m:
return Actor(m.group(1), None)
# assume best and use the whole string as name
return Actor(string, None)
# END special case name
# END handle name/email matching
@classmethod
def _main_actor(
cls,
env_name: str,
env_email: str,
config_reader: Union[None, "GitConfigParser", "SectionConstraint"] = None,
) -> "Actor":
actor = Actor("", "")
user_id = None # We use this to avoid multiple calls to getpass.getuser()
def default_email() -> str:
nonlocal user_id
if not user_id:
user_id = get_user_id()
return user_id
def default_name() -> str:
return default_email().split("@")[0]
for attr, evar, cvar, default in (
("name", env_name, cls.conf_name, default_name),
("email", env_email, cls.conf_email, default_email),
):
try:
val = os.environ[evar]
setattr(actor, attr, val)
except KeyError:
if config_reader is not None:
try:
val = config_reader.get("user", cvar)
except Exception:
val = default()
setattr(actor, attr, val)
# END config-reader handling
if not getattr(actor, attr):
setattr(actor, attr, default())
# END handle name
# END for each item to retrieve
return actor
@classmethod
def committer(cls, config_reader: Union[None, "GitConfigParser", "SectionConstraint"] = None) -> "Actor":
"""
:return: Actor instance corresponding to the configured committer. It behaves
similar to the git implementation, such that the environment will override
configuration values of config_reader. If no value is set at all, it will be
generated
:param config_reader: ConfigReader to use to retrieve the values from in case
they are not set in the environment"""
return cls._main_actor(cls.env_committer_name, cls.env_committer_email, config_reader)
@classmethod
def author(cls, config_reader: Union[None, "GitConfigParser", "SectionConstraint"] = None) -> "Actor":
"""Same as committer(), but defines the main author. It may be specified in the environment,
but defaults to the committer"""
return cls._main_actor(cls.env_author_name, cls.env_author_email, config_reader)
class Stats(object):
"""
Represents stat information as presented by git at the end of a merge. It is
created from the output of a diff operation.
``Example``::
c = Commit( sha1 )
s = c.stats
s.total # full-stat-dict
s.files # dict( filepath : stat-dict )
``stat-dict``
A dictionary with the following keys and values::
deletions = number of deleted lines as int
insertions = number of inserted lines as int
lines = total number of lines changed as int, or deletions + insertions
``full-stat-dict``
In addition to the items in the stat-dict, it features additional information::
files = number of changed files as int"""
__slots__ = ("total", "files")
def __init__(self, total: Total_TD, files: Dict[PathLike, Files_TD]):
self.total = total
self.files = files
@classmethod
def _list_from_string(cls, repo: "Repo", text: str) -> "Stats":
"""Create a Stat object from output retrieved by git-diff.
:return: git.Stat"""
hsh: HSH_TD = {
"total": {"insertions": 0, "deletions": 0, "lines": 0, "files": 0},
"files": {},
}
for line in text.splitlines():
(raw_insertions, raw_deletions, filename) = line.split("\t")
insertions = raw_insertions != "-" and int(raw_insertions) or 0
deletions = raw_deletions != "-" and int(raw_deletions) or 0
hsh["total"]["insertions"] += insertions
hsh["total"]["deletions"] += deletions
hsh["total"]["lines"] += insertions + deletions
hsh["total"]["files"] += 1
files_dict: Files_TD = {
"insertions": insertions,
"deletions": deletions,
"lines": insertions + deletions,
}
hsh["files"][filename.strip()] = files_dict
return Stats(hsh["total"], hsh["files"])
class IndexFileSHA1Writer(object):
"""Wrapper around a file-like object that remembers the SHA1 of
the data written to it. It will write a sha when the stream is closed
or if the asked for explicitly using write_sha.
Only useful to the indexfile
:note: Based on the dulwich project"""
__slots__ = ("f", "sha1")
def __init__(self, f: IO) -> None:
self.f = f
self.sha1 = make_sha(b"")
def write(self, data: AnyStr) -> int:
self.sha1.update(data)
return self.f.write(data)
def write_sha(self) -> bytes:
sha = self.sha1.digest()
self.f.write(sha)
return sha
def close(self) -> bytes:
sha = self.write_sha()
self.f.close()
return sha
def tell(self) -> int:
return self.f.tell()
class LockFile(object):
"""Provides methods to obtain, check for, and release a file based lock which
should be used to handle concurrent access to the same file.
As we are a utility class to be derived from, we only use protected methods.
Locks will automatically be released on destruction"""
__slots__ = ("_file_path", "_owns_lock")
def __init__(self, file_path: PathLike) -> None:
self._file_path = file_path
self._owns_lock = False
def __del__(self) -> None:
self._release_lock()
def _lock_file_path(self) -> str:
""":return: Path to lockfile"""
return "%s.lock" % (self._file_path)
def _has_lock(self) -> bool:
""":return: True if we have a lock and if the lockfile still exists
:raise AssertionError: if our lock-file does not exist"""
return self._owns_lock
def _obtain_lock_or_raise(self) -> None:
"""Create a lock file as flag for other instances, mark our instance as lock-holder
:raise IOError: if a lock was already present or a lock file could not be written"""
if self._has_lock():
return
lock_file = self._lock_file_path()
if osp.isfile(lock_file):
raise IOError(
"Lock for file %r did already exist, delete %r in case the lock is illegal"
% (self._file_path, lock_file)
)
try:
flags = os.O_WRONLY | os.O_CREAT | os.O_EXCL
if is_win:
flags |= os.O_SHORT_LIVED
fd = os.open(lock_file, flags, 0)
os.close(fd)
except OSError as e:
raise IOError(str(e)) from e
self._owns_lock = True
def _obtain_lock(self) -> None:
"""The default implementation will raise if a lock cannot be obtained.
Subclasses may override this method to provide a different implementation"""
return self._obtain_lock_or_raise()
def _release_lock(self) -> None:
"""Release our lock if we have one"""
if not self._has_lock():
return
# if someone removed our file beforhand, lets just flag this issue
# instead of failing, to make it more usable.
lfp = self._lock_file_path()
try:
rmfile(lfp)
except OSError:
pass
self._owns_lock = False
class BlockingLockFile(LockFile):
"""The lock file will block until a lock could be obtained, or fail after
a specified timeout.
:note: If the directory containing the lock was removed, an exception will
be raised during the blocking period, preventing hangs as the lock
can never be obtained."""
__slots__ = ("_check_interval", "_max_block_time")
def __init__(
self,
file_path: PathLike,
check_interval_s: float = 0.3,
max_block_time_s: int = maxsize,
) -> None:
"""Configure the instance
:param check_interval_s:
Period of time to sleep until the lock is checked the next time.
By default, it waits a nearly unlimited time
:param max_block_time_s: Maximum amount of seconds we may lock"""
super(BlockingLockFile, self).__init__(file_path)
self._check_interval = check_interval_s
self._max_block_time = max_block_time_s
def _obtain_lock(self) -> None:
"""This method blocks until it obtained the lock, or raises IOError if
it ran out of time or if the parent directory was not available anymore.
If this method returns, you are guaranteed to own the lock"""
starttime = time.time()
maxtime = starttime + float(self._max_block_time)
while True:
try:
super(BlockingLockFile, self)._obtain_lock()
except IOError as e:
# synity check: if the directory leading to the lockfile is not
# readable anymore, raise an exception
curtime = time.time()
if not osp.isdir(osp.dirname(self._lock_file_path())):
msg = "Directory containing the lockfile %r was not readable anymore after waiting %g seconds" % (
self._lock_file_path(),
curtime - starttime,
)
raise IOError(msg) from e
# END handle missing directory
if curtime >= maxtime:
msg = "Waited %g seconds for lock at %r" % (
maxtime - starttime,
self._lock_file_path(),
)
raise IOError(msg) from e
# END abort if we wait too long
time.sleep(self._check_interval)
else:
break
# END endless loop
class IterableList(List[T_IterableObj]):
"""
List of iterable objects allowing to query an object by id or by named index::
heads = repo.heads
heads.master
heads['master']
heads[0]
Iterable parent objects = [Commit, SubModule, Reference, FetchInfo, PushInfo]
Iterable via inheritance = [Head, TagReference, RemoteReference]
]
It requires an id_attribute name to be set which will be queried from its
contained items to have a means for comparison.
A prefix can be specified which is to be used in case the id returned by the
items always contains a prefix that does not matter to the user, so it
can be left out."""
__slots__ = ("_id_attr", "_prefix")
def __new__(cls, id_attr: str, prefix: str = "") -> "IterableList[IterableObj]":
return super(IterableList, cls).__new__(cls)
def __init__(self, id_attr: str, prefix: str = "") -> None:
self._id_attr = id_attr
self._prefix = prefix
def __contains__(self, attr: object) -> bool:
# first try identity match for performance
try:
rval = list.__contains__(self, attr)
if rval:
return rval
except (AttributeError, TypeError):
pass
# END handle match
# otherwise make a full name search
try:
getattr(self, cast(str, attr)) # use cast to silence mypy
return True
except (AttributeError, TypeError):
return False
# END handle membership
def __getattr__(self, attr: str) -> T_IterableObj:
attr = self._prefix + attr
for item in self:
if getattr(item, self._id_attr) == attr:
return item
# END for each item
return list.__getattribute__(self, attr)
def __getitem__(self, index: Union[SupportsIndex, int, slice, str]) -> T_IterableObj: # type: ignore
assert isinstance(index, (int, str, slice)), "Index of IterableList should be an int or str"
if isinstance(index, int):
return list.__getitem__(self, index)
elif isinstance(index, slice):
raise ValueError("Index should be an int or str")
else:
try:
return getattr(self, index)
except AttributeError as e:
raise IndexError("No item found with id %r" % (self._prefix + index)) from e
# END handle getattr
def __delitem__(self, index: Union[SupportsIndex, int, slice, str]) -> None:
assert isinstance(index, (int, str)), "Index of IterableList should be an int or str"
delindex = cast(int, index)
if not isinstance(index, int):
delindex = -1
name = self._prefix + index
for i, item in enumerate(self):
if getattr(item, self._id_attr) == name:
delindex = i
break
# END search index
# END for each item
if delindex == -1:
raise IndexError("Item with name %s not found" % name)
# END handle error
# END get index to delete
list.__delitem__(self, delindex)
class IterableClassWatcher(type):
"""Metaclass that watches"""
def __init__(cls, name: str, bases: Tuple, clsdict: Dict) -> None:
for base in bases:
if type(base) == IterableClassWatcher:
warnings.warn(
f"GitPython Iterable subclassed by {name}. "
"Iterable is deprecated due to naming clash since v3.1.18"
" and will be removed in 3.1.20, "
"Use IterableObj instead \n",
DeprecationWarning,
stacklevel=2,
)
class Iterable(metaclass=IterableClassWatcher):
"""Defines an interface for iterable items which is to assure a uniform
way to retrieve and iterate items within the git repository"""
__slots__ = ()
_id_attribute_ = "attribute that most suitably identifies your instance"
@classmethod
def list_items(cls, repo: "Repo", *args: Any, **kwargs: Any) -> Any:
"""
Deprecated, use IterableObj instead.
Find all items of this type - subclasses can specify args and kwargs differently.
If no args are given, subclasses are obliged to return all items if no additional
arguments arg given.
:note: Favor the iter_items method as it will
:return:list(Item,...) list of item instances"""
out_list: Any = IterableList(cls._id_attribute_)
out_list.extend(cls.iter_items(repo, *args, **kwargs))
return out_list
@classmethod
def iter_items(cls, repo: "Repo", *args: Any, **kwargs: Any) -> Any:
# return typed to be compatible with subtypes e.g. Remote
"""For more information about the arguments, see list_items
:return: iterator yielding Items"""
raise NotImplementedError("To be implemented by Subclass")
@runtime_checkable
class IterableObj(Protocol):
"""Defines an interface for iterable items which is to assure a uniform
way to retrieve and iterate items within the git repository
Subclasses = [Submodule, Commit, Reference, PushInfo, FetchInfo, Remote]"""
__slots__ = ()
_id_attribute_: str
@classmethod
def list_items(cls, repo: "Repo", *args: Any, **kwargs: Any) -> IterableList[T_IterableObj]:
"""
Find all items of this type - subclasses can specify args and kwargs differently.
If no args are given, subclasses are obliged to return all items if no additional
arguments arg given.
:note: Favor the iter_items method as it will
:return:list(Item,...) list of item instances"""
out_list: IterableList = IterableList(cls._id_attribute_)
out_list.extend(cls.iter_items(repo, *args, **kwargs))
return out_list
@classmethod
@abstractmethod
def iter_items(cls, repo: "Repo", *args: Any, **kwargs: Any) -> Iterator[T_IterableObj]: # Iterator[T_IterableObj]:
# return typed to be compatible with subtypes e.g. Remote
"""For more information about the arguments, see list_items
:return: iterator yielding Items"""
raise NotImplementedError("To be implemented by Subclass")
# } END classes
class NullHandler(logging.Handler):
def emit(self, record: object) -> None:
pass
| bsd-3-clause | 85c6d99aa2d44495807d4e50d43fdea5 | 32.049751 | 120 | 0.586984 | 3.936981 | false | false | false | false |
gitpython-developers/gitpython | git/objects/util.py | 2 | 22234 | # util.py
# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
#
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
"""Module for general utility functions"""
# flake8: noqa F401
from abc import ABC, abstractmethod
import warnings
from git.util import IterableList, IterableObj, Actor
import re
from collections import deque
from string import digits
import time
import calendar
from datetime import datetime, timedelta, tzinfo
# typing ------------------------------------------------------------
from typing import (
Any,
Callable,
Deque,
Iterator,
Generic,
NamedTuple,
overload,
Sequence, # NOQA: F401
TYPE_CHECKING,
Tuple,
Type,
TypeVar,
Union,
cast,
)
from git.types import Has_id_attribute, Literal, _T # NOQA: F401
if TYPE_CHECKING:
from io import BytesIO, StringIO
from .commit import Commit
from .blob import Blob
from .tag import TagObject
from .tree import Tree, TraversedTreeTup
from subprocess import Popen
from .submodule.base import Submodule
from git.types import Protocol, runtime_checkable
else:
# Protocol = Generic[_T] # Needed for typing bug #572?
Protocol = ABC
def runtime_checkable(f):
return f
class TraverseNT(NamedTuple):
depth: int
item: Union["Traversable", "Blob"]
src: Union["Traversable", None]
T_TIobj = TypeVar("T_TIobj", bound="TraversableIterableObj") # for TraversableIterableObj.traverse()
TraversedTup = Union[
Tuple[Union["Traversable", None], "Traversable"], # for commit, submodule
"TraversedTreeTup",
] # for tree.traverse()
# --------------------------------------------------------------------
__all__ = (
"get_object_type_by_name",
"parse_date",
"parse_actor_and_date",
"ProcessStreamAdapter",
"Traversable",
"altz_to_utctz_str",
"utctz_to_altz",
"verify_utctz",
"Actor",
"tzoffset",
"utc",
)
ZERO = timedelta(0)
# { Functions
def mode_str_to_int(modestr: Union[bytes, str]) -> int:
"""
:param modestr: string like 755 or 644 or 100644 - only the last 6 chars will be used
:return:
String identifying a mode compatible to the mode methods ids of the
stat module regarding the rwx permissions for user, group and other,
special flags and file system flags, i.e. whether it is a symlink
for example."""
mode = 0
for iteration, char in enumerate(reversed(modestr[-6:])):
char = cast(Union[str, int], char)
mode += int(char) << iteration * 3
# END for each char
return mode
def get_object_type_by_name(
object_type_name: bytes,
) -> Union[Type["Commit"], Type["TagObject"], Type["Tree"], Type["Blob"]]:
"""
:return: type suitable to handle the given object type name.
Use the type to create new instances.
:param object_type_name: Member of TYPES
:raise ValueError: In case object_type_name is unknown"""
if object_type_name == b"commit":
from . import commit
return commit.Commit
elif object_type_name == b"tag":
from . import tag
return tag.TagObject
elif object_type_name == b"blob":
from . import blob
return blob.Blob
elif object_type_name == b"tree":
from . import tree
return tree.Tree
else:
raise ValueError("Cannot handle unknown object type: %s" % object_type_name.decode())
def utctz_to_altz(utctz: str) -> int:
"""we convert utctz to the timezone in seconds, it is the format time.altzone
returns. Git stores it as UTC timezone which has the opposite sign as well,
which explains the -1 * ( that was made explicit here )
:param utctz: git utc timezone string, i.e. +0200"""
return -1 * int(float(utctz) / 100 * 3600)
def altz_to_utctz_str(altz: float) -> str:
"""As above, but inverses the operation, returning a string that can be used
in commit objects"""
utci = -1 * int((float(altz) / 3600) * 100)
utcs = str(abs(utci))
utcs = "0" * (4 - len(utcs)) + utcs
prefix = (utci < 0 and "-") or "+"
return prefix + utcs
def verify_utctz(offset: str) -> str:
""":raise ValueError: if offset is incorrect
:return: offset"""
fmt_exc = ValueError("Invalid timezone offset format: %s" % offset)
if len(offset) != 5:
raise fmt_exc
if offset[0] not in "+-":
raise fmt_exc
if offset[1] not in digits or offset[2] not in digits or offset[3] not in digits or offset[4] not in digits:
raise fmt_exc
# END for each char
return offset
class tzoffset(tzinfo):
def __init__(self, secs_west_of_utc: float, name: Union[None, str] = None) -> None:
self._offset = timedelta(seconds=-secs_west_of_utc)
self._name = name or "fixed"
def __reduce__(self) -> Tuple[Type["tzoffset"], Tuple[float, str]]:
return tzoffset, (-self._offset.total_seconds(), self._name)
def utcoffset(self, dt: Union[datetime, None]) -> timedelta:
return self._offset
def tzname(self, dt: Union[datetime, None]) -> str:
return self._name
def dst(self, dt: Union[datetime, None]) -> timedelta:
return ZERO
utc = tzoffset(0, "UTC")
def from_timestamp(timestamp: float, tz_offset: float) -> datetime:
"""Converts a timestamp + tz_offset into an aware datetime instance."""
utc_dt = datetime.fromtimestamp(timestamp, utc)
try:
local_dt = utc_dt.astimezone(tzoffset(tz_offset))
return local_dt
except ValueError:
return utc_dt
def parse_date(string_date: Union[str, datetime]) -> Tuple[int, int]:
"""
Parse the given date as one of the following
* aware datetime instance
* Git internal format: timestamp offset
* RFC 2822: Thu, 07 Apr 2005 22:13:13 +0200.
* ISO 8601 2005-04-07T22:13:13
The T can be a space as well
:return: Tuple(int(timestamp_UTC), int(offset)), both in seconds since epoch
:raise ValueError: If the format could not be understood
:note: Date can also be YYYY.MM.DD, MM/DD/YYYY and DD.MM.YYYY.
"""
if isinstance(string_date, datetime):
if string_date.tzinfo:
utcoffset = cast(timedelta, string_date.utcoffset()) # typeguard, if tzinfoand is not None
offset = -int(utcoffset.total_seconds())
return int(string_date.astimezone(utc).timestamp()), offset
else:
raise ValueError(f"string_date datetime object without tzinfo, {string_date}")
# git time
try:
if string_date.count(" ") == 1 and string_date.rfind(":") == -1:
timestamp, offset_str = string_date.split()
if timestamp.startswith("@"):
timestamp = timestamp[1:]
timestamp_int = int(timestamp)
return timestamp_int, utctz_to_altz(verify_utctz(offset_str))
else:
offset_str = "+0000" # local time by default
if string_date[-5] in "-+":
offset_str = verify_utctz(string_date[-5:])
string_date = string_date[:-6] # skip space as well
# END split timezone info
offset = utctz_to_altz(offset_str)
# now figure out the date and time portion - split time
date_formats = []
splitter = -1
if "," in string_date:
date_formats.append("%a, %d %b %Y")
splitter = string_date.rfind(" ")
else:
# iso plus additional
date_formats.append("%Y-%m-%d")
date_formats.append("%Y.%m.%d")
date_formats.append("%m/%d/%Y")
date_formats.append("%d.%m.%Y")
splitter = string_date.rfind("T")
if splitter == -1:
splitter = string_date.rfind(" ")
# END handle 'T' and ' '
# END handle rfc or iso
assert splitter > -1
# split date and time
time_part = string_date[splitter + 1 :] # skip space
date_part = string_date[:splitter]
# parse time
tstruct = time.strptime(time_part, "%H:%M:%S")
for fmt in date_formats:
try:
dtstruct = time.strptime(date_part, fmt)
utctime = calendar.timegm(
(
dtstruct.tm_year,
dtstruct.tm_mon,
dtstruct.tm_mday,
tstruct.tm_hour,
tstruct.tm_min,
tstruct.tm_sec,
dtstruct.tm_wday,
dtstruct.tm_yday,
tstruct.tm_isdst,
)
)
return int(utctime), offset
except ValueError:
continue
# END exception handling
# END for each fmt
# still here ? fail
raise ValueError("no format matched")
# END handle format
except Exception as e:
raise ValueError(f"Unsupported date format or type: {string_date}, type={type(string_date)}") from e
# END handle exceptions
# precompiled regex
_re_actor_epoch = re.compile(r"^.+? (.*) (\d+) ([+-]\d+).*$")
_re_only_actor = re.compile(r"^.+? (.*)$")
def parse_actor_and_date(line: str) -> Tuple[Actor, int, int]:
"""Parse out the actor (author or committer) info from a line like::
author Tom Preston-Werner <tom@mojombo.com> 1191999972 -0700
:return: [Actor, int_seconds_since_epoch, int_timezone_offset]"""
actor, epoch, offset = "", "0", "0"
m = _re_actor_epoch.search(line)
if m:
actor, epoch, offset = m.groups()
else:
m = _re_only_actor.search(line)
actor = m.group(1) if m else line or ""
return (Actor._from_string(actor), int(epoch), utctz_to_altz(offset))
# } END functions
# { Classes
class ProcessStreamAdapter(object):
"""Class wireing all calls to the contained Process instance.
Use this type to hide the underlying process to provide access only to a specified
stream. The process is usually wrapped into an AutoInterrupt class to kill
it if the instance goes out of scope."""
__slots__ = ("_proc", "_stream")
def __init__(self, process: "Popen", stream_name: str) -> None:
self._proc = process
self._stream: StringIO = getattr(process, stream_name) # guessed type
def __getattr__(self, attr: str) -> Any:
return getattr(self._stream, attr)
@runtime_checkable
class Traversable(Protocol):
"""Simple interface to perform depth-first or breadth-first traversals
into one direction.
Subclasses only need to implement one function.
Instances of the Subclass must be hashable
Defined subclasses = [Commit, Tree, SubModule]
"""
__slots__ = ()
@classmethod
@abstractmethod
def _get_intermediate_items(cls, item: Any) -> Sequence["Traversable"]:
"""
Returns:
Tuple of items connected to the given item.
Must be implemented in subclass
class Commit:: (cls, Commit) -> Tuple[Commit, ...]
class Submodule:: (cls, Submodule) -> Iterablelist[Submodule]
class Tree:: (cls, Tree) -> Tuple[Tree, ...]
"""
raise NotImplementedError("To be implemented in subclass")
@abstractmethod
def list_traverse(self, *args: Any, **kwargs: Any) -> Any:
""" """
warnings.warn(
"list_traverse() method should only be called from subclasses."
"Calling from Traversable abstract class will raise NotImplementedError in 3.1.20"
"Builtin sublclasses are 'Submodule', 'Tree' and 'Commit",
DeprecationWarning,
stacklevel=2,
)
return self._list_traverse(*args, **kwargs)
def _list_traverse(
self, as_edge: bool = False, *args: Any, **kwargs: Any
) -> IterableList[Union["Commit", "Submodule", "Tree", "Blob"]]:
"""
:return: IterableList with the results of the traversal as produced by
traverse()
Commit -> IterableList['Commit']
Submodule -> IterableList['Submodule']
Tree -> IterableList[Union['Submodule', 'Tree', 'Blob']]
"""
# Commit and Submodule have id.__attribute__ as IterableObj
# Tree has id.__attribute__ inherited from IndexObject
if isinstance(self, Has_id_attribute):
id = self._id_attribute_
else:
id = "" # shouldn't reach here, unless Traversable subclass created with no _id_attribute_
# could add _id_attribute_ to Traversable, or make all Traversable also Iterable?
if not as_edge:
out: IterableList[Union["Commit", "Submodule", "Tree", "Blob"]] = IterableList(id)
out.extend(self.traverse(as_edge=as_edge, *args, **kwargs))
return out
# overloads in subclasses (mypy doesn't allow typing self: subclass)
# Union[IterableList['Commit'], IterableList['Submodule'], IterableList[Union['Submodule', 'Tree', 'Blob']]]
else:
# Raise deprecationwarning, doesn't make sense to use this
out_list: IterableList = IterableList(self.traverse(*args, **kwargs))
return out_list
@abstractmethod
def traverse(self, *args: Any, **kwargs: Any) -> Any:
""" """
warnings.warn(
"traverse() method should only be called from subclasses."
"Calling from Traversable abstract class will raise NotImplementedError in 3.1.20"
"Builtin sublclasses are 'Submodule', 'Tree' and 'Commit",
DeprecationWarning,
stacklevel=2,
)
return self._traverse(*args, **kwargs)
def _traverse(
self,
predicate: Callable[[Union["Traversable", "Blob", TraversedTup], int], bool] = lambda i, d: True,
prune: Callable[[Union["Traversable", "Blob", TraversedTup], int], bool] = lambda i, d: False,
depth: int = -1,
branch_first: bool = True,
visit_once: bool = True,
ignore_self: int = 1,
as_edge: bool = False,
) -> Union[Iterator[Union["Traversable", "Blob"]], Iterator[TraversedTup]]:
""":return: iterator yielding of items found when traversing self
:param predicate: f(i,d) returns False if item i at depth d should not be included in the result
:param prune:
f(i,d) return True if the search should stop at item i at depth d.
Item i will not be returned.
:param depth:
define at which level the iteration should not go deeper
if -1, there is no limit
if 0, you would effectively only get self, the root of the iteration
i.e. if 1, you would only get the first level of predecessors/successors
:param branch_first:
if True, items will be returned branch first, otherwise depth first
:param visit_once:
if True, items will only be returned once, although they might be encountered
several times. Loops are prevented that way.
:param ignore_self:
if True, self will be ignored and automatically pruned from
the result. Otherwise it will be the first item to be returned.
If as_edge is True, the source of the first edge is None
:param as_edge:
if True, return a pair of items, first being the source, second the
destination, i.e. tuple(src, dest) with the edge spanning from
source to destination"""
"""
Commit -> Iterator[Union[Commit, Tuple[Commit, Commit]]
Submodule -> Iterator[Submodule, Tuple[Submodule, Submodule]]
Tree -> Iterator[Union[Blob, Tree, Submodule,
Tuple[Union[Submodule, Tree], Union[Blob, Tree, Submodule]]]
ignore_self=True is_edge=True -> Iterator[item]
ignore_self=True is_edge=False --> Iterator[item]
ignore_self=False is_edge=True -> Iterator[item] | Iterator[Tuple[src, item]]
ignore_self=False is_edge=False -> Iterator[Tuple[src, item]]"""
visited = set()
stack: Deque[TraverseNT] = deque()
stack.append(TraverseNT(0, self, None)) # self is always depth level 0
def addToStack(
stack: Deque[TraverseNT],
src_item: "Traversable",
branch_first: bool,
depth: int,
) -> None:
lst = self._get_intermediate_items(item)
if not lst: # empty list
return None
if branch_first:
stack.extendleft(TraverseNT(depth, i, src_item) for i in lst)
else:
reviter = (TraverseNT(depth, lst[i], src_item) for i in range(len(lst) - 1, -1, -1))
stack.extend(reviter)
# END addToStack local method
while stack:
d, item, src = stack.pop() # depth of item, item, item_source
if visit_once and item in visited:
continue
if visit_once:
visited.add(item)
rval: Union[TraversedTup, "Traversable", "Blob"]
if as_edge: # if as_edge return (src, item) unless rrc is None (e.g. for first item)
rval = (src, item)
else:
rval = item
if prune(rval, d):
continue
skipStartItem = ignore_self and (item is self)
if not skipStartItem and predicate(rval, d):
yield rval
# only continue to next level if this is appropriate !
nd = d + 1
if depth > -1 and nd > depth:
continue
addToStack(stack, item, branch_first, nd)
# END for each item on work stack
@runtime_checkable
class Serializable(Protocol):
"""Defines methods to serialize and deserialize objects from and into a data stream"""
__slots__ = ()
# @abstractmethod
def _serialize(self, stream: "BytesIO") -> "Serializable":
"""Serialize the data of this object into the given data stream
:note: a serialized object would ``_deserialize`` into the same object
:param stream: a file-like object
:return: self"""
raise NotImplementedError("To be implemented in subclass")
# @abstractmethod
def _deserialize(self, stream: "BytesIO") -> "Serializable":
"""Deserialize all information regarding this object from the stream
:param stream: a file-like object
:return: self"""
raise NotImplementedError("To be implemented in subclass")
class TraversableIterableObj(IterableObj, Traversable):
__slots__ = ()
TIobj_tuple = Tuple[Union[T_TIobj, None], T_TIobj]
def list_traverse(self: T_TIobj, *args: Any, **kwargs: Any) -> IterableList[T_TIobj]:
return super(TraversableIterableObj, self)._list_traverse(*args, **kwargs)
@overload # type: ignore
def traverse(self: T_TIobj) -> Iterator[T_TIobj]:
...
@overload
def traverse(
self: T_TIobj,
predicate: Callable[[Union[T_TIobj, Tuple[Union[T_TIobj, None], T_TIobj]], int], bool],
prune: Callable[[Union[T_TIobj, Tuple[Union[T_TIobj, None], T_TIobj]], int], bool],
depth: int,
branch_first: bool,
visit_once: bool,
ignore_self: Literal[True],
as_edge: Literal[False],
) -> Iterator[T_TIobj]:
...
@overload
def traverse(
self: T_TIobj,
predicate: Callable[[Union[T_TIobj, Tuple[Union[T_TIobj, None], T_TIobj]], int], bool],
prune: Callable[[Union[T_TIobj, Tuple[Union[T_TIobj, None], T_TIobj]], int], bool],
depth: int,
branch_first: bool,
visit_once: bool,
ignore_self: Literal[False],
as_edge: Literal[True],
) -> Iterator[Tuple[Union[T_TIobj, None], T_TIobj]]:
...
@overload
def traverse(
self: T_TIobj,
predicate: Callable[[Union[T_TIobj, TIobj_tuple], int], bool],
prune: Callable[[Union[T_TIobj, TIobj_tuple], int], bool],
depth: int,
branch_first: bool,
visit_once: bool,
ignore_self: Literal[True],
as_edge: Literal[True],
) -> Iterator[Tuple[T_TIobj, T_TIobj]]:
...
def traverse(
self: T_TIobj,
predicate: Callable[[Union[T_TIobj, TIobj_tuple], int], bool] = lambda i, d: True,
prune: Callable[[Union[T_TIobj, TIobj_tuple], int], bool] = lambda i, d: False,
depth: int = -1,
branch_first: bool = True,
visit_once: bool = True,
ignore_self: int = 1,
as_edge: bool = False,
) -> Union[Iterator[T_TIobj], Iterator[Tuple[T_TIobj, T_TIobj]], Iterator[TIobj_tuple]]:
"""For documentation, see util.Traversable._traverse()"""
"""
# To typecheck instead of using cast.
import itertools
from git.types import TypeGuard
def is_commit_traversed(inp: Tuple) -> TypeGuard[Tuple[Iterator[Tuple['Commit', 'Commit']]]]:
for x in inp[1]:
if not isinstance(x, tuple) and len(x) != 2:
if all(isinstance(inner, Commit) for inner in x):
continue
return True
ret = super(Commit, self).traverse(predicate, prune, depth, branch_first, visit_once, ignore_self, as_edge)
ret_tup = itertools.tee(ret, 2)
assert is_commit_traversed(ret_tup), f"{[type(x) for x in list(ret_tup[0])]}"
return ret_tup[0]
"""
return cast(
Union[Iterator[T_TIobj], Iterator[Tuple[Union[None, T_TIobj], T_TIobj]]],
super(TraversableIterableObj, self)._traverse(
predicate, prune, depth, branch_first, visit_once, ignore_self, as_edge # type: ignore
),
)
| bsd-3-clause | 468ea18ea4be458e4d2f0011bfbc4b52 | 34.124803 | 120 | 0.58442 | 3.939405 | false | false | false | false |
gitpython-developers/gitpython | git/objects/tag.py | 2 | 3840 | # objects.py
# Copyright (C) 2008, 2009 Michael Trier (mtrier@gmail.com) and contributors
#
# This module is part of GitPython and is released under
# the BSD License: http://www.opensource.org/licenses/bsd-license.php
""" Module containing all object based types. """
from . import base
from .util import get_object_type_by_name, parse_actor_and_date
from ..util import hex_to_bin
from ..compat import defenc
from typing import List, TYPE_CHECKING, Union
from git.types import Literal
if TYPE_CHECKING:
from git.repo import Repo
from git.util import Actor
from .commit import Commit
from .blob import Blob
from .tree import Tree
__all__ = ("TagObject",)
class TagObject(base.Object):
"""Non-Lightweight tag carrying additional information about an object we are pointing to."""
type: Literal["tag"] = "tag"
__slots__ = (
"object",
"tag",
"tagger",
"tagged_date",
"tagger_tz_offset",
"message",
)
def __init__(
self,
repo: "Repo",
binsha: bytes,
object: Union[None, base.Object] = None,
tag: Union[None, str] = None,
tagger: Union[None, "Actor"] = None,
tagged_date: Union[int, None] = None,
tagger_tz_offset: Union[int, None] = None,
message: Union[str, None] = None,
) -> None: # @ReservedAssignment
"""Initialize a tag object with additional data
:param repo: repository this object is located in
:param binsha: 20 byte SHA1
:param object: Object instance of object we are pointing to
:param tag: name of this tag
:param tagger: Actor identifying the tagger
:param tagged_date: int_seconds_since_epoch
is the DateTime of the tag creation - use time.gmtime to convert
it into a different format
:param tagged_tz_offset: int_seconds_west_of_utc is the timezone that the
authored_date is in, in a format similar to time.altzone"""
super(TagObject, self).__init__(repo, binsha)
if object is not None:
self.object: Union["Commit", "Blob", "Tree", "TagObject"] = object
if tag is not None:
self.tag = tag
if tagger is not None:
self.tagger = tagger
if tagged_date is not None:
self.tagged_date = tagged_date
if tagger_tz_offset is not None:
self.tagger_tz_offset = tagger_tz_offset
if message is not None:
self.message = message
def _set_cache_(self, attr: str) -> None:
"""Cache all our attributes at once"""
if attr in TagObject.__slots__:
ostream = self.repo.odb.stream(self.binsha)
lines: List[str] = ostream.read().decode(defenc, "replace").splitlines()
_obj, hexsha = lines[0].split(" ")
_type_token, type_name = lines[1].split(" ")
object_type = get_object_type_by_name(type_name.encode("ascii"))
self.object = object_type(self.repo, hex_to_bin(hexsha))
self.tag = lines[2][4:] # tag <tag name>
if len(lines) > 3:
tagger_info = lines[3] # tagger <actor> <date>
(
self.tagger,
self.tagged_date,
self.tagger_tz_offset,
) = parse_actor_and_date(tagger_info)
# line 4 empty - it could mark the beginning of the next header
# in case there really is no message, it would not exist. Otherwise
# a newline separates header from message
if len(lines) > 5:
self.message = "\n".join(lines[5:])
else:
self.message = ""
# END check our attributes
else:
super(TagObject, self)._set_cache_(attr)
| bsd-3-clause | 16ca98bf1800a596b1f1310c30ef4a1d | 34.88785 | 97 | 0.583854 | 3.88664 | false | false | false | false |
cqlengine/cqlengine | cqlengine/models.py | 1 | 30054 | from collections import OrderedDict
import re
import warnings
from cqlengine import columns
from cqlengine.exceptions import ModelException, CQLEngineException, ValidationError
from cqlengine.query import ModelQuerySet, DMLQuery, AbstractQueryableColumn, NOT_SET
from cqlengine.query import DoesNotExist as _DoesNotExist
from cqlengine.query import MultipleObjectsReturned as _MultipleObjectsReturned
class ModelDefinitionException(ModelException): pass
class PolyMorphicModelException(ModelException): pass
DEFAULT_KEYSPACE = None
class UndefinedKeyspaceWarning(Warning):
pass
class hybrid_classmethod(object):
"""
Allows a method to behave as both a class method and
normal instance method depending on how it's called
"""
def __init__(self, clsmethod, instmethod):
self.clsmethod = clsmethod
self.instmethod = instmethod
def __get__(self, instance, owner):
if instance is None:
return self.clsmethod.__get__(owner, owner)
else:
return self.instmethod.__get__(instance, owner)
def __call__(self, *args, **kwargs):
"""
Just a hint to IDEs that it's ok to call this
"""
raise NotImplementedError
class QuerySetDescriptor(object):
"""
returns a fresh queryset for the given model
it's declared on everytime it's accessed
"""
def __get__(self, obj, model):
""" :rtype: ModelQuerySet """
if model.__abstract__:
raise CQLEngineException('cannot execute queries against abstract models')
queryset = model.__queryset__(model)
# if this is a concrete polymorphic model, and the polymorphic
# key is an indexed column, add a filter clause to only return
# logical rows of the proper type
if model._is_polymorphic and not model._is_polymorphic_base:
name, column = model._polymorphic_column_name, model._polymorphic_column
if column.partition_key or column.index:
# look for existing poly types
return queryset.filter(**{name: model.__polymorphic_key__})
return queryset
def __call__(self, *args, **kwargs):
"""
Just a hint to IDEs that it's ok to call this
:rtype: ModelQuerySet
"""
raise NotImplementedError
class TransactionDescriptor(object):
"""
returns a query set descriptor
"""
def __get__(self, instance, model):
if instance:
def transaction_setter(*prepared_transaction, **unprepared_transactions):
if len(prepared_transaction) > 0:
transactions = prepared_transaction[0]
else:
transactions = instance.objects.iff(**unprepared_transactions)._transaction
instance._transaction = transactions
return instance
return transaction_setter
qs = model.__queryset__(model)
def transaction_setter(**unprepared_transactions):
transactions = model.objects.iff(**unprepared_transactions)._transaction
qs._transaction = transactions
return qs
return transaction_setter
def __call__(self, *args, **kwargs):
raise NotImplementedError
class TTLDescriptor(object):
"""
returns a query set descriptor
"""
def __get__(self, instance, model):
if instance:
#instance = copy.deepcopy(instance)
# instance method
def ttl_setter(ts):
instance._ttl = ts
return instance
return ttl_setter
qs = model.__queryset__(model)
def ttl_setter(ts):
qs._ttl = ts
return qs
return ttl_setter
def __call__(self, *args, **kwargs):
raise NotImplementedError
class TimestampDescriptor(object):
"""
returns a query set descriptor with a timestamp specified
"""
def __get__(self, instance, model):
if instance:
# instance method
def timestamp_setter(ts):
instance._timestamp = ts
return instance
return timestamp_setter
return model.objects.timestamp
def __call__(self, *args, **kwargs):
raise NotImplementedError
class IfNotExistsDescriptor(object):
"""
return a query set descriptor with a if_not_exists flag specified
"""
def __get__(self, instance, model):
if instance:
# instance method
def ifnotexists_setter(ife):
instance._if_not_exists = ife
return instance
return ifnotexists_setter
return model.objects.if_not_exists
def __call__(self, *args, **kwargs):
raise NotImplementedError
class ConsistencyDescriptor(object):
"""
returns a query set descriptor if called on Class, instance if it was an instance call
"""
def __get__(self, instance, model):
if instance:
#instance = copy.deepcopy(instance)
def consistency_setter(consistency):
instance.__consistency__ = consistency
return instance
return consistency_setter
qs = model.__queryset__(model)
def consistency_setter(consistency):
qs._consistency = consistency
return qs
return consistency_setter
def __call__(self, *args, **kwargs):
raise NotImplementedError
class ColumnQueryEvaluator(AbstractQueryableColumn):
"""
Wraps a column and allows it to be used in comparator
expressions, returning query operators
ie:
Model.column == 5
"""
def __init__(self, column):
self.column = column
def __unicode__(self):
return self.column.db_field_name
def _get_column(self):
""" :rtype: ColumnQueryEvaluator """
return self.column
class ColumnDescriptor(object):
"""
Handles the reading and writing of column values to and from
a model instance's value manager, as well as creating
comparator queries
"""
def __init__(self, column):
"""
:param column:
:type column: columns.Column
:return:
"""
self.column = column
self.query_evaluator = ColumnQueryEvaluator(self.column)
def __get__(self, instance, owner):
"""
Returns either the value or column, depending
on if an instance is provided or not
:param instance: the model instance
:type instance: Model
"""
try:
return instance._values[self.column.column_name].getval()
except AttributeError as e:
return self.query_evaluator
def __set__(self, instance, value):
"""
Sets the value on an instance, raises an exception with classes
TODO: use None instance to create update statements
"""
if instance:
return instance._values[self.column.column_name].setval(value)
else:
raise AttributeError('cannot reassign column values')
def __delete__(self, instance):
"""
Sets the column value to None, if possible
"""
if instance:
if self.column.can_delete:
instance._values[self.column.column_name].delval()
else:
raise AttributeError('cannot delete {} columns'.format(self.column.column_name))
class BaseModel(object):
"""
The base model class, don't inherit from this, inherit from Model, defined below
"""
class DoesNotExist(_DoesNotExist): pass
class MultipleObjectsReturned(_MultipleObjectsReturned): pass
objects = QuerySetDescriptor()
ttl = TTLDescriptor()
consistency = ConsistencyDescriptor()
iff = TransactionDescriptor()
# custom timestamps, see USING TIMESTAMP X
timestamp = TimestampDescriptor()
if_not_exists = IfNotExistsDescriptor()
# _len is lazily created by __len__
# table names will be generated automatically from it's model
# however, you can also define them manually here
__table_name__ = None
# the keyspace for this model
__keyspace__ = None
# polymorphism options
__polymorphic_key__ = None
# compaction options
__compaction__ = None
__compaction_tombstone_compaction_interval__ = None
__compaction_tombstone_threshold__ = None
# compaction - size tiered options
__compaction_bucket_high__ = None
__compaction_bucket_low__ = None
__compaction_max_threshold__ = None
__compaction_min_threshold__ = None
__compaction_min_sstable_size__ = None
# compaction - leveled options
__compaction_sstable_size_in_mb__ = None
# end compaction
# the queryset class used for this class
__queryset__ = ModelQuerySet
__dmlquery__ = DMLQuery
__default_ttl__ = None # default ttl value to use
__consistency__ = None # can be set per query
# Additional table properties
__bloom_filter_fp_chance__ = None
__caching__ = None
__comment__ = None
__dclocal_read_repair_chance__ = None
__default_time_to_live__ = None
__gc_grace_seconds__ = None
__index_interval__ = None
__memtable_flush_period_in_ms__ = None
__populate_io_cache_on_flush__ = None
__read_repair_chance__ = None
__replicate_on_write__ = None
_timestamp = None # optional timestamp to include with the operation (USING TIMESTAMP)
_if_not_exists = False # optional if_not_exists flag to check existence before insertion
def __init__(self, **values):
self._values = {}
self._ttl = self.__default_ttl__
self._timestamp = None
self._transaction = None
for name, column in self._columns.items():
value = values.get(name, None)
if value is not None or isinstance(column, columns.BaseContainerColumn):
value = column.to_python(value)
value_mngr = column.value_manager(self, column, value)
if name in values:
value_mngr.explicit = True
self._values[name] = value_mngr
# a flag set by the deserializer to indicate
# that update should be used when persisting changes
self._is_persisted = False
self._batch = None
self._timeout = NOT_SET
def __repr__(self):
"""
Pretty printing of models by their primary key
"""
return '{} <{}>'.format(self.__class__.__name__,
', '.join(('{}={}'.format(k, getattr(self, k)) for k,v in six.iteritems(self._primary_keys)))
)
@classmethod
def _discover_polymorphic_submodels(cls):
if not cls._is_polymorphic_base:
raise ModelException('_discover_polymorphic_submodels can only be called on polymorphic base classes')
def _discover(klass):
if not klass._is_polymorphic_base and klass.__polymorphic_key__ is not None:
cls._polymorphic_map[klass.__polymorphic_key__] = klass
for subklass in klass.__subclasses__():
_discover(subklass)
_discover(cls)
@classmethod
def _get_model_by_polymorphic_key(cls, key):
if not cls._is_polymorphic_base:
raise ModelException('_get_model_by_polymorphic_key can only be called on polymorphic base classes')
return cls._polymorphic_map.get(key)
@classmethod
def _construct_instance(cls, values):
"""
method used to construct instances from query results
this is where polymorphic deserialization occurs
"""
# we're going to take the values, which is from the DB as a dict
# and translate that into our local fields
# the db_map is a db_field -> model field map
items = values.items()
field_dict = dict([(cls._db_map.get(k, k),v) for k,v in items])
if cls._is_polymorphic:
poly_key = field_dict.get(cls._polymorphic_column_name)
if poly_key is None:
raise PolyMorphicModelException('polymorphic key was not found in values')
poly_base = cls if cls._is_polymorphic_base else cls._polymorphic_base
klass = poly_base._get_model_by_polymorphic_key(poly_key)
if klass is None:
poly_base._discover_polymorphic_submodels()
klass = poly_base._get_model_by_polymorphic_key(poly_key)
if klass is None:
raise PolyMorphicModelException(
'unrecognized polymorphic key {} for class {}'.format(poly_key, poly_base.__name__)
)
if not issubclass(klass, cls):
raise PolyMorphicModelException(
'{} is not a subclass of {}'.format(klass.__name__, cls.__name__)
)
field_dict = {k: v for k, v in field_dict.items() if k in klass._columns.keys()}
else:
klass = cls
instance = klass(**field_dict)
instance._is_persisted = True
return instance
def _can_update(self):
"""
Called by the save function to check if this should be
persisted with update or insert
:return:
"""
if not self._is_persisted: return False
pks = self._primary_keys.keys()
return all([not self._values[k].changed for k in self._primary_keys])
@classmethod
def _get_keyspace(cls):
""" Returns the manual keyspace, if set, otherwise the default keyspace """
return cls.__keyspace__ or DEFAULT_KEYSPACE
@classmethod
def _get_column(cls, name):
"""
Returns the column matching the given name, raising a key error if
it doesn't exist
:param name: the name of the column to return
:rtype: Column
"""
return cls._columns[name]
def __eq__(self, other):
if self.__class__ != other.__class__:
return False
# check attribute keys
keys = set(self._columns.keys())
other_keys = set(other._columns.keys())
if keys != other_keys:
return False
# check that all of the attributes match
for key in other_keys:
if getattr(self, key, None) != getattr(other, key, None):
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
@classmethod
def column_family_name(cls, include_keyspace=True):
"""
Returns the column family name if it's been defined
otherwise, it creates it from the module and class name
"""
cf_name = ''
if cls.__table_name__:
cf_name = cls.__table_name__.lower()
else:
# get polymorphic base table names if model is polymorphic
if cls._is_polymorphic and not cls._is_polymorphic_base:
return cls._polymorphic_base.column_family_name(include_keyspace=include_keyspace)
camelcase = re.compile(r'([a-z])([A-Z])')
ccase = lambda s: camelcase.sub(lambda v: '{}_{}'.format(v.group(1), v.group(2).lower()), s)
cf_name += ccase(cls.__name__)
#trim to less than 48 characters or cassandra will complain
cf_name = cf_name[-48:]
cf_name = cf_name.lower()
cf_name = re.sub(r'^_+', '', cf_name)
if not include_keyspace: return cf_name
return '{}.{}'.format(cls._get_keyspace(), cf_name)
def validate(self):
""" Cleans and validates the field values """
for name, col in self._columns.items():
v = getattr(self, name)
if v is None and not self._values[name].explicit and col.has_default:
v = col.get_default()
val = col.validate(v)
setattr(self, name, val)
### Let an instance be used like a dict of its columns keys/values
def __iter__(self):
""" Iterate over column ids. """
for column_id in self._columns.keys():
yield column_id
def __getitem__(self, key):
""" Returns column's value. """
if not isinstance(key, six.string_types):
raise TypeError
if key not in self._columns.keys():
raise KeyError
return getattr(self, key)
def __setitem__(self, key, val):
""" Sets a column's value. """
if not isinstance(key, six.string_types):
raise TypeError
if key not in self._columns.keys():
raise KeyError
return setattr(self, key, val)
def __len__(self):
""" Returns the number of columns defined on that model. """
try:
return self._len
except:
self._len = len(self._columns.keys())
return self._len
def keys(self):
""" Returns list of column's IDs. """
return [k for k in self]
def values(self):
""" Returns list of column's values. """
return [self[k] for k in self]
def items(self):
""" Returns a list of columns's IDs/values. """
return [(k, self[k]) for k in self]
def _as_dict(self):
""" Returns a map of column names to cleaned values """
values = self._dynamic_columns or {}
for name, col in self._columns.items():
values[name] = col.to_database(getattr(self, name, None))
return values
@classmethod
def create(cls, **kwargs):
extra_columns = set(kwargs.keys()) - set(cls._columns.keys())
if extra_columns:
raise ValidationError("Incorrect columns passed: {}".format(extra_columns))
return cls.objects.create(**kwargs)
@classmethod
def all(cls):
return cls.objects.all()
@classmethod
def filter(cls, *args, **kwargs):
# if kwargs.values().count(None):
# raise CQLEngineException("Cannot pass None as a filter")
return cls.objects.filter(*args, **kwargs)
@classmethod
def get(cls, *args, **kwargs):
return cls.objects.get(*args, **kwargs)
def timeout(self, timeout):
assert self._batch is None, 'Setting both timeout and batch is not supported'
self._timeout = timeout
return self
def save(self):
# handle polymorphic models
if self._is_polymorphic:
if self._is_polymorphic_base:
raise PolyMorphicModelException('cannot save polymorphic base model')
else:
setattr(self, self._polymorphic_column_name, self.__polymorphic_key__)
is_new = self.pk is None
self.validate()
self.__dmlquery__(self.__class__, self,
batch=self._batch,
ttl=self._ttl,
timestamp=self._timestamp,
consistency=self.__consistency__,
if_not_exists=self._if_not_exists,
transaction=self._transaction,
timeout=self._timeout).save()
#reset the value managers
for v in self._values.values():
v.reset_previous_value()
self._is_persisted = True
self._ttl = self.__default_ttl__
self._timestamp = None
return self
def update(self, **values):
for k, v in values.items():
col = self._columns.get(k)
# check for nonexistant columns
if col is None:
raise ValidationError("{}.{} has no column named: {}".format(self.__module__, self.__class__.__name__, k))
# check for primary key update attempts
if col.is_primary_key:
raise ValidationError("Cannot apply update to primary key '{}' for {}.{}".format(k, self.__module__, self.__class__.__name__))
setattr(self, k, v)
# handle polymorphic models
if self._is_polymorphic:
if self._is_polymorphic_base:
raise PolyMorphicModelException('cannot update polymorphic base model')
else:
setattr(self, self._polymorphic_column_name, self.__polymorphic_key__)
self.validate()
self.__dmlquery__(self.__class__, self,
batch=self._batch,
ttl=self._ttl,
timestamp=self._timestamp,
consistency=self.__consistency__,
transaction=self._transaction,
timeout=self._timeout).update()
#reset the value managers
for v in self._values.values():
v.reset_previous_value()
self._is_persisted = True
self._ttl = self.__default_ttl__
self._timestamp = None
return self
def delete(self):
""" Deletes this instance """
self.__dmlquery__(self.__class__, self,
batch=self._batch,
timestamp=self._timestamp,
consistency=self.__consistency__,
timeout=self._timeout).delete()
def get_changed_columns(self):
""" returns a list of the columns that have been updated since instantiation or save """
return [k for k,v in self._values.items() if v.changed]
@classmethod
def _class_batch(cls, batch):
return cls.objects.batch(batch)
def _inst_batch(self, batch):
assert self._timeout is NOT_SET, 'Setting both timeout and batch is not supported'
self._batch = batch
return self
batch = hybrid_classmethod(_class_batch, _inst_batch)
class ModelMetaClass(type):
def __new__(cls, name, bases, attrs):
"""
"""
#move column definitions into columns dict
#and set default column names
column_dict = OrderedDict()
primary_keys = OrderedDict()
pk_name = None
#get inherited properties
inherited_columns = OrderedDict()
for base in bases:
for k,v in getattr(base, '_defined_columns', {}).items():
inherited_columns.setdefault(k,v)
#short circuit __abstract__ inheritance
is_abstract = attrs['__abstract__'] = attrs.get('__abstract__', False)
#short circuit __polymorphic_key__ inheritance
attrs['__polymorphic_key__'] = attrs.get('__polymorphic_key__', None)
def _transform_column(col_name, col_obj):
column_dict[col_name] = col_obj
if col_obj.primary_key:
primary_keys[col_name] = col_obj
col_obj.set_column_name(col_name)
#set properties
attrs[col_name] = ColumnDescriptor(col_obj)
column_definitions = [(k,v) for k,v in attrs.items() if isinstance(v, columns.Column)]
#column_definitions = sorted(column_definitions, lambda x,y: cmp(x[1].position, y[1].position))
column_definitions = sorted(column_definitions, key=lambda x: x[1].position)
is_polymorphic_base = any([c[1].polymorphic_key for c in column_definitions])
column_definitions = [x for x in inherited_columns.items()] + column_definitions
polymorphic_columns = [c for c in column_definitions if c[1].polymorphic_key]
is_polymorphic = len(polymorphic_columns) > 0
if len(polymorphic_columns) > 1:
raise ModelDefinitionException('only one polymorphic_key can be defined in a model, {} found'.format(len(polymorphic_columns)))
polymorphic_column_name, polymorphic_column = polymorphic_columns[0] if polymorphic_columns else (None, None)
if isinstance(polymorphic_column, (columns.BaseContainerColumn, columns.Counter)):
raise ModelDefinitionException('counter and container columns cannot be used for polymorphic keys')
# find polymorphic base class
polymorphic_base = None
if is_polymorphic and not is_polymorphic_base:
def _get_polymorphic_base(bases):
for base in bases:
if getattr(base, '_is_polymorphic_base', False):
return base
klass = _get_polymorphic_base(base.__bases__)
if klass:
return klass
polymorphic_base = _get_polymorphic_base(bases)
defined_columns = OrderedDict(column_definitions)
# check for primary key
if not is_abstract and not any([v.primary_key for k,v in column_definitions]):
raise ModelDefinitionException("At least 1 primary key is required.")
counter_columns = [c for c in defined_columns.values() if isinstance(c, columns.Counter)]
data_columns = [c for c in defined_columns.values() if not c.primary_key and not isinstance(c, columns.Counter)]
if counter_columns and data_columns:
raise ModelDefinitionException('counter models may not have data columns')
has_partition_keys = any(v.partition_key for (k, v) in column_definitions)
#transform column definitions
for k, v in column_definitions:
# don't allow a column with the same name as a built-in attribute or method
if k in BaseModel.__dict__:
raise ModelDefinitionException("column '{}' conflicts with built-in attribute/method".format(k))
# counter column primary keys are not allowed
if (v.primary_key or v.partition_key) and isinstance(v, (columns.Counter, columns.BaseContainerColumn)):
raise ModelDefinitionException('counter columns and container columns cannot be used as primary keys')
# this will mark the first primary key column as a partition
# key, if one hasn't been set already
if not has_partition_keys and v.primary_key:
v.partition_key = True
has_partition_keys = True
_transform_column(k, v)
partition_keys = OrderedDict(k for k in primary_keys.items() if k[1].partition_key)
clustering_keys = OrderedDict(k for k in primary_keys.items() if not k[1].partition_key)
#setup partition key shortcut
if len(partition_keys) == 0:
if not is_abstract:
raise ModelException("at least one partition key must be defined")
if len(partition_keys) == 1:
pk_name = [x for x in partition_keys.keys()][0]
attrs['pk'] = attrs[pk_name]
else:
# composite partition key case, get/set a tuple of values
_get = lambda self: tuple(self._values[c].getval() for c in partition_keys.keys())
_set = lambda self, val: tuple(self._values[c].setval(v) for (c, v) in zip(partition_keys.keys(), val))
attrs['pk'] = property(_get, _set)
# some validation
col_names = set()
for v in column_dict.values():
# check for duplicate column names
if v.db_field_name in col_names:
raise ModelException("{} defines the column {} more than once".format(name, v.db_field_name))
if v.clustering_order and not (v.primary_key and not v.partition_key):
raise ModelException("clustering_order may be specified only for clustering primary keys")
if v.clustering_order and v.clustering_order.lower() not in ('asc', 'desc'):
raise ModelException("invalid clustering order {} for column {}".format(repr(v.clustering_order), v.db_field_name))
col_names.add(v.db_field_name)
#create db_name -> model name map for loading
db_map = {}
for field_name, col in column_dict.items():
db_map[col.db_field_name] = field_name
#add management members to the class
attrs['_columns'] = column_dict
attrs['_primary_keys'] = primary_keys
attrs['_defined_columns'] = defined_columns
# maps the database field to the models key
attrs['_db_map'] = db_map
attrs['_pk_name'] = pk_name
attrs['_dynamic_columns'] = {}
attrs['_partition_keys'] = partition_keys
attrs['_clustering_keys'] = clustering_keys
attrs['_has_counter'] = len(counter_columns) > 0
# add polymorphic management attributes
attrs['_is_polymorphic_base'] = is_polymorphic_base
attrs['_is_polymorphic'] = is_polymorphic
attrs['_polymorphic_base'] = polymorphic_base
attrs['_polymorphic_column'] = polymorphic_column
attrs['_polymorphic_column_name'] = polymorphic_column_name
attrs['_polymorphic_map'] = {} if is_polymorphic_base else None
#setup class exceptions
DoesNotExistBase = None
for base in bases:
DoesNotExistBase = getattr(base, 'DoesNotExist', None)
if DoesNotExistBase is not None: break
DoesNotExistBase = DoesNotExistBase or attrs.pop('DoesNotExist', BaseModel.DoesNotExist)
attrs['DoesNotExist'] = type('DoesNotExist', (DoesNotExistBase,), {})
MultipleObjectsReturnedBase = None
for base in bases:
MultipleObjectsReturnedBase = getattr(base, 'MultipleObjectsReturned', None)
if MultipleObjectsReturnedBase is not None: break
MultipleObjectsReturnedBase = DoesNotExistBase or attrs.pop('MultipleObjectsReturned', BaseModel.MultipleObjectsReturned)
attrs['MultipleObjectsReturned'] = type('MultipleObjectsReturned', (MultipleObjectsReturnedBase,), {})
#create the class and add a QuerySet to it
klass = super(ModelMetaClass, cls).__new__(cls, name, bases, attrs)
return klass
import six
@six.add_metaclass(ModelMetaClass)
class Model(BaseModel):
"""
the db name for the column family can be set as the attribute db_name, or
it will be generated from the class name
"""
__abstract__ = True
# __metaclass__ = ModelMetaClass
| bsd-3-clause | e2a7a81acdd459976404e6b31ba1cb12 | 34.357647 | 142 | 0.596327 | 4.501123 | false | false | false | false |
cqlengine/cqlengine | cqlengine/statements.py | 2 | 26897 | import time
from datetime import datetime, timedelta
import six
from cqlengine.functions import QueryValue
from cqlengine.operators import BaseWhereOperator, InOperator
class StatementException(Exception): pass
import sys
class UnicodeMixin(object):
if sys.version_info > (3, 0):
__str__ = lambda x: x.__unicode__()
else:
__str__ = lambda x: six.text_type(x).encode('utf-8')
class ValueQuoter(UnicodeMixin):
def __init__(self, value):
self.value = value
def __unicode__(self):
from cassandra.encoder import cql_quote
if isinstance(self.value, bool):
return 'true' if self.value else 'false'
elif isinstance(self.value, (list, tuple)):
return '[' + ', '.join([cql_quote(v) for v in self.value]) + ']'
elif isinstance(self.value, dict):
return '{' + ', '.join([cql_quote(k) + ':' + cql_quote(v) for k,v in self.value.items()]) + '}'
elif isinstance(self.value, set):
return '{' + ', '.join([cql_quote(v) for v in self.value]) + '}'
return cql_quote(self.value)
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.value == other.value
return False
class InQuoter(ValueQuoter):
def __unicode__(self):
from cassandra.encoder import cql_quote
return '(' + ', '.join([cql_quote(v) for v in self.value]) + ')'
class BaseClause(UnicodeMixin):
def __init__(self, field, value):
self.field = field
self.value = value
self.context_id = None
def __unicode__(self):
raise NotImplementedError
def __hash__(self):
return hash(self.field) ^ hash(self.value)
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.field == other.field and self.value == other.value
return False
def __ne__(self, other):
return not self.__eq__(other)
def get_context_size(self):
""" returns the number of entries this clause will add to the query context """
return 1
def set_context_id(self, i):
""" sets the value placeholder that will be used in the query """
self.context_id = i
def update_context(self, ctx):
""" updates the query context with this clauses values """
assert isinstance(ctx, dict)
ctx[str(self.context_id)] = self.value
class WhereClause(BaseClause):
""" a single where statement used in queries """
def __init__(self, field, operator, value, quote_field=True):
"""
:param field:
:param operator:
:param value:
:param quote_field: hack to get the token function rendering properly
:return:
"""
if not isinstance(operator, BaseWhereOperator):
raise StatementException(
"operator must be of type {}, got {}".format(BaseWhereOperator, type(operator))
)
super(WhereClause, self).__init__(field, value)
self.operator = operator
self.query_value = self.value if isinstance(self.value, QueryValue) else QueryValue(self.value)
self.quote_field = quote_field
def __unicode__(self):
field = ('"{}"' if self.quote_field else '{}').format(self.field)
return u'{} {} {}'.format(field, self.operator, six.text_type(self.query_value))
def __hash__(self):
return super(WhereClause, self).__hash__() ^ hash(self.operator)
def __eq__(self, other):
if super(WhereClause, self).__eq__(other):
return self.operator.__class__ == other.operator.__class__
return False
def get_context_size(self):
return self.query_value.get_context_size()
def set_context_id(self, i):
super(WhereClause, self).set_context_id(i)
self.query_value.set_context_id(i)
def update_context(self, ctx):
if isinstance(self.operator, InOperator):
ctx[str(self.context_id)] = InQuoter(self.value)
else:
self.query_value.update_context(ctx)
class AssignmentClause(BaseClause):
""" a single variable st statement """
def __unicode__(self):
return u'"{}" = %({})s'.format(self.field, self.context_id)
def insert_tuple(self):
return self.field, self.context_id
class TransactionClause(BaseClause):
""" A single variable iff statement """
def __unicode__(self):
return u'"{}" = %({})s'.format(self.field, self.context_id)
def insert_tuple(self):
return self.field, self.context_id
class ContainerUpdateClause(AssignmentClause):
def __init__(self, field, value, operation=None, previous=None, column=None):
super(ContainerUpdateClause, self).__init__(field, value)
self.previous = previous
self._assignments = None
self._operation = operation
self._analyzed = False
self._column = column
def _to_database(self, val):
return self._column.to_database(val) if self._column else val
def _analyze(self):
raise NotImplementedError
def get_context_size(self):
raise NotImplementedError
def update_context(self, ctx):
raise NotImplementedError
class SetUpdateClause(ContainerUpdateClause):
""" updates a set collection """
def __init__(self, field, value, operation=None, previous=None, column=None):
super(SetUpdateClause, self).__init__(field, value, operation, previous, column=column)
self._additions = None
self._removals = None
def __unicode__(self):
qs = []
ctx_id = self.context_id
if (self.previous is None and
self._assignments is None and
self._additions is None and
self._removals is None):
qs += ['"{}" = %({})s'.format(self.field, ctx_id)]
if self._assignments is not None:
qs += ['"{}" = %({})s'.format(self.field, ctx_id)]
ctx_id += 1
if self._additions is not None:
qs += ['"{0}" = "{0}" + %({1})s'.format(self.field, ctx_id)]
ctx_id += 1
if self._removals is not None:
qs += ['"{0}" = "{0}" - %({1})s'.format(self.field, ctx_id)]
return ', '.join(qs)
def _analyze(self):
""" works out the updates to be performed """
if self.value is None or self.value == self.previous:
pass
elif self._operation == "add":
self._additions = self.value
elif self._operation == "remove":
self._removals = self.value
elif self.previous is None:
self._assignments = self.value
else:
# partial update time
self._additions = (self.value - self.previous) or None
self._removals = (self.previous - self.value) or None
self._analyzed = True
def get_context_size(self):
if not self._analyzed: self._analyze()
if (self.previous is None and
not self._assignments and
self._additions is None and
self._removals is None):
return 1
return int(bool(self._assignments)) + int(bool(self._additions)) + int(bool(self._removals))
def update_context(self, ctx):
if not self._analyzed: self._analyze()
ctx_id = self.context_id
if (self.previous is None and
self._assignments is None and
self._additions is None and
self._removals is None):
ctx[str(ctx_id)] = self._to_database({})
if self._assignments is not None:
ctx[str(ctx_id)] = self._to_database(self._assignments)
ctx_id += 1
if self._additions is not None:
ctx[str(ctx_id)] = self._to_database(self._additions)
ctx_id += 1
if self._removals is not None:
ctx[str(ctx_id)] = self._to_database(self._removals)
class ListUpdateClause(ContainerUpdateClause):
""" updates a list collection """
def __init__(self, field, value, operation=None, previous=None, column=None):
super(ListUpdateClause, self).__init__(field, value, operation, previous, column=column)
self._append = None
self._prepend = None
def __unicode__(self):
if not self._analyzed: self._analyze()
qs = []
ctx_id = self.context_id
if self._assignments is not None:
qs += ['"{}" = %({})s'.format(self.field, ctx_id)]
ctx_id += 1
if self._prepend is not None:
qs += ['"{0}" = %({1})s + "{0}"'.format(self.field, ctx_id)]
ctx_id += 1
if self._append is not None:
qs += ['"{0}" = "{0}" + %({1})s'.format(self.field, ctx_id)]
return ', '.join(qs)
def get_context_size(self):
if not self._analyzed: self._analyze()
return int(self._assignments is not None) + int(bool(self._append)) + int(bool(self._prepend))
def update_context(self, ctx):
if not self._analyzed: self._analyze()
ctx_id = self.context_id
if self._assignments is not None:
ctx[str(ctx_id)] = self._to_database(self._assignments)
ctx_id += 1
if self._prepend is not None:
# CQL seems to prepend element at a time, starting
# with the element at idx 0, we can either reverse
# it here, or have it inserted in reverse
ctx[str(ctx_id)] = self._to_database(list(reversed(self._prepend)))
ctx_id += 1
if self._append is not None:
ctx[str(ctx_id)] = self._to_database(self._append)
def _analyze(self):
""" works out the updates to be performed """
if self.value is None or self.value == self.previous:
pass
elif self._operation == "append":
self._append = self.value
elif self._operation == "prepend":
# self.value is a Quoter but we reverse self._prepend later as if
# it's a list, so we have to set it to the underlying list
self._prepend = self.value.value
elif self.previous is None:
self._assignments = self.value
elif len(self.value) < len(self.previous):
# if elements have been removed,
# rewrite the whole list
self._assignments = self.value
elif len(self.previous) == 0:
# if we're updating from an empty
# list, do a complete insert
self._assignments = self.value
else:
# the max start idx we want to compare
search_space = len(self.value) - max(0, len(self.previous)-1)
# the size of the sub lists we want to look at
search_size = len(self.previous)
for i in range(search_space):
#slice boundary
j = i + search_size
sub = self.value[i:j]
idx_cmp = lambda idx: self.previous[idx] == sub[idx]
if idx_cmp(0) and idx_cmp(-1) and self.previous == sub:
self._prepend = self.value[:i] or None
self._append = self.value[j:] or None
break
# if both append and prepend are still None after looking
# at both lists, an insert statement will be created
if self._prepend is self._append is None:
self._assignments = self.value
self._analyzed = True
class MapUpdateClause(ContainerUpdateClause):
""" updates a map collection """
def __init__(self, field, value, operation=None, previous=None, column=None):
super(MapUpdateClause, self).__init__(field, value, operation, previous, column=column)
self._updates = None
def _analyze(self):
if self._operation == "update":
self._updates = self.value.keys()
else:
if self.previous is None:
self._updates = sorted([k for k, v in self.value.items()])
else:
self._updates = sorted([k for k, v in self.value.items() if v != self.previous.get(k)]) or None
self._analyzed = True
def get_context_size(self):
if not self._analyzed: self._analyze()
if self.previous is None and not self._updates:
return 1
return len(self._updates or []) * 2
def update_context(self, ctx):
if not self._analyzed: self._analyze()
ctx_id = self.context_id
if self.previous is None and not self._updates:
ctx[str(ctx_id)] = {}
else:
for key in self._updates or []:
val = self.value.get(key)
ctx[str(ctx_id)] = self._column.key_col.to_database(key) if self._column else key
ctx[str(ctx_id + 1)] = self._column.value_col.to_database(val) if self._column else val
ctx_id += 2
def __unicode__(self):
if not self._analyzed: self._analyze()
qs = []
ctx_id = self.context_id
if self.previous is None and not self._updates:
qs += ['"{}" = %({})s'.format(self.field, ctx_id)]
else:
for _ in self._updates or []:
qs += ['"{}"[%({})s] = %({})s'.format(self.field, ctx_id, ctx_id + 1)]
ctx_id += 2
return ', '.join(qs)
class CounterUpdateClause(ContainerUpdateClause):
def __init__(self, field, value, previous=None, column=None):
super(CounterUpdateClause, self).__init__(field, value, previous=previous, column=column)
self.previous = self.previous or 0
def get_context_size(self):
return 1
def update_context(self, ctx):
ctx[str(self.context_id)] = self._to_database(abs(self.value - self.previous))
def __unicode__(self):
delta = self.value - self.previous
sign = '-' if delta < 0 else '+'
return '"{0}" = "{0}" {1} %({2})s'.format(self.field, sign, self.context_id)
class BaseDeleteClause(BaseClause):
pass
class FieldDeleteClause(BaseDeleteClause):
""" deletes a field from a row """
def __init__(self, field):
super(FieldDeleteClause, self).__init__(field, None)
def __unicode__(self):
return '"{}"'.format(self.field)
def update_context(self, ctx):
pass
def get_context_size(self):
return 0
class MapDeleteClause(BaseDeleteClause):
""" removes keys from a map """
def __init__(self, field, value, previous=None):
super(MapDeleteClause, self).__init__(field, value)
self.value = self.value or {}
self.previous = previous or {}
self._analyzed = False
self._removals = None
def _analyze(self):
self._removals = sorted([k for k in self.previous if k not in self.value])
self._analyzed = True
def update_context(self, ctx):
if not self._analyzed: self._analyze()
for idx, key in enumerate(self._removals):
ctx[str(self.context_id + idx)] = key
def get_context_size(self):
if not self._analyzed: self._analyze()
return len(self._removals)
def __unicode__(self):
if not self._analyzed: self._analyze()
return ', '.join(['"{}"[%({})s]'.format(self.field, self.context_id + i) for i in range(len(self._removals))])
class BaseCQLStatement(UnicodeMixin):
""" The base cql statement class """
def __init__(self, table, consistency=None, timestamp=None, where=None):
super(BaseCQLStatement, self).__init__()
self.table = table
self.consistency = consistency
self.context_id = 0
self.context_counter = self.context_id
self.timestamp = timestamp
self.where_clauses = []
for clause in where or []:
self.add_where_clause(clause)
def add_where_clause(self, clause):
"""
adds a where clause to this statement
:param clause: the clause to add
:type clause: WhereClause
"""
if not isinstance(clause, WhereClause):
raise StatementException("only instances of WhereClause can be added to statements")
clause.set_context_id(self.context_counter)
self.context_counter += clause.get_context_size()
self.where_clauses.append(clause)
def get_context(self):
"""
returns the context dict for this statement
:rtype: dict
"""
ctx = {}
for clause in self.where_clauses or []:
clause.update_context(ctx)
return ctx
def get_context_size(self):
return len(self.get_context())
def update_context_id(self, i):
self.context_id = i
self.context_counter = self.context_id
for clause in self.where_clauses:
clause.set_context_id(self.context_counter)
self.context_counter += clause.get_context_size()
@property
def timestamp_normalized(self):
"""
we're expecting self.timestamp to be either a long, int, a datetime, or a timedelta
:return:
"""
if not self.timestamp:
return None
if isinstance(self.timestamp, six.integer_types):
return self.timestamp
if isinstance(self.timestamp, timedelta):
tmp = datetime.now() + self.timestamp
else:
tmp = self.timestamp
return int(time.mktime(tmp.timetuple()) * 1e+6 + tmp.microsecond)
def __unicode__(self):
raise NotImplementedError
def __repr__(self):
return self.__unicode__()
@property
def _where(self):
return 'WHERE {}'.format(' AND '.join([six.text_type(c) for c in self.where_clauses]))
class SelectStatement(BaseCQLStatement):
""" a cql select statement """
def __init__(self,
table,
fields=None,
count=False,
consistency=None,
where=None,
order_by=None,
limit=None,
allow_filtering=False):
"""
:param where
:type where list of cqlengine.statements.WhereClause
"""
super(SelectStatement, self).__init__(
table,
consistency=consistency,
where=where
)
self.fields = [fields] if isinstance(fields, six.string_types) else (fields or [])
self.count = count
self.order_by = [order_by] if isinstance(order_by, six.string_types) else order_by
self.limit = limit
self.allow_filtering = allow_filtering
def __unicode__(self):
qs = ['SELECT']
if self.count:
qs += ['COUNT(*)']
else:
qs += [', '.join(['"{}"'.format(f) for f in self.fields]) if self.fields else '*']
qs += ['FROM', self.table]
if self.where_clauses:
qs += [self._where]
if self.order_by and not self.count:
qs += ['ORDER BY {}'.format(', '.join(six.text_type(o) for o in self.order_by))]
if self.limit:
qs += ['LIMIT {}'.format(self.limit)]
if self.allow_filtering:
qs += ['ALLOW FILTERING']
return ' '.join(qs)
class AssignmentStatement(BaseCQLStatement):
""" value assignment statements """
def __init__(self,
table,
assignments=None,
consistency=None,
where=None,
ttl=None,
timestamp=None):
super(AssignmentStatement, self).__init__(
table,
consistency=consistency,
where=where,
)
self.ttl = ttl
self.timestamp = timestamp
# add assignments
self.assignments = []
for assignment in assignments or []:
self.add_assignment_clause(assignment)
def update_context_id(self, i):
super(AssignmentStatement, self).update_context_id(i)
for assignment in self.assignments:
assignment.set_context_id(self.context_counter)
self.context_counter += assignment.get_context_size()
def add_assignment_clause(self, clause):
"""
adds an assignment clause to this statement
:param clause: the clause to add
:type clause: AssignmentClause
"""
if not isinstance(clause, AssignmentClause):
raise StatementException("only instances of AssignmentClause can be added to statements")
clause.set_context_id(self.context_counter)
self.context_counter += clause.get_context_size()
self.assignments.append(clause)
@property
def is_empty(self):
return len(self.assignments) == 0
def get_context(self):
ctx = super(AssignmentStatement, self).get_context()
for clause in self.assignments:
clause.update_context(ctx)
return ctx
class InsertStatement(AssignmentStatement):
""" an cql insert select statement """
def __init__(self,
table,
assignments=None,
consistency=None,
where=None,
ttl=None,
timestamp=None,
if_not_exists=False):
super(InsertStatement, self).__init__(
table,
assignments=assignments,
consistency=consistency,
where=where,
ttl=ttl,
timestamp=timestamp)
self.if_not_exists = if_not_exists
def add_where_clause(self, clause):
raise StatementException("Cannot add where clauses to insert statements")
def __unicode__(self):
qs = ['INSERT INTO {}'.format(self.table)]
# get column names and context placeholders
fields = [a.insert_tuple() for a in self.assignments]
columns, values = zip(*fields)
qs += ["({})".format(', '.join(['"{}"'.format(c) for c in columns]))]
qs += ['VALUES']
qs += ["({})".format(', '.join(['%({})s'.format(v) for v in values]))]
if self.if_not_exists:
qs += ["IF NOT EXISTS"]
if self.ttl:
qs += ["USING TTL {}".format(self.ttl)]
if self.timestamp:
qs += ["USING TIMESTAMP {}".format(self.timestamp_normalized)]
return ' '.join(qs)
class UpdateStatement(AssignmentStatement):
""" an cql update select statement """
def __init__(self,
table,
assignments=None,
consistency=None,
where=None,
ttl=None,
timestamp=None,
transactions=None):
super(UpdateStatement, self). __init__(table,
assignments=assignments,
consistency=consistency,
where=where,
ttl=ttl,
timestamp=timestamp)
# Add iff statements
self.transactions = []
for transaction in transactions or []:
self.add_transaction_clause(transaction)
def __unicode__(self):
qs = ['UPDATE', self.table]
using_options = []
if self.ttl:
using_options += ["TTL {}".format(self.ttl)]
if self.timestamp:
using_options += ["TIMESTAMP {}".format(self.timestamp_normalized)]
if using_options:
qs += ["USING {}".format(" AND ".join(using_options))]
qs += ['SET']
qs += [', '.join([six.text_type(c) for c in self.assignments])]
if self.where_clauses:
qs += [self._where]
if len(self.transactions) > 0:
qs += [self._get_transactions()]
return ' '.join(qs)
def add_transaction_clause(self, clause):
"""
Adds a iff clause to this statement
:param clause: The clause that will be added to the iff statement
:type clause: TransactionClause
"""
if not isinstance(clause, TransactionClause):
raise StatementException('only instances of AssignmentClause can be added to statements')
clause.set_context_id(self.context_counter)
self.context_counter += clause.get_context_size()
self.transactions.append(clause)
def get_context(self):
ctx = super(UpdateStatement, self).get_context()
for clause in self.transactions or []:
clause.update_context(ctx)
return ctx
def _get_transactions(self):
return 'IF {}'.format(' AND '.join([six.text_type(c) for c in self.transactions]))
def update_context_id(self, i):
super(UpdateStatement, self).update_context_id(i)
for transaction in self.transactions:
transaction.set_context_id(self.context_counter)
self.context_counter += transaction.get_context_size()
class DeleteStatement(BaseCQLStatement):
""" a cql delete statement """
def __init__(self, table, fields=None, consistency=None, where=None, timestamp=None):
super(DeleteStatement, self).__init__(
table,
consistency=consistency,
where=where,
timestamp=timestamp
)
self.fields = []
if isinstance(fields, six.string_types):
fields = [fields]
for field in fields or []:
self.add_field(field)
def update_context_id(self, i):
super(DeleteStatement, self).update_context_id(i)
for field in self.fields:
field.set_context_id(self.context_counter)
self.context_counter += field.get_context_size()
def get_context(self):
ctx = super(DeleteStatement, self).get_context()
for field in self.fields:
field.update_context(ctx)
return ctx
def add_field(self, field):
if isinstance(field, six.string_types):
field = FieldDeleteClause(field)
if not isinstance(field, BaseClause):
raise StatementException("only instances of AssignmentClause can be added to statements")
field.set_context_id(self.context_counter)
self.context_counter += field.get_context_size()
self.fields.append(field)
def __unicode__(self):
qs = ['DELETE']
if self.fields:
qs += [', '.join(['{}'.format(f) for f in self.fields])]
qs += ['FROM', self.table]
delete_option = []
if self.timestamp:
delete_option += ["TIMESTAMP {}".format(self.timestamp_normalized)]
if delete_option:
qs += [" USING {} ".format(" AND ".join(delete_option))]
if self.where_clauses:
qs += [self._where]
return ' '.join(qs)
| bsd-3-clause | d5f9b44ffb742bbcdf6bd6dd19be2e73 | 31.921665 | 118 | 0.563557 | 4.163622 | false | false | false | false |
datarobot/batch-scoring | datarobot_batch_scoring/consts.py | 1 | 1361 | import collections
Batch = collections.namedtuple('Batch', 'id rows fieldnames data rty_cnt')
SENTINEL = Batch(-1, 0, None, '', -1)
ERROR_SENTINEL = Batch(-1, 1, None, '', -1)
REPORT_INTERVAL = 5
DEPRECATION_WARNING = (
'{yellow}{bold}Deprecation Warning!{reset} '
'The Batch Scoring script is deprecated. It will continue functioning '
'indefinitely, but it will not receive any new bug fixes and new '
'functionality. Please, use the Batch Prediction command-line tools '
'instead: '
'({underline}https://app.datarobot.com/docs/predictions'
'/batch/cli-scripts.html{reset}).'
.format(
yellow='\033[93m',
bold='\033[1m',
reset='\033[0m',
underline='\033[4m'
)
)
class WriterQueueMsg(object):
CTX_WARNING = 'CTX_WARNING'
CTX_ERROR = 'CTX_ERROR'
RESPONSE = 'RESPONSE'
SENTINEL = 'SENTINEL'
class ProgressQueueMsg(object):
SHOVEL_DONE = 'SHOVEL_DONE'
SHOVEL_ERROR = 'SHOVEL_ERROR'
SHOVEL_CSV_ERROR = 'SHOVEL_CSV_ERROR'
SHOVEL_PROGRESS = 'SHOVEL_PROGRESS'
NETWORK_DONE = 'NETWORK_DONE'
NETWORK_ERROR = 'NETWORK_ERROR'
NETWORK_PROGRESS = 'NETWORK_PROGRESS'
WRITER_DONE = 'WRITER_DONE'
WRITER_ERROR = 'WRITER_ERROR'
WRITER_PROGRESS = 'WRITER_PROGRESS'
class TargetType(object):
REGRESSION = 'Regression'
BINARY = 'Binary'
| bsd-3-clause | c0a192681db3f75e80d4f6e2a59b0783 | 27.354167 | 75 | 0.656135 | 3.179907 | false | false | false | false |
datarobot/batch-scoring | datarobot_batch_scoring/network/dry_run.py | 1 | 2708 | import collections
import logging
import signal
from concurrent.futures import FIRST_COMPLETED
from concurrent.futures import wait
from datarobot_batch_scoring.consts import (SENTINEL)
from six.moves import queue
from .base_network_worker import BaseNetworkWorker
logger = logging.getLogger(__name__)
FakeResponse = collections.namedtuple('FakeResponse', 'status_code, text')
class DryRunNetworkWorker(BaseNetworkWorker):
"""A worker that will drain the network_queue, but doesn't actually send
any requests or put anything into the writer_queue
"""
def get_batch(self):
while True:
if self.abort_flag.value:
self.exit_fast(None, None)
break
try:
r = self.network_deque.get_nowait()
self.ui.debug('Got batch from dequeu: {}'.format(r.id))
self.n_retried += 1
yield r
except queue.Empty:
try:
r = self.network_queue.get(timeout=1)
if r.id == SENTINEL.id:
break
self.n_consumed += 1
yield r
except queue.Empty:
if self.state in (b"-", b"R"):
self.state = b'E'
elif self.state == b"E":
self.state = b'e'
elif self.state == b"e":
self.state = b'I'
break
except OSError:
self.ui.error('OS Error')
break
def perform_requests(self):
signal.signal(signal.SIGINT, self.exit_fast)
signal.signal(signal.SIGTERM, self.exit_fast)
self.state = b'E'
for q_batch in self.get_batch():
for (_, _) in self.split_batch(q_batch):
if self.state != b"R":
self.state = b'R'
yield
continue
# wait for all batches to finish before returning
self.state = b'W'
while self.futures:
f_len = len(self.futures)
self.futures = [i for i in self.futures if not i.done()]
if f_len != len(self.futures):
self.ui.debug('Waiting for final requests to finish. '
'remaining requests: {}'
''.format(len(self.futures)))
wait(self.futures, return_when=FIRST_COMPLETED)
self.state = b'D'
yield True
def run(self):
i = 0
for _ in self.perform_requests():
i += 1
return i
def go(self):
return self.run()
| bsd-3-clause | 253340bc232407a79ad2f6dc12415fdb | 30.858824 | 76 | 0.50517 | 4.367742 | false | false | false | false |
altair-viz/altair | altair/examples/interactive_cross_highlight.py | 1 | 1231 | """
Interactive Chart with Cross-Highlight
======================================
This example shows an interactive chart where selections in one portion of
the chart affect what is shown in other panels. Click on the bar chart to
see a detail of the distribution in the upper panel.
"""
# category: interactive charts
import altair as alt
from vega_datasets import data
source = data.movies.url
pts = alt.selection(type="point", encodings=['x'])
rect = alt.Chart(data.movies.url).mark_rect().encode(
alt.X('IMDB_Rating:Q', bin=True),
alt.Y('Rotten_Tomatoes_Rating:Q', bin=True),
alt.Color('count()',
scale=alt.Scale(scheme='greenblue'),
legend=alt.Legend(title='Total Records')
)
)
circ = rect.mark_point().encode(
alt.ColorValue('grey'),
alt.Size('count()',
legend=alt.Legend(title='Records in Selection')
)
).transform_filter(
pts
)
bar = alt.Chart(source).mark_bar().encode(
x='Major_Genre:N',
y='count()',
color=alt.condition(pts, alt.ColorValue("steelblue"), alt.ColorValue("grey"))
).properties(
width=550,
height=200
).add_params(pts)
alt.vconcat(
rect + circ,
bar
).resolve_legend(
color="independent",
size="independent"
)
| bsd-3-clause | 89d70b3929cf3c7068808b971a6db955 | 24.122449 | 81 | 0.652315 | 3.409972 | false | false | true | false |
altair-viz/altair | altair/sphinxext/altairgallery.py | 1 | 9580 | import hashlib
import os
import json
import random
import collections
from operator import itemgetter
import warnings
import jinja2
from docutils import nodes
from docutils.statemachine import ViewList
from docutils.parsers.rst import Directive
from docutils.parsers.rst.directives import flag
from sphinx.util.nodes import nested_parse_with_titles
from .utils import (
get_docstring_and_rest,
prev_this_next,
create_thumbnail,
create_generic_image,
)
from altair.utils.execeval import eval_block
from altair.examples import iter_examples
EXAMPLE_MODULE = "altair.examples"
GALLERY_TEMPLATE = jinja2.Template(
"""
.. This document is auto-generated by the altair-gallery extension. Do not modify directly.
.. _{{ gallery_ref }}:
{{ title }}
{% for char in title %}-{% endfor %}
This gallery contains a selection of examples of the plots Altair can create.
Some may seem fairly complicated at first glance, but they are built by combining a simple set of declarative building blocks.
Many draw upon sample datasets compiled by the `Vega <https://vega.github.io/vega/>`_ project. To access them yourself, install `vega_datasets <https://github.com/altair-viz/vega_datasets>`_.
.. code-block:: none
python -m pip install vega_datasets
{% for grouper, group in examples %}
.. _gallery-category-{{ grouper }}:
{{ grouper }}
{% for char in grouper %}~{% endfor %}
.. raw:: html
<span class="gallery">
{% for example in group %}
<a class="imagegroup" href="{{ example.name }}.html">
<span class="image" alt="{{ example.title }}" style="background-image: url(..{{ image_dir }}/{{ example.name }}-thumb.png);"></span>
<span class="image-title">{{ example.title }}</span>
</a>
{% endfor %}
</span>
<div style='clear:both;'></div>
{% endfor %}
.. toctree::
:maxdepth: 2
:caption: Examples
:hidden:
Gallery <self>
Tutorials <../case_studies/exploring-weather>
"""
)
MINIGALLERY_TEMPLATE = jinja2.Template(
"""
.. raw:: html
<div id="showcase">
<div class="examples">
{% for example in examples %}
<a class="preview" href="{{ gallery_dir }}/{{ example.name }}.html" style="background-image: url(.{{ image_dir }}/{{ example.name }}-thumb.png)"></a>
{% endfor %}
</div>
</div>
"""
)
EXAMPLE_TEMPLATE = jinja2.Template(
"""
:html_theme.sidebar_secondary.remove:
.. This document is auto-generated by the altair-gallery extension. Do not modify directly.
.. _gallery_{{ name }}:
{{ docstring }}
.. altair-plot::
{% if code_below %}:code-below:{% endif %}
{% if strict %}:strict:{% endif %}
{{ code | indent(4) }}
"""
)
def save_example_pngs(examples, image_dir, make_thumbnails=True):
"""Save example pngs and (optionally) thumbnails"""
if not os.path.exists(image_dir):
os.makedirs(image_dir)
# store hashes so that we know whether images need to be generated
hash_file = os.path.join(image_dir, "_image_hashes.json")
if os.path.exists(hash_file):
with open(hash_file) as f:
hashes = json.load(f)
else:
hashes = {}
for example in examples:
filename = example["name"] + ".png"
image_file = os.path.join(image_dir, filename)
example_hash = hashlib.md5(example["code"].encode()).hexdigest()
hashes_match = hashes.get(filename, "") == example_hash
if hashes_match and os.path.exists(image_file):
print("-> using cached {}".format(image_file))
else:
# the file changed or the image file does not exist. Generate it.
print("-> saving {}".format(image_file))
chart = eval_block(example["code"])
try:
chart.save(image_file)
hashes[filename] = example_hash
except ImportError:
warnings.warn("Unable to save image: using generic image")
create_generic_image(image_file)
with open(hash_file, "w") as f:
json.dump(hashes, f)
if make_thumbnails:
params = example.get("galleryParameters", {})
thumb_file = os.path.join(image_dir, example["name"] + "-thumb.png")
create_thumbnail(image_file, thumb_file, **params)
# Save hashes so we know whether we need to re-generate plots
with open(hash_file, "w") as f:
json.dump(hashes, f)
def populate_examples(**kwds):
"""Iterate through Altair examples and extract code"""
examples = sorted(iter_examples(), key=itemgetter("name"))
for example in examples:
docstring, category, code, lineno = get_docstring_and_rest(example["filename"])
example.update(kwds)
if category is None:
raise Exception(
f"The example {example['name']} is not assigned to a category"
)
example.update(
{
"docstring": docstring,
"title": docstring.strip().split("\n")[0],
"code": code,
"category": category.title(),
"lineno": lineno,
}
)
return examples
class AltairMiniGalleryDirective(Directive):
has_content = False
option_spec = {
"size": int,
"names": str,
"indices": lambda x: list(map(int, x.split())),
"shuffle": flag,
"seed": int,
"titles": bool,
"width": str,
}
def run(self):
size = self.options.get("size", 15)
names = [name.strip() for name in self.options.get("names", "").split(",")]
indices = self.options.get("indices", [])
shuffle = "shuffle" in self.options
seed = self.options.get("seed", 42)
titles = self.options.get("titles", False)
width = self.options.get("width", None)
env = self.state.document.settings.env
app = env.app
gallery_dir = app.builder.config.altair_gallery_dir
examples = populate_examples()
if names:
if len(names) < size:
raise ValueError(
"altair-minigallery: if names are specified, "
"the list must be at least as long as size."
)
mapping = {example["name"]: example for example in examples}
examples = [mapping[name] for name in names]
else:
if indices:
examples = [examples[i] for i in indices]
if shuffle:
random.seed(seed)
random.shuffle(examples)
if size:
examples = examples[:size]
include = MINIGALLERY_TEMPLATE.render(
image_dir="/_static",
gallery_dir=gallery_dir,
examples=examples,
titles=titles,
width=width,
)
# parse and return documentation
result = ViewList()
for line in include.split("\n"):
result.append(line, "<altair-minigallery>")
node = nodes.paragraph()
node.document = self.state.document
nested_parse_with_titles(self.state, result, node)
return node.children
def main(app):
gallery_dir = app.builder.config.altair_gallery_dir
target_dir = os.path.join(app.builder.srcdir, gallery_dir)
image_dir = os.path.join(app.builder.srcdir, "_images")
gallery_ref = app.builder.config.altair_gallery_ref
gallery_title = app.builder.config.altair_gallery_title
examples = populate_examples(gallery_ref=gallery_ref, code_below=True, strict=False)
if not os.path.exists(target_dir):
os.makedirs(target_dir)
examples = sorted(examples, key=lambda x: x["title"])
examples_toc = collections.OrderedDict(
{
"Simple Charts": [],
"Bar Charts": [],
"Line Charts": [],
"Area Charts": [],
"Circular Plots": [],
"Scatter Plots": [],
"Uncertainties And Trends": [],
"Distributions": [],
"Tables": [],
"Maps": [],
"Interactive Charts": [],
"Advanced Calculations": [],
"Case Studies": [],
}
)
for d in examples:
examples_toc[d["category"]].append(d)
# Write the gallery index file
with open(os.path.join(target_dir, "index.rst"), "w") as f:
f.write(
GALLERY_TEMPLATE.render(
title=gallery_title,
examples=examples_toc.items(),
image_dir="/_static",
gallery_ref=gallery_ref,
)
)
# save the images to file
save_example_pngs(examples, image_dir)
# Write the individual example files
for prev_ex, example, next_ex in prev_this_next(examples):
if prev_ex:
example["prev_ref"] = "gallery_{name}".format(**prev_ex)
if next_ex:
example["next_ref"] = "gallery_{name}".format(**next_ex)
target_filename = os.path.join(target_dir, example["name"] + ".rst")
with open(os.path.join(target_filename), "w", encoding="utf-8") as f:
f.write(EXAMPLE_TEMPLATE.render(example))
def setup(app):
app.connect("builder-inited", main)
app.add_css_file("altair-gallery.css")
app.add_config_value("altair_gallery_dir", "gallery", "env")
app.add_config_value("altair_gallery_ref", "example-gallery", "env")
app.add_config_value("altair_gallery_title", "Example Gallery", "env")
app.add_directive_to_domain("py", "altair-minigallery", AltairMiniGalleryDirective)
| bsd-3-clause | 4746edfae221b71d81760ba26cdd80bb | 29.125786 | 191 | 0.590084 | 3.782077 | false | false | false | false |
altair-viz/altair | altair/utils/data.py | 2 | 8047 | import json
import os
import random
import hashlib
import warnings
import pandas as pd
from toolz import curried
from typing import Callable
from .core import sanitize_dataframe
from .core import sanitize_geo_interface
from .deprecation import AltairDeprecationWarning
from .plugin_registry import PluginRegistry
# ==============================================================================
# Data transformer registry
# ==============================================================================
DataTransformerType = Callable
class DataTransformerRegistry(PluginRegistry[DataTransformerType]):
_global_settings = {"consolidate_datasets": True}
@property
def consolidate_datasets(self):
return self._global_settings["consolidate_datasets"]
@consolidate_datasets.setter
def consolidate_datasets(self, value):
self._global_settings["consolidate_datasets"] = value
# ==============================================================================
# Data model transformers
#
# A data model transformer is a pure function that takes a dict or DataFrame
# and returns a transformed version of a dict or DataFrame. The dict objects
# will be the Data portion of the VegaLite schema. The idea is that user can
# pipe a sequence of these data transformers together to prepare the data before
# it hits the renderer.
#
# In this version of Altair, renderers only deal with the dict form of a
# VegaLite spec, after the Data model has been put into a schema compliant
# form.
#
# A data model transformer has the following type signature:
# DataModelType = Union[dict, pd.DataFrame]
# DataModelTransformerType = Callable[[DataModelType, KwArgs], DataModelType]
# ==============================================================================
class MaxRowsError(Exception):
"""Raised when a data model has too many rows."""
pass
@curried.curry
def limit_rows(data, max_rows=5000):
"""Raise MaxRowsError if the data model has more than max_rows.
If max_rows is None, then do not perform any check.
"""
check_data_type(data)
if hasattr(data, "__geo_interface__"):
if data.__geo_interface__["type"] == "FeatureCollection":
values = data.__geo_interface__["features"]
else:
values = data.__geo_interface__
elif isinstance(data, pd.DataFrame):
values = data
elif isinstance(data, dict):
if "values" in data:
values = data["values"]
else:
return data
if max_rows is not None and len(values) > max_rows:
raise MaxRowsError(
"The number of rows in your dataset is greater "
"than the maximum allowed ({}). "
"For information on how to plot larger datasets "
"in Altair, see the documentation".format(max_rows)
)
return data
@curried.curry
def sample(data, n=None, frac=None):
"""Reduce the size of the data model by sampling without replacement."""
check_data_type(data)
if isinstance(data, pd.DataFrame):
return data.sample(n=n, frac=frac)
elif isinstance(data, dict):
if "values" in data:
values = data["values"]
n = n if n else int(frac * len(values))
values = random.sample(values, n)
return {"values": values}
@curried.curry
def to_json(
data,
prefix="altair-data",
extension="json",
filename="{prefix}-{hash}.{extension}",
urlpath="",
):
"""
Write the data model to a .json file and return a url based data model.
"""
data_json = _data_to_json_string(data)
data_hash = _compute_data_hash(data_json)
filename = filename.format(prefix=prefix, hash=data_hash, extension=extension)
with open(filename, "w") as f:
f.write(data_json)
return {"url": os.path.join(urlpath, filename), "format": {"type": "json"}}
@curried.curry
def to_csv(
data,
prefix="altair-data",
extension="csv",
filename="{prefix}-{hash}.{extension}",
urlpath="",
):
"""Write the data model to a .csv file and return a url based data model."""
data_csv = _data_to_csv_string(data)
data_hash = _compute_data_hash(data_csv)
filename = filename.format(prefix=prefix, hash=data_hash, extension=extension)
with open(filename, "w") as f:
f.write(data_csv)
return {"url": os.path.join(urlpath, filename), "format": {"type": "csv"}}
@curried.curry
def to_values(data):
"""Replace a DataFrame by a data model with values."""
check_data_type(data)
if hasattr(data, "__geo_interface__"):
if isinstance(data, pd.DataFrame):
data = sanitize_dataframe(data)
data = sanitize_geo_interface(data.__geo_interface__)
return {"values": data}
elif isinstance(data, pd.DataFrame):
data = sanitize_dataframe(data)
return {"values": data.to_dict(orient="records")}
elif isinstance(data, dict):
if "values" not in data:
raise KeyError("values expected in data dict, but not present.")
return data
def check_data_type(data):
"""Raise if the data is not a dict or DataFrame."""
if not isinstance(data, (dict, pd.DataFrame)) and not hasattr(
data, "__geo_interface__"
):
raise TypeError(
"Expected dict, DataFrame or a __geo_interface__ attribute, got: {}".format(
type(data)
)
)
# ==============================================================================
# Private utilities
# ==============================================================================
def _compute_data_hash(data_str):
return hashlib.md5(data_str.encode()).hexdigest()
def _data_to_json_string(data):
"""Return a JSON string representation of the input data"""
check_data_type(data)
if hasattr(data, "__geo_interface__"):
if isinstance(data, pd.DataFrame):
data = sanitize_dataframe(data)
data = sanitize_geo_interface(data.__geo_interface__)
return json.dumps(data)
elif isinstance(data, pd.DataFrame):
data = sanitize_dataframe(data)
return data.to_json(orient="records", double_precision=15)
elif isinstance(data, dict):
if "values" not in data:
raise KeyError("values expected in data dict, but not present.")
return json.dumps(data["values"], sort_keys=True)
else:
raise NotImplementedError(
"to_json only works with data expressed as " "a DataFrame or as a dict"
)
def _data_to_csv_string(data):
"""return a CSV string representation of the input data"""
check_data_type(data)
if hasattr(data, "__geo_interface__"):
raise NotImplementedError(
"to_csv does not work with data that "
"contains the __geo_interface__ attribute"
)
elif isinstance(data, pd.DataFrame):
data = sanitize_dataframe(data)
return data.to_csv(index=False)
elif isinstance(data, dict):
if "values" not in data:
raise KeyError("values expected in data dict, but not present")
return pd.DataFrame.from_dict(data["values"]).to_csv(index=False)
else:
raise NotImplementedError(
"to_csv only works with data expressed as " "a DataFrame or as a dict"
)
def pipe(data, *funcs):
"""
Pipe a value through a sequence of functions
Deprecated: use toolz.curried.pipe() instead.
"""
warnings.warn(
"alt.pipe() is deprecated, and will be removed in a future release. "
"Use toolz.curried.pipe() instead.",
AltairDeprecationWarning,
)
return curried.pipe(data, *funcs)
def curry(*args, **kwargs):
"""Curry a callable function
Deprecated: use toolz.curried.curry() instead.
"""
warnings.warn(
"alt.curry() is deprecated, and will be removed in a future release. "
"Use toolz.curried.curry() instead.",
AltairDeprecationWarning,
)
return curried.curry(*args, **kwargs)
| bsd-3-clause | 80c1d061e340a00aa0bfbe2b021c4cff | 31.979508 | 88 | 0.607431 | 4.135149 | false | false | false | false |
altair-viz/altair | altair/examples/ridgeline_plot.py | 1 | 1593 | """
Ridgeline plot
--------------
A `Ridgeline plot <https://serialmentor.com/blog/2017/9/15/goodbye-joyplots>`_
chart is a chart that lets you visualize distribution of a numeric value for
several groups.
Such a chart can be created in Altair by first transforming the data into a
suitable representation.
"""
# category: distributions
import altair as alt
from vega_datasets import data
source = data.seattle_weather.url
step = 20
overlap = 1
alt.Chart(source, height=step).transform_timeunit(
Month='month(date)'
).transform_joinaggregate(
mean_temp='mean(temp_max)', groupby=['Month']
).transform_bin(
['bin_max', 'bin_min'], 'temp_max'
).transform_aggregate(
value='count()', groupby=['Month', 'mean_temp', 'bin_min', 'bin_max']
).transform_impute(
impute='value', groupby=['Month', 'mean_temp'], key='bin_min', value=0
).mark_area(
interpolate='monotone',
fillOpacity=0.8,
stroke='lightgray',
strokeWidth=0.5
).encode(
alt.X('bin_min:Q', bin='binned', title='Maximum Daily Temperature (C)'),
alt.Y(
'value:Q',
scale=alt.Scale(range=[step, -step * overlap]),
axis=None
),
alt.Fill(
'mean_temp:Q',
legend=None,
scale=alt.Scale(domain=[30, 5], scheme='redyellowblue')
)
).facet(
row=alt.Row(
'Month:T',
title=None,
header=alt.Header(labelAngle=0, labelAlign='right', format='%B')
)
).properties(
title='Seattle Weather',
bounds='flush'
).configure_facet(
spacing=0
).configure_view(
stroke=None
).configure_title(
anchor='end'
)
| bsd-3-clause | 1858f1b4b45f229c7bfa527fba1294af | 24.285714 | 78 | 0.642185 | 3.237805 | false | false | true | false |
altair-viz/altair | altair/vega/v5/schema/core.py | 1 | 115906 | # The contents of this file are automatically written by
# tools/generate_schema_wrapper.py. Do not modify directly.
from altair.utils.schemapi import SchemaBase, Undefined, _subclasses
import pkgutil
import json
def load_schema():
"""Load the json schema associated with this module's functions"""
return json.loads(pkgutil.get_data(__name__, 'vega-schema.json').decode('utf-8'))
class VegaSchema(SchemaBase):
_rootschema = load_schema()
@classmethod
def _default_wrapper_classes(cls):
return _subclasses(VegaSchema)
class Root(VegaSchema):
"""Root schema wrapper
allOf(:class:`scope`, Mapping(required=[]))
"""
_schema = VegaSchema._rootschema
def __init__(self, autosize=Undefined, axes=Undefined, background=Undefined, config=Undefined,
data=Undefined, description=Undefined, encode=Undefined, height=Undefined,
layout=Undefined, legends=Undefined, marks=Undefined, padding=Undefined,
projections=Undefined, scales=Undefined, signals=Undefined, style=Undefined,
title=Undefined, usermeta=Undefined, width=Undefined, **kwds):
super(Root, self).__init__(autosize=autosize, axes=axes, background=background, config=config,
data=data, description=description, encode=encode, height=height,
layout=layout, legends=legends, marks=marks, padding=padding,
projections=projections, scales=scales, signals=signals, style=style,
title=title, usermeta=usermeta, width=width, **kwds)
class autosize(VegaSchema):
"""autosize schema wrapper
oneOf(enum('pad', 'fit', 'fit-x', 'fit-y', 'none'), Mapping(required=[type]),
:class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/autosize'}
def __init__(self, *args, **kwds):
super(autosize, self).__init__(*args, **kwds)
class axis(VegaSchema):
"""axis schema wrapper
Mapping(required=[orient, scale])
Attributes
----------
orient : oneOf(enum('top', 'bottom', 'left', 'right'), :class:`signalRef`)
scale : string
aria : boolean
bandPosition : oneOf(float, :class:`numberValue`)
description : string
domain : boolean
domainCap : oneOf(string, :class:`stringValue`)
domainColor : oneOf(None, string, :class:`colorValue`)
domainDash : oneOf(List(float), :class:`arrayValue`)
domainDashOffset : oneOf(float, :class:`numberValue`)
domainOpacity : oneOf(float, :class:`numberValue`)
domainWidth : oneOf(float, :class:`numberValue`)
encode : Mapping(required=[])
format : oneOf(string, Mapping(required=[]), :class:`signalRef`)
formatType : oneOf(enum('number', 'time', 'utc'), :class:`signalRef`)
grid : boolean
gridCap : oneOf(string, :class:`stringValue`)
gridColor : oneOf(None, string, :class:`colorValue`)
gridDash : oneOf(List(float), :class:`arrayValue`)
gridDashOffset : oneOf(float, :class:`numberValue`)
gridOpacity : oneOf(float, :class:`numberValue`)
gridScale : string
gridWidth : oneOf(float, :class:`numberValue`)
labelAlign : oneOf(enum('left', 'right', 'center'), :class:`alignValue`)
labelAngle : oneOf(float, :class:`numberValue`)
labelBaseline : oneOf(enum('top', 'middle', 'bottom', 'alphabetic', 'line-top',
'line-bottom'), :class:`baselineValue`)
labelBound : oneOf(boolean, float, :class:`signalRef`)
labelColor : oneOf(None, string, :class:`colorValue`)
labelFlush : oneOf(boolean, float, :class:`signalRef`)
labelFlushOffset : :class:`numberOrSignal`
labelFont : oneOf(string, :class:`stringValue`)
labelFontSize : oneOf(float, :class:`numberValue`)
labelFontStyle : oneOf(string, :class:`stringValue`)
labelFontWeight : oneOf(enum(None, 'normal', 'bold', 'lighter', 'bolder', '100', '200',
'300', '400', '500', '600', '700', '800', '900', 100, 200, 300, 400, 500, 600, 700, 800,
900), :class:`fontWeightValue`)
labelLimit : oneOf(float, :class:`numberValue`)
labelLineHeight : oneOf(float, :class:`numberValue`)
labelOffset : oneOf(float, :class:`numberValue`)
labelOpacity : oneOf(float, :class:`numberValue`)
labelOverlap : :class:`labelOverlap`
labelPadding : oneOf(float, :class:`numberValue`)
labelSeparation : :class:`numberOrSignal`
labels : boolean
maxExtent : oneOf(float, :class:`numberValue`)
minExtent : oneOf(float, :class:`numberValue`)
offset : oneOf(float, :class:`numberValue`)
position : oneOf(float, :class:`numberValue`)
tickBand : :class:`tickBand`
tickCap : oneOf(string, :class:`stringValue`)
tickColor : oneOf(None, string, :class:`colorValue`)
tickCount : :class:`tickCount`
tickDash : oneOf(List(float), :class:`arrayValue`)
tickDashOffset : oneOf(float, :class:`numberValue`)
tickExtra : :class:`booleanOrSignal`
tickMinStep : :class:`numberOrSignal`
tickOffset : oneOf(float, :class:`numberValue`)
tickOpacity : oneOf(float, :class:`numberValue`)
tickRound : oneOf(boolean, :class:`booleanValue`)
tickSize : oneOf(float, :class:`numberValue`)
tickWidth : oneOf(float, :class:`numberValue`)
ticks : boolean
title : :class:`textOrSignal`
titleAlign : oneOf(enum('left', 'right', 'center'), :class:`alignValue`)
titleAnchor : oneOf(enum(None, 'start', 'middle', 'end'), :class:`anchorValue`)
titleAngle : oneOf(float, :class:`numberValue`)
titleBaseline : oneOf(enum('top', 'middle', 'bottom', 'alphabetic', 'line-top',
'line-bottom'), :class:`baselineValue`)
titleColor : oneOf(None, string, :class:`colorValue`)
titleFont : oneOf(string, :class:`stringValue`)
titleFontSize : oneOf(float, :class:`numberValue`)
titleFontStyle : oneOf(string, :class:`stringValue`)
titleFontWeight : oneOf(enum(None, 'normal', 'bold', 'lighter', 'bolder', '100', '200',
'300', '400', '500', '600', '700', '800', '900', 100, 200, 300, 400, 500, 600, 700, 800,
900), :class:`fontWeightValue`)
titleLimit : oneOf(float, :class:`numberValue`)
titleLineHeight : oneOf(float, :class:`numberValue`)
titleOpacity : oneOf(float, :class:`numberValue`)
titlePadding : oneOf(float, :class:`numberValue`)
titleX : oneOf(float, :class:`numberValue`)
titleY : oneOf(float, :class:`numberValue`)
translate : oneOf(float, :class:`numberValue`)
values : :class:`arrayOrSignal`
zindex : float
"""
_schema = {'$ref': '#/definitions/axis'}
def __init__(self, orient=Undefined, scale=Undefined, aria=Undefined, bandPosition=Undefined,
description=Undefined, domain=Undefined, domainCap=Undefined, domainColor=Undefined,
domainDash=Undefined, domainDashOffset=Undefined, domainOpacity=Undefined,
domainWidth=Undefined, encode=Undefined, format=Undefined, formatType=Undefined,
grid=Undefined, gridCap=Undefined, gridColor=Undefined, gridDash=Undefined,
gridDashOffset=Undefined, gridOpacity=Undefined, gridScale=Undefined,
gridWidth=Undefined, labelAlign=Undefined, labelAngle=Undefined,
labelBaseline=Undefined, labelBound=Undefined, labelColor=Undefined,
labelFlush=Undefined, labelFlushOffset=Undefined, labelFont=Undefined,
labelFontSize=Undefined, labelFontStyle=Undefined, labelFontWeight=Undefined,
labelLimit=Undefined, labelLineHeight=Undefined, labelOffset=Undefined,
labelOpacity=Undefined, labelOverlap=Undefined, labelPadding=Undefined,
labelSeparation=Undefined, labels=Undefined, maxExtent=Undefined, minExtent=Undefined,
offset=Undefined, position=Undefined, tickBand=Undefined, tickCap=Undefined,
tickColor=Undefined, tickCount=Undefined, tickDash=Undefined, tickDashOffset=Undefined,
tickExtra=Undefined, tickMinStep=Undefined, tickOffset=Undefined,
tickOpacity=Undefined, tickRound=Undefined, tickSize=Undefined, tickWidth=Undefined,
ticks=Undefined, title=Undefined, titleAlign=Undefined, titleAnchor=Undefined,
titleAngle=Undefined, titleBaseline=Undefined, titleColor=Undefined,
titleFont=Undefined, titleFontSize=Undefined, titleFontStyle=Undefined,
titleFontWeight=Undefined, titleLimit=Undefined, titleLineHeight=Undefined,
titleOpacity=Undefined, titlePadding=Undefined, titleX=Undefined, titleY=Undefined,
translate=Undefined, values=Undefined, zindex=Undefined, **kwds):
super(axis, self).__init__(orient=orient, scale=scale, aria=aria, bandPosition=bandPosition,
description=description, domain=domain, domainCap=domainCap,
domainColor=domainColor, domainDash=domainDash,
domainDashOffset=domainDashOffset, domainOpacity=domainOpacity,
domainWidth=domainWidth, encode=encode, format=format,
formatType=formatType, grid=grid, gridCap=gridCap,
gridColor=gridColor, gridDash=gridDash,
gridDashOffset=gridDashOffset, gridOpacity=gridOpacity,
gridScale=gridScale, gridWidth=gridWidth, labelAlign=labelAlign,
labelAngle=labelAngle, labelBaseline=labelBaseline,
labelBound=labelBound, labelColor=labelColor, labelFlush=labelFlush,
labelFlushOffset=labelFlushOffset, labelFont=labelFont,
labelFontSize=labelFontSize, labelFontStyle=labelFontStyle,
labelFontWeight=labelFontWeight, labelLimit=labelLimit,
labelLineHeight=labelLineHeight, labelOffset=labelOffset,
labelOpacity=labelOpacity, labelOverlap=labelOverlap,
labelPadding=labelPadding, labelSeparation=labelSeparation,
labels=labels, maxExtent=maxExtent, minExtent=minExtent,
offset=offset, position=position, tickBand=tickBand, tickCap=tickCap,
tickColor=tickColor, tickCount=tickCount, tickDash=tickDash,
tickDashOffset=tickDashOffset, tickExtra=tickExtra,
tickMinStep=tickMinStep, tickOffset=tickOffset,
tickOpacity=tickOpacity, tickRound=tickRound, tickSize=tickSize,
tickWidth=tickWidth, ticks=ticks, title=title, titleAlign=titleAlign,
titleAnchor=titleAnchor, titleAngle=titleAngle,
titleBaseline=titleBaseline, titleColor=titleColor,
titleFont=titleFont, titleFontSize=titleFontSize,
titleFontStyle=titleFontStyle, titleFontWeight=titleFontWeight,
titleLimit=titleLimit, titleLineHeight=titleLineHeight,
titleOpacity=titleOpacity, titlePadding=titlePadding, titleX=titleX,
titleY=titleY, translate=translate, values=values, zindex=zindex,
**kwds)
class labelOverlap(VegaSchema):
"""labelOverlap schema wrapper
oneOf(boolean, enum('parity', 'greedy'), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/labelOverlap'}
def __init__(self, *args, **kwds):
super(labelOverlap, self).__init__(*args, **kwds)
class tickBand(VegaSchema):
"""tickBand schema wrapper
oneOf(enum('center', 'extent'), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/tickBand'}
def __init__(self, *args, **kwds):
super(tickBand, self).__init__(*args, **kwds)
class tickCount(VegaSchema):
"""tickCount schema wrapper
oneOf(float, enum('millisecond', 'second', 'minute', 'hour', 'day', 'week', 'month',
'year'), Mapping(required=[interval]), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/tickCount'}
def __init__(self, *args, **kwds):
super(tickCount, self).__init__(*args, **kwds)
class background(VegaSchema):
"""background schema wrapper
oneOf(string, :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/background'}
def __init__(self, *args, **kwds):
super(background, self).__init__(*args, **kwds)
class bind(VegaSchema):
"""bind schema wrapper
oneOf(Mapping(required=[input]), Mapping(required=[input, options]),
Mapping(required=[input]), Mapping(required=[input]), Mapping(required=[element]))
"""
_schema = {'$ref': '#/definitions/bind'}
def __init__(self, *args, **kwds):
super(bind, self).__init__(*args, **kwds)
class element(VegaSchema):
"""element schema wrapper
string
"""
_schema = {'$ref': '#/definitions/element'}
def __init__(self, *args):
super(element, self).__init__(*args)
class data(VegaSchema):
"""data schema wrapper
oneOf(Mapping(required=[name]), Mapping(required=[source, name]), Mapping(required=[url,
name]), Mapping(required=[values, name]))
"""
_schema = {'$ref': '#/definitions/data'}
def __init__(self, *args, **kwds):
super(data, self).__init__(*args, **kwds)
class paramField(VegaSchema):
"""paramField schema wrapper
Mapping(required=[field])
Attributes
----------
field : string
as : string
"""
_schema = {'$ref': '#/definitions/paramField'}
def __init__(self, field=Undefined, **kwds):
super(paramField, self).__init__(field=field, **kwds)
class rule(VegaSchema):
"""rule schema wrapper
Mapping(required=[])
Attributes
----------
test : string
"""
_schema = {'$ref': '#/definitions/rule'}
def __init__(self, test=Undefined, **kwds):
super(rule, self).__init__(test=test, **kwds)
class encodeEntry(VegaSchema):
"""encodeEntry schema wrapper
Mapping(required=[])
Attributes
----------
align : :class:`alignValue`
angle : :class:`numberValue`
aria : :class:`booleanValue`
ariaRole : :class:`stringValue`
ariaRoleDescription : :class:`stringValue`
aspect : :class:`booleanValue`
baseline : :class:`baselineValue`
blend : :class:`blendValue`
clip : :class:`booleanValue`
cornerRadius : :class:`numberValue`
cornerRadiusBottomLeft : :class:`numberValue`
cornerRadiusBottomRight : :class:`numberValue`
cornerRadiusTopLeft : :class:`numberValue`
cornerRadiusTopRight : :class:`numberValue`
cursor : :class:`stringValue`
defined : :class:`booleanValue`
description : :class:`stringValue`
dir : :class:`stringValue`
dx : :class:`numberValue`
dy : :class:`numberValue`
ellipsis : :class:`stringValue`
endAngle : :class:`numberValue`
fill : :class:`colorValue`
fillOpacity : :class:`numberValue`
font : :class:`stringValue`
fontSize : :class:`numberValue`
fontStyle : :class:`stringValue`
fontWeight : :class:`fontWeightValue`
height : :class:`numberValue`
innerRadius : :class:`numberValue`
interpolate : :class:`stringValue`
limit : :class:`numberValue`
lineBreak : :class:`stringValue`
lineHeight : :class:`numberValue`
opacity : :class:`numberValue`
orient : :class:`directionValue`
outerRadius : :class:`numberValue`
padAngle : :class:`numberValue`
path : :class:`stringValue`
radius : :class:`numberValue`
scaleX : :class:`numberValue`
scaleY : :class:`numberValue`
shape : :class:`stringValue`
size : :class:`numberValue`
smooth : :class:`booleanValue`
startAngle : :class:`numberValue`
stroke : :class:`colorValue`
strokeCap : :class:`strokeCapValue`
strokeDash : :class:`arrayValue`
strokeDashOffset : :class:`numberValue`
strokeForeground : :class:`booleanValue`
strokeJoin : :class:`strokeJoinValue`
strokeMiterLimit : :class:`numberValue`
strokeOffset : :class:`numberValue`
strokeOpacity : :class:`numberValue`
strokeWidth : :class:`numberValue`
tension : :class:`numberValue`
text : :class:`textValue`
theta : :class:`numberValue`
tooltip : :class:`anyValue`
url : :class:`stringValue`
width : :class:`numberValue`
x : :class:`numberValue`
x2 : :class:`numberValue`
xc : :class:`numberValue`
y : :class:`numberValue`
y2 : :class:`numberValue`
yc : :class:`numberValue`
zindex : :class:`numberValue`
"""
_schema = {'$ref': '#/definitions/encodeEntry'}
def __init__(self, align=Undefined, angle=Undefined, aria=Undefined, ariaRole=Undefined,
ariaRoleDescription=Undefined, aspect=Undefined, baseline=Undefined, blend=Undefined,
clip=Undefined, cornerRadius=Undefined, cornerRadiusBottomLeft=Undefined,
cornerRadiusBottomRight=Undefined, cornerRadiusTopLeft=Undefined,
cornerRadiusTopRight=Undefined, cursor=Undefined, defined=Undefined,
description=Undefined, dir=Undefined, dx=Undefined, dy=Undefined, ellipsis=Undefined,
endAngle=Undefined, fill=Undefined, fillOpacity=Undefined, font=Undefined,
fontSize=Undefined, fontStyle=Undefined, fontWeight=Undefined, height=Undefined,
innerRadius=Undefined, interpolate=Undefined, limit=Undefined, lineBreak=Undefined,
lineHeight=Undefined, opacity=Undefined, orient=Undefined, outerRadius=Undefined,
padAngle=Undefined, path=Undefined, radius=Undefined, scaleX=Undefined,
scaleY=Undefined, shape=Undefined, size=Undefined, smooth=Undefined,
startAngle=Undefined, stroke=Undefined, strokeCap=Undefined, strokeDash=Undefined,
strokeDashOffset=Undefined, strokeForeground=Undefined, strokeJoin=Undefined,
strokeMiterLimit=Undefined, strokeOffset=Undefined, strokeOpacity=Undefined,
strokeWidth=Undefined, tension=Undefined, text=Undefined, theta=Undefined,
tooltip=Undefined, url=Undefined, width=Undefined, x=Undefined, x2=Undefined,
xc=Undefined, y=Undefined, y2=Undefined, yc=Undefined, zindex=Undefined, **kwds):
super(encodeEntry, self).__init__(align=align, angle=angle, aria=aria, ariaRole=ariaRole,
ariaRoleDescription=ariaRoleDescription, aspect=aspect,
baseline=baseline, blend=blend, clip=clip,
cornerRadius=cornerRadius,
cornerRadiusBottomLeft=cornerRadiusBottomLeft,
cornerRadiusBottomRight=cornerRadiusBottomRight,
cornerRadiusTopLeft=cornerRadiusTopLeft,
cornerRadiusTopRight=cornerRadiusTopRight, cursor=cursor,
defined=defined, description=description, dir=dir, dx=dx,
dy=dy, ellipsis=ellipsis, endAngle=endAngle, fill=fill,
fillOpacity=fillOpacity, font=font, fontSize=fontSize,
fontStyle=fontStyle, fontWeight=fontWeight, height=height,
innerRadius=innerRadius, interpolate=interpolate, limit=limit,
lineBreak=lineBreak, lineHeight=lineHeight, opacity=opacity,
orient=orient, outerRadius=outerRadius, padAngle=padAngle,
path=path, radius=radius, scaleX=scaleX, scaleY=scaleY,
shape=shape, size=size, smooth=smooth, startAngle=startAngle,
stroke=stroke, strokeCap=strokeCap, strokeDash=strokeDash,
strokeDashOffset=strokeDashOffset,
strokeForeground=strokeForeground, strokeJoin=strokeJoin,
strokeMiterLimit=strokeMiterLimit, strokeOffset=strokeOffset,
strokeOpacity=strokeOpacity, strokeWidth=strokeWidth,
tension=tension, text=text, theta=theta, tooltip=tooltip,
url=url, width=width, x=x, x2=x2, xc=xc, y=y, y2=y2, yc=yc,
zindex=zindex, **kwds)
class encode(VegaSchema):
"""encode schema wrapper
Mapping(required=[])
"""
_schema = {'$ref': '#/definitions/encode'}
def __init__(self, **kwds):
super(encode, self).__init__(**kwds)
class field(VegaSchema):
"""field schema wrapper
oneOf(string, :class:`signalRef`, Mapping(required=[datum]), Mapping(required=[group]),
Mapping(required=[parent]))
"""
_schema = {'$ref': '#/definitions/field'}
def __init__(self, *args, **kwds):
super(field, self).__init__(*args, **kwds)
class stringModifiers(VegaSchema):
"""stringModifiers schema wrapper
Mapping(required=[])
Attributes
----------
scale : :class:`field`
"""
_schema = {'$ref': '#/definitions/stringModifiers'}
def __init__(self, scale=Undefined, **kwds):
super(stringModifiers, self).__init__(scale=scale, **kwds)
class numberModifiers(VegaSchema):
"""numberModifiers schema wrapper
Mapping(required=[])
Attributes
----------
band : oneOf(float, boolean)
exponent : oneOf(float, :class:`numberValue`)
extra : boolean
mult : oneOf(float, :class:`numberValue`)
offset : oneOf(float, :class:`numberValue`)
round : boolean
scale : :class:`field`
"""
_schema = {'$ref': '#/definitions/numberModifiers'}
def __init__(self, band=Undefined, exponent=Undefined, extra=Undefined, mult=Undefined,
offset=Undefined, round=Undefined, scale=Undefined, **kwds):
super(numberModifiers, self).__init__(band=band, exponent=exponent, extra=extra, mult=mult,
offset=offset, round=round, scale=scale, **kwds)
class anyValue(VegaSchema):
"""anyValue schema wrapper
oneOf(List(allOf(:class:`rule`, allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))), allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))
"""
_schema = {'$ref': '#/definitions/anyValue'}
def __init__(self, *args, **kwds):
super(anyValue, self).__init__(*args, **kwds)
class blendValue(VegaSchema):
"""blendValue schema wrapper
oneOf(List(allOf(:class:`rule`, allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))), allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))
"""
_schema = {'$ref': '#/definitions/blendValue'}
def __init__(self, *args, **kwds):
super(blendValue, self).__init__(*args, **kwds)
class numberValue(VegaSchema):
"""numberValue schema wrapper
oneOf(List(allOf(:class:`rule`, allOf(:class:`numberModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))), allOf(:class:`numberModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))
"""
_schema = {'$ref': '#/definitions/numberValue'}
def __init__(self, *args, **kwds):
super(numberValue, self).__init__(*args, **kwds)
class stringValue(VegaSchema):
"""stringValue schema wrapper
oneOf(List(allOf(:class:`rule`, allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))), allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))
"""
_schema = {'$ref': '#/definitions/stringValue'}
def __init__(self, *args, **kwds):
super(stringValue, self).__init__(*args, **kwds)
class textValue(VegaSchema):
"""textValue schema wrapper
oneOf(List(allOf(:class:`rule`, allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))), allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))
"""
_schema = {'$ref': '#/definitions/textValue'}
def __init__(self, *args, **kwds):
super(textValue, self).__init__(*args, **kwds)
class booleanValue(VegaSchema):
"""booleanValue schema wrapper
oneOf(List(allOf(:class:`rule`, allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))), allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))
"""
_schema = {'$ref': '#/definitions/booleanValue'}
def __init__(self, *args, **kwds):
super(booleanValue, self).__init__(*args, **kwds)
class arrayValue(VegaSchema):
"""arrayValue schema wrapper
oneOf(List(allOf(:class:`rule`, allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))), allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))
"""
_schema = {'$ref': '#/definitions/arrayValue'}
def __init__(self, *args, **kwds):
super(arrayValue, self).__init__(*args, **kwds)
class fontWeightValue(VegaSchema):
"""fontWeightValue schema wrapper
oneOf(List(allOf(:class:`rule`, allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))), allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))
"""
_schema = {'$ref': '#/definitions/fontWeightValue'}
def __init__(self, *args, **kwds):
super(fontWeightValue, self).__init__(*args, **kwds)
class anchorValue(VegaSchema):
"""anchorValue schema wrapper
oneOf(List(allOf(:class:`rule`, allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))), allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))
"""
_schema = {'$ref': '#/definitions/anchorValue'}
def __init__(self, *args, **kwds):
super(anchorValue, self).__init__(*args, **kwds)
class alignValue(VegaSchema):
"""alignValue schema wrapper
oneOf(List(allOf(:class:`rule`, allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))), allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))
"""
_schema = {'$ref': '#/definitions/alignValue'}
def __init__(self, *args, **kwds):
super(alignValue, self).__init__(*args, **kwds)
class baselineValue(VegaSchema):
"""baselineValue schema wrapper
oneOf(List(allOf(:class:`rule`, allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))), allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))
"""
_schema = {'$ref': '#/definitions/baselineValue'}
def __init__(self, *args, **kwds):
super(baselineValue, self).__init__(*args, **kwds)
class directionValue(VegaSchema):
"""directionValue schema wrapper
oneOf(List(allOf(:class:`rule`, allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))), allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))
"""
_schema = {'$ref': '#/definitions/directionValue'}
def __init__(self, *args, **kwds):
super(directionValue, self).__init__(*args, **kwds)
class orientValue(VegaSchema):
"""orientValue schema wrapper
oneOf(List(allOf(:class:`rule`, allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))), allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))
"""
_schema = {'$ref': '#/definitions/orientValue'}
def __init__(self, *args, **kwds):
super(orientValue, self).__init__(*args, **kwds)
class strokeCapValue(VegaSchema):
"""strokeCapValue schema wrapper
oneOf(List(allOf(:class:`rule`, allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))), allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))
"""
_schema = {'$ref': '#/definitions/strokeCapValue'}
def __init__(self, *args, **kwds):
super(strokeCapValue, self).__init__(*args, **kwds)
class strokeJoinValue(VegaSchema):
"""strokeJoinValue schema wrapper
oneOf(List(allOf(:class:`rule`, allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))), allOf(:class:`stringModifiers`,
anyOf(oneOf(:class:`signalRef`, Mapping(required=[value]), Mapping(required=[field]),
Mapping(required=[range])), Mapping(required=[scale, value]), Mapping(required=[scale,
band]), Mapping(required=[offset]))))
"""
_schema = {'$ref': '#/definitions/strokeJoinValue'}
def __init__(self, *args, **kwds):
super(strokeJoinValue, self).__init__(*args, **kwds)
class baseColorValue(VegaSchema):
"""baseColorValue schema wrapper
oneOf(allOf(:class:`stringModifiers`, anyOf(oneOf(:class:`signalRef`,
Mapping(required=[value]), Mapping(required=[field]), Mapping(required=[range])),
Mapping(required=[scale, value]), Mapping(required=[scale, band]),
Mapping(required=[offset]))), Mapping(required=[value]), Mapping(required=[value]),
Mapping(required=[gradient]), Mapping(required=[color]))
"""
_schema = {'$ref': '#/definitions/baseColorValue'}
def __init__(self, *args, **kwds):
super(baseColorValue, self).__init__(*args, **kwds)
class colorRGB(VegaSchema):
"""colorRGB schema wrapper
Mapping(required=[r, g, b])
Attributes
----------
b : :class:`numberValue`
g : :class:`numberValue`
r : :class:`numberValue`
"""
_schema = {'$ref': '#/definitions/colorRGB'}
def __init__(self, b=Undefined, g=Undefined, r=Undefined, **kwds):
super(colorRGB, self).__init__(b=b, g=g, r=r, **kwds)
class colorHSL(VegaSchema):
"""colorHSL schema wrapper
Mapping(required=[h, s, l])
Attributes
----------
h : :class:`numberValue`
l : :class:`numberValue`
s : :class:`numberValue`
"""
_schema = {'$ref': '#/definitions/colorHSL'}
def __init__(self, h=Undefined, l=Undefined, s=Undefined, **kwds):
super(colorHSL, self).__init__(h=h, l=l, s=s, **kwds)
class colorLAB(VegaSchema):
"""colorLAB schema wrapper
Mapping(required=[l, a, b])
Attributes
----------
a : :class:`numberValue`
b : :class:`numberValue`
l : :class:`numberValue`
"""
_schema = {'$ref': '#/definitions/colorLAB'}
def __init__(self, a=Undefined, b=Undefined, l=Undefined, **kwds):
super(colorLAB, self).__init__(a=a, b=b, l=l, **kwds)
class colorHCL(VegaSchema):
"""colorHCL schema wrapper
Mapping(required=[h, c, l])
Attributes
----------
c : :class:`numberValue`
h : :class:`numberValue`
l : :class:`numberValue`
"""
_schema = {'$ref': '#/definitions/colorHCL'}
def __init__(self, c=Undefined, h=Undefined, l=Undefined, **kwds):
super(colorHCL, self).__init__(c=c, h=h, l=l, **kwds)
class colorValue(VegaSchema):
"""colorValue schema wrapper
oneOf(List(allOf(:class:`rule`, :class:`baseColorValue`)), :class:`baseColorValue`)
"""
_schema = {'$ref': '#/definitions/colorValue'}
def __init__(self, *args, **kwds):
super(colorValue, self).__init__(*args, **kwds)
class gradientStops(VegaSchema):
"""gradientStops schema wrapper
List(Mapping(required=[offset, color]))
"""
_schema = {'$ref': '#/definitions/gradientStops'}
def __init__(self, *args):
super(gradientStops, self).__init__(*args)
class linearGradient(VegaSchema):
"""linearGradient schema wrapper
Mapping(required=[gradient, stops])
Attributes
----------
gradient : enum('linear')
stops : :class:`gradientStops`
id : string
x1 : float
x2 : float
y1 : float
y2 : float
"""
_schema = {'$ref': '#/definitions/linearGradient'}
def __init__(self, gradient=Undefined, stops=Undefined, id=Undefined, x1=Undefined, x2=Undefined,
y1=Undefined, y2=Undefined, **kwds):
super(linearGradient, self).__init__(gradient=gradient, stops=stops, id=id, x1=x1, x2=x2, y1=y1,
y2=y2, **kwds)
class radialGradient(VegaSchema):
"""radialGradient schema wrapper
Mapping(required=[gradient, stops])
Attributes
----------
gradient : enum('radial')
stops : :class:`gradientStops`
id : string
r1 : float
r2 : float
x1 : float
x2 : float
y1 : float
y2 : float
"""
_schema = {'$ref': '#/definitions/radialGradient'}
def __init__(self, gradient=Undefined, stops=Undefined, id=Undefined, r1=Undefined, r2=Undefined,
x1=Undefined, x2=Undefined, y1=Undefined, y2=Undefined, **kwds):
super(radialGradient, self).__init__(gradient=gradient, stops=stops, id=id, r1=r1, r2=r2, x1=x1,
x2=x2, y1=y1, y2=y2, **kwds)
class expr(VegaSchema):
"""expr schema wrapper
Mapping(required=[expr])
Attributes
----------
expr : string
as : string
"""
_schema = {'$ref': '#/definitions/expr'}
def __init__(self, expr=Undefined, **kwds):
super(expr, self).__init__(expr=expr, **kwds)
class exprString(VegaSchema):
"""exprString schema wrapper
string
"""
_schema = {'$ref': '#/definitions/exprString'}
def __init__(self, *args):
super(exprString, self).__init__(*args)
class layout(VegaSchema):
"""layout schema wrapper
oneOf(Mapping(required=[]), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/layout'}
def __init__(self, *args, **kwds):
super(layout, self).__init__(*args, **kwds)
class guideEncode(VegaSchema):
"""guideEncode schema wrapper
Mapping(required=[])
Attributes
----------
interactive : boolean
name : string
style : :class:`style`
"""
_schema = {'$ref': '#/definitions/guideEncode'}
def __init__(self, interactive=Undefined, name=Undefined, style=Undefined, **kwds):
super(guideEncode, self).__init__(interactive=interactive, name=name, style=style, **kwds)
class legend(VegaSchema):
"""legend schema wrapper
allOf(Mapping(required=[]), anyOf(Mapping(required=[size]), Mapping(required=[shape]),
Mapping(required=[fill]), Mapping(required=[stroke]), Mapping(required=[opacity]),
Mapping(required=[strokeDash]), Mapping(required=[strokeWidth])))
"""
_schema = {'$ref': '#/definitions/legend'}
def __init__(self, aria=Undefined, clipHeight=Undefined, columnPadding=Undefined, columns=Undefined,
cornerRadius=Undefined, description=Undefined, direction=Undefined, encode=Undefined,
fill=Undefined, fillColor=Undefined, format=Undefined, formatType=Undefined,
gradientLength=Undefined, gradientOpacity=Undefined, gradientStrokeColor=Undefined,
gradientStrokeWidth=Undefined, gradientThickness=Undefined, gridAlign=Undefined,
labelAlign=Undefined, labelBaseline=Undefined, labelColor=Undefined,
labelFont=Undefined, labelFontSize=Undefined, labelFontStyle=Undefined,
labelFontWeight=Undefined, labelLimit=Undefined, labelOffset=Undefined,
labelOpacity=Undefined, labelOverlap=Undefined, labelSeparation=Undefined,
legendX=Undefined, legendY=Undefined, offset=Undefined, opacity=Undefined,
orient=Undefined, padding=Undefined, rowPadding=Undefined, shape=Undefined,
size=Undefined, stroke=Undefined, strokeColor=Undefined, strokeDash=Undefined,
strokeWidth=Undefined, symbolDash=Undefined, symbolDashOffset=Undefined,
symbolFillColor=Undefined, symbolLimit=Undefined, symbolOffset=Undefined,
symbolOpacity=Undefined, symbolSize=Undefined, symbolStrokeColor=Undefined,
symbolStrokeWidth=Undefined, symbolType=Undefined, tickCount=Undefined,
tickMinStep=Undefined, title=Undefined, titleAlign=Undefined, titleAnchor=Undefined,
titleBaseline=Undefined, titleColor=Undefined, titleFont=Undefined,
titleFontSize=Undefined, titleFontStyle=Undefined, titleFontWeight=Undefined,
titleLimit=Undefined, titleLineHeight=Undefined, titleOpacity=Undefined,
titleOrient=Undefined, titlePadding=Undefined, type=Undefined, values=Undefined,
zindex=Undefined, **kwds):
super(legend, self).__init__(aria=aria, clipHeight=clipHeight, columnPadding=columnPadding,
columns=columns, cornerRadius=cornerRadius,
description=description, direction=direction, encode=encode,
fill=fill, fillColor=fillColor, format=format,
formatType=formatType, gradientLength=gradientLength,
gradientOpacity=gradientOpacity,
gradientStrokeColor=gradientStrokeColor,
gradientStrokeWidth=gradientStrokeWidth,
gradientThickness=gradientThickness, gridAlign=gridAlign,
labelAlign=labelAlign, labelBaseline=labelBaseline,
labelColor=labelColor, labelFont=labelFont,
labelFontSize=labelFontSize, labelFontStyle=labelFontStyle,
labelFontWeight=labelFontWeight, labelLimit=labelLimit,
labelOffset=labelOffset, labelOpacity=labelOpacity,
labelOverlap=labelOverlap, labelSeparation=labelSeparation,
legendX=legendX, legendY=legendY, offset=offset, opacity=opacity,
orient=orient, padding=padding, rowPadding=rowPadding, shape=shape,
size=size, stroke=stroke, strokeColor=strokeColor,
strokeDash=strokeDash, strokeWidth=strokeWidth,
symbolDash=symbolDash, symbolDashOffset=symbolDashOffset,
symbolFillColor=symbolFillColor, symbolLimit=symbolLimit,
symbolOffset=symbolOffset, symbolOpacity=symbolOpacity,
symbolSize=symbolSize, symbolStrokeColor=symbolStrokeColor,
symbolStrokeWidth=symbolStrokeWidth, symbolType=symbolType,
tickCount=tickCount, tickMinStep=tickMinStep, title=title,
titleAlign=titleAlign, titleAnchor=titleAnchor,
titleBaseline=titleBaseline, titleColor=titleColor,
titleFont=titleFont, titleFontSize=titleFontSize,
titleFontStyle=titleFontStyle, titleFontWeight=titleFontWeight,
titleLimit=titleLimit, titleLineHeight=titleLineHeight,
titleOpacity=titleOpacity, titleOrient=titleOrient,
titlePadding=titlePadding, type=type, values=values, zindex=zindex,
**kwds)
class compare(VegaSchema):
"""compare schema wrapper
oneOf(Mapping(required=[]), Mapping(required=[]))
"""
_schema = {'$ref': '#/definitions/compare'}
def __init__(self, *args, **kwds):
super(compare, self).__init__(*args, **kwds)
class from_(VegaSchema):
"""from_ schema wrapper
Mapping(required=[])
Attributes
----------
data : string
"""
_schema = {'$ref': '#/definitions/from'}
def __init__(self, data=Undefined, **kwds):
super(from_, self).__init__(data=data, **kwds)
class facet(VegaSchema):
"""facet schema wrapper
Mapping(required=[facet])
Attributes
----------
facet : oneOf(Mapping(required=[name, data, field]), Mapping(required=[name, data,
groupby]))
data : string
"""
_schema = {'$ref': '#/definitions/facet'}
def __init__(self, facet=Undefined, data=Undefined, **kwds):
super(facet, self).__init__(facet=facet, data=data, **kwds)
class mark(VegaSchema):
"""mark schema wrapper
Mapping(required=[type])
Attributes
----------
type : :class:`marktype`
aria : boolean
clip : :class:`markclip`
description : string
encode : :class:`encode`
interactive : :class:`booleanOrSignal`
key : string
name : string
on : :class:`onMarkTrigger`
role : string
sort : :class:`compare`
style : :class:`style`
transform : List(:class:`transformMark`)
"""
_schema = {'$ref': '#/definitions/mark'}
def __init__(self, type=Undefined, aria=Undefined, clip=Undefined, description=Undefined,
encode=Undefined, interactive=Undefined, key=Undefined, name=Undefined, on=Undefined,
role=Undefined, sort=Undefined, style=Undefined, transform=Undefined, **kwds):
super(mark, self).__init__(type=type, aria=aria, clip=clip, description=description,
encode=encode, interactive=interactive, key=key, name=name, on=on,
role=role, sort=sort, style=style, transform=transform, **kwds)
class markclip(VegaSchema):
"""markclip schema wrapper
oneOf(:class:`booleanOrSignal`, Mapping(required=[path]), Mapping(required=[sphere]))
"""
_schema = {'$ref': '#/definitions/markclip'}
def __init__(self, *args, **kwds):
super(markclip, self).__init__(*args, **kwds)
class markGroup(VegaSchema):
"""markGroup schema wrapper
allOf(Mapping(required=[type]), :class:`mark`, :class:`scope`)
"""
_schema = {'$ref': '#/definitions/markGroup'}
def __init__(self, type=Undefined, aria=Undefined, axes=Undefined, clip=Undefined, data=Undefined,
description=Undefined, encode=Undefined, interactive=Undefined, key=Undefined,
layout=Undefined, legends=Undefined, marks=Undefined, name=Undefined, on=Undefined,
projections=Undefined, role=Undefined, scales=Undefined, signals=Undefined,
sort=Undefined, style=Undefined, title=Undefined, transform=Undefined,
usermeta=Undefined, **kwds):
super(markGroup, self).__init__(type=type, aria=aria, axes=axes, clip=clip, data=data,
description=description, encode=encode, interactive=interactive,
key=key, layout=layout, legends=legends, marks=marks, name=name,
on=on, projections=projections, role=role, scales=scales,
signals=signals, sort=sort, style=style, title=title,
transform=transform, usermeta=usermeta, **kwds)
class markVisual(VegaSchema):
"""markVisual schema wrapper
allOf(Mapping(required=[]), :class:`mark`)
"""
_schema = {'$ref': '#/definitions/markVisual'}
def __init__(self, type=Undefined, aria=Undefined, clip=Undefined, description=Undefined,
encode=Undefined, interactive=Undefined, key=Undefined, name=Undefined, on=Undefined,
role=Undefined, sort=Undefined, style=Undefined, transform=Undefined, **kwds):
super(markVisual, self).__init__(type=type, aria=aria, clip=clip, description=description,
encode=encode, interactive=interactive, key=key, name=name,
on=on, role=role, sort=sort, style=style, transform=transform,
**kwds)
class style(VegaSchema):
"""style schema wrapper
oneOf(string, List(string))
"""
_schema = {'$ref': '#/definitions/style'}
def __init__(self, *args, **kwds):
super(style, self).__init__(*args, **kwds)
class marktype(VegaSchema):
"""marktype schema wrapper
string
"""
_schema = {'$ref': '#/definitions/marktype'}
def __init__(self, *args):
super(marktype, self).__init__(*args)
class listener(VegaSchema):
"""listener schema wrapper
oneOf(:class:`signalRef`, Mapping(required=[scale]), :class:`stream`)
"""
_schema = {'$ref': '#/definitions/listener'}
def __init__(self, *args, **kwds):
super(listener, self).__init__(*args, **kwds)
class onEvents(VegaSchema):
"""onEvents schema wrapper
List(allOf(Mapping(required=[events]), oneOf(Mapping(required=[encode]),
Mapping(required=[update]))))
"""
_schema = {'$ref': '#/definitions/onEvents'}
def __init__(self, *args):
super(onEvents, self).__init__(*args)
class onTrigger(VegaSchema):
"""onTrigger schema wrapper
List(Mapping(required=[trigger]))
"""
_schema = {'$ref': '#/definitions/onTrigger'}
def __init__(self, *args):
super(onTrigger, self).__init__(*args)
class onMarkTrigger(VegaSchema):
"""onMarkTrigger schema wrapper
List(Mapping(required=[trigger]))
"""
_schema = {'$ref': '#/definitions/onMarkTrigger'}
def __init__(self, *args):
super(onMarkTrigger, self).__init__(*args)
class padding(VegaSchema):
"""padding schema wrapper
oneOf(float, Mapping(required=[]), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/padding'}
def __init__(self, *args, **kwds):
super(padding, self).__init__(*args, **kwds)
class projection(VegaSchema):
"""projection schema wrapper
Mapping(required=[name])
Attributes
----------
name : string
center : oneOf(List(:class:`numberOrSignal`), :class:`signalRef`)
clipAngle : :class:`numberOrSignal`
clipExtent : oneOf(List(oneOf(List(:class:`numberOrSignal`), :class:`signalRef`)),
:class:`signalRef`)
extent : oneOf(List(oneOf(List(:class:`numberOrSignal`), :class:`signalRef`)),
:class:`signalRef`)
fit : oneOf(Mapping(required=[]), List(Any))
parallels : oneOf(List(:class:`numberOrSignal`), :class:`signalRef`)
pointRadius : :class:`numberOrSignal`
precision : :class:`numberOrSignal`
rotate : oneOf(List(:class:`numberOrSignal`), :class:`signalRef`)
scale : :class:`numberOrSignal`
size : oneOf(List(:class:`numberOrSignal`), :class:`signalRef`)
translate : oneOf(List(:class:`numberOrSignal`), :class:`signalRef`)
type : :class:`stringOrSignal`
"""
_schema = {'$ref': '#/definitions/projection'}
def __init__(self, name=Undefined, center=Undefined, clipAngle=Undefined, clipExtent=Undefined,
extent=Undefined, fit=Undefined, parallels=Undefined, pointRadius=Undefined,
precision=Undefined, rotate=Undefined, scale=Undefined, size=Undefined,
translate=Undefined, type=Undefined, **kwds):
super(projection, self).__init__(name=name, center=center, clipAngle=clipAngle,
clipExtent=clipExtent, extent=extent, fit=fit,
parallels=parallels, pointRadius=pointRadius,
precision=precision, rotate=rotate, scale=scale, size=size,
translate=translate, type=type, **kwds)
class scale(VegaSchema):
"""scale schema wrapper
oneOf(Mapping(required=[type, name]), Mapping(required=[type, name]),
Mapping(required=[type, name]), Mapping(required=[type, name]), Mapping(required=[type,
name]), Mapping(required=[type, name]), Mapping(required=[type, name]),
Mapping(required=[type, name]), Mapping(required=[name]), Mapping(required=[type, name]),
Mapping(required=[type, name]), Mapping(required=[type, name]))
"""
_schema = {'$ref': '#/definitions/scale'}
def __init__(self, *args, **kwds):
super(scale, self).__init__(*args, **kwds)
class scaleField(VegaSchema):
"""scaleField schema wrapper
oneOf(string, :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/scaleField'}
def __init__(self, *args, **kwds):
super(scaleField, self).__init__(*args, **kwds)
class sortOrder(VegaSchema):
"""sortOrder schema wrapper
oneOf(enum('ascending', 'descending'), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/sortOrder'}
def __init__(self, *args, **kwds):
super(sortOrder, self).__init__(*args, **kwds)
class scaleBins(VegaSchema):
"""scaleBins schema wrapper
oneOf(List(:class:`numberOrSignal`), Mapping(required=[step]), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/scaleBins'}
def __init__(self, *args, **kwds):
super(scaleBins, self).__init__(*args, **kwds)
class scaleInterpolate(VegaSchema):
"""scaleInterpolate schema wrapper
oneOf(string, :class:`signalRef`, Mapping(required=[type]))
"""
_schema = {'$ref': '#/definitions/scaleInterpolate'}
def __init__(self, *args, **kwds):
super(scaleInterpolate, self).__init__(*args, **kwds)
class scaleData(VegaSchema):
"""scaleData schema wrapper
oneOf(Mapping(required=[data, field]), Mapping(required=[data, fields]),
Mapping(required=[fields]))
"""
_schema = {'$ref': '#/definitions/scaleData'}
def __init__(self, *args, **kwds):
super(scaleData, self).__init__(*args, **kwds)
class scope(VegaSchema):
"""scope schema wrapper
Mapping(required=[])
Attributes
----------
axes : List(:class:`axis`)
data : List(:class:`data`)
encode : :class:`encode`
layout : :class:`layout`
legends : List(:class:`legend`)
marks : List(oneOf(:class:`markGroup`, :class:`markVisual`))
projections : List(:class:`projection`)
scales : List(:class:`scale`)
signals : List(:class:`signal`)
title : :class:`title`
usermeta : Mapping(required=[])
"""
_schema = {'$ref': '#/definitions/scope'}
def __init__(self, axes=Undefined, data=Undefined, encode=Undefined, layout=Undefined,
legends=Undefined, marks=Undefined, projections=Undefined, scales=Undefined,
signals=Undefined, title=Undefined, usermeta=Undefined, **kwds):
super(scope, self).__init__(axes=axes, data=data, encode=encode, layout=layout, legends=legends,
marks=marks, projections=projections, scales=scales,
signals=signals, title=title, usermeta=usermeta, **kwds)
class selector(VegaSchema):
"""selector schema wrapper
string
"""
_schema = {'$ref': '#/definitions/selector'}
def __init__(self, *args):
super(selector, self).__init__(*args)
class signal(VegaSchema):
"""signal schema wrapper
oneOf(Mapping(required=[name, push]), Mapping(required=[name]), Mapping(required=[name,
init]))
"""
_schema = {'$ref': '#/definitions/signal'}
def __init__(self, *args, **kwds):
super(signal, self).__init__(*args, **kwds)
class signalName(VegaSchema):
"""signalName schema wrapper
not enum('parent', 'datum', 'event', 'item')
"""
_schema = {'$ref': '#/definitions/signalName'}
def __init__(self, *args):
super(signalName, self).__init__(*args)
class signalRef(VegaSchema):
"""signalRef schema wrapper
Mapping(required=[signal])
Attributes
----------
signal : string
"""
_schema = {'$ref': '#/definitions/signalRef'}
def __init__(self, signal=Undefined, **kwds):
super(signalRef, self).__init__(signal=signal, **kwds)
class arrayOrSignal(VegaSchema):
"""arrayOrSignal schema wrapper
oneOf(List(Any), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/arrayOrSignal'}
def __init__(self, *args, **kwds):
super(arrayOrSignal, self).__init__(*args, **kwds)
class booleanOrSignal(VegaSchema):
"""booleanOrSignal schema wrapper
oneOf(boolean, :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/booleanOrSignal'}
def __init__(self, *args, **kwds):
super(booleanOrSignal, self).__init__(*args, **kwds)
class numberOrSignal(VegaSchema):
"""numberOrSignal schema wrapper
oneOf(float, :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/numberOrSignal'}
def __init__(self, *args, **kwds):
super(numberOrSignal, self).__init__(*args, **kwds)
class stringOrSignal(VegaSchema):
"""stringOrSignal schema wrapper
oneOf(string, :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/stringOrSignal'}
def __init__(self, *args, **kwds):
super(stringOrSignal, self).__init__(*args, **kwds)
class textOrSignal(VegaSchema):
"""textOrSignal schema wrapper
oneOf(oneOf(string, List(string)), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/textOrSignal'}
def __init__(self, *args, **kwds):
super(textOrSignal, self).__init__(*args, **kwds)
class stream(VegaSchema):
"""stream schema wrapper
allOf(Mapping(required=[]), oneOf(Mapping(required=[type]), Mapping(required=[stream]),
Mapping(required=[merge])))
"""
_schema = {'$ref': '#/definitions/stream'}
def __init__(self, between=Undefined, consume=Undefined, debounce=Undefined, filter=Undefined,
markname=Undefined, marktype=Undefined, throttle=Undefined, **kwds):
super(stream, self).__init__(between=between, consume=consume, debounce=debounce, filter=filter,
markname=markname, marktype=marktype, throttle=throttle, **kwds)
class title(VegaSchema):
"""title schema wrapper
oneOf(string, Mapping(required=[]))
"""
_schema = {'$ref': '#/definitions/title'}
def __init__(self, *args, **kwds):
super(title, self).__init__(*args, **kwds)
class transform(VegaSchema):
"""transform schema wrapper
oneOf(:class:`crossfilterTransform`, :class:`resolvefilterTransform`,
:class:`linkpathTransform`, :class:`pieTransform`, :class:`stackTransform`,
:class:`forceTransform`, :class:`contourTransform`, :class:`geojsonTransform`,
:class:`geopathTransform`, :class:`geopointTransform`, :class:`geoshapeTransform`,
:class:`graticuleTransform`, :class:`heatmapTransform`, :class:`isocontourTransform`,
:class:`kde2dTransform`, :class:`nestTransform`, :class:`packTransform`,
:class:`partitionTransform`, :class:`stratifyTransform`, :class:`treeTransform`,
:class:`treelinksTransform`, :class:`treemapTransform`, :class:`labelTransform`,
:class:`loessTransform`, :class:`regressionTransform`, :class:`aggregateTransform`,
:class:`binTransform`, :class:`collectTransform`, :class:`countpatternTransform`,
:class:`crossTransform`, :class:`densityTransform`, :class:`dotbinTransform`,
:class:`extentTransform`, :class:`filterTransform`, :class:`flattenTransform`,
:class:`foldTransform`, :class:`formulaTransform`, :class:`imputeTransform`,
:class:`joinaggregateTransform`, :class:`kdeTransform`, :class:`lookupTransform`,
:class:`pivotTransform`, :class:`projectTransform`, :class:`quantileTransform`,
:class:`sampleTransform`, :class:`sequenceTransform`, :class:`timeunitTransform`,
:class:`windowTransform`, :class:`identifierTransform`, :class:`voronoiTransform`,
:class:`wordcloudTransform`)
"""
_schema = {'$ref': '#/definitions/transform'}
def __init__(self, *args, **kwds):
super(transform, self).__init__(*args, **kwds)
class transformMark(VegaSchema):
"""transformMark schema wrapper
oneOf(:class:`crossfilterTransform`, :class:`resolvefilterTransform`,
:class:`linkpathTransform`, :class:`pieTransform`, :class:`stackTransform`,
:class:`forceTransform`, :class:`geojsonTransform`, :class:`geopathTransform`,
:class:`geopointTransform`, :class:`geoshapeTransform`, :class:`heatmapTransform`,
:class:`packTransform`, :class:`partitionTransform`, :class:`stratifyTransform`,
:class:`treeTransform`, :class:`treemapTransform`, :class:`labelTransform`,
:class:`binTransform`, :class:`collectTransform`, :class:`dotbinTransform`,
:class:`extentTransform`, :class:`formulaTransform`, :class:`joinaggregateTransform`,
:class:`lookupTransform`, :class:`sampleTransform`, :class:`timeunitTransform`,
:class:`windowTransform`, :class:`identifierTransform`, :class:`voronoiTransform`,
:class:`wordcloudTransform`)
"""
_schema = {'$ref': '#/definitions/transformMark'}
def __init__(self, *args, **kwds):
super(transformMark, self).__init__(*args, **kwds)
class crossfilterTransform(VegaSchema):
"""crossfilterTransform schema wrapper
Mapping(required=[type, fields, query])
Attributes
----------
fields : oneOf(List(oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)),
:class:`signalRef`)
query : oneOf(List(Any), :class:`signalRef`)
type : enum('crossfilter')
signal : string
"""
_schema = {'$ref': '#/definitions/crossfilterTransform'}
def __init__(self, fields=Undefined, query=Undefined, type=Undefined, signal=Undefined, **kwds):
super(crossfilterTransform, self).__init__(fields=fields, query=query, type=type, signal=signal,
**kwds)
class resolvefilterTransform(VegaSchema):
"""resolvefilterTransform schema wrapper
Mapping(required=[type, ignore, filter])
Attributes
----------
filter : Any
ignore : anyOf(float, :class:`signalRef`)
type : enum('resolvefilter')
signal : string
"""
_schema = {'$ref': '#/definitions/resolvefilterTransform'}
def __init__(self, filter=Undefined, ignore=Undefined, type=Undefined, signal=Undefined, **kwds):
super(resolvefilterTransform, self).__init__(filter=filter, ignore=ignore, type=type,
signal=signal, **kwds)
class linkpathTransform(VegaSchema):
"""linkpathTransform schema wrapper
Mapping(required=[type])
Attributes
----------
type : enum('linkpath')
orient : anyOf(enum('horizontal', 'vertical', 'radial'), :class:`signalRef`)
require : :class:`signalRef`
shape : anyOf(enum('line', 'arc', 'curve', 'diagonal', 'orthogonal'), :class:`signalRef`)
signal : string
sourceX : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
sourceY : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
targetX : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
targetY : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
as : anyOf(string, :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/linkpathTransform'}
def __init__(self, type=Undefined, orient=Undefined, require=Undefined, shape=Undefined,
signal=Undefined, sourceX=Undefined, sourceY=Undefined, targetX=Undefined,
targetY=Undefined, **kwds):
super(linkpathTransform, self).__init__(type=type, orient=orient, require=require, shape=shape,
signal=signal, sourceX=sourceX, sourceY=sourceY,
targetX=targetX, targetY=targetY, **kwds)
class pieTransform(VegaSchema):
"""pieTransform schema wrapper
Mapping(required=[type])
Attributes
----------
type : enum('pie')
endAngle : anyOf(float, :class:`signalRef`)
field : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
signal : string
sort : anyOf(boolean, :class:`signalRef`)
startAngle : anyOf(float, :class:`signalRef`)
as : oneOf(List(anyOf(string, :class:`signalRef`)), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/pieTransform'}
def __init__(self, type=Undefined, endAngle=Undefined, field=Undefined, signal=Undefined,
sort=Undefined, startAngle=Undefined, **kwds):
super(pieTransform, self).__init__(type=type, endAngle=endAngle, field=field, signal=signal,
sort=sort, startAngle=startAngle, **kwds)
class stackTransform(VegaSchema):
"""stackTransform schema wrapper
Mapping(required=[type])
Attributes
----------
type : enum('stack')
field : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
groupby : oneOf(List(oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)),
:class:`signalRef`)
offset : anyOf(enum('zero', 'center', 'normalize'), :class:`signalRef`)
signal : string
sort : :class:`compare`
as : oneOf(List(anyOf(string, :class:`signalRef`)), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/stackTransform'}
def __init__(self, type=Undefined, field=Undefined, groupby=Undefined, offset=Undefined,
signal=Undefined, sort=Undefined, **kwds):
super(stackTransform, self).__init__(type=type, field=field, groupby=groupby, offset=offset,
signal=signal, sort=sort, **kwds)
class forceTransform(VegaSchema):
"""forceTransform schema wrapper
Mapping(required=[type])
Attributes
----------
type : enum('force')
alpha : anyOf(float, :class:`signalRef`)
alphaMin : anyOf(float, :class:`signalRef`)
alphaTarget : anyOf(float, :class:`signalRef`)
forces : List(oneOf(Mapping(required=[force]), Mapping(required=[force]),
Mapping(required=[force]), Mapping(required=[force]), Mapping(required=[force]),
Mapping(required=[force])))
iterations : anyOf(float, :class:`signalRef`)
restart : anyOf(boolean, :class:`signalRef`)
signal : string
static : anyOf(boolean, :class:`signalRef`)
velocityDecay : anyOf(float, :class:`signalRef`)
as : oneOf(List(anyOf(string, :class:`signalRef`)), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/forceTransform'}
def __init__(self, type=Undefined, alpha=Undefined, alphaMin=Undefined, alphaTarget=Undefined,
forces=Undefined, iterations=Undefined, restart=Undefined, signal=Undefined,
static=Undefined, velocityDecay=Undefined, **kwds):
super(forceTransform, self).__init__(type=type, alpha=alpha, alphaMin=alphaMin,
alphaTarget=alphaTarget, forces=forces,
iterations=iterations, restart=restart, signal=signal,
static=static, velocityDecay=velocityDecay, **kwds)
class contourTransform(VegaSchema):
"""contourTransform schema wrapper
Mapping(required=[type, size])
Attributes
----------
size : oneOf(List(anyOf(float, :class:`signalRef`)), :class:`signalRef`)
type : enum('contour')
bandwidth : anyOf(float, :class:`signalRef`)
cellSize : anyOf(float, :class:`signalRef`)
count : anyOf(float, :class:`signalRef`)
nice : anyOf(boolean, :class:`signalRef`)
signal : string
smooth : anyOf(boolean, :class:`signalRef`)
thresholds : oneOf(List(anyOf(float, :class:`signalRef`)), :class:`signalRef`)
values : oneOf(List(anyOf(float, :class:`signalRef`)), :class:`signalRef`)
weight : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
x : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
y : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
"""
_schema = {'$ref': '#/definitions/contourTransform'}
def __init__(self, size=Undefined, type=Undefined, bandwidth=Undefined, cellSize=Undefined,
count=Undefined, nice=Undefined, signal=Undefined, smooth=Undefined,
thresholds=Undefined, values=Undefined, weight=Undefined, x=Undefined, y=Undefined,
**kwds):
super(contourTransform, self).__init__(size=size, type=type, bandwidth=bandwidth,
cellSize=cellSize, count=count, nice=nice, signal=signal,
smooth=smooth, thresholds=thresholds, values=values,
weight=weight, x=x, y=y, **kwds)
class geojsonTransform(VegaSchema):
"""geojsonTransform schema wrapper
Mapping(required=[type])
Attributes
----------
type : enum('geojson')
fields : oneOf(List(oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)),
:class:`signalRef`)
geojson : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
signal : string
"""
_schema = {'$ref': '#/definitions/geojsonTransform'}
def __init__(self, type=Undefined, fields=Undefined, geojson=Undefined, signal=Undefined, **kwds):
super(geojsonTransform, self).__init__(type=type, fields=fields, geojson=geojson, signal=signal,
**kwds)
class geopathTransform(VegaSchema):
"""geopathTransform schema wrapper
Mapping(required=[type])
Attributes
----------
type : enum('geopath')
field : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
pointRadius : anyOf(float, :class:`signalRef`, :class:`expr`, :class:`paramField`)
projection : string
signal : string
as : anyOf(string, :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/geopathTransform'}
def __init__(self, type=Undefined, field=Undefined, pointRadius=Undefined, projection=Undefined,
signal=Undefined, **kwds):
super(geopathTransform, self).__init__(type=type, field=field, pointRadius=pointRadius,
projection=projection, signal=signal, **kwds)
class geopointTransform(VegaSchema):
"""geopointTransform schema wrapper
Mapping(required=[type, projection, fields])
Attributes
----------
fields : oneOf(List(oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)),
:class:`signalRef`)
projection : string
type : enum('geopoint')
signal : string
as : oneOf(List(anyOf(string, :class:`signalRef`)), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/geopointTransform'}
def __init__(self, fields=Undefined, projection=Undefined, type=Undefined, signal=Undefined, **kwds):
super(geopointTransform, self).__init__(fields=fields, projection=projection, type=type,
signal=signal, **kwds)
class geoshapeTransform(VegaSchema):
"""geoshapeTransform schema wrapper
Mapping(required=[type])
Attributes
----------
type : enum('geoshape')
field : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
pointRadius : anyOf(float, :class:`signalRef`, :class:`expr`, :class:`paramField`)
projection : string
signal : string
as : anyOf(string, :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/geoshapeTransform'}
def __init__(self, type=Undefined, field=Undefined, pointRadius=Undefined, projection=Undefined,
signal=Undefined, **kwds):
super(geoshapeTransform, self).__init__(type=type, field=field, pointRadius=pointRadius,
projection=projection, signal=signal, **kwds)
class graticuleTransform(VegaSchema):
"""graticuleTransform schema wrapper
Mapping(required=[type])
Attributes
----------
type : enum('graticule')
extent : oneOf(List(Any), :class:`signalRef`)
extentMajor : oneOf(List(Any), :class:`signalRef`)
extentMinor : oneOf(List(Any), :class:`signalRef`)
precision : anyOf(float, :class:`signalRef`)
signal : string
step : oneOf(List(anyOf(float, :class:`signalRef`)), :class:`signalRef`)
stepMajor : oneOf(List(anyOf(float, :class:`signalRef`)), :class:`signalRef`)
stepMinor : oneOf(List(anyOf(float, :class:`signalRef`)), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/graticuleTransform'}
def __init__(self, type=Undefined, extent=Undefined, extentMajor=Undefined, extentMinor=Undefined,
precision=Undefined, signal=Undefined, step=Undefined, stepMajor=Undefined,
stepMinor=Undefined, **kwds):
super(graticuleTransform, self).__init__(type=type, extent=extent, extentMajor=extentMajor,
extentMinor=extentMinor, precision=precision,
signal=signal, step=step, stepMajor=stepMajor,
stepMinor=stepMinor, **kwds)
class heatmapTransform(VegaSchema):
"""heatmapTransform schema wrapper
Mapping(required=[type])
Attributes
----------
type : enum('heatmap')
color : anyOf(string, :class:`signalRef`, :class:`expr`, :class:`paramField`)
field : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
opacity : anyOf(float, :class:`signalRef`, :class:`expr`, :class:`paramField`)
resolve : anyOf(enum('shared', 'independent'), :class:`signalRef`)
signal : string
as : anyOf(string, :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/heatmapTransform'}
def __init__(self, type=Undefined, color=Undefined, field=Undefined, opacity=Undefined,
resolve=Undefined, signal=Undefined, **kwds):
super(heatmapTransform, self).__init__(type=type, color=color, field=field, opacity=opacity,
resolve=resolve, signal=signal, **kwds)
class isocontourTransform(VegaSchema):
"""isocontourTransform schema wrapper
Mapping(required=[type])
Attributes
----------
type : enum('isocontour')
field : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
levels : anyOf(float, :class:`signalRef`)
nice : anyOf(boolean, :class:`signalRef`)
resolve : anyOf(enum('shared', 'independent'), :class:`signalRef`)
scale : anyOf(float, :class:`signalRef`, :class:`expr`, :class:`paramField`)
signal : string
smooth : anyOf(boolean, :class:`signalRef`)
thresholds : oneOf(List(anyOf(float, :class:`signalRef`)), :class:`signalRef`)
translate : oneOf(List(anyOf(float, :class:`signalRef`, :class:`expr`,
:class:`paramField`)), :class:`signalRef`)
zero : anyOf(boolean, :class:`signalRef`)
as : anyOf(string, :class:`signalRef`, None)
"""
_schema = {'$ref': '#/definitions/isocontourTransform'}
def __init__(self, type=Undefined, field=Undefined, levels=Undefined, nice=Undefined,
resolve=Undefined, scale=Undefined, signal=Undefined, smooth=Undefined,
thresholds=Undefined, translate=Undefined, zero=Undefined, **kwds):
super(isocontourTransform, self).__init__(type=type, field=field, levels=levels, nice=nice,
resolve=resolve, scale=scale, signal=signal,
smooth=smooth, thresholds=thresholds,
translate=translate, zero=zero, **kwds)
class kde2dTransform(VegaSchema):
"""kde2dTransform schema wrapper
Mapping(required=[type, size, x, y])
Attributes
----------
size : oneOf(List(anyOf(float, :class:`signalRef`)), :class:`signalRef`)
type : enum('kde2d')
x : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
y : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
bandwidth : oneOf(List(anyOf(float, :class:`signalRef`)), :class:`signalRef`)
cellSize : anyOf(float, :class:`signalRef`)
counts : anyOf(boolean, :class:`signalRef`)
groupby : oneOf(List(oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)),
:class:`signalRef`)
signal : string
weight : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
as : anyOf(string, :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/kde2dTransform'}
def __init__(self, size=Undefined, type=Undefined, x=Undefined, y=Undefined, bandwidth=Undefined,
cellSize=Undefined, counts=Undefined, groupby=Undefined, signal=Undefined,
weight=Undefined, **kwds):
super(kde2dTransform, self).__init__(size=size, type=type, x=x, y=y, bandwidth=bandwidth,
cellSize=cellSize, counts=counts, groupby=groupby,
signal=signal, weight=weight, **kwds)
class nestTransform(VegaSchema):
"""nestTransform schema wrapper
Mapping(required=[type])
Attributes
----------
type : enum('nest')
generate : anyOf(boolean, :class:`signalRef`)
keys : oneOf(List(oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)),
:class:`signalRef`)
signal : string
"""
_schema = {'$ref': '#/definitions/nestTransform'}
def __init__(self, type=Undefined, generate=Undefined, keys=Undefined, signal=Undefined, **kwds):
super(nestTransform, self).__init__(type=type, generate=generate, keys=keys, signal=signal,
**kwds)
class packTransform(VegaSchema):
"""packTransform schema wrapper
Mapping(required=[type])
Attributes
----------
type : enum('pack')
field : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
padding : anyOf(float, :class:`signalRef`)
radius : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
signal : string
size : oneOf(List(anyOf(float, :class:`signalRef`)), :class:`signalRef`)
sort : :class:`compare`
as : oneOf(List(anyOf(string, :class:`signalRef`)), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/packTransform'}
def __init__(self, type=Undefined, field=Undefined, padding=Undefined, radius=Undefined,
signal=Undefined, size=Undefined, sort=Undefined, **kwds):
super(packTransform, self).__init__(type=type, field=field, padding=padding, radius=radius,
signal=signal, size=size, sort=sort, **kwds)
class partitionTransform(VegaSchema):
"""partitionTransform schema wrapper
Mapping(required=[type])
Attributes
----------
type : enum('partition')
field : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
padding : anyOf(float, :class:`signalRef`)
round : anyOf(boolean, :class:`signalRef`)
signal : string
size : oneOf(List(anyOf(float, :class:`signalRef`)), :class:`signalRef`)
sort : :class:`compare`
as : oneOf(List(anyOf(string, :class:`signalRef`)), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/partitionTransform'}
def __init__(self, type=Undefined, field=Undefined, padding=Undefined, round=Undefined,
signal=Undefined, size=Undefined, sort=Undefined, **kwds):
super(partitionTransform, self).__init__(type=type, field=field, padding=padding, round=round,
signal=signal, size=size, sort=sort, **kwds)
class stratifyTransform(VegaSchema):
"""stratifyTransform schema wrapper
Mapping(required=[type, key, parentKey])
Attributes
----------
key : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
parentKey : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
type : enum('stratify')
signal : string
"""
_schema = {'$ref': '#/definitions/stratifyTransform'}
def __init__(self, key=Undefined, parentKey=Undefined, type=Undefined, signal=Undefined, **kwds):
super(stratifyTransform, self).__init__(key=key, parentKey=parentKey, type=type, signal=signal,
**kwds)
class treeTransform(VegaSchema):
"""treeTransform schema wrapper
Mapping(required=[type])
Attributes
----------
type : enum('tree')
field : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
method : anyOf(enum('tidy', 'cluster'), :class:`signalRef`)
nodeSize : oneOf(List(anyOf(float, :class:`signalRef`)), :class:`signalRef`)
separation : anyOf(boolean, :class:`signalRef`)
signal : string
size : oneOf(List(anyOf(float, :class:`signalRef`)), :class:`signalRef`)
sort : :class:`compare`
as : oneOf(List(anyOf(string, :class:`signalRef`)), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/treeTransform'}
def __init__(self, type=Undefined, field=Undefined, method=Undefined, nodeSize=Undefined,
separation=Undefined, signal=Undefined, size=Undefined, sort=Undefined, **kwds):
super(treeTransform, self).__init__(type=type, field=field, method=method, nodeSize=nodeSize,
separation=separation, signal=signal, size=size, sort=sort,
**kwds)
class treelinksTransform(VegaSchema):
"""treelinksTransform schema wrapper
Mapping(required=[type])
Attributes
----------
type : enum('treelinks')
signal : string
"""
_schema = {'$ref': '#/definitions/treelinksTransform'}
def __init__(self, type=Undefined, signal=Undefined, **kwds):
super(treelinksTransform, self).__init__(type=type, signal=signal, **kwds)
class treemapTransform(VegaSchema):
"""treemapTransform schema wrapper
Mapping(required=[type])
Attributes
----------
type : enum('treemap')
field : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
method : anyOf(enum('squarify', 'resquarify', 'binary', 'dice', 'slice', 'slicedice'),
:class:`signalRef`)
padding : anyOf(float, :class:`signalRef`)
paddingBottom : anyOf(float, :class:`signalRef`)
paddingInner : anyOf(float, :class:`signalRef`)
paddingLeft : anyOf(float, :class:`signalRef`)
paddingOuter : anyOf(float, :class:`signalRef`)
paddingRight : anyOf(float, :class:`signalRef`)
paddingTop : anyOf(float, :class:`signalRef`)
ratio : anyOf(float, :class:`signalRef`)
round : anyOf(boolean, :class:`signalRef`)
signal : string
size : oneOf(List(anyOf(float, :class:`signalRef`)), :class:`signalRef`)
sort : :class:`compare`
as : oneOf(List(anyOf(string, :class:`signalRef`)), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/treemapTransform'}
def __init__(self, type=Undefined, field=Undefined, method=Undefined, padding=Undefined,
paddingBottom=Undefined, paddingInner=Undefined, paddingLeft=Undefined,
paddingOuter=Undefined, paddingRight=Undefined, paddingTop=Undefined, ratio=Undefined,
round=Undefined, signal=Undefined, size=Undefined, sort=Undefined, **kwds):
super(treemapTransform, self).__init__(type=type, field=field, method=method, padding=padding,
paddingBottom=paddingBottom, paddingInner=paddingInner,
paddingLeft=paddingLeft, paddingOuter=paddingOuter,
paddingRight=paddingRight, paddingTop=paddingTop,
ratio=ratio, round=round, signal=signal, size=size,
sort=sort, **kwds)
class labelTransform(VegaSchema):
"""labelTransform schema wrapper
Mapping(required=[type, size])
Attributes
----------
size : oneOf(List(anyOf(float, :class:`signalRef`)), :class:`signalRef`)
type : enum('label')
anchor : oneOf(List(anyOf(string, :class:`signalRef`)), :class:`signalRef`)
avoidBaseMark : anyOf(boolean, :class:`signalRef`)
avoidMarks : oneOf(List(string), :class:`signalRef`)
lineAnchor : anyOf(string, :class:`signalRef`)
markIndex : anyOf(float, :class:`signalRef`)
method : anyOf(string, :class:`signalRef`)
offset : oneOf(List(anyOf(float, :class:`signalRef`)), :class:`signalRef`)
padding : anyOf(float, :class:`signalRef`, None)
signal : string
sort : :class:`compare`
as : oneOf(List(anyOf(string, :class:`signalRef`)), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/labelTransform'}
def __init__(self, size=Undefined, type=Undefined, anchor=Undefined, avoidBaseMark=Undefined,
avoidMarks=Undefined, lineAnchor=Undefined, markIndex=Undefined, method=Undefined,
offset=Undefined, padding=Undefined, signal=Undefined, sort=Undefined, **kwds):
super(labelTransform, self).__init__(size=size, type=type, anchor=anchor,
avoidBaseMark=avoidBaseMark, avoidMarks=avoidMarks,
lineAnchor=lineAnchor, markIndex=markIndex, method=method,
offset=offset, padding=padding, signal=signal, sort=sort,
**kwds)
class loessTransform(VegaSchema):
"""loessTransform schema wrapper
Mapping(required=[type, x, y])
Attributes
----------
type : enum('loess')
x : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
y : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
bandwidth : anyOf(float, :class:`signalRef`)
groupby : oneOf(List(oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)),
:class:`signalRef`)
signal : string
as : oneOf(List(anyOf(string, :class:`signalRef`)), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/loessTransform'}
def __init__(self, type=Undefined, x=Undefined, y=Undefined, bandwidth=Undefined, groupby=Undefined,
signal=Undefined, **kwds):
super(loessTransform, self).__init__(type=type, x=x, y=y, bandwidth=bandwidth, groupby=groupby,
signal=signal, **kwds)
class regressionTransform(VegaSchema):
"""regressionTransform schema wrapper
Mapping(required=[type, x, y])
Attributes
----------
type : enum('regression')
x : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
y : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
extent : oneOf(List(anyOf(float, :class:`signalRef`)), :class:`signalRef`)
groupby : oneOf(List(oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)),
:class:`signalRef`)
method : anyOf(string, :class:`signalRef`)
order : anyOf(float, :class:`signalRef`)
params : anyOf(boolean, :class:`signalRef`)
signal : string
as : oneOf(List(anyOf(string, :class:`signalRef`)), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/regressionTransform'}
def __init__(self, type=Undefined, x=Undefined, y=Undefined, extent=Undefined, groupby=Undefined,
method=Undefined, order=Undefined, params=Undefined, signal=Undefined, **kwds):
super(regressionTransform, self).__init__(type=type, x=x, y=y, extent=extent, groupby=groupby,
method=method, order=order, params=params,
signal=signal, **kwds)
class aggregateTransform(VegaSchema):
"""aggregateTransform schema wrapper
Mapping(required=[type])
Attributes
----------
type : enum('aggregate')
cross : anyOf(boolean, :class:`signalRef`)
drop : anyOf(boolean, :class:`signalRef`)
fields : oneOf(List(oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`, None)),
:class:`signalRef`)
groupby : oneOf(List(oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)),
:class:`signalRef`)
key : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
ops : oneOf(List(anyOf(enum('values', 'count', '__count__', 'missing', 'valid', 'sum',
'product', 'mean', 'average', 'variance', 'variancep', 'stdev', 'stdevp', 'stderr',
'distinct', 'ci0', 'ci1', 'median', 'q1', 'q3', 'min', 'max', 'argmin', 'argmax'),
:class:`signalRef`)), :class:`signalRef`)
signal : string
as : oneOf(List(anyOf(string, :class:`signalRef`, None)), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/aggregateTransform'}
def __init__(self, type=Undefined, cross=Undefined, drop=Undefined, fields=Undefined,
groupby=Undefined, key=Undefined, ops=Undefined, signal=Undefined, **kwds):
super(aggregateTransform, self).__init__(type=type, cross=cross, drop=drop, fields=fields,
groupby=groupby, key=key, ops=ops, signal=signal,
**kwds)
class binTransform(VegaSchema):
"""binTransform schema wrapper
Mapping(required=[type, field, extent])
Attributes
----------
extent : oneOf(List(anyOf(float, :class:`signalRef`)), :class:`signalRef`)
field : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
type : enum('bin')
anchor : anyOf(float, :class:`signalRef`)
base : anyOf(float, :class:`signalRef`)
divide : oneOf(List(anyOf(float, :class:`signalRef`)), :class:`signalRef`)
interval : anyOf(boolean, :class:`signalRef`)
maxbins : anyOf(float, :class:`signalRef`)
minstep : anyOf(float, :class:`signalRef`)
name : anyOf(string, :class:`signalRef`)
nice : anyOf(boolean, :class:`signalRef`)
signal : string
span : anyOf(float, :class:`signalRef`)
step : anyOf(float, :class:`signalRef`)
steps : oneOf(List(anyOf(float, :class:`signalRef`)), :class:`signalRef`)
as : oneOf(List(anyOf(string, :class:`signalRef`)), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/binTransform'}
def __init__(self, extent=Undefined, field=Undefined, type=Undefined, anchor=Undefined,
base=Undefined, divide=Undefined, interval=Undefined, maxbins=Undefined,
minstep=Undefined, name=Undefined, nice=Undefined, signal=Undefined, span=Undefined,
step=Undefined, steps=Undefined, **kwds):
super(binTransform, self).__init__(extent=extent, field=field, type=type, anchor=anchor,
base=base, divide=divide, interval=interval, maxbins=maxbins,
minstep=minstep, name=name, nice=nice, signal=signal,
span=span, step=step, steps=steps, **kwds)
class collectTransform(VegaSchema):
"""collectTransform schema wrapper
Mapping(required=[type])
Attributes
----------
type : enum('collect')
signal : string
sort : :class:`compare`
"""
_schema = {'$ref': '#/definitions/collectTransform'}
def __init__(self, type=Undefined, signal=Undefined, sort=Undefined, **kwds):
super(collectTransform, self).__init__(type=type, signal=signal, sort=sort, **kwds)
class countpatternTransform(VegaSchema):
"""countpatternTransform schema wrapper
Mapping(required=[type, field])
Attributes
----------
field : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
type : enum('countpattern')
case : anyOf(enum('upper', 'lower', 'mixed'), :class:`signalRef`)
pattern : anyOf(string, :class:`signalRef`)
signal : string
stopwords : anyOf(string, :class:`signalRef`)
as : oneOf(List(anyOf(string, :class:`signalRef`)), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/countpatternTransform'}
def __init__(self, field=Undefined, type=Undefined, case=Undefined, pattern=Undefined,
signal=Undefined, stopwords=Undefined, **kwds):
super(countpatternTransform, self).__init__(field=field, type=type, case=case, pattern=pattern,
signal=signal, stopwords=stopwords, **kwds)
class crossTransform(VegaSchema):
"""crossTransform schema wrapper
Mapping(required=[type])
Attributes
----------
type : enum('cross')
filter : :class:`exprString`
signal : string
as : oneOf(List(anyOf(string, :class:`signalRef`)), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/crossTransform'}
def __init__(self, type=Undefined, filter=Undefined, signal=Undefined, **kwds):
super(crossTransform, self).__init__(type=type, filter=filter, signal=signal, **kwds)
class densityTransform(VegaSchema):
"""densityTransform schema wrapper
Mapping(required=[type])
Attributes
----------
type : enum('density')
distribution : oneOf(Mapping(required=[function]), Mapping(required=[function]),
Mapping(required=[function]), Mapping(required=[function, field]),
Mapping(required=[function]))
extent : oneOf(List(anyOf(float, :class:`signalRef`)), :class:`signalRef`)
maxsteps : anyOf(float, :class:`signalRef`)
method : anyOf(string, :class:`signalRef`)
minsteps : anyOf(float, :class:`signalRef`)
signal : string
steps : anyOf(float, :class:`signalRef`)
as : oneOf(List(anyOf(string, :class:`signalRef`)), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/densityTransform'}
def __init__(self, type=Undefined, distribution=Undefined, extent=Undefined, maxsteps=Undefined,
method=Undefined, minsteps=Undefined, signal=Undefined, steps=Undefined, **kwds):
super(densityTransform, self).__init__(type=type, distribution=distribution, extent=extent,
maxsteps=maxsteps, method=method, minsteps=minsteps,
signal=signal, steps=steps, **kwds)
class dotbinTransform(VegaSchema):
"""dotbinTransform schema wrapper
Mapping(required=[type, field])
Attributes
----------
field : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
type : enum('dotbin')
groupby : oneOf(List(oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)),
:class:`signalRef`)
signal : string
smooth : anyOf(boolean, :class:`signalRef`)
step : anyOf(float, :class:`signalRef`)
as : anyOf(string, :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/dotbinTransform'}
def __init__(self, field=Undefined, type=Undefined, groupby=Undefined, signal=Undefined,
smooth=Undefined, step=Undefined, **kwds):
super(dotbinTransform, self).__init__(field=field, type=type, groupby=groupby, signal=signal,
smooth=smooth, step=step, **kwds)
class extentTransform(VegaSchema):
"""extentTransform schema wrapper
Mapping(required=[type, field])
Attributes
----------
field : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
type : enum('extent')
signal : string
"""
_schema = {'$ref': '#/definitions/extentTransform'}
def __init__(self, field=Undefined, type=Undefined, signal=Undefined, **kwds):
super(extentTransform, self).__init__(field=field, type=type, signal=signal, **kwds)
class filterTransform(VegaSchema):
"""filterTransform schema wrapper
Mapping(required=[type, expr])
Attributes
----------
expr : :class:`exprString`
type : enum('filter')
signal : string
"""
_schema = {'$ref': '#/definitions/filterTransform'}
def __init__(self, expr=Undefined, type=Undefined, signal=Undefined, **kwds):
super(filterTransform, self).__init__(expr=expr, type=type, signal=signal, **kwds)
class flattenTransform(VegaSchema):
"""flattenTransform schema wrapper
Mapping(required=[type, fields])
Attributes
----------
fields : oneOf(List(oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)),
:class:`signalRef`)
type : enum('flatten')
index : anyOf(string, :class:`signalRef`)
signal : string
as : oneOf(List(anyOf(string, :class:`signalRef`)), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/flattenTransform'}
def __init__(self, fields=Undefined, type=Undefined, index=Undefined, signal=Undefined, **kwds):
super(flattenTransform, self).__init__(fields=fields, type=type, index=index, signal=signal,
**kwds)
class foldTransform(VegaSchema):
"""foldTransform schema wrapper
Mapping(required=[type, fields])
Attributes
----------
fields : oneOf(List(oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)),
:class:`signalRef`)
type : enum('fold')
signal : string
as : oneOf(List(anyOf(string, :class:`signalRef`)), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/foldTransform'}
def __init__(self, fields=Undefined, type=Undefined, signal=Undefined, **kwds):
super(foldTransform, self).__init__(fields=fields, type=type, signal=signal, **kwds)
class formulaTransform(VegaSchema):
"""formulaTransform schema wrapper
Mapping(required=[type, expr, as])
Attributes
----------
expr : :class:`exprString`
type : enum('formula')
initonly : anyOf(boolean, :class:`signalRef`)
signal : string
as : anyOf(string, :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/formulaTransform'}
def __init__(self, expr=Undefined, type=Undefined, initonly=Undefined, signal=Undefined, **kwds):
super(formulaTransform, self).__init__(expr=expr, type=type, initonly=initonly, signal=signal,
**kwds)
class imputeTransform(VegaSchema):
"""imputeTransform schema wrapper
Mapping(required=[type, field, key])
Attributes
----------
field : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
key : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
type : enum('impute')
groupby : oneOf(List(oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)),
:class:`signalRef`)
keyvals : oneOf(List(Any), :class:`signalRef`)
method : anyOf(enum('value', 'mean', 'median', 'max', 'min'), :class:`signalRef`)
signal : string
value : Any
"""
_schema = {'$ref': '#/definitions/imputeTransform'}
def __init__(self, field=Undefined, key=Undefined, type=Undefined, groupby=Undefined,
keyvals=Undefined, method=Undefined, signal=Undefined, value=Undefined, **kwds):
super(imputeTransform, self).__init__(field=field, key=key, type=type, groupby=groupby,
keyvals=keyvals, method=method, signal=signal,
value=value, **kwds)
class joinaggregateTransform(VegaSchema):
"""joinaggregateTransform schema wrapper
Mapping(required=[type])
Attributes
----------
type : enum('joinaggregate')
fields : oneOf(List(oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`, None)),
:class:`signalRef`)
groupby : oneOf(List(oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)),
:class:`signalRef`)
key : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
ops : oneOf(List(anyOf(enum('values', 'count', '__count__', 'missing', 'valid', 'sum',
'product', 'mean', 'average', 'variance', 'variancep', 'stdev', 'stdevp', 'stderr',
'distinct', 'ci0', 'ci1', 'median', 'q1', 'q3', 'min', 'max', 'argmin', 'argmax'),
:class:`signalRef`)), :class:`signalRef`)
signal : string
as : oneOf(List(anyOf(string, :class:`signalRef`, None)), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/joinaggregateTransform'}
def __init__(self, type=Undefined, fields=Undefined, groupby=Undefined, key=Undefined,
ops=Undefined, signal=Undefined, **kwds):
super(joinaggregateTransform, self).__init__(type=type, fields=fields, groupby=groupby, key=key,
ops=ops, signal=signal, **kwds)
class kdeTransform(VegaSchema):
"""kdeTransform schema wrapper
Mapping(required=[type, field])
Attributes
----------
field : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
type : enum('kde')
bandwidth : anyOf(float, :class:`signalRef`)
counts : anyOf(boolean, :class:`signalRef`)
cumulative : anyOf(boolean, :class:`signalRef`)
extent : oneOf(List(anyOf(float, :class:`signalRef`)), :class:`signalRef`)
groupby : oneOf(List(oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)),
:class:`signalRef`)
maxsteps : anyOf(float, :class:`signalRef`)
minsteps : anyOf(float, :class:`signalRef`)
resolve : anyOf(enum('shared', 'independent'), :class:`signalRef`)
signal : string
steps : anyOf(float, :class:`signalRef`)
as : oneOf(List(anyOf(string, :class:`signalRef`)), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/kdeTransform'}
def __init__(self, field=Undefined, type=Undefined, bandwidth=Undefined, counts=Undefined,
cumulative=Undefined, extent=Undefined, groupby=Undefined, maxsteps=Undefined,
minsteps=Undefined, resolve=Undefined, signal=Undefined, steps=Undefined, **kwds):
super(kdeTransform, self).__init__(field=field, type=type, bandwidth=bandwidth, counts=counts,
cumulative=cumulative, extent=extent, groupby=groupby,
maxsteps=maxsteps, minsteps=minsteps, resolve=resolve,
signal=signal, steps=steps, **kwds)
class lookupTransform(VegaSchema):
"""lookupTransform schema wrapper
Mapping(required=[type, from, key, fields])
Attributes
----------
fields : oneOf(List(oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)),
:class:`signalRef`)
key : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
type : enum('lookup')
default : Any
signal : string
values : oneOf(List(oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)),
:class:`signalRef`)
as : oneOf(List(anyOf(string, :class:`signalRef`)), :class:`signalRef`)
from : string
"""
_schema = {'$ref': '#/definitions/lookupTransform'}
def __init__(self, fields=Undefined, key=Undefined, type=Undefined, default=Undefined,
signal=Undefined, values=Undefined, **kwds):
super(lookupTransform, self).__init__(fields=fields, key=key, type=type, default=default,
signal=signal, values=values, **kwds)
class pivotTransform(VegaSchema):
"""pivotTransform schema wrapper
Mapping(required=[type, field, value])
Attributes
----------
field : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
type : enum('pivot')
value : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
groupby : oneOf(List(oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)),
:class:`signalRef`)
key : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
limit : anyOf(float, :class:`signalRef`)
op : anyOf(enum('values', 'count', '__count__', 'missing', 'valid', 'sum', 'product',
'mean', 'average', 'variance', 'variancep', 'stdev', 'stdevp', 'stderr', 'distinct', 'ci0',
'ci1', 'median', 'q1', 'q3', 'min', 'max', 'argmin', 'argmax'), :class:`signalRef`)
signal : string
"""
_schema = {'$ref': '#/definitions/pivotTransform'}
def __init__(self, field=Undefined, type=Undefined, value=Undefined, groupby=Undefined,
key=Undefined, limit=Undefined, op=Undefined, signal=Undefined, **kwds):
super(pivotTransform, self).__init__(field=field, type=type, value=value, groupby=groupby,
key=key, limit=limit, op=op, signal=signal, **kwds)
class projectTransform(VegaSchema):
"""projectTransform schema wrapper
Mapping(required=[type])
Attributes
----------
type : enum('project')
fields : oneOf(List(oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)),
:class:`signalRef`)
signal : string
as : oneOf(List(anyOf(string, :class:`signalRef`, None)), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/projectTransform'}
def __init__(self, type=Undefined, fields=Undefined, signal=Undefined, **kwds):
super(projectTransform, self).__init__(type=type, fields=fields, signal=signal, **kwds)
class quantileTransform(VegaSchema):
"""quantileTransform schema wrapper
Mapping(required=[type, field])
Attributes
----------
field : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
type : enum('quantile')
groupby : oneOf(List(oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)),
:class:`signalRef`)
probs : oneOf(List(anyOf(float, :class:`signalRef`)), :class:`signalRef`)
signal : string
step : anyOf(float, :class:`signalRef`)
as : oneOf(List(anyOf(string, :class:`signalRef`)), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/quantileTransform'}
def __init__(self, field=Undefined, type=Undefined, groupby=Undefined, probs=Undefined,
signal=Undefined, step=Undefined, **kwds):
super(quantileTransform, self).__init__(field=field, type=type, groupby=groupby, probs=probs,
signal=signal, step=step, **kwds)
class sampleTransform(VegaSchema):
"""sampleTransform schema wrapper
Mapping(required=[type])
Attributes
----------
type : enum('sample')
signal : string
size : anyOf(float, :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/sampleTransform'}
def __init__(self, type=Undefined, signal=Undefined, size=Undefined, **kwds):
super(sampleTransform, self).__init__(type=type, signal=signal, size=size, **kwds)
class sequenceTransform(VegaSchema):
"""sequenceTransform schema wrapper
Mapping(required=[type, start, stop])
Attributes
----------
start : anyOf(float, :class:`signalRef`)
stop : anyOf(float, :class:`signalRef`)
type : enum('sequence')
signal : string
step : anyOf(float, :class:`signalRef`)
as : anyOf(string, :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/sequenceTransform'}
def __init__(self, start=Undefined, stop=Undefined, type=Undefined, signal=Undefined,
step=Undefined, **kwds):
super(sequenceTransform, self).__init__(start=start, stop=stop, type=type, signal=signal,
step=step, **kwds)
class timeunitTransform(VegaSchema):
"""timeunitTransform schema wrapper
Mapping(required=[type, field])
Attributes
----------
field : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
type : enum('timeunit')
extent : oneOf(List(anyOf(float, :class:`signalRef`)), :class:`signalRef`)
interval : anyOf(boolean, :class:`signalRef`)
maxbins : anyOf(float, :class:`signalRef`)
signal : string
step : anyOf(float, :class:`signalRef`)
timezone : anyOf(enum('local', 'utc'), :class:`signalRef`)
units : oneOf(List(anyOf(enum('year', 'quarter', 'month', 'week', 'date', 'day',
'dayofyear', 'hours', 'minutes', 'seconds', 'milliseconds'), :class:`signalRef`)),
:class:`signalRef`)
as : oneOf(List(anyOf(string, :class:`signalRef`)), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/timeunitTransform'}
def __init__(self, field=Undefined, type=Undefined, extent=Undefined, interval=Undefined,
maxbins=Undefined, signal=Undefined, step=Undefined, timezone=Undefined,
units=Undefined, **kwds):
super(timeunitTransform, self).__init__(field=field, type=type, extent=extent,
interval=interval, maxbins=maxbins, signal=signal,
step=step, timezone=timezone, units=units, **kwds)
class windowTransform(VegaSchema):
"""windowTransform schema wrapper
Mapping(required=[type])
Attributes
----------
type : enum('window')
fields : oneOf(List(oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`, None)),
:class:`signalRef`)
frame : oneOf(List(anyOf(float, :class:`signalRef`, None)), :class:`signalRef`)
groupby : oneOf(List(oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)),
:class:`signalRef`)
ignorePeers : anyOf(boolean, :class:`signalRef`)
ops : oneOf(List(anyOf(enum('row_number', 'rank', 'dense_rank', 'percent_rank', 'cume_dist',
'ntile', 'lag', 'lead', 'first_value', 'last_value', 'nth_value', 'prev_value',
'next_value', 'values', 'count', '__count__', 'missing', 'valid', 'sum', 'product', 'mean',
'average', 'variance', 'variancep', 'stdev', 'stdevp', 'stderr', 'distinct', 'ci0', 'ci1',
'median', 'q1', 'q3', 'min', 'max', 'argmin', 'argmax'), :class:`signalRef`)),
:class:`signalRef`)
params : oneOf(List(anyOf(float, :class:`signalRef`, None)), :class:`signalRef`)
signal : string
sort : :class:`compare`
as : oneOf(List(anyOf(string, :class:`signalRef`, None)), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/windowTransform'}
def __init__(self, type=Undefined, fields=Undefined, frame=Undefined, groupby=Undefined,
ignorePeers=Undefined, ops=Undefined, params=Undefined, signal=Undefined,
sort=Undefined, **kwds):
super(windowTransform, self).__init__(type=type, fields=fields, frame=frame, groupby=groupby,
ignorePeers=ignorePeers, ops=ops, params=params,
signal=signal, sort=sort, **kwds)
class identifierTransform(VegaSchema):
"""identifierTransform schema wrapper
Mapping(required=[type, as])
Attributes
----------
type : enum('identifier')
signal : string
as : anyOf(string, :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/identifierTransform'}
def __init__(self, type=Undefined, signal=Undefined, **kwds):
super(identifierTransform, self).__init__(type=type, signal=signal, **kwds)
class voronoiTransform(VegaSchema):
"""voronoiTransform schema wrapper
Mapping(required=[type, x, y])
Attributes
----------
type : enum('voronoi')
x : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
y : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
extent : oneOf(List(Any), :class:`signalRef`)
signal : string
size : oneOf(List(anyOf(float, :class:`signalRef`)), :class:`signalRef`)
as : anyOf(string, :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/voronoiTransform'}
def __init__(self, type=Undefined, x=Undefined, y=Undefined, extent=Undefined, signal=Undefined,
size=Undefined, **kwds):
super(voronoiTransform, self).__init__(type=type, x=x, y=y, extent=extent, signal=signal,
size=size, **kwds)
class wordcloudTransform(VegaSchema):
"""wordcloudTransform schema wrapper
Mapping(required=[type])
Attributes
----------
type : enum('wordcloud')
font : anyOf(string, :class:`signalRef`, :class:`expr`, :class:`paramField`)
fontSize : anyOf(float, :class:`signalRef`, :class:`expr`, :class:`paramField`)
fontSizeRange : oneOf(List(anyOf(float, :class:`signalRef`)), :class:`signalRef`, None)
fontStyle : anyOf(string, :class:`signalRef`, :class:`expr`, :class:`paramField`)
fontWeight : anyOf(string, :class:`signalRef`, :class:`expr`, :class:`paramField`)
padding : anyOf(float, :class:`signalRef`, :class:`expr`, :class:`paramField`)
rotate : anyOf(float, :class:`signalRef`, :class:`expr`, :class:`paramField`)
signal : string
size : oneOf(List(anyOf(float, :class:`signalRef`)), :class:`signalRef`)
spiral : anyOf(string, :class:`signalRef`)
text : oneOf(:class:`scaleField`, :class:`paramField`, :class:`expr`)
as : oneOf(List(anyOf(string, :class:`signalRef`)), :class:`signalRef`)
"""
_schema = {'$ref': '#/definitions/wordcloudTransform'}
def __init__(self, type=Undefined, font=Undefined, fontSize=Undefined, fontSizeRange=Undefined,
fontStyle=Undefined, fontWeight=Undefined, padding=Undefined, rotate=Undefined,
signal=Undefined, size=Undefined, spiral=Undefined, text=Undefined, **kwds):
super(wordcloudTransform, self).__init__(type=type, font=font, fontSize=fontSize,
fontSizeRange=fontSizeRange, fontStyle=fontStyle,
fontWeight=fontWeight, padding=padding, rotate=rotate,
signal=signal, size=size, spiral=spiral, text=text,
**kwds)
| bsd-3-clause | 6a4195d6e708eb79e8d0a1ff315a8caf | 32.239461 | 105 | 0.614731 | 4.026191 | false | false | false | false |
altair-viz/altair | altair/examples/candlestick_chart.py | 1 | 1251 | """
Candlestick Chart
=================
A candlestick chart inspired from `Protovis <http://mbostock.github.io/protovis/ex/candlestick.html>`_.
This example shows the performance of the Chicago Board Options Exchange `Volatility Index <https://en.wikipedia.org/wiki/VIX>`_ (VIX)
in the summer of 2009. The thick bar represents the opening and closing prices,
while the thin bar shows intraday high and low prices; if the index closed higher on a given day, the bars are colored green rather than red.
"""
# category: advanced calculations
import altair as alt
from vega_datasets import data
source = data.ohlc()
open_close_color = alt.condition("datum.open <= datum.close",
alt.value("#06982d"),
alt.value("#ae1325"))
base = alt.Chart(source).encode(
alt.X('date:T',
axis=alt.Axis(
format='%m/%d',
labelAngle=-45,
title='Date in 2009'
)
),
color=open_close_color
)
rule = base.mark_rule().encode(
alt.Y(
'low:Q',
title='Price',
scale=alt.Scale(zero=False),
),
alt.Y2('high:Q')
)
bar = base.mark_bar().encode(
alt.Y('open:Q'),
alt.Y2('close:Q')
)
rule + bar | bsd-3-clause | 6d7728a71e56b0ca74e20870c007dfb7 | 27.454545 | 141 | 0.603517 | 3.446281 | false | false | false | false |
altair-viz/altair | altair/examples/scatter_linked_table.py | 1 | 1360 | """
Brushing Scatter Plot to Show Data on a Table
---------------------------------------------
A scatter plot of the cars dataset, with data tables for horsepower, MPG, and origin.
The tables update to reflect the selection on the scatter plot.
"""
# category: scatter plots
import altair as alt
from vega_datasets import data
source = data.cars()
# Brush for selection
brush = alt.selection(type='interval')
# Scatter Plot
points = alt.Chart(source).mark_point().encode(
x='Horsepower:Q',
y='Miles_per_Gallon:Q',
color=alt.condition(brush, 'Cylinders:O', alt.value('grey'))
).add_params(brush)
# Base chart for data tables
ranked_text = alt.Chart(source).mark_text().encode(
y=alt.Y('row_number:O',axis=None)
).transform_window(
row_number='row_number()'
).transform_filter(
brush
).transform_window(
rank='rank(row_number)'
).transform_filter(
alt.datum.rank<20
)
# Data Tables
horsepower = ranked_text.encode(text='Horsepower:N').properties(title='Horsepower')
mpg = ranked_text.encode(text='Miles_per_Gallon:N').properties(title='MPG')
origin = ranked_text.encode(text='Origin:N').properties(title='Origin')
text = alt.hconcat(horsepower, mpg, origin) # Combine data tables
# Build chart
alt.hconcat(
points,
text
).resolve_legend(
color="independent"
)
| bsd-3-clause | 23d787449c468072785ca7e8e24066ee | 25.755102 | 86 | 0.663971 | 3.126437 | false | false | false | false |
altair-viz/altair | altair/examples/select_detail.py | 1 | 1911 | """
Selection Detail
================
This example shows a selection that links two views of data: the left panel
contains one point per object, and the right panel contains one line per
object. Clicking on either the points or lines will select the corresponding
objects in both views of the data.
The challenge lies in expressing such hierarchical data in a way that Altair
can handle. We do this by merging the data into a "long form" dataframe, and
aggregating identical metadata for the final plot.
"""
# category: interactive charts
import altair as alt
import pandas as pd
import numpy as np
np.random.seed(0)
n_objects = 20
n_times = 50
# Create one (x, y) pair of metadata per object
locations = pd.DataFrame({
'id': range(n_objects),
'x': np.random.randn(n_objects),
'y': np.random.randn(n_objects)
})
# Create a 50-element time-series for each object
timeseries = pd.DataFrame(np.random.randn(n_times, n_objects).cumsum(0),
columns=locations['id'],
index=pd.RangeIndex(0, n_times, name='time'))
# Melt the wide-form timeseries into a long-form view
timeseries = timeseries.reset_index().melt('time')
# Merge the (x, y) metadata into the long-form view
timeseries['id'] = timeseries['id'].astype(int) # make merge not complain
data = pd.merge(timeseries, locations, on='id')
# Data is prepared, now make a chart
selector = alt.selection_point(fields=['id'])
base = alt.Chart(data).properties(
width=250,
height=250
).add_params(selector)
points = base.mark_point(filled=True, size=200).encode(
x='mean(x)',
y='mean(y)',
color=alt.condition(selector, 'id:O', alt.value('lightgray'), legend=None),
)
timeseries = base.mark_line().encode(
x='time',
y=alt.Y('value', scale=alt.Scale(domain=(-15, 15))),
color=alt.Color('id:O', legend=None)
).transform_filter(
selector
)
points | timeseries
| bsd-3-clause | dbddfdbf822a587a899399b33d709a99 | 28.4 | 79 | 0.688645 | 3.443243 | false | false | false | false |
altair-viz/altair | altair/vegalite/v4/schema/core.py | 2 | 973217 | # The contents of this file are automatically written by
# tools/generate_schema_wrapper.py. Do not modify directly.
from altair.utils.schemapi import SchemaBase, Undefined, _subclasses
import pkgutil
import json
def load_schema():
"""Load the json schema associated with this module's functions"""
return json.loads(pkgutil.get_data(__name__, 'vega-lite-schema.json').decode('utf-8'))
class VegaLiteSchema(SchemaBase):
_rootschema = load_schema()
@classmethod
def _default_wrapper_classes(cls):
return _subclasses(VegaLiteSchema)
class Root(VegaLiteSchema):
"""Root schema wrapper
anyOf(:class:`TopLevelUnitSpec`, :class:`TopLevelFacetSpec`, :class:`TopLevelLayerSpec`,
:class:`TopLevelRepeatSpec`, :class:`TopLevelNormalizedConcatSpecGenericSpec`,
:class:`TopLevelNormalizedVConcatSpecGenericSpec`,
:class:`TopLevelNormalizedHConcatSpecGenericSpec`)
A Vega-Lite top-level specification. This is the root class for all Vega-Lite
specifications. (The json schema is generated from this type.)
"""
_schema = VegaLiteSchema._rootschema
def __init__(self, *args, **kwds):
super(Root, self).__init__(*args, **kwds)
class Aggregate(VegaLiteSchema):
"""Aggregate schema wrapper
anyOf(:class:`NonArgAggregateOp`, :class:`ArgmaxDef`, :class:`ArgminDef`)
"""
_schema = {'$ref': '#/definitions/Aggregate'}
def __init__(self, *args, **kwds):
super(Aggregate, self).__init__(*args, **kwds)
class AggregateOp(VegaLiteSchema):
"""AggregateOp schema wrapper
enum('argmax', 'argmin', 'average', 'count', 'distinct', 'max', 'mean', 'median', 'min',
'missing', 'product', 'q1', 'q3', 'ci0', 'ci1', 'stderr', 'stdev', 'stdevp', 'sum', 'valid',
'values', 'variance', 'variancep')
"""
_schema = {'$ref': '#/definitions/AggregateOp'}
def __init__(self, *args):
super(AggregateOp, self).__init__(*args)
class AggregatedFieldDef(VegaLiteSchema):
"""AggregatedFieldDef schema wrapper
Mapping(required=[op, as])
Attributes
----------
op : :class:`AggregateOp`
The aggregation operation to apply to the fields (e.g., ``"sum"``, ``"average"``, or
``"count"`` ). See the `full list of supported aggregation operations
<https://vega.github.io/vega-lite/docs/aggregate.html#ops>`__ for more information.
field : :class:`FieldName`
The data field for which to compute aggregate function. This is required for all
aggregation operations except ``"count"``.
as : :class:`FieldName`
The output field names to use for each aggregated field.
"""
_schema = {'$ref': '#/definitions/AggregatedFieldDef'}
def __init__(self, op=Undefined, field=Undefined, **kwds):
super(AggregatedFieldDef, self).__init__(op=op, field=field, **kwds)
class Align(VegaLiteSchema):
"""Align schema wrapper
enum('left', 'center', 'right')
"""
_schema = {'$ref': '#/definitions/Align'}
def __init__(self, *args):
super(Align, self).__init__(*args)
class AnyMark(VegaLiteSchema):
"""AnyMark schema wrapper
anyOf(:class:`CompositeMark`, :class:`CompositeMarkDef`, :class:`Mark`, :class:`MarkDef`)
"""
_schema = {'$ref': '#/definitions/AnyMark'}
def __init__(self, *args, **kwds):
super(AnyMark, self).__init__(*args, **kwds)
class AnyMarkConfig(VegaLiteSchema):
"""AnyMarkConfig schema wrapper
anyOf(:class:`MarkConfig`, :class:`AreaConfig`, :class:`BarConfig`, :class:`RectConfig`,
:class:`LineConfig`, :class:`TickConfig`)
"""
_schema = {'$ref': '#/definitions/AnyMarkConfig'}
def __init__(self, *args, **kwds):
super(AnyMarkConfig, self).__init__(*args, **kwds)
class AreaConfig(AnyMarkConfig):
"""AreaConfig schema wrapper
Mapping(required=[])
Attributes
----------
align : anyOf(:class:`Align`, :class:`ExprRef`)
The horizontal alignment of the text or ranged marks (area, bar, image, rect, rule).
One of ``"left"``, ``"right"``, ``"center"``.
**Note:** Expression reference is *not* supported for range marks.
angle : anyOf(float, :class:`ExprRef`)
aria : anyOf(boolean, :class:`ExprRef`)
ariaRole : anyOf(string, :class:`ExprRef`)
ariaRoleDescription : anyOf(string, :class:`ExprRef`)
aspect : anyOf(boolean, :class:`ExprRef`)
baseline : anyOf(:class:`TextBaseline`, :class:`ExprRef`)
For text marks, the vertical text baseline. One of ``"alphabetic"`` (default),
``"top"``, ``"middle"``, ``"bottom"``, ``"line-top"``, ``"line-bottom"``, or an
expression reference that provides one of the valid values. The ``"line-top"`` and
``"line-bottom"`` values operate similarly to ``"top"`` and ``"bottom"``, but are
calculated relative to the ``lineHeight`` rather than ``fontSize`` alone.
For range marks, the vertical alignment of the marks. One of ``"top"``,
``"middle"``, ``"bottom"``.
**Note:** Expression reference is *not* supported for range marks.
blend : anyOf(:class:`Blend`, :class:`ExprRef`)
color : anyOf(:class:`Color`, :class:`Gradient`, :class:`ExprRef`)
Default color.
**Default value:** :raw-html:`<span style="color: #4682b4;">■</span>`
``"#4682b4"``
**Note:** - This property cannot be used in a `style config
<https://vega.github.io/vega-lite/docs/mark.html#style-config>`__. - The ``fill``
and ``stroke`` properties have higher precedence than ``color`` and will override
``color``.
cornerRadius : anyOf(float, :class:`ExprRef`)
cornerRadiusBottomLeft : anyOf(float, :class:`ExprRef`)
cornerRadiusBottomRight : anyOf(float, :class:`ExprRef`)
cornerRadiusTopLeft : anyOf(float, :class:`ExprRef`)
cornerRadiusTopRight : anyOf(float, :class:`ExprRef`)
cursor : anyOf(:class:`Cursor`, :class:`ExprRef`)
description : anyOf(string, :class:`ExprRef`)
dir : anyOf(:class:`TextDirection`, :class:`ExprRef`)
dx : anyOf(float, :class:`ExprRef`)
dy : anyOf(float, :class:`ExprRef`)
ellipsis : anyOf(string, :class:`ExprRef`)
endAngle : anyOf(float, :class:`ExprRef`)
fill : anyOf(:class:`Color`, :class:`Gradient`, None, :class:`ExprRef`)
Default fill color. This property has higher precedence than ``config.color``. Set
to ``null`` to remove fill.
**Default value:** (None)
fillOpacity : anyOf(float, :class:`ExprRef`)
filled : boolean
Whether the mark's color should be used as fill color instead of stroke color.
**Default value:** ``false`` for all ``point``, ``line``, and ``rule`` marks as well
as ``geoshape`` marks for `graticule
<https://vega.github.io/vega-lite/docs/data.html#graticule>`__ data sources;
otherwise, ``true``.
**Note:** This property cannot be used in a `style config
<https://vega.github.io/vega-lite/docs/mark.html#style-config>`__.
font : anyOf(string, :class:`ExprRef`)
fontSize : anyOf(float, :class:`ExprRef`)
fontStyle : anyOf(:class:`FontStyle`, :class:`ExprRef`)
fontWeight : anyOf(:class:`FontWeight`, :class:`ExprRef`)
height : anyOf(float, :class:`ExprRef`)
href : anyOf(:class:`URI`, :class:`ExprRef`)
innerRadius : anyOf(float, :class:`ExprRef`)
The inner radius in pixels of arc marks. ``innerRadius`` is an alias for
``radius2``.
interpolate : anyOf(:class:`Interpolate`, :class:`ExprRef`)
invalid : enum('filter', None)
Defines how Vega-Lite should handle marks for invalid values ( ``null`` and ``NaN``
). - If set to ``"filter"`` (default), all data items with null values will be
skipped (for line, trail, and area marks) or filtered (for other marks). - If
``null``, all data items are included. In this case, invalid values will be
interpreted as zeroes.
limit : anyOf(float, :class:`ExprRef`)
line : anyOf(boolean, :class:`OverlayMarkDef`)
A flag for overlaying line on top of area marks, or an object defining the
properties of the overlayed lines.
If this value is an empty object ( ``{}`` ) or ``true``, lines with default
properties will be used.
If this value is ``false``, no lines would be automatically added to area marks.
**Default value:** ``false``.
lineBreak : anyOf(string, :class:`ExprRef`)
lineHeight : anyOf(float, :class:`ExprRef`)
opacity : anyOf(float, :class:`ExprRef`)
The overall opacity (value between [0,1]).
**Default value:** ``0.7`` for non-aggregate plots with ``point``, ``tick``,
``circle``, or ``square`` marks or layered ``bar`` charts and ``1`` otherwise.
order : anyOf(None, boolean)
For line and trail marks, this ``order`` property can be set to ``null`` or
``false`` to make the lines use the original order in the data sources.
orient : :class:`Orientation`
The orientation of a non-stacked bar, tick, area, and line charts. The value is
either horizontal (default) or vertical. - For bar, rule and tick, this determines
whether the size of the bar and tick should be applied to x or y dimension. - For
area, this property determines the orient property of the Vega output. - For line
and trail marks, this property determines the sort order of the points in the line
if ``config.sortLineBy`` is not specified. For stacked charts, this is always
determined by the orientation of the stack; therefore explicitly specified value
will be ignored.
outerRadius : anyOf(float, :class:`ExprRef`)
The outer radius in pixels of arc marks. ``outerRadius`` is an alias for ``radius``.
padAngle : anyOf(float, :class:`ExprRef`)
point : anyOf(boolean, :class:`OverlayMarkDef`, string)
A flag for overlaying points on top of line or area marks, or an object defining the
properties of the overlayed points.
If this property is ``"transparent"``, transparent points will be used (for
enhancing tooltips and selections).
If this property is an empty object ( ``{}`` ) or ``true``, filled points with
default properties will be used.
If this property is ``false``, no points would be automatically added to line or
area marks.
**Default value:** ``false``.
radius : anyOf(float, :class:`ExprRef`)
For arc mark, the primary (outer) radius in pixels.
For text marks, polar coordinate radial offset, in pixels, of the text from the
origin determined by the ``x`` and ``y`` properties.
radius2 : anyOf(float, :class:`ExprRef`)
The secondary (inner) radius in pixels of arc marks.
shape : anyOf(anyOf(:class:`SymbolShape`, string), :class:`ExprRef`)
size : anyOf(float, :class:`ExprRef`)
Default size for marks. - For ``point`` / ``circle`` / ``square``, this represents
the pixel area of the marks. Note that this value sets the area of the symbol; the
side lengths will increase with the square root of this value. - For ``bar``, this
represents the band size of the bar, in pixels. - For ``text``, this represents the
font size, in pixels.
**Default value:** - ``30`` for point, circle, square marks; width/height's ``step``
- ``2`` for bar marks with discrete dimensions; - ``5`` for bar marks with
continuous dimensions; - ``11`` for text marks.
smooth : anyOf(boolean, :class:`ExprRef`)
startAngle : anyOf(float, :class:`ExprRef`)
stroke : anyOf(:class:`Color`, :class:`Gradient`, None, :class:`ExprRef`)
Default stroke color. This property has higher precedence than ``config.color``. Set
to ``null`` to remove stroke.
**Default value:** (None)
strokeCap : anyOf(:class:`StrokeCap`, :class:`ExprRef`)
strokeDash : anyOf(List(float), :class:`ExprRef`)
strokeDashOffset : anyOf(float, :class:`ExprRef`)
strokeJoin : anyOf(:class:`StrokeJoin`, :class:`ExprRef`)
strokeMiterLimit : anyOf(float, :class:`ExprRef`)
strokeOffset : anyOf(float, :class:`ExprRef`)
strokeOpacity : anyOf(float, :class:`ExprRef`)
strokeWidth : anyOf(float, :class:`ExprRef`)
tension : anyOf(float, :class:`ExprRef`)
text : anyOf(:class:`Text`, :class:`ExprRef`)
theta : anyOf(float, :class:`ExprRef`)
For arc marks, the arc length in radians if theta2 is not specified, otherwise the
start arc angle. (A value of 0 indicates up or “north”, increasing values proceed
clockwise.)
For text marks, polar coordinate angle in radians.
theta2 : anyOf(float, :class:`ExprRef`)
The end angle of arc marks in radians. A value of 0 indicates up or “north”,
increasing values proceed clockwise.
timeUnitBand : float
Default relative band size for a time unit. If set to ``1``, the bandwidth of the
marks will be equal to the time unit band step. If set to ``0.5``, bandwidth of the
marks will be half of the time unit band step.
timeUnitBandPosition : float
Default relative band position for a time unit. If set to ``0``, the marks will be
positioned at the beginning of the time unit band step. If set to ``0.5``, the marks
will be positioned in the middle of the time unit band step.
tooltip : anyOf(float, string, boolean, :class:`TooltipContent`, :class:`ExprRef`, None)
The tooltip text string to show upon mouse hover or an object defining which fields
should the tooltip be derived from.
* If ``tooltip`` is ``true`` or ``{"content": "encoding"}``, then all fields from
``encoding`` will be used. - If ``tooltip`` is ``{"content": "data"}``, then all
fields that appear in the highlighted data point will be used. - If set to
``null`` or ``false``, then no tooltip will be used.
See the `tooltip <https://vega.github.io/vega-lite/docs/tooltip.html>`__
documentation for a detailed discussion about tooltip in Vega-Lite.
**Default value:** ``null``
url : anyOf(:class:`URI`, :class:`ExprRef`)
width : anyOf(float, :class:`ExprRef`)
x : anyOf(float, string, :class:`ExprRef`)
X coordinates of the marks, or width of horizontal ``"bar"`` and ``"area"`` without
specified ``x2`` or ``width``.
The ``value`` of this channel can be a number or a string ``"width"`` for the width
of the plot.
x2 : anyOf(float, string, :class:`ExprRef`)
X2 coordinates for ranged ``"area"``, ``"bar"``, ``"rect"``, and ``"rule"``.
The ``value`` of this channel can be a number or a string ``"width"`` for the width
of the plot.
y : anyOf(float, string, :class:`ExprRef`)
Y coordinates of the marks, or height of vertical ``"bar"`` and ``"area"`` without
specified ``y2`` or ``height``.
The ``value`` of this channel can be a number or a string ``"height"`` for the
height of the plot.
y2 : anyOf(float, string, :class:`ExprRef`)
Y2 coordinates for ranged ``"area"``, ``"bar"``, ``"rect"``, and ``"rule"``.
The ``value`` of this channel can be a number or a string ``"height"`` for the
height of the plot.
"""
_schema = {'$ref': '#/definitions/AreaConfig'}
def __init__(self, align=Undefined, angle=Undefined, aria=Undefined, ariaRole=Undefined,
ariaRoleDescription=Undefined, aspect=Undefined, baseline=Undefined, blend=Undefined,
color=Undefined, cornerRadius=Undefined, cornerRadiusBottomLeft=Undefined,
cornerRadiusBottomRight=Undefined, cornerRadiusTopLeft=Undefined,
cornerRadiusTopRight=Undefined, cursor=Undefined, description=Undefined, dir=Undefined,
dx=Undefined, dy=Undefined, ellipsis=Undefined, endAngle=Undefined, fill=Undefined,
fillOpacity=Undefined, filled=Undefined, font=Undefined, fontSize=Undefined,
fontStyle=Undefined, fontWeight=Undefined, height=Undefined, href=Undefined,
innerRadius=Undefined, interpolate=Undefined, invalid=Undefined, limit=Undefined,
line=Undefined, lineBreak=Undefined, lineHeight=Undefined, opacity=Undefined,
order=Undefined, orient=Undefined, outerRadius=Undefined, padAngle=Undefined,
point=Undefined, radius=Undefined, radius2=Undefined, shape=Undefined, size=Undefined,
smooth=Undefined, startAngle=Undefined, stroke=Undefined, strokeCap=Undefined,
strokeDash=Undefined, strokeDashOffset=Undefined, strokeJoin=Undefined,
strokeMiterLimit=Undefined, strokeOffset=Undefined, strokeOpacity=Undefined,
strokeWidth=Undefined, tension=Undefined, text=Undefined, theta=Undefined,
theta2=Undefined, timeUnitBand=Undefined, timeUnitBandPosition=Undefined,
tooltip=Undefined, url=Undefined, width=Undefined, x=Undefined, x2=Undefined,
y=Undefined, y2=Undefined, **kwds):
super(AreaConfig, self).__init__(align=align, angle=angle, aria=aria, ariaRole=ariaRole,
ariaRoleDescription=ariaRoleDescription, aspect=aspect,
baseline=baseline, blend=blend, color=color,
cornerRadius=cornerRadius,
cornerRadiusBottomLeft=cornerRadiusBottomLeft,
cornerRadiusBottomRight=cornerRadiusBottomRight,
cornerRadiusTopLeft=cornerRadiusTopLeft,
cornerRadiusTopRight=cornerRadiusTopRight, cursor=cursor,
description=description, dir=dir, dx=dx, dy=dy,
ellipsis=ellipsis, endAngle=endAngle, fill=fill,
fillOpacity=fillOpacity, filled=filled, font=font,
fontSize=fontSize, fontStyle=fontStyle, fontWeight=fontWeight,
height=height, href=href, innerRadius=innerRadius,
interpolate=interpolate, invalid=invalid, limit=limit,
line=line, lineBreak=lineBreak, lineHeight=lineHeight,
opacity=opacity, order=order, orient=orient,
outerRadius=outerRadius, padAngle=padAngle, point=point,
radius=radius, radius2=radius2, shape=shape, size=size,
smooth=smooth, startAngle=startAngle, stroke=stroke,
strokeCap=strokeCap, strokeDash=strokeDash,
strokeDashOffset=strokeDashOffset, strokeJoin=strokeJoin,
strokeMiterLimit=strokeMiterLimit, strokeOffset=strokeOffset,
strokeOpacity=strokeOpacity, strokeWidth=strokeWidth,
tension=tension, text=text, theta=theta, theta2=theta2,
timeUnitBand=timeUnitBand,
timeUnitBandPosition=timeUnitBandPosition, tooltip=tooltip,
url=url, width=width, x=x, x2=x2, y=y, y2=y2, **kwds)
class ArgmaxDef(Aggregate):
"""ArgmaxDef schema wrapper
Mapping(required=[argmax])
Attributes
----------
argmax : string
"""
_schema = {'$ref': '#/definitions/ArgmaxDef'}
def __init__(self, argmax=Undefined, **kwds):
super(ArgmaxDef, self).__init__(argmax=argmax, **kwds)
class ArgminDef(Aggregate):
"""ArgminDef schema wrapper
Mapping(required=[argmin])
Attributes
----------
argmin : string
"""
_schema = {'$ref': '#/definitions/ArgminDef'}
def __init__(self, argmin=Undefined, **kwds):
super(ArgminDef, self).__init__(argmin=argmin, **kwds)
class AutoSizeParams(VegaLiteSchema):
"""AutoSizeParams schema wrapper
Mapping(required=[])
Attributes
----------
contains : enum('content', 'padding')
Determines how size calculation should be performed, one of ``"content"`` or
``"padding"``. The default setting ( ``"content"`` ) interprets the width and height
settings as the data rectangle (plotting) dimensions, to which padding is then
added. In contrast, the ``"padding"`` setting includes the padding within the view
size calculations, such that the width and height settings indicate the **total**
intended size of the view.
**Default value** : ``"content"``
resize : boolean
A boolean flag indicating if autosize layout should be re-calculated on every view
update.
**Default value** : ``false``
type : :class:`AutosizeType`
The sizing format type. One of ``"pad"``, ``"fit"``, ``"fit-x"``, ``"fit-y"``, or
``"none"``. See the `autosize type
<https://vega.github.io/vega-lite/docs/size.html#autosize>`__ documentation for
descriptions of each.
**Default value** : ``"pad"``
"""
_schema = {'$ref': '#/definitions/AutoSizeParams'}
def __init__(self, contains=Undefined, resize=Undefined, type=Undefined, **kwds):
super(AutoSizeParams, self).__init__(contains=contains, resize=resize, type=type, **kwds)
class AutosizeType(VegaLiteSchema):
"""AutosizeType schema wrapper
enum('pad', 'none', 'fit', 'fit-x', 'fit-y')
"""
_schema = {'$ref': '#/definitions/AutosizeType'}
def __init__(self, *args):
super(AutosizeType, self).__init__(*args)
class Axis(VegaLiteSchema):
"""Axis schema wrapper
Mapping(required=[])
Attributes
----------
aria : anyOf(boolean, :class:`ExprRef`)
bandPosition : anyOf(float, :class:`ExprRef`)
description : anyOf(string, :class:`ExprRef`)
domain : anyOf(boolean, :class:`ExprRef`)
domainCap : anyOf(:class:`StrokeCap`, :class:`ExprRef`)
domainColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`)
domainDash : anyOf(List(float), :class:`ExprRef`)
domainDashOffset : anyOf(float, :class:`ExprRef`)
domainOpacity : anyOf(float, :class:`ExprRef`)
domainWidth : anyOf(float, :class:`ExprRef`)
format : anyOf(string, :class:`Dictunknown`)
When used with the default ``"number"`` and ``"time"`` format type, the text
formatting pattern for labels of guides (axes, legends, headers) and text marks.
* If the format type is ``"number"`` (e.g., for quantitative fields), this is D3's
`number format pattern <https://github.com/d3/d3-format#locale_format>`__. - If
the format type is ``"time"`` (e.g., for temporal fields), this is D3's `time
format pattern <https://github.com/d3/d3-time-format#locale_format>`__.
See the `format documentation <https://vega.github.io/vega-lite/docs/format.html>`__
for more examples.
When used with a `custom formatType
<https://vega.github.io/vega-lite/docs/config.html#custom-format-type>`__, this
value will be passed as ``format`` alongside ``datum.value`` to the registered
function.
**Default value:** Derived from `numberFormat
<https://vega.github.io/vega-lite/docs/config.html#format>`__ config for number
format and from `timeFormat
<https://vega.github.io/vega-lite/docs/config.html#format>`__ config for time
format.
formatType : string
The format type for labels. One of ``"number"``, ``"time"``, or a `registered custom
format type
<https://vega.github.io/vega-lite/docs/config.html#custom-format-type>`__.
**Default value:** - ``"time"`` for temporal fields and ordinal and nominal fields
with ``timeUnit``. - ``"number"`` for quantitative fields as well as ordinal and
nominal fields without ``timeUnit``.
grid : boolean
A boolean flag indicating if grid lines should be included as part of the axis
**Default value:** ``true`` for `continuous scales
<https://vega.github.io/vega-lite/docs/scale.html#continuous>`__ that are not
binned; otherwise, ``false``.
gridCap : anyOf(:class:`StrokeCap`, :class:`ExprRef`)
gridColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`,
:class:`ConditionalAxisColor`)
gridDash : anyOf(List(float), :class:`ExprRef`, :class:`ConditionalAxisNumberArray`)
gridDashOffset : anyOf(float, :class:`ExprRef`, :class:`ConditionalAxisNumber`)
gridOpacity : anyOf(float, :class:`ExprRef`, :class:`ConditionalAxisNumber`)
gridWidth : anyOf(float, :class:`ExprRef`, :class:`ConditionalAxisNumber`)
labelAlign : anyOf(:class:`Align`, :class:`ExprRef`, :class:`ConditionalAxisLabelAlign`)
labelAngle : anyOf(float, :class:`ExprRef`)
labelBaseline : anyOf(:class:`TextBaseline`, :class:`ExprRef`,
:class:`ConditionalAxisLabelBaseline`)
labelBound : anyOf(anyOf(float, boolean), :class:`ExprRef`)
labelColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`,
:class:`ConditionalAxisColor`)
labelExpr : string
`Vega expression <https://vega.github.io/vega/docs/expressions/>`__ for customizing
labels.
**Note:** The label text and value can be assessed via the ``label`` and ``value``
properties of the axis's backing ``datum`` object.
labelFlush : anyOf(boolean, float)
Indicates if the first and last axis labels should be aligned flush with the scale
range. Flush alignment for a horizontal axis will left-align the first label and
right-align the last label. For vertical axes, bottom and top text baselines are
applied instead. If this property is a number, it also indicates the number of
pixels by which to offset the first and last labels; for example, a value of 2 will
flush-align the first and last labels and also push them 2 pixels outward from the
center of the axis. The additional adjustment can sometimes help the labels better
visually group with corresponding axis ticks.
**Default value:** ``true`` for axis of a continuous x-scale. Otherwise, ``false``.
labelFlushOffset : anyOf(float, :class:`ExprRef`)
labelFont : anyOf(string, :class:`ExprRef`, :class:`ConditionalAxisString`)
labelFontSize : anyOf(float, :class:`ExprRef`, :class:`ConditionalAxisNumber`)
labelFontStyle : anyOf(:class:`FontStyle`, :class:`ExprRef`,
:class:`ConditionalAxisLabelFontStyle`)
labelFontWeight : anyOf(:class:`FontWeight`, :class:`ExprRef`,
:class:`ConditionalAxisLabelFontWeight`)
labelLimit : anyOf(float, :class:`ExprRef`)
labelLineHeight : anyOf(float, :class:`ExprRef`)
labelOffset : anyOf(float, :class:`ExprRef`, :class:`ConditionalAxisNumber`)
labelOpacity : anyOf(float, :class:`ExprRef`, :class:`ConditionalAxisNumber`)
labelOverlap : anyOf(:class:`LabelOverlap`, :class:`ExprRef`)
The strategy to use for resolving overlap of axis labels. If ``false`` (the
default), no overlap reduction is attempted. If set to ``true`` or ``"parity"``, a
strategy of removing every other label is used (this works well for standard linear
axes). If set to ``"greedy"``, a linear scan of the labels is performed, removing
any labels that overlaps with the last visible label (this often works better for
log-scaled axes).
**Default value:** ``true`` for non-nominal fields with non-log scales; ``"greedy"``
for log scales; otherwise ``false``.
labelPadding : anyOf(float, :class:`ExprRef`, :class:`ConditionalAxisNumber`)
labelSeparation : anyOf(float, :class:`ExprRef`)
labels : anyOf(boolean, :class:`ExprRef`)
maxExtent : anyOf(float, :class:`ExprRef`)
minExtent : anyOf(float, :class:`ExprRef`)
offset : float
The offset, in pixels, by which to displace the axis from the edge of the enclosing
group or data rectangle.
**Default value:** derived from the `axis config
<https://vega.github.io/vega-lite/docs/config.html#facet-scale-config>`__ 's
``offset`` ( ``0`` by default)
orient : anyOf(:class:`AxisOrient`, :class:`ExprRef`)
The orientation of the axis. One of ``"top"``, ``"bottom"``, ``"left"`` or
``"right"``. The orientation can be used to further specialize the axis type (e.g.,
a y-axis oriented towards the right edge of the chart).
**Default value:** ``"bottom"`` for x-axes and ``"left"`` for y-axes.
position : anyOf(float, :class:`ExprRef`)
The anchor position of the axis in pixels. For x-axes with top or bottom
orientation, this sets the axis group x coordinate. For y-axes with left or right
orientation, this sets the axis group y coordinate.
**Default value** : ``0``
style : anyOf(string, List(string))
A string or array of strings indicating the name of custom styles to apply to the
axis. A style is a named collection of axis property defined within the `style
configuration <https://vega.github.io/vega-lite/docs/mark.html#style-config>`__. If
style is an array, later styles will override earlier styles.
**Default value:** (none) **Note:** Any specified style will augment the default
style. For example, an x-axis mark with ``"style": "foo"`` will use ``config.axisX``
and ``config.style.foo`` (the specified style ``"foo"`` has higher precedence).
tickBand : anyOf(enum('center', 'extent'), :class:`ExprRef`)
tickCap : anyOf(:class:`StrokeCap`, :class:`ExprRef`)
tickColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`,
:class:`ConditionalAxisColor`)
tickCount : anyOf(float, :class:`TimeInterval`, :class:`TimeIntervalStep`, :class:`ExprRef`)
A desired number of ticks, for axes visualizing quantitative scales. The resulting
number may be different so that values are "nice" (multiples of 2, 5, 10) and lie
within the underlying scale's range.
For scales of type ``"time"`` or ``"utc"``, the tick count can instead be a time
interval specifier. Legal string values are ``"millisecond"``, ``"second"``,
``"minute"``, ``"hour"``, ``"day"``, ``"week"``, ``"month"``, and ``"year"``.
Alternatively, an object-valued interval specifier of the form ``{"interval":
"month", "step": 3}`` includes a desired number of interval steps. Here, ticks are
generated for each quarter (Jan, Apr, Jul, Oct) boundary.
**Default value** : Determine using a formula ``ceil(width/40)`` for x and
``ceil(height/40)`` for y.
tickDash : anyOf(List(float), :class:`ExprRef`, :class:`ConditionalAxisNumberArray`)
tickDashOffset : anyOf(float, :class:`ExprRef`, :class:`ConditionalAxisNumber`)
tickExtra : anyOf(boolean, :class:`ExprRef`)
tickMinStep : anyOf(float, :class:`ExprRef`)
The minimum desired step between axis ticks, in terms of scale domain values. For
example, a value of ``1`` indicates that ticks should not be less than 1 unit apart.
If ``tickMinStep`` is specified, the ``tickCount`` value will be adjusted, if
necessary, to enforce the minimum step value.
tickOffset : anyOf(float, :class:`ExprRef`)
tickOpacity : anyOf(float, :class:`ExprRef`, :class:`ConditionalAxisNumber`)
tickRound : anyOf(boolean, :class:`ExprRef`)
tickSize : anyOf(float, :class:`ExprRef`, :class:`ConditionalAxisNumber`)
tickWidth : anyOf(float, :class:`ExprRef`, :class:`ConditionalAxisNumber`)
ticks : anyOf(boolean, :class:`ExprRef`)
title : anyOf(:class:`Text`, None)
A title for the field. If ``null``, the title will be removed.
**Default value:** derived from the field's name and transformation function (
``aggregate``, ``bin`` and ``timeUnit`` ). If the field has an aggregate function,
the function is displayed as part of the title (e.g., ``"Sum of Profit"`` ). If the
field is binned or has a time unit applied, the applied function is shown in
parentheses (e.g., ``"Profit (binned)"``, ``"Transaction Date (year-month)"`` ).
Otherwise, the title is simply the field name.
**Notes** :
1) You can customize the default field title format by providing the `fieldTitle
<https://vega.github.io/vega-lite/docs/config.html#top-level-config>`__ property in
the `config <https://vega.github.io/vega-lite/docs/config.html>`__ or `fieldTitle
function via the compile function's options
<https://vega.github.io/vega-lite/docs/compile.html#field-title>`__.
2) If both field definition's ``title`` and axis, header, or legend ``title`` are
defined, axis/header/legend title will be used.
titleAlign : anyOf(:class:`Align`, :class:`ExprRef`)
titleAnchor : anyOf(:class:`TitleAnchor`, :class:`ExprRef`)
titleAngle : anyOf(float, :class:`ExprRef`)
titleBaseline : anyOf(:class:`TextBaseline`, :class:`ExprRef`)
titleColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`)
titleFont : anyOf(string, :class:`ExprRef`)
titleFontSize : anyOf(float, :class:`ExprRef`)
titleFontStyle : anyOf(:class:`FontStyle`, :class:`ExprRef`)
titleFontWeight : anyOf(:class:`FontWeight`, :class:`ExprRef`)
titleLimit : anyOf(float, :class:`ExprRef`)
titleLineHeight : anyOf(float, :class:`ExprRef`)
titleOpacity : anyOf(float, :class:`ExprRef`)
titlePadding : anyOf(float, :class:`ExprRef`)
titleX : anyOf(float, :class:`ExprRef`)
titleY : anyOf(float, :class:`ExprRef`)
translate : anyOf(float, :class:`ExprRef`)
values : anyOf(List(float), List(string), List(boolean), List(:class:`DateTime`),
:class:`ExprRef`)
Explicitly set the visible axis tick values.
zindex : float
A non-negative integer indicating the z-index of the axis. If zindex is 0, axes
should be drawn behind all chart elements. To put them in front, set ``zindex`` to
``1`` or more.
**Default value:** ``0`` (behind the marks).
"""
_schema = {'$ref': '#/definitions/Axis'}
def __init__(self, aria=Undefined, bandPosition=Undefined, description=Undefined, domain=Undefined,
domainCap=Undefined, domainColor=Undefined, domainDash=Undefined,
domainDashOffset=Undefined, domainOpacity=Undefined, domainWidth=Undefined,
format=Undefined, formatType=Undefined, grid=Undefined, gridCap=Undefined,
gridColor=Undefined, gridDash=Undefined, gridDashOffset=Undefined,
gridOpacity=Undefined, gridWidth=Undefined, labelAlign=Undefined, labelAngle=Undefined,
labelBaseline=Undefined, labelBound=Undefined, labelColor=Undefined,
labelExpr=Undefined, labelFlush=Undefined, labelFlushOffset=Undefined,
labelFont=Undefined, labelFontSize=Undefined, labelFontStyle=Undefined,
labelFontWeight=Undefined, labelLimit=Undefined, labelLineHeight=Undefined,
labelOffset=Undefined, labelOpacity=Undefined, labelOverlap=Undefined,
labelPadding=Undefined, labelSeparation=Undefined, labels=Undefined,
maxExtent=Undefined, minExtent=Undefined, offset=Undefined, orient=Undefined,
position=Undefined, style=Undefined, tickBand=Undefined, tickCap=Undefined,
tickColor=Undefined, tickCount=Undefined, tickDash=Undefined, tickDashOffset=Undefined,
tickExtra=Undefined, tickMinStep=Undefined, tickOffset=Undefined,
tickOpacity=Undefined, tickRound=Undefined, tickSize=Undefined, tickWidth=Undefined,
ticks=Undefined, title=Undefined, titleAlign=Undefined, titleAnchor=Undefined,
titleAngle=Undefined, titleBaseline=Undefined, titleColor=Undefined,
titleFont=Undefined, titleFontSize=Undefined, titleFontStyle=Undefined,
titleFontWeight=Undefined, titleLimit=Undefined, titleLineHeight=Undefined,
titleOpacity=Undefined, titlePadding=Undefined, titleX=Undefined, titleY=Undefined,
translate=Undefined, values=Undefined, zindex=Undefined, **kwds):
super(Axis, self).__init__(aria=aria, bandPosition=bandPosition, description=description,
domain=domain, domainCap=domainCap, domainColor=domainColor,
domainDash=domainDash, domainDashOffset=domainDashOffset,
domainOpacity=domainOpacity, domainWidth=domainWidth, format=format,
formatType=formatType, grid=grid, gridCap=gridCap,
gridColor=gridColor, gridDash=gridDash,
gridDashOffset=gridDashOffset, gridOpacity=gridOpacity,
gridWidth=gridWidth, labelAlign=labelAlign, labelAngle=labelAngle,
labelBaseline=labelBaseline, labelBound=labelBound,
labelColor=labelColor, labelExpr=labelExpr, labelFlush=labelFlush,
labelFlushOffset=labelFlushOffset, labelFont=labelFont,
labelFontSize=labelFontSize, labelFontStyle=labelFontStyle,
labelFontWeight=labelFontWeight, labelLimit=labelLimit,
labelLineHeight=labelLineHeight, labelOffset=labelOffset,
labelOpacity=labelOpacity, labelOverlap=labelOverlap,
labelPadding=labelPadding, labelSeparation=labelSeparation,
labels=labels, maxExtent=maxExtent, minExtent=minExtent,
offset=offset, orient=orient, position=position, style=style,
tickBand=tickBand, tickCap=tickCap, tickColor=tickColor,
tickCount=tickCount, tickDash=tickDash,
tickDashOffset=tickDashOffset, tickExtra=tickExtra,
tickMinStep=tickMinStep, tickOffset=tickOffset,
tickOpacity=tickOpacity, tickRound=tickRound, tickSize=tickSize,
tickWidth=tickWidth, ticks=ticks, title=title, titleAlign=titleAlign,
titleAnchor=titleAnchor, titleAngle=titleAngle,
titleBaseline=titleBaseline, titleColor=titleColor,
titleFont=titleFont, titleFontSize=titleFontSize,
titleFontStyle=titleFontStyle, titleFontWeight=titleFontWeight,
titleLimit=titleLimit, titleLineHeight=titleLineHeight,
titleOpacity=titleOpacity, titlePadding=titlePadding, titleX=titleX,
titleY=titleY, translate=translate, values=values, zindex=zindex,
**kwds)
class AxisConfig(VegaLiteSchema):
"""AxisConfig schema wrapper
Mapping(required=[])
Attributes
----------
aria : anyOf(boolean, :class:`ExprRef`)
bandPosition : anyOf(float, :class:`ExprRef`)
description : anyOf(string, :class:`ExprRef`)
disable : boolean
Disable axis by default.
domain : anyOf(boolean, :class:`ExprRef`)
domainCap : anyOf(:class:`StrokeCap`, :class:`ExprRef`)
domainColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`)
domainDash : anyOf(List(float), :class:`ExprRef`)
domainDashOffset : anyOf(float, :class:`ExprRef`)
domainOpacity : anyOf(float, :class:`ExprRef`)
domainWidth : anyOf(float, :class:`ExprRef`)
format : anyOf(string, :class:`Dictunknown`)
When used with the default ``"number"`` and ``"time"`` format type, the text
formatting pattern for labels of guides (axes, legends, headers) and text marks.
* If the format type is ``"number"`` (e.g., for quantitative fields), this is D3's
`number format pattern <https://github.com/d3/d3-format#locale_format>`__. - If
the format type is ``"time"`` (e.g., for temporal fields), this is D3's `time
format pattern <https://github.com/d3/d3-time-format#locale_format>`__.
See the `format documentation <https://vega.github.io/vega-lite/docs/format.html>`__
for more examples.
When used with a `custom formatType
<https://vega.github.io/vega-lite/docs/config.html#custom-format-type>`__, this
value will be passed as ``format`` alongside ``datum.value`` to the registered
function.
**Default value:** Derived from `numberFormat
<https://vega.github.io/vega-lite/docs/config.html#format>`__ config for number
format and from `timeFormat
<https://vega.github.io/vega-lite/docs/config.html#format>`__ config for time
format.
formatType : string
The format type for labels. One of ``"number"``, ``"time"``, or a `registered custom
format type
<https://vega.github.io/vega-lite/docs/config.html#custom-format-type>`__.
**Default value:** - ``"time"`` for temporal fields and ordinal and nominal fields
with ``timeUnit``. - ``"number"`` for quantitative fields as well as ordinal and
nominal fields without ``timeUnit``.
grid : boolean
A boolean flag indicating if grid lines should be included as part of the axis
**Default value:** ``true`` for `continuous scales
<https://vega.github.io/vega-lite/docs/scale.html#continuous>`__ that are not
binned; otherwise, ``false``.
gridCap : anyOf(:class:`StrokeCap`, :class:`ExprRef`)
gridColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`,
:class:`ConditionalAxisColor`)
gridDash : anyOf(List(float), :class:`ExprRef`, :class:`ConditionalAxisNumberArray`)
gridDashOffset : anyOf(float, :class:`ExprRef`, :class:`ConditionalAxisNumber`)
gridOpacity : anyOf(float, :class:`ExprRef`, :class:`ConditionalAxisNumber`)
gridWidth : anyOf(float, :class:`ExprRef`, :class:`ConditionalAxisNumber`)
labelAlign : anyOf(:class:`Align`, :class:`ExprRef`, :class:`ConditionalAxisLabelAlign`)
labelAngle : anyOf(float, :class:`ExprRef`)
labelBaseline : anyOf(:class:`TextBaseline`, :class:`ExprRef`,
:class:`ConditionalAxisLabelBaseline`)
labelBound : anyOf(anyOf(float, boolean), :class:`ExprRef`)
labelColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`,
:class:`ConditionalAxisColor`)
labelExpr : string
`Vega expression <https://vega.github.io/vega/docs/expressions/>`__ for customizing
labels text.
**Note:** The label text and value can be assessed via the ``label`` and ``value``
properties of the axis's backing ``datum`` object.
labelFlush : anyOf(boolean, float)
Indicates if the first and last axis labels should be aligned flush with the scale
range. Flush alignment for a horizontal axis will left-align the first label and
right-align the last label. For vertical axes, bottom and top text baselines are
applied instead. If this property is a number, it also indicates the number of
pixels by which to offset the first and last labels; for example, a value of 2 will
flush-align the first and last labels and also push them 2 pixels outward from the
center of the axis. The additional adjustment can sometimes help the labels better
visually group with corresponding axis ticks.
**Default value:** ``true`` for axis of a continuous x-scale. Otherwise, ``false``.
labelFlushOffset : anyOf(float, :class:`ExprRef`)
labelFont : anyOf(string, :class:`ExprRef`, :class:`ConditionalAxisString`)
labelFontSize : anyOf(float, :class:`ExprRef`, :class:`ConditionalAxisNumber`)
labelFontStyle : anyOf(:class:`FontStyle`, :class:`ExprRef`,
:class:`ConditionalAxisLabelFontStyle`)
labelFontWeight : anyOf(:class:`FontWeight`, :class:`ExprRef`,
:class:`ConditionalAxisLabelFontWeight`)
labelLimit : anyOf(float, :class:`ExprRef`)
labelLineHeight : anyOf(float, :class:`ExprRef`)
labelOffset : anyOf(float, :class:`ExprRef`, :class:`ConditionalAxisNumber`)
labelOpacity : anyOf(float, :class:`ExprRef`, :class:`ConditionalAxisNumber`)
labelOverlap : anyOf(:class:`LabelOverlap`, :class:`ExprRef`)
The strategy to use for resolving overlap of axis labels. If ``false`` (the
default), no overlap reduction is attempted. If set to ``true`` or ``"parity"``, a
strategy of removing every other label is used (this works well for standard linear
axes). If set to ``"greedy"``, a linear scan of the labels is performed, removing
any labels that overlaps with the last visible label (this often works better for
log-scaled axes).
**Default value:** ``true`` for non-nominal fields with non-log scales; ``"greedy"``
for log scales; otherwise ``false``.
labelPadding : anyOf(float, :class:`ExprRef`, :class:`ConditionalAxisNumber`)
labelSeparation : anyOf(float, :class:`ExprRef`)
labels : anyOf(boolean, :class:`ExprRef`)
maxExtent : anyOf(float, :class:`ExprRef`)
minExtent : anyOf(float, :class:`ExprRef`)
offset : float
The offset, in pixels, by which to displace the axis from the edge of the enclosing
group or data rectangle.
**Default value:** derived from the `axis config
<https://vega.github.io/vega-lite/docs/config.html#facet-scale-config>`__ 's
``offset`` ( ``0`` by default)
orient : anyOf(:class:`AxisOrient`, :class:`ExprRef`)
The orientation of the axis. One of ``"top"``, ``"bottom"``, ``"left"`` or
``"right"``. The orientation can be used to further specialize the axis type (e.g.,
a y-axis oriented towards the right edge of the chart).
**Default value:** ``"bottom"`` for x-axes and ``"left"`` for y-axes.
position : anyOf(float, :class:`ExprRef`)
The anchor position of the axis in pixels. For x-axes with top or bottom
orientation, this sets the axis group x coordinate. For y-axes with left or right
orientation, this sets the axis group y coordinate.
**Default value** : ``0``
style : anyOf(string, List(string))
A string or array of strings indicating the name of custom styles to apply to the
axis. A style is a named collection of axis property defined within the `style
configuration <https://vega.github.io/vega-lite/docs/mark.html#style-config>`__. If
style is an array, later styles will override earlier styles.
**Default value:** (none) **Note:** Any specified style will augment the default
style. For example, an x-axis mark with ``"style": "foo"`` will use ``config.axisX``
and ``config.style.foo`` (the specified style ``"foo"`` has higher precedence).
tickBand : anyOf(enum('center', 'extent'), :class:`ExprRef`)
tickCap : anyOf(:class:`StrokeCap`, :class:`ExprRef`)
tickColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`,
:class:`ConditionalAxisColor`)
tickCount : anyOf(float, :class:`TimeInterval`, :class:`TimeIntervalStep`, :class:`ExprRef`)
A desired number of ticks, for axes visualizing quantitative scales. The resulting
number may be different so that values are "nice" (multiples of 2, 5, 10) and lie
within the underlying scale's range.
For scales of type ``"time"`` or ``"utc"``, the tick count can instead be a time
interval specifier. Legal string values are ``"millisecond"``, ``"second"``,
``"minute"``, ``"hour"``, ``"day"``, ``"week"``, ``"month"``, and ``"year"``.
Alternatively, an object-valued interval specifier of the form ``{"interval":
"month", "step": 3}`` includes a desired number of interval steps. Here, ticks are
generated for each quarter (Jan, Apr, Jul, Oct) boundary.
**Default value** : Determine using a formula ``ceil(width/40)`` for x and
``ceil(height/40)`` for y.
tickDash : anyOf(List(float), :class:`ExprRef`, :class:`ConditionalAxisNumberArray`)
tickDashOffset : anyOf(float, :class:`ExprRef`, :class:`ConditionalAxisNumber`)
tickExtra : anyOf(boolean, :class:`ExprRef`)
tickMinStep : anyOf(float, :class:`ExprRef`)
The minimum desired step between axis ticks, in terms of scale domain values. For
example, a value of ``1`` indicates that ticks should not be less than 1 unit apart.
If ``tickMinStep`` is specified, the ``tickCount`` value will be adjusted, if
necessary, to enforce the minimum step value.
tickOffset : anyOf(float, :class:`ExprRef`)
tickOpacity : anyOf(float, :class:`ExprRef`, :class:`ConditionalAxisNumber`)
tickRound : anyOf(boolean, :class:`ExprRef`)
tickSize : anyOf(float, :class:`ExprRef`, :class:`ConditionalAxisNumber`)
tickWidth : anyOf(float, :class:`ExprRef`, :class:`ConditionalAxisNumber`)
ticks : anyOf(boolean, :class:`ExprRef`)
title : anyOf(:class:`Text`, None)
A title for the field. If ``null``, the title will be removed.
**Default value:** derived from the field's name and transformation function (
``aggregate``, ``bin`` and ``timeUnit`` ). If the field has an aggregate function,
the function is displayed as part of the title (e.g., ``"Sum of Profit"`` ). If the
field is binned or has a time unit applied, the applied function is shown in
parentheses (e.g., ``"Profit (binned)"``, ``"Transaction Date (year-month)"`` ).
Otherwise, the title is simply the field name.
**Notes** :
1) You can customize the default field title format by providing the `fieldTitle
<https://vega.github.io/vega-lite/docs/config.html#top-level-config>`__ property in
the `config <https://vega.github.io/vega-lite/docs/config.html>`__ or `fieldTitle
function via the compile function's options
<https://vega.github.io/vega-lite/docs/compile.html#field-title>`__.
2) If both field definition's ``title`` and axis, header, or legend ``title`` are
defined, axis/header/legend title will be used.
titleAlign : anyOf(:class:`Align`, :class:`ExprRef`)
titleAnchor : anyOf(:class:`TitleAnchor`, :class:`ExprRef`)
titleAngle : anyOf(float, :class:`ExprRef`)
titleBaseline : anyOf(:class:`TextBaseline`, :class:`ExprRef`)
titleColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`)
titleFont : anyOf(string, :class:`ExprRef`)
titleFontSize : anyOf(float, :class:`ExprRef`)
titleFontStyle : anyOf(:class:`FontStyle`, :class:`ExprRef`)
titleFontWeight : anyOf(:class:`FontWeight`, :class:`ExprRef`)
titleLimit : anyOf(float, :class:`ExprRef`)
titleLineHeight : anyOf(float, :class:`ExprRef`)
titleOpacity : anyOf(float, :class:`ExprRef`)
titlePadding : anyOf(float, :class:`ExprRef`)
titleX : anyOf(float, :class:`ExprRef`)
titleY : anyOf(float, :class:`ExprRef`)
translate : anyOf(float, :class:`ExprRef`)
values : anyOf(List(float), List(string), List(boolean), List(:class:`DateTime`),
:class:`ExprRef`)
Explicitly set the visible axis tick values.
zindex : float
A non-negative integer indicating the z-index of the axis. If zindex is 0, axes
should be drawn behind all chart elements. To put them in front, set ``zindex`` to
``1`` or more.
**Default value:** ``0`` (behind the marks).
"""
_schema = {'$ref': '#/definitions/AxisConfig'}
def __init__(self, aria=Undefined, bandPosition=Undefined, description=Undefined, disable=Undefined,
domain=Undefined, domainCap=Undefined, domainColor=Undefined, domainDash=Undefined,
domainDashOffset=Undefined, domainOpacity=Undefined, domainWidth=Undefined,
format=Undefined, formatType=Undefined, grid=Undefined, gridCap=Undefined,
gridColor=Undefined, gridDash=Undefined, gridDashOffset=Undefined,
gridOpacity=Undefined, gridWidth=Undefined, labelAlign=Undefined, labelAngle=Undefined,
labelBaseline=Undefined, labelBound=Undefined, labelColor=Undefined,
labelExpr=Undefined, labelFlush=Undefined, labelFlushOffset=Undefined,
labelFont=Undefined, labelFontSize=Undefined, labelFontStyle=Undefined,
labelFontWeight=Undefined, labelLimit=Undefined, labelLineHeight=Undefined,
labelOffset=Undefined, labelOpacity=Undefined, labelOverlap=Undefined,
labelPadding=Undefined, labelSeparation=Undefined, labels=Undefined,
maxExtent=Undefined, minExtent=Undefined, offset=Undefined, orient=Undefined,
position=Undefined, style=Undefined, tickBand=Undefined, tickCap=Undefined,
tickColor=Undefined, tickCount=Undefined, tickDash=Undefined, tickDashOffset=Undefined,
tickExtra=Undefined, tickMinStep=Undefined, tickOffset=Undefined,
tickOpacity=Undefined, tickRound=Undefined, tickSize=Undefined, tickWidth=Undefined,
ticks=Undefined, title=Undefined, titleAlign=Undefined, titleAnchor=Undefined,
titleAngle=Undefined, titleBaseline=Undefined, titleColor=Undefined,
titleFont=Undefined, titleFontSize=Undefined, titleFontStyle=Undefined,
titleFontWeight=Undefined, titleLimit=Undefined, titleLineHeight=Undefined,
titleOpacity=Undefined, titlePadding=Undefined, titleX=Undefined, titleY=Undefined,
translate=Undefined, values=Undefined, zindex=Undefined, **kwds):
super(AxisConfig, self).__init__(aria=aria, bandPosition=bandPosition, description=description,
disable=disable, domain=domain, domainCap=domainCap,
domainColor=domainColor, domainDash=domainDash,
domainDashOffset=domainDashOffset, domainOpacity=domainOpacity,
domainWidth=domainWidth, format=format, formatType=formatType,
grid=grid, gridCap=gridCap, gridColor=gridColor,
gridDash=gridDash, gridDashOffset=gridDashOffset,
gridOpacity=gridOpacity, gridWidth=gridWidth,
labelAlign=labelAlign, labelAngle=labelAngle,
labelBaseline=labelBaseline, labelBound=labelBound,
labelColor=labelColor, labelExpr=labelExpr,
labelFlush=labelFlush, labelFlushOffset=labelFlushOffset,
labelFont=labelFont, labelFontSize=labelFontSize,
labelFontStyle=labelFontStyle, labelFontWeight=labelFontWeight,
labelLimit=labelLimit, labelLineHeight=labelLineHeight,
labelOffset=labelOffset, labelOpacity=labelOpacity,
labelOverlap=labelOverlap, labelPadding=labelPadding,
labelSeparation=labelSeparation, labels=labels,
maxExtent=maxExtent, minExtent=minExtent, offset=offset,
orient=orient, position=position, style=style,
tickBand=tickBand, tickCap=tickCap, tickColor=tickColor,
tickCount=tickCount, tickDash=tickDash,
tickDashOffset=tickDashOffset, tickExtra=tickExtra,
tickMinStep=tickMinStep, tickOffset=tickOffset,
tickOpacity=tickOpacity, tickRound=tickRound,
tickSize=tickSize, tickWidth=tickWidth, ticks=ticks,
title=title, titleAlign=titleAlign, titleAnchor=titleAnchor,
titleAngle=titleAngle, titleBaseline=titleBaseline,
titleColor=titleColor, titleFont=titleFont,
titleFontSize=titleFontSize, titleFontStyle=titleFontStyle,
titleFontWeight=titleFontWeight, titleLimit=titleLimit,
titleLineHeight=titleLineHeight, titleOpacity=titleOpacity,
titlePadding=titlePadding, titleX=titleX, titleY=titleY,
translate=translate, values=values, zindex=zindex, **kwds)
class AxisOrient(VegaLiteSchema):
"""AxisOrient schema wrapper
enum('top', 'bottom', 'left', 'right')
"""
_schema = {'$ref': '#/definitions/AxisOrient'}
def __init__(self, *args):
super(AxisOrient, self).__init__(*args)
class AxisResolveMap(VegaLiteSchema):
"""AxisResolveMap schema wrapper
Mapping(required=[])
Attributes
----------
x : :class:`ResolveMode`
y : :class:`ResolveMode`
"""
_schema = {'$ref': '#/definitions/AxisResolveMap'}
def __init__(self, x=Undefined, y=Undefined, **kwds):
super(AxisResolveMap, self).__init__(x=x, y=y, **kwds)
class BarConfig(AnyMarkConfig):
"""BarConfig schema wrapper
Mapping(required=[])
Attributes
----------
align : anyOf(:class:`Align`, :class:`ExprRef`)
The horizontal alignment of the text or ranged marks (area, bar, image, rect, rule).
One of ``"left"``, ``"right"``, ``"center"``.
**Note:** Expression reference is *not* supported for range marks.
angle : anyOf(float, :class:`ExprRef`)
aria : anyOf(boolean, :class:`ExprRef`)
ariaRole : anyOf(string, :class:`ExprRef`)
ariaRoleDescription : anyOf(string, :class:`ExprRef`)
aspect : anyOf(boolean, :class:`ExprRef`)
baseline : anyOf(:class:`TextBaseline`, :class:`ExprRef`)
For text marks, the vertical text baseline. One of ``"alphabetic"`` (default),
``"top"``, ``"middle"``, ``"bottom"``, ``"line-top"``, ``"line-bottom"``, or an
expression reference that provides one of the valid values. The ``"line-top"`` and
``"line-bottom"`` values operate similarly to ``"top"`` and ``"bottom"``, but are
calculated relative to the ``lineHeight`` rather than ``fontSize`` alone.
For range marks, the vertical alignment of the marks. One of ``"top"``,
``"middle"``, ``"bottom"``.
**Note:** Expression reference is *not* supported for range marks.
binSpacing : float
Offset between bars for binned field. The ideal value for this is either 0
(preferred by statisticians) or 1 (Vega-Lite default, D3 example style).
**Default value:** ``1``
blend : anyOf(:class:`Blend`, :class:`ExprRef`)
color : anyOf(:class:`Color`, :class:`Gradient`, :class:`ExprRef`)
Default color.
**Default value:** :raw-html:`<span style="color: #4682b4;">■</span>`
``"#4682b4"``
**Note:** - This property cannot be used in a `style config
<https://vega.github.io/vega-lite/docs/mark.html#style-config>`__. - The ``fill``
and ``stroke`` properties have higher precedence than ``color`` and will override
``color``.
continuousBandSize : float
The default size of the bars on continuous scales.
**Default value:** ``5``
cornerRadius : anyOf(float, :class:`ExprRef`)
cornerRadiusBottomLeft : anyOf(float, :class:`ExprRef`)
cornerRadiusBottomRight : anyOf(float, :class:`ExprRef`)
cornerRadiusEnd : anyOf(float, :class:`ExprRef`)
* For vertical bars, top-left and top-right corner radius. - For horizontal bars,
top-right and bottom-right corner radius.
cornerRadiusTopLeft : anyOf(float, :class:`ExprRef`)
cornerRadiusTopRight : anyOf(float, :class:`ExprRef`)
cursor : anyOf(:class:`Cursor`, :class:`ExprRef`)
description : anyOf(string, :class:`ExprRef`)
dir : anyOf(:class:`TextDirection`, :class:`ExprRef`)
discreteBandSize : float
The default size of the bars with discrete dimensions. If unspecified, the default
size is ``step-2``, which provides 2 pixel offset between bars.
dx : anyOf(float, :class:`ExprRef`)
dy : anyOf(float, :class:`ExprRef`)
ellipsis : anyOf(string, :class:`ExprRef`)
endAngle : anyOf(float, :class:`ExprRef`)
fill : anyOf(:class:`Color`, :class:`Gradient`, None, :class:`ExprRef`)
Default fill color. This property has higher precedence than ``config.color``. Set
to ``null`` to remove fill.
**Default value:** (None)
fillOpacity : anyOf(float, :class:`ExprRef`)
filled : boolean
Whether the mark's color should be used as fill color instead of stroke color.
**Default value:** ``false`` for all ``point``, ``line``, and ``rule`` marks as well
as ``geoshape`` marks for `graticule
<https://vega.github.io/vega-lite/docs/data.html#graticule>`__ data sources;
otherwise, ``true``.
**Note:** This property cannot be used in a `style config
<https://vega.github.io/vega-lite/docs/mark.html#style-config>`__.
font : anyOf(string, :class:`ExprRef`)
fontSize : anyOf(float, :class:`ExprRef`)
fontStyle : anyOf(:class:`FontStyle`, :class:`ExprRef`)
fontWeight : anyOf(:class:`FontWeight`, :class:`ExprRef`)
height : anyOf(float, :class:`ExprRef`)
href : anyOf(:class:`URI`, :class:`ExprRef`)
innerRadius : anyOf(float, :class:`ExprRef`)
The inner radius in pixels of arc marks. ``innerRadius`` is an alias for
``radius2``.
interpolate : anyOf(:class:`Interpolate`, :class:`ExprRef`)
invalid : enum('filter', None)
Defines how Vega-Lite should handle marks for invalid values ( ``null`` and ``NaN``
). - If set to ``"filter"`` (default), all data items with null values will be
skipped (for line, trail, and area marks) or filtered (for other marks). - If
``null``, all data items are included. In this case, invalid values will be
interpreted as zeroes.
limit : anyOf(float, :class:`ExprRef`)
lineBreak : anyOf(string, :class:`ExprRef`)
lineHeight : anyOf(float, :class:`ExprRef`)
opacity : anyOf(float, :class:`ExprRef`)
The overall opacity (value between [0,1]).
**Default value:** ``0.7`` for non-aggregate plots with ``point``, ``tick``,
``circle``, or ``square`` marks or layered ``bar`` charts and ``1`` otherwise.
order : anyOf(None, boolean)
For line and trail marks, this ``order`` property can be set to ``null`` or
``false`` to make the lines use the original order in the data sources.
orient : :class:`Orientation`
The orientation of a non-stacked bar, tick, area, and line charts. The value is
either horizontal (default) or vertical. - For bar, rule and tick, this determines
whether the size of the bar and tick should be applied to x or y dimension. - For
area, this property determines the orient property of the Vega output. - For line
and trail marks, this property determines the sort order of the points in the line
if ``config.sortLineBy`` is not specified. For stacked charts, this is always
determined by the orientation of the stack; therefore explicitly specified value
will be ignored.
outerRadius : anyOf(float, :class:`ExprRef`)
The outer radius in pixels of arc marks. ``outerRadius`` is an alias for ``radius``.
padAngle : anyOf(float, :class:`ExprRef`)
radius : anyOf(float, :class:`ExprRef`)
For arc mark, the primary (outer) radius in pixels.
For text marks, polar coordinate radial offset, in pixels, of the text from the
origin determined by the ``x`` and ``y`` properties.
radius2 : anyOf(float, :class:`ExprRef`)
The secondary (inner) radius in pixels of arc marks.
shape : anyOf(anyOf(:class:`SymbolShape`, string), :class:`ExprRef`)
size : anyOf(float, :class:`ExprRef`)
Default size for marks. - For ``point`` / ``circle`` / ``square``, this represents
the pixel area of the marks. Note that this value sets the area of the symbol; the
side lengths will increase with the square root of this value. - For ``bar``, this
represents the band size of the bar, in pixels. - For ``text``, this represents the
font size, in pixels.
**Default value:** - ``30`` for point, circle, square marks; width/height's ``step``
- ``2`` for bar marks with discrete dimensions; - ``5`` for bar marks with
continuous dimensions; - ``11`` for text marks.
smooth : anyOf(boolean, :class:`ExprRef`)
startAngle : anyOf(float, :class:`ExprRef`)
stroke : anyOf(:class:`Color`, :class:`Gradient`, None, :class:`ExprRef`)
Default stroke color. This property has higher precedence than ``config.color``. Set
to ``null`` to remove stroke.
**Default value:** (None)
strokeCap : anyOf(:class:`StrokeCap`, :class:`ExprRef`)
strokeDash : anyOf(List(float), :class:`ExprRef`)
strokeDashOffset : anyOf(float, :class:`ExprRef`)
strokeJoin : anyOf(:class:`StrokeJoin`, :class:`ExprRef`)
strokeMiterLimit : anyOf(float, :class:`ExprRef`)
strokeOffset : anyOf(float, :class:`ExprRef`)
strokeOpacity : anyOf(float, :class:`ExprRef`)
strokeWidth : anyOf(float, :class:`ExprRef`)
tension : anyOf(float, :class:`ExprRef`)
text : anyOf(:class:`Text`, :class:`ExprRef`)
theta : anyOf(float, :class:`ExprRef`)
For arc marks, the arc length in radians if theta2 is not specified, otherwise the
start arc angle. (A value of 0 indicates up or “north”, increasing values proceed
clockwise.)
For text marks, polar coordinate angle in radians.
theta2 : anyOf(float, :class:`ExprRef`)
The end angle of arc marks in radians. A value of 0 indicates up or “north”,
increasing values proceed clockwise.
timeUnitBand : float
Default relative band size for a time unit. If set to ``1``, the bandwidth of the
marks will be equal to the time unit band step. If set to ``0.5``, bandwidth of the
marks will be half of the time unit band step.
timeUnitBandPosition : float
Default relative band position for a time unit. If set to ``0``, the marks will be
positioned at the beginning of the time unit band step. If set to ``0.5``, the marks
will be positioned in the middle of the time unit band step.
tooltip : anyOf(float, string, boolean, :class:`TooltipContent`, :class:`ExprRef`, None)
The tooltip text string to show upon mouse hover or an object defining which fields
should the tooltip be derived from.
* If ``tooltip`` is ``true`` or ``{"content": "encoding"}``, then all fields from
``encoding`` will be used. - If ``tooltip`` is ``{"content": "data"}``, then all
fields that appear in the highlighted data point will be used. - If set to
``null`` or ``false``, then no tooltip will be used.
See the `tooltip <https://vega.github.io/vega-lite/docs/tooltip.html>`__
documentation for a detailed discussion about tooltip in Vega-Lite.
**Default value:** ``null``
url : anyOf(:class:`URI`, :class:`ExprRef`)
width : anyOf(float, :class:`ExprRef`)
x : anyOf(float, string, :class:`ExprRef`)
X coordinates of the marks, or width of horizontal ``"bar"`` and ``"area"`` without
specified ``x2`` or ``width``.
The ``value`` of this channel can be a number or a string ``"width"`` for the width
of the plot.
x2 : anyOf(float, string, :class:`ExprRef`)
X2 coordinates for ranged ``"area"``, ``"bar"``, ``"rect"``, and ``"rule"``.
The ``value`` of this channel can be a number or a string ``"width"`` for the width
of the plot.
y : anyOf(float, string, :class:`ExprRef`)
Y coordinates of the marks, or height of vertical ``"bar"`` and ``"area"`` without
specified ``y2`` or ``height``.
The ``value`` of this channel can be a number or a string ``"height"`` for the
height of the plot.
y2 : anyOf(float, string, :class:`ExprRef`)
Y2 coordinates for ranged ``"area"``, ``"bar"``, ``"rect"``, and ``"rule"``.
The ``value`` of this channel can be a number or a string ``"height"`` for the
height of the plot.
"""
_schema = {'$ref': '#/definitions/BarConfig'}
def __init__(self, align=Undefined, angle=Undefined, aria=Undefined, ariaRole=Undefined,
ariaRoleDescription=Undefined, aspect=Undefined, baseline=Undefined,
binSpacing=Undefined, blend=Undefined, color=Undefined, continuousBandSize=Undefined,
cornerRadius=Undefined, cornerRadiusBottomLeft=Undefined,
cornerRadiusBottomRight=Undefined, cornerRadiusEnd=Undefined,
cornerRadiusTopLeft=Undefined, cornerRadiusTopRight=Undefined, cursor=Undefined,
description=Undefined, dir=Undefined, discreteBandSize=Undefined, dx=Undefined,
dy=Undefined, ellipsis=Undefined, endAngle=Undefined, fill=Undefined,
fillOpacity=Undefined, filled=Undefined, font=Undefined, fontSize=Undefined,
fontStyle=Undefined, fontWeight=Undefined, height=Undefined, href=Undefined,
innerRadius=Undefined, interpolate=Undefined, invalid=Undefined, limit=Undefined,
lineBreak=Undefined, lineHeight=Undefined, opacity=Undefined, order=Undefined,
orient=Undefined, outerRadius=Undefined, padAngle=Undefined, radius=Undefined,
radius2=Undefined, shape=Undefined, size=Undefined, smooth=Undefined,
startAngle=Undefined, stroke=Undefined, strokeCap=Undefined, strokeDash=Undefined,
strokeDashOffset=Undefined, strokeJoin=Undefined, strokeMiterLimit=Undefined,
strokeOffset=Undefined, strokeOpacity=Undefined, strokeWidth=Undefined,
tension=Undefined, text=Undefined, theta=Undefined, theta2=Undefined,
timeUnitBand=Undefined, timeUnitBandPosition=Undefined, tooltip=Undefined,
url=Undefined, width=Undefined, x=Undefined, x2=Undefined, y=Undefined, y2=Undefined,
**kwds):
super(BarConfig, self).__init__(align=align, angle=angle, aria=aria, ariaRole=ariaRole,
ariaRoleDescription=ariaRoleDescription, aspect=aspect,
baseline=baseline, binSpacing=binSpacing, blend=blend,
color=color, continuousBandSize=continuousBandSize,
cornerRadius=cornerRadius,
cornerRadiusBottomLeft=cornerRadiusBottomLeft,
cornerRadiusBottomRight=cornerRadiusBottomRight,
cornerRadiusEnd=cornerRadiusEnd,
cornerRadiusTopLeft=cornerRadiusTopLeft,
cornerRadiusTopRight=cornerRadiusTopRight, cursor=cursor,
description=description, dir=dir,
discreteBandSize=discreteBandSize, dx=dx, dy=dy,
ellipsis=ellipsis, endAngle=endAngle, fill=fill,
fillOpacity=fillOpacity, filled=filled, font=font,
fontSize=fontSize, fontStyle=fontStyle, fontWeight=fontWeight,
height=height, href=href, innerRadius=innerRadius,
interpolate=interpolate, invalid=invalid, limit=limit,
lineBreak=lineBreak, lineHeight=lineHeight, opacity=opacity,
order=order, orient=orient, outerRadius=outerRadius,
padAngle=padAngle, radius=radius, radius2=radius2, shape=shape,
size=size, smooth=smooth, startAngle=startAngle, stroke=stroke,
strokeCap=strokeCap, strokeDash=strokeDash,
strokeDashOffset=strokeDashOffset, strokeJoin=strokeJoin,
strokeMiterLimit=strokeMiterLimit, strokeOffset=strokeOffset,
strokeOpacity=strokeOpacity, strokeWidth=strokeWidth,
tension=tension, text=text, theta=theta, theta2=theta2,
timeUnitBand=timeUnitBand,
timeUnitBandPosition=timeUnitBandPosition, tooltip=tooltip,
url=url, width=width, x=x, x2=x2, y=y, y2=y2, **kwds)
class BaseTitleNoValueRefs(VegaLiteSchema):
"""BaseTitleNoValueRefs schema wrapper
Mapping(required=[])
Attributes
----------
align : :class:`Align`
Horizontal text alignment for title text. One of ``"left"``, ``"center"``, or
``"right"``.
anchor : anyOf(:class:`TitleAnchor`, :class:`ExprRef`)
angle : anyOf(float, :class:`ExprRef`)
aria : anyOf(boolean, :class:`ExprRef`)
baseline : :class:`TextBaseline`
Vertical text baseline for title and subtitle text. One of ``"alphabetic"``
(default), ``"top"``, ``"middle"``, ``"bottom"``, ``"line-top"``, or
``"line-bottom"``. The ``"line-top"`` and ``"line-bottom"`` values operate similarly
to ``"top"`` and ``"bottom"``, but are calculated relative to the *lineHeight*
rather than *fontSize* alone.
color : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`)
dx : anyOf(float, :class:`ExprRef`)
dy : anyOf(float, :class:`ExprRef`)
font : anyOf(string, :class:`ExprRef`)
fontSize : anyOf(float, :class:`ExprRef`)
fontStyle : anyOf(:class:`FontStyle`, :class:`ExprRef`)
fontWeight : anyOf(:class:`FontWeight`, :class:`ExprRef`)
frame : anyOf(anyOf(:class:`TitleFrame`, string), :class:`ExprRef`)
limit : anyOf(float, :class:`ExprRef`)
lineHeight : anyOf(float, :class:`ExprRef`)
offset : anyOf(float, :class:`ExprRef`)
orient : anyOf(:class:`TitleOrient`, :class:`ExprRef`)
subtitleColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`)
subtitleFont : anyOf(string, :class:`ExprRef`)
subtitleFontSize : anyOf(float, :class:`ExprRef`)
subtitleFontStyle : anyOf(:class:`FontStyle`, :class:`ExprRef`)
subtitleFontWeight : anyOf(:class:`FontWeight`, :class:`ExprRef`)
subtitleLineHeight : anyOf(float, :class:`ExprRef`)
subtitlePadding : anyOf(float, :class:`ExprRef`)
zindex : anyOf(float, :class:`ExprRef`)
"""
_schema = {'$ref': '#/definitions/BaseTitleNoValueRefs'}
def __init__(self, align=Undefined, anchor=Undefined, angle=Undefined, aria=Undefined,
baseline=Undefined, color=Undefined, dx=Undefined, dy=Undefined, font=Undefined,
fontSize=Undefined, fontStyle=Undefined, fontWeight=Undefined, frame=Undefined,
limit=Undefined, lineHeight=Undefined, offset=Undefined, orient=Undefined,
subtitleColor=Undefined, subtitleFont=Undefined, subtitleFontSize=Undefined,
subtitleFontStyle=Undefined, subtitleFontWeight=Undefined,
subtitleLineHeight=Undefined, subtitlePadding=Undefined, zindex=Undefined, **kwds):
super(BaseTitleNoValueRefs, self).__init__(align=align, anchor=anchor, angle=angle, aria=aria,
baseline=baseline, color=color, dx=dx, dy=dy,
font=font, fontSize=fontSize, fontStyle=fontStyle,
fontWeight=fontWeight, frame=frame, limit=limit,
lineHeight=lineHeight, offset=offset, orient=orient,
subtitleColor=subtitleColor,
subtitleFont=subtitleFont,
subtitleFontSize=subtitleFontSize,
subtitleFontStyle=subtitleFontStyle,
subtitleFontWeight=subtitleFontWeight,
subtitleLineHeight=subtitleLineHeight,
subtitlePadding=subtitlePadding, zindex=zindex,
**kwds)
class BinExtent(VegaLiteSchema):
"""BinExtent schema wrapper
anyOf(List([float, float]), :class:`SelectionExtent`)
"""
_schema = {'$ref': '#/definitions/BinExtent'}
def __init__(self, *args, **kwds):
super(BinExtent, self).__init__(*args, **kwds)
class BinParams(VegaLiteSchema):
"""BinParams schema wrapper
Mapping(required=[])
Binning properties or boolean flag for determining whether to bin data or not.
Attributes
----------
anchor : float
A value in the binned domain at which to anchor the bins, shifting the bin
boundaries if necessary to ensure that a boundary aligns with the anchor value.
**Default value:** the minimum bin extent value
base : float
The number base to use for automatic bin determination (default is base 10).
**Default value:** ``10``
binned : boolean
When set to ``true``, Vega-Lite treats the input data as already binned.
divide : List([float, float])
Scale factors indicating allowable subdivisions. The default value is [5, 2], which
indicates that for base 10 numbers (the default base), the method may consider
dividing bin sizes by 5 and/or 2. For example, for an initial step size of 10, the
method can check if bin sizes of 2 (= 10/5), 5 (= 10/2), or 1 (= 10/(5*2)) might
also satisfy the given constraints.
**Default value:** ``[5, 2]``
extent : :class:`BinExtent`
A two-element ( ``[min, max]`` ) array indicating the range of desired bin values.
maxbins : float
Maximum number of bins.
**Default value:** ``6`` for ``row``, ``column`` and ``shape`` channels; ``10`` for
other channels
minstep : float
A minimum allowable step size (particularly useful for integer values).
nice : boolean
If true, attempts to make the bin boundaries use human-friendly boundaries, such as
multiples of ten.
**Default value:** ``true``
step : float
An exact step size to use between bins.
**Note:** If provided, options such as maxbins will be ignored.
steps : List(float)
An array of allowable step sizes to choose from.
"""
_schema = {'$ref': '#/definitions/BinParams'}
def __init__(self, anchor=Undefined, base=Undefined, binned=Undefined, divide=Undefined,
extent=Undefined, maxbins=Undefined, minstep=Undefined, nice=Undefined, step=Undefined,
steps=Undefined, **kwds):
super(BinParams, self).__init__(anchor=anchor, base=base, binned=binned, divide=divide,
extent=extent, maxbins=maxbins, minstep=minstep, nice=nice,
step=step, steps=steps, **kwds)
class Binding(VegaLiteSchema):
"""Binding schema wrapper
anyOf(:class:`BindCheckbox`, :class:`BindRadioSelect`, :class:`BindRange`,
:class:`InputBinding`)
"""
_schema = {'$ref': '#/definitions/Binding'}
def __init__(self, *args, **kwds):
super(Binding, self).__init__(*args, **kwds)
class BindCheckbox(Binding):
"""BindCheckbox schema wrapper
Mapping(required=[input])
Attributes
----------
input : string
debounce : float
element : :class:`Element`
name : string
type : string
"""
_schema = {'$ref': '#/definitions/BindCheckbox'}
def __init__(self, input=Undefined, debounce=Undefined, element=Undefined, name=Undefined,
type=Undefined, **kwds):
super(BindCheckbox, self).__init__(input=input, debounce=debounce, element=element, name=name,
type=type, **kwds)
class BindRadioSelect(Binding):
"""BindRadioSelect schema wrapper
Mapping(required=[input, options])
Attributes
----------
input : enum('radio', 'select')
options : List(Any)
debounce : float
element : :class:`Element`
labels : List(string)
name : string
type : string
"""
_schema = {'$ref': '#/definitions/BindRadioSelect'}
def __init__(self, input=Undefined, options=Undefined, debounce=Undefined, element=Undefined,
labels=Undefined, name=Undefined, type=Undefined, **kwds):
super(BindRadioSelect, self).__init__(input=input, options=options, debounce=debounce,
element=element, labels=labels, name=name, type=type,
**kwds)
class BindRange(Binding):
"""BindRange schema wrapper
Mapping(required=[input])
Attributes
----------
input : string
debounce : float
element : :class:`Element`
max : float
min : float
name : string
step : float
type : string
"""
_schema = {'$ref': '#/definitions/BindRange'}
def __init__(self, input=Undefined, debounce=Undefined, element=Undefined, max=Undefined,
min=Undefined, name=Undefined, step=Undefined, type=Undefined, **kwds):
super(BindRange, self).__init__(input=input, debounce=debounce, element=element, max=max,
min=min, name=name, step=step, type=type, **kwds)
class Blend(VegaLiteSchema):
"""Blend schema wrapper
enum(None, 'multiply', 'screen', 'overlay', 'darken', 'lighten', 'color-dodge',
'color-burn', 'hard-light', 'soft-light', 'difference', 'exclusion', 'hue', 'saturation',
'color', 'luminosity')
"""
_schema = {'$ref': '#/definitions/Blend'}
def __init__(self, *args):
super(Blend, self).__init__(*args)
class BoxPlotConfig(VegaLiteSchema):
"""BoxPlotConfig schema wrapper
Mapping(required=[])
Attributes
----------
box : anyOf(boolean, :class:`MarkConfigExprOrSignalRef`)
extent : anyOf(string, float)
The extent of the whiskers. Available options include: - ``"min-max"`` : min and max
are the lower and upper whiskers respectively. - A number representing multiple of
the interquartile range. This number will be multiplied by the IQR to determine
whisker boundary, which spans from the smallest data to the largest data within the
range *[Q1 - k * IQR, Q3 + k * IQR]* where *Q1* and *Q3* are the first and third
quartiles while *IQR* is the interquartile range ( *Q3-Q1* ).
**Default value:** ``1.5``.
median : anyOf(boolean, :class:`MarkConfigExprOrSignalRef`)
outliers : anyOf(boolean, :class:`MarkConfigExprOrSignalRef`)
rule : anyOf(boolean, :class:`MarkConfigExprOrSignalRef`)
size : float
Size of the box and median tick of a box plot
ticks : anyOf(boolean, :class:`MarkConfigExprOrSignalRef`)
"""
_schema = {'$ref': '#/definitions/BoxPlotConfig'}
def __init__(self, box=Undefined, extent=Undefined, median=Undefined, outliers=Undefined,
rule=Undefined, size=Undefined, ticks=Undefined, **kwds):
super(BoxPlotConfig, self).__init__(box=box, extent=extent, median=median, outliers=outliers,
rule=rule, size=size, ticks=ticks, **kwds)
class BrushConfig(VegaLiteSchema):
"""BrushConfig schema wrapper
Mapping(required=[])
Attributes
----------
cursor : :class:`Cursor`
The mouse cursor used over the interval mark. Any valid `CSS cursor type
<https://developer.mozilla.org/en-US/docs/Web/CSS/cursor#Values>`__ can be used.
fill : :class:`Color`
The fill color of the interval mark.
**Default value:** ``"#333333"``
fillOpacity : float
The fill opacity of the interval mark (a value between ``0`` and ``1`` ).
**Default value:** ``0.125``
stroke : :class:`Color`
The stroke color of the interval mark.
**Default value:** ``"#ffffff"``
strokeDash : List(float)
An array of alternating stroke and space lengths, for creating dashed or dotted
lines.
strokeDashOffset : float
The offset (in pixels) with which to begin drawing the stroke dash array.
strokeOpacity : float
The stroke opacity of the interval mark (a value between ``0`` and ``1`` ).
strokeWidth : float
The stroke width of the interval mark.
"""
_schema = {'$ref': '#/definitions/BrushConfig'}
def __init__(self, cursor=Undefined, fill=Undefined, fillOpacity=Undefined, stroke=Undefined,
strokeDash=Undefined, strokeDashOffset=Undefined, strokeOpacity=Undefined,
strokeWidth=Undefined, **kwds):
super(BrushConfig, self).__init__(cursor=cursor, fill=fill, fillOpacity=fillOpacity,
stroke=stroke, strokeDash=strokeDash,
strokeDashOffset=strokeDashOffset,
strokeOpacity=strokeOpacity, strokeWidth=strokeWidth, **kwds)
class Color(VegaLiteSchema):
"""Color schema wrapper
anyOf(:class:`ColorName`, :class:`HexColor`, string)
"""
_schema = {'$ref': '#/definitions/Color'}
def __init__(self, *args, **kwds):
super(Color, self).__init__(*args, **kwds)
class ColorDef(VegaLiteSchema):
"""ColorDef schema wrapper
anyOf(:class:`FieldOrDatumDefWithConditionMarkPropFieldDefGradientstringnull`,
:class:`FieldOrDatumDefWithConditionDatumDefGradientstringnull`,
:class:`ValueDefWithConditionMarkPropFieldOrDatumDefGradientstringnull`)
"""
_schema = {'$ref': '#/definitions/ColorDef'}
def __init__(self, *args, **kwds):
super(ColorDef, self).__init__(*args, **kwds)
class ColorName(Color):
"""ColorName schema wrapper
enum('black', 'silver', 'gray', 'white', 'maroon', 'red', 'purple', 'fuchsia', 'green',
'lime', 'olive', 'yellow', 'navy', 'blue', 'teal', 'aqua', 'orange', 'aliceblue',
'antiquewhite', 'aquamarine', 'azure', 'beige', 'bisque', 'blanchedalmond', 'blueviolet',
'brown', 'burlywood', 'cadetblue', 'chartreuse', 'chocolate', 'coral', 'cornflowerblue',
'cornsilk', 'crimson', 'cyan', 'darkblue', 'darkcyan', 'darkgoldenrod', 'darkgray',
'darkgreen', 'darkgrey', 'darkkhaki', 'darkmagenta', 'darkolivegreen', 'darkorange',
'darkorchid', 'darkred', 'darksalmon', 'darkseagreen', 'darkslateblue', 'darkslategray',
'darkslategrey', 'darkturquoise', 'darkviolet', 'deeppink', 'deepskyblue', 'dimgray',
'dimgrey', 'dodgerblue', 'firebrick', 'floralwhite', 'forestgreen', 'gainsboro',
'ghostwhite', 'gold', 'goldenrod', 'greenyellow', 'grey', 'honeydew', 'hotpink',
'indianred', 'indigo', 'ivory', 'khaki', 'lavender', 'lavenderblush', 'lawngreen',
'lemonchiffon', 'lightblue', 'lightcoral', 'lightcyan', 'lightgoldenrodyellow', 'lightgray',
'lightgreen', 'lightgrey', 'lightpink', 'lightsalmon', 'lightseagreen', 'lightskyblue',
'lightslategray', 'lightslategrey', 'lightsteelblue', 'lightyellow', 'limegreen', 'linen',
'magenta', 'mediumaquamarine', 'mediumblue', 'mediumorchid', 'mediumpurple',
'mediumseagreen', 'mediumslateblue', 'mediumspringgreen', 'mediumturquoise',
'mediumvioletred', 'midnightblue', 'mintcream', 'mistyrose', 'moccasin', 'navajowhite',
'oldlace', 'olivedrab', 'orangered', 'orchid', 'palegoldenrod', 'palegreen',
'paleturquoise', 'palevioletred', 'papayawhip', 'peachpuff', 'peru', 'pink', 'plum',
'powderblue', 'rosybrown', 'royalblue', 'saddlebrown', 'salmon', 'sandybrown', 'seagreen',
'seashell', 'sienna', 'skyblue', 'slateblue', 'slategray', 'slategrey', 'snow',
'springgreen', 'steelblue', 'tan', 'thistle', 'tomato', 'turquoise', 'violet', 'wheat',
'whitesmoke', 'yellowgreen', 'rebeccapurple')
"""
_schema = {'$ref': '#/definitions/ColorName'}
def __init__(self, *args):
super(ColorName, self).__init__(*args)
class ColorScheme(VegaLiteSchema):
"""ColorScheme schema wrapper
anyOf(:class:`Categorical`, :class:`SequentialSingleHue`, :class:`SequentialMultiHue`,
:class:`Diverging`, :class:`Cyclical`)
"""
_schema = {'$ref': '#/definitions/ColorScheme'}
def __init__(self, *args, **kwds):
super(ColorScheme, self).__init__(*args, **kwds)
class Categorical(ColorScheme):
"""Categorical schema wrapper
enum('accent', 'category10', 'category20', 'category20b', 'category20c', 'dark2', 'paired',
'pastel1', 'pastel2', 'set1', 'set2', 'set3', 'tableau10', 'tableau20')
"""
_schema = {'$ref': '#/definitions/Categorical'}
def __init__(self, *args):
super(Categorical, self).__init__(*args)
class CompositeMark(AnyMark):
"""CompositeMark schema wrapper
anyOf(:class:`BoxPlot`, :class:`ErrorBar`, :class:`ErrorBand`)
"""
_schema = {'$ref': '#/definitions/CompositeMark'}
def __init__(self, *args, **kwds):
super(CompositeMark, self).__init__(*args, **kwds)
class BoxPlot(CompositeMark):
"""BoxPlot schema wrapper
string
"""
_schema = {'$ref': '#/definitions/BoxPlot'}
def __init__(self, *args):
super(BoxPlot, self).__init__(*args)
class CompositeMarkDef(AnyMark):
"""CompositeMarkDef schema wrapper
anyOf(:class:`BoxPlotDef`, :class:`ErrorBarDef`, :class:`ErrorBandDef`)
"""
_schema = {'$ref': '#/definitions/CompositeMarkDef'}
def __init__(self, *args, **kwds):
super(CompositeMarkDef, self).__init__(*args, **kwds)
class BoxPlotDef(CompositeMarkDef):
"""BoxPlotDef schema wrapper
Mapping(required=[type])
Attributes
----------
type : :class:`BoxPlot`
The mark type. This could a primitive mark type (one of ``"bar"``, ``"circle"``,
``"square"``, ``"tick"``, ``"line"``, ``"area"``, ``"point"``, ``"geoshape"``,
``"rule"``, and ``"text"`` ) or a composite mark type ( ``"boxplot"``,
``"errorband"``, ``"errorbar"`` ).
box : anyOf(boolean, :class:`MarkConfigExprOrSignalRef`)
clip : boolean
Whether a composite mark be clipped to the enclosing group’s width and height.
color : anyOf(:class:`Color`, :class:`Gradient`, :class:`ExprRef`)
Default color.
**Default value:** :raw-html:`<span style="color: #4682b4;">■</span>`
``"#4682b4"``
**Note:** - This property cannot be used in a `style config
<https://vega.github.io/vega-lite/docs/mark.html#style-config>`__. - The ``fill``
and ``stroke`` properties have higher precedence than ``color`` and will override
``color``.
extent : anyOf(string, float)
The extent of the whiskers. Available options include: - ``"min-max"`` : min and max
are the lower and upper whiskers respectively. - A number representing multiple of
the interquartile range. This number will be multiplied by the IQR to determine
whisker boundary, which spans from the smallest data to the largest data within the
range *[Q1 - k * IQR, Q3 + k * IQR]* where *Q1* and *Q3* are the first and third
quartiles while *IQR* is the interquartile range ( *Q3-Q1* ).
**Default value:** ``1.5``.
median : anyOf(boolean, :class:`MarkConfigExprOrSignalRef`)
opacity : float
The opacity (value between [0,1]) of the mark.
orient : :class:`Orientation`
Orientation of the box plot. This is normally automatically determined based on
types of fields on x and y channels. However, an explicit ``orient`` be specified
when the orientation is ambiguous.
**Default value:** ``"vertical"``.
outliers : anyOf(boolean, :class:`MarkConfigExprOrSignalRef`)
rule : anyOf(boolean, :class:`MarkConfigExprOrSignalRef`)
size : float
Size of the box and median tick of a box plot
ticks : anyOf(boolean, :class:`MarkConfigExprOrSignalRef`)
"""
_schema = {'$ref': '#/definitions/BoxPlotDef'}
def __init__(self, type=Undefined, box=Undefined, clip=Undefined, color=Undefined, extent=Undefined,
median=Undefined, opacity=Undefined, orient=Undefined, outliers=Undefined,
rule=Undefined, size=Undefined, ticks=Undefined, **kwds):
super(BoxPlotDef, self).__init__(type=type, box=box, clip=clip, color=color, extent=extent,
median=median, opacity=opacity, orient=orient,
outliers=outliers, rule=rule, size=size, ticks=ticks, **kwds)
class CompositionConfig(VegaLiteSchema):
"""CompositionConfig schema wrapper
Mapping(required=[])
Attributes
----------
columns : float
The number of columns to include in the view composition layout.
**Default value** : ``undefined`` -- An infinite number of columns (a single row)
will be assumed. This is equivalent to ``hconcat`` (for ``concat`` ) and to using
the ``column`` channel (for ``facet`` and ``repeat`` ).
**Note** :
1) This property is only for: - the general (wrappable) ``concat`` operator (not
``hconcat`` / ``vconcat`` ) - the ``facet`` and ``repeat`` operator with one
field/repetition definition (without row/column nesting)
2) Setting the ``columns`` to ``1`` is equivalent to ``vconcat`` (for ``concat`` )
and to using the ``row`` channel (for ``facet`` and ``repeat`` ).
spacing : float
The default spacing in pixels between composed sub-views.
**Default value** : ``20``
"""
_schema = {'$ref': '#/definitions/CompositionConfig'}
def __init__(self, columns=Undefined, spacing=Undefined, **kwds):
super(CompositionConfig, self).__init__(columns=columns, spacing=spacing, **kwds)
class ConditionalAxisColor(VegaLiteSchema):
"""ConditionalAxisColor schema wrapper
anyOf(Mapping(required=[condition, value]), Mapping(required=[condition, expr]))
"""
_schema = {'$ref': '#/definitions/ConditionalAxisColor'}
def __init__(self, *args, **kwds):
super(ConditionalAxisColor, self).__init__(*args, **kwds)
class ConditionalAxisLabelAlign(VegaLiteSchema):
"""ConditionalAxisLabelAlign schema wrapper
anyOf(Mapping(required=[condition, value]), Mapping(required=[condition, expr]))
"""
_schema = {'$ref': '#/definitions/ConditionalAxisLabelAlign'}
def __init__(self, *args, **kwds):
super(ConditionalAxisLabelAlign, self).__init__(*args, **kwds)
class ConditionalAxisLabelBaseline(VegaLiteSchema):
"""ConditionalAxisLabelBaseline schema wrapper
anyOf(Mapping(required=[condition, value]), Mapping(required=[condition, expr]))
"""
_schema = {'$ref': '#/definitions/ConditionalAxisLabelBaseline'}
def __init__(self, *args, **kwds):
super(ConditionalAxisLabelBaseline, self).__init__(*args, **kwds)
class ConditionalAxisLabelFontStyle(VegaLiteSchema):
"""ConditionalAxisLabelFontStyle schema wrapper
anyOf(Mapping(required=[condition, value]), Mapping(required=[condition, expr]))
"""
_schema = {'$ref': '#/definitions/ConditionalAxisLabelFontStyle'}
def __init__(self, *args, **kwds):
super(ConditionalAxisLabelFontStyle, self).__init__(*args, **kwds)
class ConditionalAxisLabelFontWeight(VegaLiteSchema):
"""ConditionalAxisLabelFontWeight schema wrapper
anyOf(Mapping(required=[condition, value]), Mapping(required=[condition, expr]))
"""
_schema = {'$ref': '#/definitions/ConditionalAxisLabelFontWeight'}
def __init__(self, *args, **kwds):
super(ConditionalAxisLabelFontWeight, self).__init__(*args, **kwds)
class ConditionalAxisNumber(VegaLiteSchema):
"""ConditionalAxisNumber schema wrapper
anyOf(Mapping(required=[condition, value]), Mapping(required=[condition, expr]))
"""
_schema = {'$ref': '#/definitions/ConditionalAxisNumber'}
def __init__(self, *args, **kwds):
super(ConditionalAxisNumber, self).__init__(*args, **kwds)
class ConditionalAxisNumberArray(VegaLiteSchema):
"""ConditionalAxisNumberArray schema wrapper
anyOf(Mapping(required=[condition, value]), Mapping(required=[condition, expr]))
"""
_schema = {'$ref': '#/definitions/ConditionalAxisNumberArray'}
def __init__(self, *args, **kwds):
super(ConditionalAxisNumberArray, self).__init__(*args, **kwds)
class ConditionalAxisPropertyAlignnull(VegaLiteSchema):
"""ConditionalAxisPropertyAlignnull schema wrapper
anyOf(Mapping(required=[condition, value]), Mapping(required=[condition, expr]))
"""
_schema = {'$ref': '#/definitions/ConditionalAxisProperty<(Align|null)>'}
def __init__(self, *args, **kwds):
super(ConditionalAxisPropertyAlignnull, self).__init__(*args, **kwds)
class ConditionalAxisPropertyColornull(VegaLiteSchema):
"""ConditionalAxisPropertyColornull schema wrapper
anyOf(Mapping(required=[condition, value]), Mapping(required=[condition, expr]))
"""
_schema = {'$ref': '#/definitions/ConditionalAxisProperty<(Color|null)>'}
def __init__(self, *args, **kwds):
super(ConditionalAxisPropertyColornull, self).__init__(*args, **kwds)
class ConditionalAxisPropertyFontStylenull(VegaLiteSchema):
"""ConditionalAxisPropertyFontStylenull schema wrapper
anyOf(Mapping(required=[condition, value]), Mapping(required=[condition, expr]))
"""
_schema = {'$ref': '#/definitions/ConditionalAxisProperty<(FontStyle|null)>'}
def __init__(self, *args, **kwds):
super(ConditionalAxisPropertyFontStylenull, self).__init__(*args, **kwds)
class ConditionalAxisPropertyFontWeightnull(VegaLiteSchema):
"""ConditionalAxisPropertyFontWeightnull schema wrapper
anyOf(Mapping(required=[condition, value]), Mapping(required=[condition, expr]))
"""
_schema = {'$ref': '#/definitions/ConditionalAxisProperty<(FontWeight|null)>'}
def __init__(self, *args, **kwds):
super(ConditionalAxisPropertyFontWeightnull, self).__init__(*args, **kwds)
class ConditionalAxisPropertyTextBaselinenull(VegaLiteSchema):
"""ConditionalAxisPropertyTextBaselinenull schema wrapper
anyOf(Mapping(required=[condition, value]), Mapping(required=[condition, expr]))
"""
_schema = {'$ref': '#/definitions/ConditionalAxisProperty<(TextBaseline|null)>'}
def __init__(self, *args, **kwds):
super(ConditionalAxisPropertyTextBaselinenull, self).__init__(*args, **kwds)
class ConditionalAxisPropertynumberArraynull(VegaLiteSchema):
"""ConditionalAxisPropertynumberArraynull schema wrapper
anyOf(Mapping(required=[condition, value]), Mapping(required=[condition, expr]))
"""
_schema = {'$ref': '#/definitions/ConditionalAxisProperty<(number[]|null)>'}
def __init__(self, *args, **kwds):
super(ConditionalAxisPropertynumberArraynull, self).__init__(*args, **kwds)
class ConditionalAxisPropertynumbernull(VegaLiteSchema):
"""ConditionalAxisPropertynumbernull schema wrapper
anyOf(Mapping(required=[condition, value]), Mapping(required=[condition, expr]))
"""
_schema = {'$ref': '#/definitions/ConditionalAxisProperty<(number|null)>'}
def __init__(self, *args, **kwds):
super(ConditionalAxisPropertynumbernull, self).__init__(*args, **kwds)
class ConditionalAxisPropertystringnull(VegaLiteSchema):
"""ConditionalAxisPropertystringnull schema wrapper
anyOf(Mapping(required=[condition, value]), Mapping(required=[condition, expr]))
"""
_schema = {'$ref': '#/definitions/ConditionalAxisProperty<(string|null)>'}
def __init__(self, *args, **kwds):
super(ConditionalAxisPropertystringnull, self).__init__(*args, **kwds)
class ConditionalAxisString(VegaLiteSchema):
"""ConditionalAxisString schema wrapper
anyOf(Mapping(required=[condition, value]), Mapping(required=[condition, expr]))
"""
_schema = {'$ref': '#/definitions/ConditionalAxisString'}
def __init__(self, *args, **kwds):
super(ConditionalAxisString, self).__init__(*args, **kwds)
class ConditionalMarkPropFieldOrDatumDef(VegaLiteSchema):
"""ConditionalMarkPropFieldOrDatumDef schema wrapper
anyOf(:class:`ConditionalPredicateMarkPropFieldOrDatumDef`,
:class:`ConditionalSelectionMarkPropFieldOrDatumDef`)
"""
_schema = {'$ref': '#/definitions/ConditionalMarkPropFieldOrDatumDef'}
def __init__(self, *args, **kwds):
super(ConditionalMarkPropFieldOrDatumDef, self).__init__(*args, **kwds)
class ConditionalMarkPropFieldOrDatumDefTypeForShape(VegaLiteSchema):
"""ConditionalMarkPropFieldOrDatumDefTypeForShape schema wrapper
anyOf(:class:`ConditionalPredicateMarkPropFieldOrDatumDefTypeForShape`,
:class:`ConditionalSelectionMarkPropFieldOrDatumDefTypeForShape`)
"""
_schema = {'$ref': '#/definitions/ConditionalMarkPropFieldOrDatumDef<TypeForShape>'}
def __init__(self, *args, **kwds):
super(ConditionalMarkPropFieldOrDatumDefTypeForShape, self).__init__(*args, **kwds)
class ConditionalPredicateMarkPropFieldOrDatumDef(ConditionalMarkPropFieldOrDatumDef):
"""ConditionalPredicateMarkPropFieldOrDatumDef schema wrapper
anyOf(Mapping(required=[test]), Mapping(required=[test]))
"""
_schema = {'$ref': '#/definitions/ConditionalPredicate<MarkPropFieldOrDatumDef>'}
def __init__(self, *args, **kwds):
super(ConditionalPredicateMarkPropFieldOrDatumDef, self).__init__(*args, **kwds)
class ConditionalPredicateMarkPropFieldOrDatumDefTypeForShape(ConditionalMarkPropFieldOrDatumDefTypeForShape):
"""ConditionalPredicateMarkPropFieldOrDatumDefTypeForShape schema wrapper
anyOf(Mapping(required=[test]), Mapping(required=[test]))
"""
_schema = {'$ref': '#/definitions/ConditionalPredicate<MarkPropFieldOrDatumDef<TypeForShape>>'}
def __init__(self, *args, **kwds):
super(ConditionalPredicateMarkPropFieldOrDatumDefTypeForShape, self).__init__(*args, **kwds)
class ConditionalPredicateValueDefAlignnullExprRef(VegaLiteSchema):
"""ConditionalPredicateValueDefAlignnullExprRef schema wrapper
anyOf(Mapping(required=[test, value]), Mapping(required=[expr, test]))
"""
_schema = {'$ref': '#/definitions/ConditionalPredicate<(ValueDef<(Align|null)>|ExprRef)>'}
def __init__(self, *args, **kwds):
super(ConditionalPredicateValueDefAlignnullExprRef, self).__init__(*args, **kwds)
class ConditionalPredicateValueDefColornullExprRef(VegaLiteSchema):
"""ConditionalPredicateValueDefColornullExprRef schema wrapper
anyOf(Mapping(required=[test, value]), Mapping(required=[expr, test]))
"""
_schema = {'$ref': '#/definitions/ConditionalPredicate<(ValueDef<(Color|null)>|ExprRef)>'}
def __init__(self, *args, **kwds):
super(ConditionalPredicateValueDefColornullExprRef, self).__init__(*args, **kwds)
class ConditionalPredicateValueDefFontStylenullExprRef(VegaLiteSchema):
"""ConditionalPredicateValueDefFontStylenullExprRef schema wrapper
anyOf(Mapping(required=[test, value]), Mapping(required=[expr, test]))
"""
_schema = {'$ref': '#/definitions/ConditionalPredicate<(ValueDef<(FontStyle|null)>|ExprRef)>'}
def __init__(self, *args, **kwds):
super(ConditionalPredicateValueDefFontStylenullExprRef, self).__init__(*args, **kwds)
class ConditionalPredicateValueDefFontWeightnullExprRef(VegaLiteSchema):
"""ConditionalPredicateValueDefFontWeightnullExprRef schema wrapper
anyOf(Mapping(required=[test, value]), Mapping(required=[expr, test]))
"""
_schema = {'$ref': '#/definitions/ConditionalPredicate<(ValueDef<(FontWeight|null)>|ExprRef)>'}
def __init__(self, *args, **kwds):
super(ConditionalPredicateValueDefFontWeightnullExprRef, self).__init__(*args, **kwds)
class ConditionalPredicateValueDefTextBaselinenullExprRef(VegaLiteSchema):
"""ConditionalPredicateValueDefTextBaselinenullExprRef schema wrapper
anyOf(Mapping(required=[test, value]), Mapping(required=[expr, test]))
"""
_schema = {'$ref': '#/definitions/ConditionalPredicate<(ValueDef<(TextBaseline|null)>|ExprRef)>'}
def __init__(self, *args, **kwds):
super(ConditionalPredicateValueDefTextBaselinenullExprRef, self).__init__(*args, **kwds)
class ConditionalPredicateValueDefnumberArraynullExprRef(VegaLiteSchema):
"""ConditionalPredicateValueDefnumberArraynullExprRef schema wrapper
anyOf(Mapping(required=[test, value]), Mapping(required=[expr, test]))
"""
_schema = {'$ref': '#/definitions/ConditionalPredicate<(ValueDef<(number[]|null)>|ExprRef)>'}
def __init__(self, *args, **kwds):
super(ConditionalPredicateValueDefnumberArraynullExprRef, self).__init__(*args, **kwds)
class ConditionalPredicateValueDefnumbernullExprRef(VegaLiteSchema):
"""ConditionalPredicateValueDefnumbernullExprRef schema wrapper
anyOf(Mapping(required=[test, value]), Mapping(required=[expr, test]))
"""
_schema = {'$ref': '#/definitions/ConditionalPredicate<(ValueDef<(number|null)>|ExprRef)>'}
def __init__(self, *args, **kwds):
super(ConditionalPredicateValueDefnumbernullExprRef, self).__init__(*args, **kwds)
class ConditionalSelectionMarkPropFieldOrDatumDef(ConditionalMarkPropFieldOrDatumDef):
"""ConditionalSelectionMarkPropFieldOrDatumDef schema wrapper
anyOf(Mapping(required=[selection]), Mapping(required=[selection]))
"""
_schema = {'$ref': '#/definitions/ConditionalSelection<MarkPropFieldOrDatumDef>'}
def __init__(self, *args, **kwds):
super(ConditionalSelectionMarkPropFieldOrDatumDef, self).__init__(*args, **kwds)
class ConditionalSelectionMarkPropFieldOrDatumDefTypeForShape(ConditionalMarkPropFieldOrDatumDefTypeForShape):
"""ConditionalSelectionMarkPropFieldOrDatumDefTypeForShape schema wrapper
anyOf(Mapping(required=[selection]), Mapping(required=[selection]))
"""
_schema = {'$ref': '#/definitions/ConditionalSelection<MarkPropFieldOrDatumDef<TypeForShape>>'}
def __init__(self, *args, **kwds):
super(ConditionalSelectionMarkPropFieldOrDatumDefTypeForShape, self).__init__(*args, **kwds)
class ConditionalStringFieldDef(VegaLiteSchema):
"""ConditionalStringFieldDef schema wrapper
anyOf(:class:`ConditionalPredicateStringFieldDef`,
:class:`ConditionalSelectionStringFieldDef`)
"""
_schema = {'$ref': '#/definitions/ConditionalStringFieldDef'}
def __init__(self, *args, **kwds):
super(ConditionalStringFieldDef, self).__init__(*args, **kwds)
class ConditionalPredicateStringFieldDef(ConditionalStringFieldDef):
"""ConditionalPredicateStringFieldDef schema wrapper
Mapping(required=[test])
Attributes
----------
test : :class:`PredicateComposition`
Predicate for triggering the condition
aggregate : :class:`Aggregate`
Aggregation function for the field (e.g., ``"mean"``, ``"sum"``, ``"median"``,
``"min"``, ``"max"``, ``"count"`` ).
**Default value:** ``undefined`` (None)
**See also:** `aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__
documentation.
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
bin : anyOf(boolean, :class:`BinParams`, string, None)
A flag for binning a ``quantitative`` field, `an object defining binning parameters
<https://vega.github.io/vega-lite/docs/bin.html#params>`__, or indicating that the
data for ``x`` or ``y`` channel are binned before they are imported into Vega-Lite (
``"binned"`` ).
If ``true``, default `binning parameters
<https://vega.github.io/vega-lite/docs/bin.html>`__ will be applied.
If ``"binned"``, this indicates that the data for the ``x`` (or ``y`` ) channel are
already binned. You can map the bin-start field to ``x`` (or ``y`` ) and the bin-end
field to ``x2`` (or ``y2`` ). The scale and axis will be formatted similar to
binning in Vega-Lite. To adjust the axis ticks based on the bin step, you can also
set the axis's `tickMinStep
<https://vega.github.io/vega-lite/docs/axis.html#ticks>`__ property.
**Default value:** ``false``
**See also:** `bin <https://vega.github.io/vega-lite/docs/bin.html>`__
documentation.
field : :class:`Field`
**Required.** A string defining the name of the field from which to pull a data
value or an object defining iterated values from the `repeat
<https://vega.github.io/vega-lite/docs/repeat.html>`__ operator.
**See also:** `field <https://vega.github.io/vega-lite/docs/field.html>`__
documentation.
**Notes:** 1) Dots ( ``.`` ) and brackets ( ``[`` and ``]`` ) can be used to access
nested objects (e.g., ``"field": "foo.bar"`` and ``"field": "foo['bar']"`` ). If
field names contain dots or brackets but are not nested, you can use ``\\`` to
escape dots and brackets (e.g., ``"a\\.b"`` and ``"a\\[0\\]"`` ). See more details
about escaping in the `field documentation
<https://vega.github.io/vega-lite/docs/field.html>`__. 2) ``field`` is not required
if ``aggregate`` is ``count``.
format : anyOf(string, :class:`Dictunknown`)
When used with the default ``"number"`` and ``"time"`` format type, the text
formatting pattern for labels of guides (axes, legends, headers) and text marks.
* If the format type is ``"number"`` (e.g., for quantitative fields), this is D3's
`number format pattern <https://github.com/d3/d3-format#locale_format>`__. - If
the format type is ``"time"`` (e.g., for temporal fields), this is D3's `time
format pattern <https://github.com/d3/d3-time-format#locale_format>`__.
See the `format documentation <https://vega.github.io/vega-lite/docs/format.html>`__
for more examples.
When used with a `custom formatType
<https://vega.github.io/vega-lite/docs/config.html#custom-format-type>`__, this
value will be passed as ``format`` alongside ``datum.value`` to the registered
function.
**Default value:** Derived from `numberFormat
<https://vega.github.io/vega-lite/docs/config.html#format>`__ config for number
format and from `timeFormat
<https://vega.github.io/vega-lite/docs/config.html#format>`__ config for time
format.
formatType : string
The format type for labels. One of ``"number"``, ``"time"``, or a `registered custom
format type
<https://vega.github.io/vega-lite/docs/config.html#custom-format-type>`__.
**Default value:** - ``"time"`` for temporal fields and ordinal and nominal fields
with ``timeUnit``. - ``"number"`` for quantitative fields as well as ordinal and
nominal fields without ``timeUnit``.
labelExpr : string
`Vega expression <https://vega.github.io/vega/docs/expressions/>`__ for customizing
labels text.
**Note:** The label text and value can be assessed via the ``label`` and ``value``
properties of the axis's backing ``datum`` object.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit (e.g., ``year``, ``yearmonth``, ``month``, ``hours`` ) for a temporal
field. or `a temporal field that gets casted as ordinal
<https://vega.github.io/vega-lite/docs/type.html#cast>`__.
**Default value:** ``undefined`` (None)
**See also:** `timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__
documentation.
title : anyOf(:class:`Text`, None)
A title for the field. If ``null``, the title will be removed.
**Default value:** derived from the field's name and transformation function (
``aggregate``, ``bin`` and ``timeUnit`` ). If the field has an aggregate function,
the function is displayed as part of the title (e.g., ``"Sum of Profit"`` ). If the
field is binned or has a time unit applied, the applied function is shown in
parentheses (e.g., ``"Profit (binned)"``, ``"Transaction Date (year-month)"`` ).
Otherwise, the title is simply the field name.
**Notes** :
1) You can customize the default field title format by providing the `fieldTitle
<https://vega.github.io/vega-lite/docs/config.html#top-level-config>`__ property in
the `config <https://vega.github.io/vega-lite/docs/config.html>`__ or `fieldTitle
function via the compile function's options
<https://vega.github.io/vega-lite/docs/compile.html#field-title>`__.
2) If both field definition's ``title`` and axis, header, or legend ``title`` are
defined, axis/header/legend title will be used.
type : :class:`StandardType`
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/ConditionalPredicate<StringFieldDef>'}
def __init__(self, test=Undefined, aggregate=Undefined, band=Undefined, bin=Undefined,
field=Undefined, format=Undefined, formatType=Undefined, labelExpr=Undefined,
timeUnit=Undefined, title=Undefined, type=Undefined, **kwds):
super(ConditionalPredicateStringFieldDef, self).__init__(test=test, aggregate=aggregate,
band=band, bin=bin, field=field,
format=format, formatType=formatType,
labelExpr=labelExpr, timeUnit=timeUnit,
title=title, type=type, **kwds)
class ConditionalSelectionStringFieldDef(ConditionalStringFieldDef):
"""ConditionalSelectionStringFieldDef schema wrapper
Mapping(required=[selection])
Attributes
----------
selection : :class:`SelectionComposition`
A `selection name <https://vega.github.io/vega-lite/docs/selection.html>`__, or a
series of `composed selections
<https://vega.github.io/vega-lite/docs/selection.html#compose>`__.
aggregate : :class:`Aggregate`
Aggregation function for the field (e.g., ``"mean"``, ``"sum"``, ``"median"``,
``"min"``, ``"max"``, ``"count"`` ).
**Default value:** ``undefined`` (None)
**See also:** `aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__
documentation.
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
bin : anyOf(boolean, :class:`BinParams`, string, None)
A flag for binning a ``quantitative`` field, `an object defining binning parameters
<https://vega.github.io/vega-lite/docs/bin.html#params>`__, or indicating that the
data for ``x`` or ``y`` channel are binned before they are imported into Vega-Lite (
``"binned"`` ).
If ``true``, default `binning parameters
<https://vega.github.io/vega-lite/docs/bin.html>`__ will be applied.
If ``"binned"``, this indicates that the data for the ``x`` (or ``y`` ) channel are
already binned. You can map the bin-start field to ``x`` (or ``y`` ) and the bin-end
field to ``x2`` (or ``y2`` ). The scale and axis will be formatted similar to
binning in Vega-Lite. To adjust the axis ticks based on the bin step, you can also
set the axis's `tickMinStep
<https://vega.github.io/vega-lite/docs/axis.html#ticks>`__ property.
**Default value:** ``false``
**See also:** `bin <https://vega.github.io/vega-lite/docs/bin.html>`__
documentation.
field : :class:`Field`
**Required.** A string defining the name of the field from which to pull a data
value or an object defining iterated values from the `repeat
<https://vega.github.io/vega-lite/docs/repeat.html>`__ operator.
**See also:** `field <https://vega.github.io/vega-lite/docs/field.html>`__
documentation.
**Notes:** 1) Dots ( ``.`` ) and brackets ( ``[`` and ``]`` ) can be used to access
nested objects (e.g., ``"field": "foo.bar"`` and ``"field": "foo['bar']"`` ). If
field names contain dots or brackets but are not nested, you can use ``\\`` to
escape dots and brackets (e.g., ``"a\\.b"`` and ``"a\\[0\\]"`` ). See more details
about escaping in the `field documentation
<https://vega.github.io/vega-lite/docs/field.html>`__. 2) ``field`` is not required
if ``aggregate`` is ``count``.
format : anyOf(string, :class:`Dictunknown`)
When used with the default ``"number"`` and ``"time"`` format type, the text
formatting pattern for labels of guides (axes, legends, headers) and text marks.
* If the format type is ``"number"`` (e.g., for quantitative fields), this is D3's
`number format pattern <https://github.com/d3/d3-format#locale_format>`__. - If
the format type is ``"time"`` (e.g., for temporal fields), this is D3's `time
format pattern <https://github.com/d3/d3-time-format#locale_format>`__.
See the `format documentation <https://vega.github.io/vega-lite/docs/format.html>`__
for more examples.
When used with a `custom formatType
<https://vega.github.io/vega-lite/docs/config.html#custom-format-type>`__, this
value will be passed as ``format`` alongside ``datum.value`` to the registered
function.
**Default value:** Derived from `numberFormat
<https://vega.github.io/vega-lite/docs/config.html#format>`__ config for number
format and from `timeFormat
<https://vega.github.io/vega-lite/docs/config.html#format>`__ config for time
format.
formatType : string
The format type for labels. One of ``"number"``, ``"time"``, or a `registered custom
format type
<https://vega.github.io/vega-lite/docs/config.html#custom-format-type>`__.
**Default value:** - ``"time"`` for temporal fields and ordinal and nominal fields
with ``timeUnit``. - ``"number"`` for quantitative fields as well as ordinal and
nominal fields without ``timeUnit``.
labelExpr : string
`Vega expression <https://vega.github.io/vega/docs/expressions/>`__ for customizing
labels text.
**Note:** The label text and value can be assessed via the ``label`` and ``value``
properties of the axis's backing ``datum`` object.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit (e.g., ``year``, ``yearmonth``, ``month``, ``hours`` ) for a temporal
field. or `a temporal field that gets casted as ordinal
<https://vega.github.io/vega-lite/docs/type.html#cast>`__.
**Default value:** ``undefined`` (None)
**See also:** `timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__
documentation.
title : anyOf(:class:`Text`, None)
A title for the field. If ``null``, the title will be removed.
**Default value:** derived from the field's name and transformation function (
``aggregate``, ``bin`` and ``timeUnit`` ). If the field has an aggregate function,
the function is displayed as part of the title (e.g., ``"Sum of Profit"`` ). If the
field is binned or has a time unit applied, the applied function is shown in
parentheses (e.g., ``"Profit (binned)"``, ``"Transaction Date (year-month)"`` ).
Otherwise, the title is simply the field name.
**Notes** :
1) You can customize the default field title format by providing the `fieldTitle
<https://vega.github.io/vega-lite/docs/config.html#top-level-config>`__ property in
the `config <https://vega.github.io/vega-lite/docs/config.html>`__ or `fieldTitle
function via the compile function's options
<https://vega.github.io/vega-lite/docs/compile.html#field-title>`__.
2) If both field definition's ``title`` and axis, header, or legend ``title`` are
defined, axis/header/legend title will be used.
type : :class:`StandardType`
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/ConditionalSelection<StringFieldDef>'}
def __init__(self, selection=Undefined, aggregate=Undefined, band=Undefined, bin=Undefined,
field=Undefined, format=Undefined, formatType=Undefined, labelExpr=Undefined,
timeUnit=Undefined, title=Undefined, type=Undefined, **kwds):
super(ConditionalSelectionStringFieldDef, self).__init__(selection=selection,
aggregate=aggregate, band=band,
bin=bin, field=field, format=format,
formatType=formatType,
labelExpr=labelExpr, timeUnit=timeUnit,
title=title, type=type, **kwds)
class ConditionalValueDefGradientstringnullExprRef(VegaLiteSchema):
"""ConditionalValueDefGradientstringnullExprRef schema wrapper
anyOf(:class:`ConditionalPredicateValueDefGradientstringnullExprRef`,
:class:`ConditionalSelectionValueDefGradientstringnullExprRef`)
"""
_schema = {'$ref': '#/definitions/ConditionalValueDef<(Gradient|string|null|ExprRef)>'}
def __init__(self, *args, **kwds):
super(ConditionalValueDefGradientstringnullExprRef, self).__init__(*args, **kwds)
class ConditionalPredicateValueDefGradientstringnullExprRef(ConditionalValueDefGradientstringnullExprRef):
"""ConditionalPredicateValueDefGradientstringnullExprRef schema wrapper
Mapping(required=[test, value])
Attributes
----------
test : :class:`PredicateComposition`
Predicate for triggering the condition
value : anyOf(:class:`Gradient`, string, None, :class:`ExprRef`)
A constant value in visual domain (e.g., ``"red"`` / ``"#0099ff"`` / `gradient
definition <https://vega.github.io/vega-lite/docs/types.html#gradient>`__ for color,
values between ``0`` to ``1`` for opacity).
"""
_schema = {'$ref': '#/definitions/ConditionalPredicate<ValueDef<(Gradient|string|null|ExprRef)>>'}
def __init__(self, test=Undefined, value=Undefined, **kwds):
super(ConditionalPredicateValueDefGradientstringnullExprRef, self).__init__(test=test,
value=value, **kwds)
class ConditionalSelectionValueDefGradientstringnullExprRef(ConditionalValueDefGradientstringnullExprRef):
"""ConditionalSelectionValueDefGradientstringnullExprRef schema wrapper
Mapping(required=[selection, value])
Attributes
----------
selection : :class:`SelectionComposition`
A `selection name <https://vega.github.io/vega-lite/docs/selection.html>`__, or a
series of `composed selections
<https://vega.github.io/vega-lite/docs/selection.html#compose>`__.
value : anyOf(:class:`Gradient`, string, None, :class:`ExprRef`)
A constant value in visual domain (e.g., ``"red"`` / ``"#0099ff"`` / `gradient
definition <https://vega.github.io/vega-lite/docs/types.html#gradient>`__ for color,
values between ``0`` to ``1`` for opacity).
"""
_schema = {'$ref': '#/definitions/ConditionalSelection<ValueDef<(Gradient|string|null|ExprRef)>>'}
def __init__(self, selection=Undefined, value=Undefined, **kwds):
super(ConditionalSelectionValueDefGradientstringnullExprRef, self).__init__(selection=selection,
value=value, **kwds)
class ConditionalValueDefTextExprRef(VegaLiteSchema):
"""ConditionalValueDefTextExprRef schema wrapper
anyOf(:class:`ConditionalPredicateValueDefTextExprRef`,
:class:`ConditionalSelectionValueDefTextExprRef`)
"""
_schema = {'$ref': '#/definitions/ConditionalValueDef<(Text|ExprRef)>'}
def __init__(self, *args, **kwds):
super(ConditionalValueDefTextExprRef, self).__init__(*args, **kwds)
class ConditionalPredicateValueDefTextExprRef(ConditionalValueDefTextExprRef):
"""ConditionalPredicateValueDefTextExprRef schema wrapper
Mapping(required=[test, value])
Attributes
----------
test : :class:`PredicateComposition`
Predicate for triggering the condition
value : anyOf(:class:`Text`, :class:`ExprRef`)
A constant value in visual domain (e.g., ``"red"`` / ``"#0099ff"`` / `gradient
definition <https://vega.github.io/vega-lite/docs/types.html#gradient>`__ for color,
values between ``0`` to ``1`` for opacity).
"""
_schema = {'$ref': '#/definitions/ConditionalPredicate<ValueDef<(Text|ExprRef)>>'}
def __init__(self, test=Undefined, value=Undefined, **kwds):
super(ConditionalPredicateValueDefTextExprRef, self).__init__(test=test, value=value, **kwds)
class ConditionalSelectionValueDefTextExprRef(ConditionalValueDefTextExprRef):
"""ConditionalSelectionValueDefTextExprRef schema wrapper
Mapping(required=[selection, value])
Attributes
----------
selection : :class:`SelectionComposition`
A `selection name <https://vega.github.io/vega-lite/docs/selection.html>`__, or a
series of `composed selections
<https://vega.github.io/vega-lite/docs/selection.html#compose>`__.
value : anyOf(:class:`Text`, :class:`ExprRef`)
A constant value in visual domain (e.g., ``"red"`` / ``"#0099ff"`` / `gradient
definition <https://vega.github.io/vega-lite/docs/types.html#gradient>`__ for color,
values between ``0`` to ``1`` for opacity).
"""
_schema = {'$ref': '#/definitions/ConditionalSelection<ValueDef<(Text|ExprRef)>>'}
def __init__(self, selection=Undefined, value=Undefined, **kwds):
super(ConditionalSelectionValueDefTextExprRef, self).__init__(selection=selection, value=value,
**kwds)
class ConditionalValueDefnumber(VegaLiteSchema):
"""ConditionalValueDefnumber schema wrapper
anyOf(:class:`ConditionalPredicateValueDefnumber`,
:class:`ConditionalSelectionValueDefnumber`)
"""
_schema = {'$ref': '#/definitions/ConditionalValueDef<number>'}
def __init__(self, *args, **kwds):
super(ConditionalValueDefnumber, self).__init__(*args, **kwds)
class ConditionalPredicateValueDefnumber(ConditionalValueDefnumber):
"""ConditionalPredicateValueDefnumber schema wrapper
Mapping(required=[test, value])
Attributes
----------
test : :class:`PredicateComposition`
Predicate for triggering the condition
value : float
A constant value in visual domain (e.g., ``"red"`` / ``"#0099ff"`` / `gradient
definition <https://vega.github.io/vega-lite/docs/types.html#gradient>`__ for color,
values between ``0`` to ``1`` for opacity).
"""
_schema = {'$ref': '#/definitions/ConditionalPredicate<ValueDef<number>>'}
def __init__(self, test=Undefined, value=Undefined, **kwds):
super(ConditionalPredicateValueDefnumber, self).__init__(test=test, value=value, **kwds)
class ConditionalSelectionValueDefnumber(ConditionalValueDefnumber):
"""ConditionalSelectionValueDefnumber schema wrapper
Mapping(required=[selection, value])
Attributes
----------
selection : :class:`SelectionComposition`
A `selection name <https://vega.github.io/vega-lite/docs/selection.html>`__, or a
series of `composed selections
<https://vega.github.io/vega-lite/docs/selection.html#compose>`__.
value : float
A constant value in visual domain (e.g., ``"red"`` / ``"#0099ff"`` / `gradient
definition <https://vega.github.io/vega-lite/docs/types.html#gradient>`__ for color,
values between ``0`` to ``1`` for opacity).
"""
_schema = {'$ref': '#/definitions/ConditionalSelection<ValueDef<number>>'}
def __init__(self, selection=Undefined, value=Undefined, **kwds):
super(ConditionalSelectionValueDefnumber, self).__init__(selection=selection, value=value,
**kwds)
class ConditionalValueDefnumberArrayExprRef(VegaLiteSchema):
"""ConditionalValueDefnumberArrayExprRef schema wrapper
anyOf(:class:`ConditionalPredicateValueDefnumberArrayExprRef`,
:class:`ConditionalSelectionValueDefnumberArrayExprRef`)
"""
_schema = {'$ref': '#/definitions/ConditionalValueDef<(number[]|ExprRef)>'}
def __init__(self, *args, **kwds):
super(ConditionalValueDefnumberArrayExprRef, self).__init__(*args, **kwds)
class ConditionalPredicateValueDefnumberArrayExprRef(ConditionalValueDefnumberArrayExprRef):
"""ConditionalPredicateValueDefnumberArrayExprRef schema wrapper
Mapping(required=[test, value])
Attributes
----------
test : :class:`PredicateComposition`
Predicate for triggering the condition
value : anyOf(List(float), :class:`ExprRef`)
A constant value in visual domain (e.g., ``"red"`` / ``"#0099ff"`` / `gradient
definition <https://vega.github.io/vega-lite/docs/types.html#gradient>`__ for color,
values between ``0`` to ``1`` for opacity).
"""
_schema = {'$ref': '#/definitions/ConditionalPredicate<ValueDef<(number[]|ExprRef)>>'}
def __init__(self, test=Undefined, value=Undefined, **kwds):
super(ConditionalPredicateValueDefnumberArrayExprRef, self).__init__(test=test, value=value,
**kwds)
class ConditionalSelectionValueDefnumberArrayExprRef(ConditionalValueDefnumberArrayExprRef):
"""ConditionalSelectionValueDefnumberArrayExprRef schema wrapper
Mapping(required=[selection, value])
Attributes
----------
selection : :class:`SelectionComposition`
A `selection name <https://vega.github.io/vega-lite/docs/selection.html>`__, or a
series of `composed selections
<https://vega.github.io/vega-lite/docs/selection.html#compose>`__.
value : anyOf(List(float), :class:`ExprRef`)
A constant value in visual domain (e.g., ``"red"`` / ``"#0099ff"`` / `gradient
definition <https://vega.github.io/vega-lite/docs/types.html#gradient>`__ for color,
values between ``0`` to ``1`` for opacity).
"""
_schema = {'$ref': '#/definitions/ConditionalSelection<ValueDef<(number[]|ExprRef)>>'}
def __init__(self, selection=Undefined, value=Undefined, **kwds):
super(ConditionalSelectionValueDefnumberArrayExprRef, self).__init__(selection=selection,
value=value, **kwds)
class ConditionalValueDefnumberExprRef(VegaLiteSchema):
"""ConditionalValueDefnumberExprRef schema wrapper
anyOf(:class:`ConditionalPredicateValueDefnumberExprRef`,
:class:`ConditionalSelectionValueDefnumberExprRef`)
"""
_schema = {'$ref': '#/definitions/ConditionalValueDef<(number|ExprRef)>'}
def __init__(self, *args, **kwds):
super(ConditionalValueDefnumberExprRef, self).__init__(*args, **kwds)
class ConditionalPredicateValueDefnumberExprRef(ConditionalValueDefnumberExprRef):
"""ConditionalPredicateValueDefnumberExprRef schema wrapper
Mapping(required=[test, value])
Attributes
----------
test : :class:`PredicateComposition`
Predicate for triggering the condition
value : anyOf(float, :class:`ExprRef`)
A constant value in visual domain (e.g., ``"red"`` / ``"#0099ff"`` / `gradient
definition <https://vega.github.io/vega-lite/docs/types.html#gradient>`__ for color,
values between ``0`` to ``1`` for opacity).
"""
_schema = {'$ref': '#/definitions/ConditionalPredicate<ValueDef<(number|ExprRef)>>'}
def __init__(self, test=Undefined, value=Undefined, **kwds):
super(ConditionalPredicateValueDefnumberExprRef, self).__init__(test=test, value=value, **kwds)
class ConditionalSelectionValueDefnumberExprRef(ConditionalValueDefnumberExprRef):
"""ConditionalSelectionValueDefnumberExprRef schema wrapper
Mapping(required=[selection, value])
Attributes
----------
selection : :class:`SelectionComposition`
A `selection name <https://vega.github.io/vega-lite/docs/selection.html>`__, or a
series of `composed selections
<https://vega.github.io/vega-lite/docs/selection.html#compose>`__.
value : anyOf(float, :class:`ExprRef`)
A constant value in visual domain (e.g., ``"red"`` / ``"#0099ff"`` / `gradient
definition <https://vega.github.io/vega-lite/docs/types.html#gradient>`__ for color,
values between ``0`` to ``1`` for opacity).
"""
_schema = {'$ref': '#/definitions/ConditionalSelection<ValueDef<(number|ExprRef)>>'}
def __init__(self, selection=Undefined, value=Undefined, **kwds):
super(ConditionalSelectionValueDefnumberExprRef, self).__init__(selection=selection,
value=value, **kwds)
class ConditionalValueDefstringExprRef(VegaLiteSchema):
"""ConditionalValueDefstringExprRef schema wrapper
anyOf(:class:`ConditionalPredicateValueDefstringExprRef`,
:class:`ConditionalSelectionValueDefstringExprRef`)
"""
_schema = {'$ref': '#/definitions/ConditionalValueDef<(string|ExprRef)>'}
def __init__(self, *args, **kwds):
super(ConditionalValueDefstringExprRef, self).__init__(*args, **kwds)
class ConditionalPredicateValueDefstringExprRef(ConditionalValueDefstringExprRef):
"""ConditionalPredicateValueDefstringExprRef schema wrapper
Mapping(required=[test, value])
Attributes
----------
test : :class:`PredicateComposition`
Predicate for triggering the condition
value : anyOf(string, :class:`ExprRef`)
A constant value in visual domain (e.g., ``"red"`` / ``"#0099ff"`` / `gradient
definition <https://vega.github.io/vega-lite/docs/types.html#gradient>`__ for color,
values between ``0`` to ``1`` for opacity).
"""
_schema = {'$ref': '#/definitions/ConditionalPredicate<ValueDef<(string|ExprRef)>>'}
def __init__(self, test=Undefined, value=Undefined, **kwds):
super(ConditionalPredicateValueDefstringExprRef, self).__init__(test=test, value=value, **kwds)
class ConditionalSelectionValueDefstringExprRef(ConditionalValueDefstringExprRef):
"""ConditionalSelectionValueDefstringExprRef schema wrapper
Mapping(required=[selection, value])
Attributes
----------
selection : :class:`SelectionComposition`
A `selection name <https://vega.github.io/vega-lite/docs/selection.html>`__, or a
series of `composed selections
<https://vega.github.io/vega-lite/docs/selection.html#compose>`__.
value : anyOf(string, :class:`ExprRef`)
A constant value in visual domain (e.g., ``"red"`` / ``"#0099ff"`` / `gradient
definition <https://vega.github.io/vega-lite/docs/types.html#gradient>`__ for color,
values between ``0`` to ``1`` for opacity).
"""
_schema = {'$ref': '#/definitions/ConditionalSelection<ValueDef<(string|ExprRef)>>'}
def __init__(self, selection=Undefined, value=Undefined, **kwds):
super(ConditionalSelectionValueDefstringExprRef, self).__init__(selection=selection,
value=value, **kwds)
class ConditionalValueDefstringnullExprRef(VegaLiteSchema):
"""ConditionalValueDefstringnullExprRef schema wrapper
anyOf(:class:`ConditionalPredicateValueDefstringnullExprRef`,
:class:`ConditionalSelectionValueDefstringnullExprRef`)
"""
_schema = {'$ref': '#/definitions/ConditionalValueDef<(string|null|ExprRef)>'}
def __init__(self, *args, **kwds):
super(ConditionalValueDefstringnullExprRef, self).__init__(*args, **kwds)
class ConditionalPredicateValueDefstringnullExprRef(ConditionalValueDefstringnullExprRef):
"""ConditionalPredicateValueDefstringnullExprRef schema wrapper
Mapping(required=[test, value])
Attributes
----------
test : :class:`PredicateComposition`
Predicate for triggering the condition
value : anyOf(string, None, :class:`ExprRef`)
A constant value in visual domain (e.g., ``"red"`` / ``"#0099ff"`` / `gradient
definition <https://vega.github.io/vega-lite/docs/types.html#gradient>`__ for color,
values between ``0`` to ``1`` for opacity).
"""
_schema = {'$ref': '#/definitions/ConditionalPredicate<ValueDef<(string|null|ExprRef)>>'}
def __init__(self, test=Undefined, value=Undefined, **kwds):
super(ConditionalPredicateValueDefstringnullExprRef, self).__init__(test=test, value=value,
**kwds)
class ConditionalSelectionValueDefstringnullExprRef(ConditionalValueDefstringnullExprRef):
"""ConditionalSelectionValueDefstringnullExprRef schema wrapper
Mapping(required=[selection, value])
Attributes
----------
selection : :class:`SelectionComposition`
A `selection name <https://vega.github.io/vega-lite/docs/selection.html>`__, or a
series of `composed selections
<https://vega.github.io/vega-lite/docs/selection.html#compose>`__.
value : anyOf(string, None, :class:`ExprRef`)
A constant value in visual domain (e.g., ``"red"`` / ``"#0099ff"`` / `gradient
definition <https://vega.github.io/vega-lite/docs/types.html#gradient>`__ for color,
values between ``0`` to ``1`` for opacity).
"""
_schema = {'$ref': '#/definitions/ConditionalSelection<ValueDef<(string|null|ExprRef)>>'}
def __init__(self, selection=Undefined, value=Undefined, **kwds):
super(ConditionalSelectionValueDefstringnullExprRef, self).__init__(selection=selection,
value=value, **kwds)
class Config(VegaLiteSchema):
"""Config schema wrapper
Mapping(required=[])
Attributes
----------
arc : :class:`RectConfig`
Arc-specific Config
area : :class:`AreaConfig`
Area-Specific Config
aria : boolean
A boolean flag indicating if ARIA default attributes should be included for marks
and guides (SVG output only). If false, the ``"aria-hidden"`` attribute will be set
for all guides, removing them from the ARIA accessibility tree and Vega-Lite will
not generate default descriptions for marks.
**Default value:** ``true``.
autosize : anyOf(:class:`AutosizeType`, :class:`AutoSizeParams`)
How the visualization size should be determined. If a string, should be one of
``"pad"``, ``"fit"`` or ``"none"``. Object values can additionally specify
parameters for content sizing and automatic resizing.
**Default value** : ``pad``
axis : :class:`AxisConfig`
Axis configuration, which determines default properties for all ``x`` and ``y``
`axes <https://vega.github.io/vega-lite/docs/axis.html>`__. For a full list of axis
configuration options, please see the `corresponding section of the axis
documentation <https://vega.github.io/vega-lite/docs/axis.html#config>`__.
axisBand : :class:`AxisConfig`
Config for axes with "band" scales.
axisBottom : :class:`AxisConfig`
Config for x-axis along the bottom edge of the chart.
axisDiscrete : :class:`AxisConfig`
Config for axes with "point" or "band" scales.
axisLeft : :class:`AxisConfig`
Config for y-axis along the left edge of the chart.
axisPoint : :class:`AxisConfig`
Config for axes with "point" scales.
axisQuantitative : :class:`AxisConfig`
Config for quantitative axes.
axisRight : :class:`AxisConfig`
Config for y-axis along the right edge of the chart.
axisTemporal : :class:`AxisConfig`
Config for temporal axes.
axisTop : :class:`AxisConfig`
Config for x-axis along the top edge of the chart.
axisX : :class:`AxisConfig`
X-axis specific config.
axisXBand : :class:`AxisConfig`
Config for x-axes with "band" scales.
axisXDiscrete : :class:`AxisConfig`
Config for x-axes with "point" or "band" scales.
axisXPoint : :class:`AxisConfig`
Config for x-axes with "point" scales.
axisXQuantitative : :class:`AxisConfig`
Config for x-quantitative axes.
axisXTemporal : :class:`AxisConfig`
Config for x-temporal axes.
axisY : :class:`AxisConfig`
Y-axis specific config.
axisYBand : :class:`AxisConfig`
Config for y-axes with "band" scales.
axisYDiscrete : :class:`AxisConfig`
Config for y-axes with "point" or "band" scales.
axisYPoint : :class:`AxisConfig`
Config for y-axes with "point" scales.
axisYQuantitative : :class:`AxisConfig`
Config for y-quantitative axes.
axisYTemporal : :class:`AxisConfig`
Config for y-temporal axes.
background : anyOf(:class:`Color`, :class:`ExprRef`)
CSS color property to use as the background of the entire view.
**Default value:** ``"white"``
bar : :class:`BarConfig`
Bar-Specific Config
boxplot : :class:`BoxPlotConfig`
Box Config
circle : :class:`MarkConfig`
Circle-Specific Config
concat : :class:`CompositionConfig`
Default configuration for all concatenation and repeat view composition operators (
``concat``, ``hconcat``, ``vconcat``, and ``repeat`` )
countTitle : string
Default axis and legend title for count fields.
**Default value:** ``'Count of Records``.
customFormatTypes : boolean
Allow the ``formatType`` property for text marks and guides to accept a custom
formatter function `registered as a Vega expression
<https://vega.github.io/vega-lite/usage/compile.html#format-type>`__.
errorband : :class:`ErrorBandConfig`
ErrorBand Config
errorbar : :class:`ErrorBarConfig`
ErrorBar Config
facet : :class:`CompositionConfig`
Default configuration for the ``facet`` view composition operator
fieldTitle : enum('verbal', 'functional', 'plain')
Defines how Vega-Lite generates title for fields. There are three possible styles: -
``"verbal"`` (Default) - displays function in a verbal style (e.g., "Sum of field",
"Year-month of date", "field (binned)"). - ``"function"`` - displays function using
parentheses and capitalized texts (e.g., "SUM(field)", "YEARMONTH(date)",
"BIN(field)"). - ``"plain"`` - displays only the field name without functions (e.g.,
"field", "date", "field").
font : string
Default font for all text marks, titles, and labels.
geoshape : :class:`MarkConfig`
Geoshape-Specific Config
header : :class:`HeaderConfig`
Header configuration, which determines default properties for all `headers
<https://vega.github.io/vega-lite/docs/header.html>`__.
For a full list of header configuration options, please see the `corresponding
section of in the header documentation
<https://vega.github.io/vega-lite/docs/header.html#config>`__.
headerColumn : :class:`HeaderConfig`
Header configuration, which determines default properties for column `headers
<https://vega.github.io/vega-lite/docs/header.html>`__.
For a full list of header configuration options, please see the `corresponding
section of in the header documentation
<https://vega.github.io/vega-lite/docs/header.html#config>`__.
headerFacet : :class:`HeaderConfig`
Header configuration, which determines default properties for non-row/column facet
`headers <https://vega.github.io/vega-lite/docs/header.html>`__.
For a full list of header configuration options, please see the `corresponding
section of in the header documentation
<https://vega.github.io/vega-lite/docs/header.html#config>`__.
headerRow : :class:`HeaderConfig`
Header configuration, which determines default properties for row `headers
<https://vega.github.io/vega-lite/docs/header.html>`__.
For a full list of header configuration options, please see the `corresponding
section of in the header documentation
<https://vega.github.io/vega-lite/docs/header.html#config>`__.
image : :class:`RectConfig`
Image-specific Config
legend : :class:`LegendConfig`
Legend configuration, which determines default properties for all `legends
<https://vega.github.io/vega-lite/docs/legend.html>`__. For a full list of legend
configuration options, please see the `corresponding section of in the legend
documentation <https://vega.github.io/vega-lite/docs/legend.html#config>`__.
line : :class:`LineConfig`
Line-Specific Config
lineBreak : anyOf(string, :class:`ExprRef`)
A delimiter, such as a newline character, upon which to break text strings into
multiple lines. This property provides a global default for text marks, which is
overridden by mark or style config settings, and by the lineBreak mark encoding
channel. If signal-valued, either string or regular expression (regexp) values are
valid.
mark : :class:`MarkConfig`
Mark Config
numberFormat : string
D3 Number format for guide labels and text marks. For example ``"s"`` for SI units.
Use `D3's number format pattern <https://github.com/d3/d3-format#locale_format>`__.
padding : anyOf(:class:`Padding`, :class:`ExprRef`)
The default visualization padding, in pixels, from the edge of the visualization
canvas to the data rectangle. If a number, specifies padding for all sides. If an
object, the value should have the format ``{"left": 5, "top": 5, "right": 5,
"bottom": 5}`` to specify padding for each side of the visualization.
**Default value** : ``5``
params : List(:class:`Parameter`)
Dynamic variables that parameterize a visualization.
point : :class:`MarkConfig`
Point-Specific Config
projection : :class:`ProjectionConfig`
Projection configuration, which determines default properties for all `projections
<https://vega.github.io/vega-lite/docs/projection.html>`__. For a full list of
projection configuration options, please see the `corresponding section of the
projection documentation
<https://vega.github.io/vega-lite/docs/projection.html#config>`__.
range : :class:`RangeConfig`
An object hash that defines default range arrays or schemes for using with scales.
For a full list of scale range configuration options, please see the `corresponding
section of the scale documentation
<https://vega.github.io/vega-lite/docs/scale.html#config>`__.
rect : :class:`RectConfig`
Rect-Specific Config
rule : :class:`MarkConfig`
Rule-Specific Config
scale : :class:`ScaleConfig`
Scale configuration determines default properties for all `scales
<https://vega.github.io/vega-lite/docs/scale.html>`__. For a full list of scale
configuration options, please see the `corresponding section of the scale
documentation <https://vega.github.io/vega-lite/docs/scale.html#config>`__.
selection : :class:`SelectionConfig`
An object hash for defining default properties for each type of selections.
square : :class:`MarkConfig`
Square-Specific Config
style : :class:`StyleConfigIndex`
An object hash that defines key-value mappings to determine default properties for
marks with a given `style
<https://vega.github.io/vega-lite/docs/mark.html#mark-def>`__. The keys represent
styles names; the values have to be valid `mark configuration objects
<https://vega.github.io/vega-lite/docs/mark.html#config>`__.
text : :class:`MarkConfig`
Text-Specific Config
tick : :class:`TickConfig`
Tick-Specific Config
timeFormat : string
Default time format for raw time values (without time units) in text marks, legend
labels and header labels.
**Default value:** ``"%b %d, %Y"`` **Note:** Axes automatically determine the format
for each label automatically so this config does not affect axes.
title : :class:`TitleConfig`
Title configuration, which determines default properties for all `titles
<https://vega.github.io/vega-lite/docs/title.html>`__. For a full list of title
configuration options, please see the `corresponding section of the title
documentation <https://vega.github.io/vega-lite/docs/title.html#config>`__.
trail : :class:`LineConfig`
Trail-Specific Config
view : :class:`ViewConfig`
Default properties for `single view plots
<https://vega.github.io/vega-lite/docs/spec.html#single>`__.
"""
_schema = {'$ref': '#/definitions/Config'}
def __init__(self, arc=Undefined, area=Undefined, aria=Undefined, autosize=Undefined,
axis=Undefined, axisBand=Undefined, axisBottom=Undefined, axisDiscrete=Undefined,
axisLeft=Undefined, axisPoint=Undefined, axisQuantitative=Undefined,
axisRight=Undefined, axisTemporal=Undefined, axisTop=Undefined, axisX=Undefined,
axisXBand=Undefined, axisXDiscrete=Undefined, axisXPoint=Undefined,
axisXQuantitative=Undefined, axisXTemporal=Undefined, axisY=Undefined,
axisYBand=Undefined, axisYDiscrete=Undefined, axisYPoint=Undefined,
axisYQuantitative=Undefined, axisYTemporal=Undefined, background=Undefined,
bar=Undefined, boxplot=Undefined, circle=Undefined, concat=Undefined,
countTitle=Undefined, customFormatTypes=Undefined, errorband=Undefined,
errorbar=Undefined, facet=Undefined, fieldTitle=Undefined, font=Undefined,
geoshape=Undefined, header=Undefined, headerColumn=Undefined, headerFacet=Undefined,
headerRow=Undefined, image=Undefined, legend=Undefined, line=Undefined,
lineBreak=Undefined, mark=Undefined, numberFormat=Undefined, padding=Undefined,
params=Undefined, point=Undefined, projection=Undefined, range=Undefined,
rect=Undefined, rule=Undefined, scale=Undefined, selection=Undefined, square=Undefined,
style=Undefined, text=Undefined, tick=Undefined, timeFormat=Undefined, title=Undefined,
trail=Undefined, view=Undefined, **kwds):
super(Config, self).__init__(arc=arc, area=area, aria=aria, autosize=autosize, axis=axis,
axisBand=axisBand, axisBottom=axisBottom,
axisDiscrete=axisDiscrete, axisLeft=axisLeft, axisPoint=axisPoint,
axisQuantitative=axisQuantitative, axisRight=axisRight,
axisTemporal=axisTemporal, axisTop=axisTop, axisX=axisX,
axisXBand=axisXBand, axisXDiscrete=axisXDiscrete,
axisXPoint=axisXPoint, axisXQuantitative=axisXQuantitative,
axisXTemporal=axisXTemporal, axisY=axisY, axisYBand=axisYBand,
axisYDiscrete=axisYDiscrete, axisYPoint=axisYPoint,
axisYQuantitative=axisYQuantitative, axisYTemporal=axisYTemporal,
background=background, bar=bar, boxplot=boxplot, circle=circle,
concat=concat, countTitle=countTitle,
customFormatTypes=customFormatTypes, errorband=errorband,
errorbar=errorbar, facet=facet, fieldTitle=fieldTitle, font=font,
geoshape=geoshape, header=header, headerColumn=headerColumn,
headerFacet=headerFacet, headerRow=headerRow, image=image,
legend=legend, line=line, lineBreak=lineBreak, mark=mark,
numberFormat=numberFormat, padding=padding, params=params,
point=point, projection=projection, range=range, rect=rect,
rule=rule, scale=scale, selection=selection, square=square,
style=style, text=text, tick=tick, timeFormat=timeFormat,
title=title, trail=trail, view=view, **kwds)
class Cursor(VegaLiteSchema):
"""Cursor schema wrapper
enum('auto', 'default', 'none', 'context-menu', 'help', 'pointer', 'progress', 'wait',
'cell', 'crosshair', 'text', 'vertical-text', 'alias', 'copy', 'move', 'no-drop',
'not-allowed', 'e-resize', 'n-resize', 'ne-resize', 'nw-resize', 's-resize', 'se-resize',
'sw-resize', 'w-resize', 'ew-resize', 'ns-resize', 'nesw-resize', 'nwse-resize',
'col-resize', 'row-resize', 'all-scroll', 'zoom-in', 'zoom-out', 'grab', 'grabbing')
"""
_schema = {'$ref': '#/definitions/Cursor'}
def __init__(self, *args):
super(Cursor, self).__init__(*args)
class Cyclical(ColorScheme):
"""Cyclical schema wrapper
enum('rainbow', 'sinebow')
"""
_schema = {'$ref': '#/definitions/Cyclical'}
def __init__(self, *args):
super(Cyclical, self).__init__(*args)
class Data(VegaLiteSchema):
"""Data schema wrapper
anyOf(:class:`DataSource`, :class:`Generator`)
"""
_schema = {'$ref': '#/definitions/Data'}
def __init__(self, *args, **kwds):
super(Data, self).__init__(*args, **kwds)
class DataFormat(VegaLiteSchema):
"""DataFormat schema wrapper
anyOf(:class:`CsvDataFormat`, :class:`DsvDataFormat`, :class:`JsonDataFormat`,
:class:`TopoDataFormat`)
"""
_schema = {'$ref': '#/definitions/DataFormat'}
def __init__(self, *args, **kwds):
super(DataFormat, self).__init__(*args, **kwds)
class CsvDataFormat(DataFormat):
"""CsvDataFormat schema wrapper
Mapping(required=[])
Attributes
----------
parse : anyOf(:class:`Parse`, None)
If set to ``null``, disable type inference based on the spec and only use type
inference based on the data. Alternatively, a parsing directive object can be
provided for explicit data types. Each property of the object corresponds to a field
name, and the value to the desired data type (one of ``"number"``, ``"boolean"``,
``"date"``, or null (do not parse the field)). For example, ``"parse":
{"modified_on": "date"}`` parses the ``modified_on`` field in each input record a
Date value.
For ``"date"``, we parse data based using Javascript's `Date.parse()
<https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/parse>`__.
For Specific date formats can be provided (e.g., ``{foo: "date:'%m%d%Y'"}`` ), using
the `d3-time-format syntax <https://github.com/d3/d3-time-format#locale_format>`__.
UTC date format parsing is supported similarly (e.g., ``{foo: "utc:'%m%d%Y'"}`` ).
See more about `UTC time
<https://vega.github.io/vega-lite/docs/timeunit.html#utc>`__
type : enum('csv', 'tsv')
Type of input data: ``"json"``, ``"csv"``, ``"tsv"``, ``"dsv"``.
**Default value:** The default format type is determined by the extension of the
file URL. If no extension is detected, ``"json"`` will be used by default.
"""
_schema = {'$ref': '#/definitions/CsvDataFormat'}
def __init__(self, parse=Undefined, type=Undefined, **kwds):
super(CsvDataFormat, self).__init__(parse=parse, type=type, **kwds)
class DataSource(Data):
"""DataSource schema wrapper
anyOf(:class:`UrlData`, :class:`InlineData`, :class:`NamedData`)
"""
_schema = {'$ref': '#/definitions/DataSource'}
def __init__(self, *args, **kwds):
super(DataSource, self).__init__(*args, **kwds)
class Datasets(VegaLiteSchema):
"""Datasets schema wrapper
Mapping(required=[])
"""
_schema = {'$ref': '#/definitions/Datasets'}
def __init__(self, **kwds):
super(Datasets, self).__init__(**kwds)
class Day(VegaLiteSchema):
"""Day schema wrapper
float
"""
_schema = {'$ref': '#/definitions/Day'}
def __init__(self, *args):
super(Day, self).__init__(*args)
class DictInlineDataset(VegaLiteSchema):
"""DictInlineDataset schema wrapper
Mapping(required=[])
"""
_schema = {'$ref': '#/definitions/Dict<InlineDataset>'}
def __init__(self, **kwds):
super(DictInlineDataset, self).__init__(**kwds)
class DictSelectionInit(VegaLiteSchema):
"""DictSelectionInit schema wrapper
Mapping(required=[])
"""
_schema = {'$ref': '#/definitions/Dict<SelectionInit>'}
def __init__(self, **kwds):
super(DictSelectionInit, self).__init__(**kwds)
class DictSelectionInitInterval(VegaLiteSchema):
"""DictSelectionInitInterval schema wrapper
Mapping(required=[])
"""
_schema = {'$ref': '#/definitions/Dict<SelectionInitInterval>'}
def __init__(self, **kwds):
super(DictSelectionInitInterval, self).__init__(**kwds)
class Dictunknown(VegaLiteSchema):
"""Dictunknown schema wrapper
Mapping(required=[])
"""
_schema = {'$ref': '#/definitions/Dict<unknown>'}
def __init__(self, **kwds):
super(Dictunknown, self).__init__(**kwds)
class Diverging(ColorScheme):
"""Diverging schema wrapper
enum('blueorange', 'blueorange-3', 'blueorange-4', 'blueorange-5', 'blueorange-6',
'blueorange-7', 'blueorange-8', 'blueorange-9', 'blueorange-10', 'blueorange-11',
'brownbluegreen', 'brownbluegreen-3', 'brownbluegreen-4', 'brownbluegreen-5',
'brownbluegreen-6', 'brownbluegreen-7', 'brownbluegreen-8', 'brownbluegreen-9',
'brownbluegreen-10', 'brownbluegreen-11', 'purplegreen', 'purplegreen-3', 'purplegreen-4',
'purplegreen-5', 'purplegreen-6', 'purplegreen-7', 'purplegreen-8', 'purplegreen-9',
'purplegreen-10', 'purplegreen-11', 'pinkyellowgreen', 'pinkyellowgreen-3',
'pinkyellowgreen-4', 'pinkyellowgreen-5', 'pinkyellowgreen-6', 'pinkyellowgreen-7',
'pinkyellowgreen-8', 'pinkyellowgreen-9', 'pinkyellowgreen-10', 'pinkyellowgreen-11',
'purpleorange', 'purpleorange-3', 'purpleorange-4', 'purpleorange-5', 'purpleorange-6',
'purpleorange-7', 'purpleorange-8', 'purpleorange-9', 'purpleorange-10', 'purpleorange-11',
'redblue', 'redblue-3', 'redblue-4', 'redblue-5', 'redblue-6', 'redblue-7', 'redblue-8',
'redblue-9', 'redblue-10', 'redblue-11', 'redgrey', 'redgrey-3', 'redgrey-4', 'redgrey-5',
'redgrey-6', 'redgrey-7', 'redgrey-8', 'redgrey-9', 'redgrey-10', 'redgrey-11',
'redyellowblue', 'redyellowblue-3', 'redyellowblue-4', 'redyellowblue-5', 'redyellowblue-6',
'redyellowblue-7', 'redyellowblue-8', 'redyellowblue-9', 'redyellowblue-10',
'redyellowblue-11', 'redyellowgreen', 'redyellowgreen-3', 'redyellowgreen-4',
'redyellowgreen-5', 'redyellowgreen-6', 'redyellowgreen-7', 'redyellowgreen-8',
'redyellowgreen-9', 'redyellowgreen-10', 'redyellowgreen-11', 'spectral', 'spectral-3',
'spectral-4', 'spectral-5', 'spectral-6', 'spectral-7', 'spectral-8', 'spectral-9',
'spectral-10', 'spectral-11')
"""
_schema = {'$ref': '#/definitions/Diverging'}
def __init__(self, *args):
super(Diverging, self).__init__(*args)
class DomainUnionWith(VegaLiteSchema):
"""DomainUnionWith schema wrapper
Mapping(required=[unionWith])
Attributes
----------
unionWith : anyOf(List(float), List(string), List(boolean), List(:class:`DateTime`))
Customized domain values to be union with the field's values.
1) ``domain`` for *quantitative* fields can take one of the following forms:
* a two-element array with minimum and maximum values. - an array with more than two
entries, for `Piecewise quantitative scales
<https://vega.github.io/vega-lite/docs/scale.html#piecewise>`__. (Alternatively,
the ``domainMid`` property can be set for a diverging scale.) - a string value
``"unaggregated"``, if the input field is aggregated, to indicate that the domain
should include the raw data values prior to the aggregation.
2) ``domain`` for *temporal* fields can be a two-element array minimum and maximum
values, in the form of either timestamps or the `DateTime definition objects
<https://vega.github.io/vega-lite/docs/types.html#datetime>`__.
3) ``domain`` for *ordinal* and *nominal* fields can be an array that lists valid
input values.
"""
_schema = {'$ref': '#/definitions/DomainUnionWith'}
def __init__(self, unionWith=Undefined, **kwds):
super(DomainUnionWith, self).__init__(unionWith=unionWith, **kwds)
class DsvDataFormat(DataFormat):
"""DsvDataFormat schema wrapper
Mapping(required=[delimiter])
Attributes
----------
delimiter : string
The delimiter between records. The delimiter must be a single character (i.e., a
single 16-bit code unit); so, ASCII delimiters are fine, but emoji delimiters are
not.
parse : anyOf(:class:`Parse`, None)
If set to ``null``, disable type inference based on the spec and only use type
inference based on the data. Alternatively, a parsing directive object can be
provided for explicit data types. Each property of the object corresponds to a field
name, and the value to the desired data type (one of ``"number"``, ``"boolean"``,
``"date"``, or null (do not parse the field)). For example, ``"parse":
{"modified_on": "date"}`` parses the ``modified_on`` field in each input record a
Date value.
For ``"date"``, we parse data based using Javascript's `Date.parse()
<https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/parse>`__.
For Specific date formats can be provided (e.g., ``{foo: "date:'%m%d%Y'"}`` ), using
the `d3-time-format syntax <https://github.com/d3/d3-time-format#locale_format>`__.
UTC date format parsing is supported similarly (e.g., ``{foo: "utc:'%m%d%Y'"}`` ).
See more about `UTC time
<https://vega.github.io/vega-lite/docs/timeunit.html#utc>`__
type : string
Type of input data: ``"json"``, ``"csv"``, ``"tsv"``, ``"dsv"``.
**Default value:** The default format type is determined by the extension of the
file URL. If no extension is detected, ``"json"`` will be used by default.
"""
_schema = {'$ref': '#/definitions/DsvDataFormat'}
def __init__(self, delimiter=Undefined, parse=Undefined, type=Undefined, **kwds):
super(DsvDataFormat, self).__init__(delimiter=delimiter, parse=parse, type=type, **kwds)
class Element(VegaLiteSchema):
"""Element schema wrapper
string
"""
_schema = {'$ref': '#/definitions/Element'}
def __init__(self, *args):
super(Element, self).__init__(*args)
class Encoding(VegaLiteSchema):
"""Encoding schema wrapper
Mapping(required=[])
Attributes
----------
angle : :class:`NumericMarkPropDef`
Rotation angle of point and text marks.
color : :class:`ColorDef`
Color of the marks – either fill or stroke color based on the ``filled`` property
of mark definition. By default, ``color`` represents fill color for ``"area"``,
``"bar"``, ``"tick"``, ``"text"``, ``"trail"``, ``"circle"``, and ``"square"`` /
stroke color for ``"line"`` and ``"point"``.
**Default value:** If undefined, the default color depends on `mark config
<https://vega.github.io/vega-lite/docs/config.html#mark-config>`__ 's ``color``
property.
*Note:* 1) For fine-grained control over both fill and stroke colors of the marks,
please use the ``fill`` and ``stroke`` channels. The ``fill`` or ``stroke``
encodings have higher precedence than ``color``, thus may override the ``color``
encoding if conflicting encodings are specified. 2) See the scale documentation for
more information about customizing `color scheme
<https://vega.github.io/vega-lite/docs/scale.html#scheme>`__.
description : anyOf(:class:`StringFieldDefWithCondition`,
:class:`StringValueDefWithCondition`)
A text description of this mark for ARIA accessibility (SVG output only). For SVG
output the ``"aria-label"`` attribute will be set to this description.
detail : anyOf(:class:`FieldDefWithoutScale`, List(:class:`FieldDefWithoutScale`))
Additional levels of detail for grouping data in aggregate views and in line, trail,
and area marks without mapping data to a specific visual channel.
fill : :class:`ColorDef`
Fill color of the marks. **Default value:** If undefined, the default color depends
on `mark config <https://vega.github.io/vega-lite/docs/config.html#mark-config>`__
's ``color`` property.
*Note:* The ``fill`` encoding has higher precedence than ``color``, thus may
override the ``color`` encoding if conflicting encodings are specified.
fillOpacity : :class:`NumericMarkPropDef`
Fill opacity of the marks.
**Default value:** If undefined, the default opacity depends on `mark config
<https://vega.github.io/vega-lite/docs/config.html#mark-config>`__ 's
``fillOpacity`` property.
href : anyOf(:class:`StringFieldDefWithCondition`, :class:`StringValueDefWithCondition`)
A URL to load upon mouse click.
key : :class:`FieldDefWithoutScale`
A data field to use as a unique key for data binding. When a visualization’s data is
updated, the key value will be used to match data elements to existing mark
instances. Use a key channel to enable object constancy for transitions over dynamic
data.
latitude : :class:`LatLongDef`
Latitude position of geographically projected marks.
latitude2 : :class:`Position2Def`
Latitude-2 position for geographically projected ranged ``"area"``, ``"bar"``,
``"rect"``, and ``"rule"``.
longitude : :class:`LatLongDef`
Longitude position of geographically projected marks.
longitude2 : :class:`Position2Def`
Longitude-2 position for geographically projected ranged ``"area"``, ``"bar"``,
``"rect"``, and ``"rule"``.
opacity : :class:`NumericMarkPropDef`
Opacity of the marks.
**Default value:** If undefined, the default opacity depends on `mark config
<https://vega.github.io/vega-lite/docs/config.html#mark-config>`__ 's ``opacity``
property.
order : anyOf(:class:`OrderFieldDef`, List(:class:`OrderFieldDef`), :class:`OrderValueDef`)
Order of the marks. - For stacked marks, this ``order`` channel encodes `stack order
<https://vega.github.io/vega-lite/docs/stack.html#order>`__. - For line and trail
marks, this ``order`` channel encodes order of data points in the lines. This can be
useful for creating `a connected scatterplot
<https://vega.github.io/vega-lite/examples/connected_scatterplot.html>`__. Setting
``order`` to ``{"value": null}`` makes the line marks use the original order in the
data sources. - Otherwise, this ``order`` channel encodes layer order of the marks.
**Note** : In aggregate plots, ``order`` field should be ``aggregate`` d to avoid
creating additional aggregation grouping.
radius : :class:`PolarDef`
The outer radius in pixels of arc marks.
radius2 : :class:`Position2Def`
The inner radius in pixels of arc marks.
shape : :class:`ShapeDef`
Shape of the mark.
#.
For ``point`` marks the supported values include: - plotting shapes:
``"circle"``, ``"square"``, ``"cross"``, ``"diamond"``, ``"triangle-up"``,
``"triangle-down"``, ``"triangle-right"``, or ``"triangle-left"``. - the line
symbol ``"stroke"`` - centered directional shapes ``"arrow"``, ``"wedge"``, or
``"triangle"`` - a custom `SVG path string
<https://developer.mozilla.org/en-US/docs/Web/SVG/Tutorial/Paths>`__ (For correct
sizing, custom shape paths should be defined within a square bounding box with
coordinates ranging from -1 to 1 along both the x and y dimensions.)
#.
For ``geoshape`` marks it should be a field definition of the geojson data
**Default value:** If undefined, the default shape depends on `mark config
<https://vega.github.io/vega-lite/docs/config.html#point-config>`__ 's ``shape``
property. ( ``"circle"`` if unset.)
size : :class:`NumericMarkPropDef`
Size of the mark. - For ``"point"``, ``"square"`` and ``"circle"``, – the symbol
size, or pixel area of the mark. - For ``"bar"`` and ``"tick"`` – the bar and tick's
size. - For ``"text"`` – the text's font size. - Size is unsupported for ``"line"``,
``"area"``, and ``"rect"``. (Use ``"trail"`` instead of line with varying size)
stroke : :class:`ColorDef`
Stroke color of the marks. **Default value:** If undefined, the default color
depends on `mark config
<https://vega.github.io/vega-lite/docs/config.html#mark-config>`__ 's ``color``
property.
*Note:* The ``stroke`` encoding has higher precedence than ``color``, thus may
override the ``color`` encoding if conflicting encodings are specified.
strokeDash : :class:`NumericArrayMarkPropDef`
Stroke dash of the marks.
**Default value:** ``[1,0]`` (No dash).
strokeOpacity : :class:`NumericMarkPropDef`
Stroke opacity of the marks.
**Default value:** If undefined, the default opacity depends on `mark config
<https://vega.github.io/vega-lite/docs/config.html#mark-config>`__ 's
``strokeOpacity`` property.
strokeWidth : :class:`NumericMarkPropDef`
Stroke width of the marks.
**Default value:** If undefined, the default stroke width depends on `mark config
<https://vega.github.io/vega-lite/docs/config.html#mark-config>`__ 's
``strokeWidth`` property.
text : :class:`TextDef`
Text of the ``text`` mark.
theta : :class:`PolarDef`
For arc marks, the arc length in radians if theta2 is not specified, otherwise the
start arc angle. (A value of 0 indicates up or “north”, increasing values proceed
clockwise.)
For text marks, polar coordinate angle in radians.
theta2 : :class:`Position2Def`
The end angle of arc marks in radians. A value of 0 indicates up or “north”,
increasing values proceed clockwise.
tooltip : anyOf(:class:`StringFieldDefWithCondition`, :class:`StringValueDefWithCondition`,
List(:class:`StringFieldDef`), None)
The tooltip text to show upon mouse hover. Specifying ``tooltip`` encoding overrides
`the tooltip property in the mark definition
<https://vega.github.io/vega-lite/docs/mark.html#mark-def>`__.
See the `tooltip <https://vega.github.io/vega-lite/docs/tooltip.html>`__
documentation for a detailed discussion about tooltip in Vega-Lite.
url : anyOf(:class:`StringFieldDefWithCondition`, :class:`StringValueDefWithCondition`)
The URL of an image mark.
x : :class:`PositionDef`
X coordinates of the marks, or width of horizontal ``"bar"`` and ``"area"`` without
specified ``x2`` or ``width``.
The ``value`` of this channel can be a number or a string ``"width"`` for the width
of the plot.
x2 : :class:`Position2Def`
X2 coordinates for ranged ``"area"``, ``"bar"``, ``"rect"``, and ``"rule"``.
The ``value`` of this channel can be a number or a string ``"width"`` for the width
of the plot.
xError : anyOf(:class:`SecondaryFieldDef`, :class:`ValueDefnumber`)
Error value of x coordinates for error specified ``"errorbar"`` and ``"errorband"``.
xError2 : anyOf(:class:`SecondaryFieldDef`, :class:`ValueDefnumber`)
Secondary error value of x coordinates for error specified ``"errorbar"`` and
``"errorband"``.
y : :class:`PositionDef`
Y coordinates of the marks, or height of vertical ``"bar"`` and ``"area"`` without
specified ``y2`` or ``height``.
The ``value`` of this channel can be a number or a string ``"height"`` for the
height of the plot.
y2 : :class:`Position2Def`
Y2 coordinates for ranged ``"area"``, ``"bar"``, ``"rect"``, and ``"rule"``.
The ``value`` of this channel can be a number or a string ``"height"`` for the
height of the plot.
yError : anyOf(:class:`SecondaryFieldDef`, :class:`ValueDefnumber`)
Error value of y coordinates for error specified ``"errorbar"`` and ``"errorband"``.
yError2 : anyOf(:class:`SecondaryFieldDef`, :class:`ValueDefnumber`)
Secondary error value of y coordinates for error specified ``"errorbar"`` and
``"errorband"``.
"""
_schema = {'$ref': '#/definitions/Encoding'}
def __init__(self, angle=Undefined, color=Undefined, description=Undefined, detail=Undefined,
fill=Undefined, fillOpacity=Undefined, href=Undefined, key=Undefined,
latitude=Undefined, latitude2=Undefined, longitude=Undefined, longitude2=Undefined,
opacity=Undefined, order=Undefined, radius=Undefined, radius2=Undefined,
shape=Undefined, size=Undefined, stroke=Undefined, strokeDash=Undefined,
strokeOpacity=Undefined, strokeWidth=Undefined, text=Undefined, theta=Undefined,
theta2=Undefined, tooltip=Undefined, url=Undefined, x=Undefined, x2=Undefined,
xError=Undefined, xError2=Undefined, y=Undefined, y2=Undefined, yError=Undefined,
yError2=Undefined, **kwds):
super(Encoding, self).__init__(angle=angle, color=color, description=description, detail=detail,
fill=fill, fillOpacity=fillOpacity, href=href, key=key,
latitude=latitude, latitude2=latitude2, longitude=longitude,
longitude2=longitude2, opacity=opacity, order=order,
radius=radius, radius2=radius2, shape=shape, size=size,
stroke=stroke, strokeDash=strokeDash,
strokeOpacity=strokeOpacity, strokeWidth=strokeWidth, text=text,
theta=theta, theta2=theta2, tooltip=tooltip, url=url, x=x, x2=x2,
xError=xError, xError2=xError2, y=y, y2=y2, yError=yError,
yError2=yError2, **kwds)
class EncodingSortFieldFieldName(VegaLiteSchema):
"""EncodingSortFieldFieldName schema wrapper
Mapping(required=[])
A sort definition for sorting a discrete scale in an encoding field definition.
Attributes
----------
field : :class:`FieldName`
The data `field <https://vega.github.io/vega-lite/docs/field.html>`__ to sort by.
**Default value:** If unspecified, defaults to the field specified in the outer data
reference.
op : :class:`NonArgAggregateOp`
An `aggregate operation
<https://vega.github.io/vega-lite/docs/aggregate.html#ops>`__ to perform on the
field prior to sorting (e.g., ``"count"``, ``"mean"`` and ``"median"`` ). An
aggregation is required when there are multiple values of the sort field for each
encoded data field. The input data objects will be aggregated, grouped by the
encoded data field.
For a full list of operations, please see the documentation for `aggregate
<https://vega.github.io/vega-lite/docs/aggregate.html#ops>`__.
**Default value:** ``"sum"`` for stacked plots. Otherwise, ``"min"``.
order : anyOf(:class:`SortOrder`, None)
The sort order. One of ``"ascending"`` (default), ``"descending"``, or ``null`` (no
not sort).
"""
_schema = {'$ref': '#/definitions/EncodingSortField<FieldName>'}
def __init__(self, field=Undefined, op=Undefined, order=Undefined, **kwds):
super(EncodingSortFieldFieldName, self).__init__(field=field, op=op, order=order, **kwds)
class ErrorBand(CompositeMark):
"""ErrorBand schema wrapper
string
"""
_schema = {'$ref': '#/definitions/ErrorBand'}
def __init__(self, *args):
super(ErrorBand, self).__init__(*args)
class ErrorBandConfig(VegaLiteSchema):
"""ErrorBandConfig schema wrapper
Mapping(required=[])
Attributes
----------
band : anyOf(boolean, :class:`MarkConfigExprOrSignalRef`)
borders : anyOf(boolean, :class:`MarkConfigExprOrSignalRef`)
extent : :class:`ErrorBarExtent`
The extent of the band. Available options include: - `"ci"`: Extend the band to the
confidence interval of the mean. - `"stderr"`: The size of band are set to the value
of standard error, extending from the mean. - `"stdev"`: The size of band are set to
the value of standard deviation, extending from the mean. - `"iqr"`: Extend the band
to the q1 and q3.
**Default value:** ``"stderr"``.
interpolate : :class:`Interpolate`
The line interpolation method for the error band. One of the following: -
`"linear"`: piecewise linear segments, as in a polyline. - `"linear-closed"`: close
the linear segments to form a polygon. - `"step"`: a piecewise constant function (a
step function) consisting of alternating horizontal and vertical lines. The y-value
changes at the midpoint of each pair of adjacent x-values. - `"step-before"`: a
piecewise constant function (a step function) consisting of alternating horizontal
and vertical lines. The y-value changes before the x-value. - `"step-after"`: a
piecewise constant function (a step function) consisting of alternating horizontal
and vertical lines. The y-value changes after the x-value. - `"basis"`: a B-spline,
with control point duplication on the ends. - `"basis-open"`: an open B-spline; may
not intersect the start or end. - `"basis-closed"`: a closed B-spline, as in a loop.
- `"cardinal"`: a Cardinal spline, with control point duplication on the ends. -
`"cardinal-open"`: an open Cardinal spline; may not intersect the start or end, but
will intersect other control points. - `"cardinal-closed"`: a closed Cardinal
spline, as in a loop. - `"bundle"`: equivalent to basis, except the tension
parameter is used to straighten the spline. - ``"monotone"`` : cubic interpolation
that preserves monotonicity in y.
tension : float
The tension parameter for the interpolation type of the error band.
"""
_schema = {'$ref': '#/definitions/ErrorBandConfig'}
def __init__(self, band=Undefined, borders=Undefined, extent=Undefined, interpolate=Undefined,
tension=Undefined, **kwds):
super(ErrorBandConfig, self).__init__(band=band, borders=borders, extent=extent,
interpolate=interpolate, tension=tension, **kwds)
class ErrorBandDef(CompositeMarkDef):
"""ErrorBandDef schema wrapper
Mapping(required=[type])
Attributes
----------
type : :class:`ErrorBand`
The mark type. This could a primitive mark type (one of ``"bar"``, ``"circle"``,
``"square"``, ``"tick"``, ``"line"``, ``"area"``, ``"point"``, ``"geoshape"``,
``"rule"``, and ``"text"`` ) or a composite mark type ( ``"boxplot"``,
``"errorband"``, ``"errorbar"`` ).
band : anyOf(boolean, :class:`MarkConfigExprOrSignalRef`)
borders : anyOf(boolean, :class:`MarkConfigExprOrSignalRef`)
clip : boolean
Whether a composite mark be clipped to the enclosing group’s width and height.
color : anyOf(:class:`Color`, :class:`Gradient`, :class:`ExprRef`)
Default color.
**Default value:** :raw-html:`<span style="color: #4682b4;">■</span>`
``"#4682b4"``
**Note:** - This property cannot be used in a `style config
<https://vega.github.io/vega-lite/docs/mark.html#style-config>`__. - The ``fill``
and ``stroke`` properties have higher precedence than ``color`` and will override
``color``.
extent : :class:`ErrorBarExtent`
The extent of the band. Available options include: - `"ci"`: Extend the band to the
confidence interval of the mean. - `"stderr"`: The size of band are set to the value
of standard error, extending from the mean. - `"stdev"`: The size of band are set to
the value of standard deviation, extending from the mean. - `"iqr"`: Extend the band
to the q1 and q3.
**Default value:** ``"stderr"``.
interpolate : :class:`Interpolate`
The line interpolation method for the error band. One of the following: -
`"linear"`: piecewise linear segments, as in a polyline. - `"linear-closed"`: close
the linear segments to form a polygon. - `"step"`: a piecewise constant function (a
step function) consisting of alternating horizontal and vertical lines. The y-value
changes at the midpoint of each pair of adjacent x-values. - `"step-before"`: a
piecewise constant function (a step function) consisting of alternating horizontal
and vertical lines. The y-value changes before the x-value. - `"step-after"`: a
piecewise constant function (a step function) consisting of alternating horizontal
and vertical lines. The y-value changes after the x-value. - `"basis"`: a B-spline,
with control point duplication on the ends. - `"basis-open"`: an open B-spline; may
not intersect the start or end. - `"basis-closed"`: a closed B-spline, as in a loop.
- `"cardinal"`: a Cardinal spline, with control point duplication on the ends. -
`"cardinal-open"`: an open Cardinal spline; may not intersect the start or end, but
will intersect other control points. - `"cardinal-closed"`: a closed Cardinal
spline, as in a loop. - `"bundle"`: equivalent to basis, except the tension
parameter is used to straighten the spline. - ``"monotone"`` : cubic interpolation
that preserves monotonicity in y.
opacity : float
The opacity (value between [0,1]) of the mark.
orient : :class:`Orientation`
Orientation of the error band. This is normally automatically determined, but can be
specified when the orientation is ambiguous and cannot be automatically determined.
tension : float
The tension parameter for the interpolation type of the error band.
"""
_schema = {'$ref': '#/definitions/ErrorBandDef'}
def __init__(self, type=Undefined, band=Undefined, borders=Undefined, clip=Undefined,
color=Undefined, extent=Undefined, interpolate=Undefined, opacity=Undefined,
orient=Undefined, tension=Undefined, **kwds):
super(ErrorBandDef, self).__init__(type=type, band=band, borders=borders, clip=clip,
color=color, extent=extent, interpolate=interpolate,
opacity=opacity, orient=orient, tension=tension, **kwds)
class ErrorBar(CompositeMark):
"""ErrorBar schema wrapper
string
"""
_schema = {'$ref': '#/definitions/ErrorBar'}
def __init__(self, *args):
super(ErrorBar, self).__init__(*args)
class ErrorBarConfig(VegaLiteSchema):
"""ErrorBarConfig schema wrapper
Mapping(required=[])
Attributes
----------
extent : :class:`ErrorBarExtent`
The extent of the rule. Available options include: - `"ci"`: Extend the rule to the
confidence interval of the mean. - `"stderr"`: The size of rule are set to the value
of standard error, extending from the mean. - `"stdev"`: The size of rule are set to
the value of standard deviation, extending from the mean. - `"iqr"`: Extend the rule
to the q1 and q3.
**Default value:** ``"stderr"``.
rule : anyOf(boolean, :class:`MarkConfigExprOrSignalRef`)
size : float
Size of the ticks of an error bar
thickness : float
Thickness of the ticks and the bar of an error bar
ticks : anyOf(boolean, :class:`MarkConfigExprOrSignalRef`)
"""
_schema = {'$ref': '#/definitions/ErrorBarConfig'}
def __init__(self, extent=Undefined, rule=Undefined, size=Undefined, thickness=Undefined,
ticks=Undefined, **kwds):
super(ErrorBarConfig, self).__init__(extent=extent, rule=rule, size=size, thickness=thickness,
ticks=ticks, **kwds)
class ErrorBarDef(CompositeMarkDef):
"""ErrorBarDef schema wrapper
Mapping(required=[type])
Attributes
----------
type : :class:`ErrorBar`
The mark type. This could a primitive mark type (one of ``"bar"``, ``"circle"``,
``"square"``, ``"tick"``, ``"line"``, ``"area"``, ``"point"``, ``"geoshape"``,
``"rule"``, and ``"text"`` ) or a composite mark type ( ``"boxplot"``,
``"errorband"``, ``"errorbar"`` ).
clip : boolean
Whether a composite mark be clipped to the enclosing group’s width and height.
color : anyOf(:class:`Color`, :class:`Gradient`, :class:`ExprRef`)
Default color.
**Default value:** :raw-html:`<span style="color: #4682b4;">■</span>`
``"#4682b4"``
**Note:** - This property cannot be used in a `style config
<https://vega.github.io/vega-lite/docs/mark.html#style-config>`__. - The ``fill``
and ``stroke`` properties have higher precedence than ``color`` and will override
``color``.
extent : :class:`ErrorBarExtent`
The extent of the rule. Available options include: - `"ci"`: Extend the rule to the
confidence interval of the mean. - `"stderr"`: The size of rule are set to the value
of standard error, extending from the mean. - `"stdev"`: The size of rule are set to
the value of standard deviation, extending from the mean. - `"iqr"`: Extend the rule
to the q1 and q3.
**Default value:** ``"stderr"``.
opacity : float
The opacity (value between [0,1]) of the mark.
orient : :class:`Orientation`
Orientation of the error bar. This is normally automatically determined, but can be
specified when the orientation is ambiguous and cannot be automatically determined.
rule : anyOf(boolean, :class:`MarkConfigExprOrSignalRef`)
size : float
Size of the ticks of an error bar
thickness : float
Thickness of the ticks and the bar of an error bar
ticks : anyOf(boolean, :class:`MarkConfigExprOrSignalRef`)
"""
_schema = {'$ref': '#/definitions/ErrorBarDef'}
def __init__(self, type=Undefined, clip=Undefined, color=Undefined, extent=Undefined,
opacity=Undefined, orient=Undefined, rule=Undefined, size=Undefined,
thickness=Undefined, ticks=Undefined, **kwds):
super(ErrorBarDef, self).__init__(type=type, clip=clip, color=color, extent=extent,
opacity=opacity, orient=orient, rule=rule, size=size,
thickness=thickness, ticks=ticks, **kwds)
class ErrorBarExtent(VegaLiteSchema):
"""ErrorBarExtent schema wrapper
enum('ci', 'iqr', 'stderr', 'stdev')
"""
_schema = {'$ref': '#/definitions/ErrorBarExtent'}
def __init__(self, *args):
super(ErrorBarExtent, self).__init__(*args)
class Expr(VegaLiteSchema):
"""Expr schema wrapper
string
"""
_schema = {'$ref': '#/definitions/Expr'}
def __init__(self, *args):
super(Expr, self).__init__(*args)
class ExprOrSignalRef(VegaLiteSchema):
"""ExprOrSignalRef schema wrapper
Mapping(required=[expr])
Attributes
----------
expr : string
Vega expression (which can refer to Vega-Lite parameters).
"""
_schema = {'$ref': '#/definitions/ExprOrSignalRef'}
def __init__(self, expr=Undefined, **kwds):
super(ExprOrSignalRef, self).__init__(expr=expr, **kwds)
class ExprRef(VegaLiteSchema):
"""ExprRef schema wrapper
Mapping(required=[expr])
Attributes
----------
expr : string
Vega expression (which can refer to Vega-Lite parameters).
"""
_schema = {'$ref': '#/definitions/ExprRef'}
def __init__(self, expr=Undefined, **kwds):
super(ExprRef, self).__init__(expr=expr, **kwds)
class FacetEncodingFieldDef(VegaLiteSchema):
"""FacetEncodingFieldDef schema wrapper
Mapping(required=[])
Attributes
----------
aggregate : :class:`Aggregate`
Aggregation function for the field (e.g., ``"mean"``, ``"sum"``, ``"median"``,
``"min"``, ``"max"``, ``"count"`` ).
**Default value:** ``undefined`` (None)
**See also:** `aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__
documentation.
align : anyOf(:class:`LayoutAlign`, :class:`RowColLayoutAlign`)
The alignment to apply to grid rows and columns. The supported string values are
``"all"``, ``"each"``, and ``"none"``.
* For ``"none"``, a flow layout will be used, in which adjacent subviews are simply
placed one after the other. - For ``"each"``, subviews will be aligned into a
clean grid structure, but each row or column may be of variable size. - For
``"all"``, subviews will be aligned and each row or column will be sized
identically based on the maximum observed size. String values for this property
will be applied to both grid rows and columns.
Alternatively, an object value of the form ``{"row": string, "column": string}`` can
be used to supply different alignments for rows and columns.
**Default value:** ``"all"``.
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
bin : anyOf(boolean, :class:`BinParams`, None)
A flag for binning a ``quantitative`` field, `an object defining binning parameters
<https://vega.github.io/vega-lite/docs/bin.html#params>`__, or indicating that the
data for ``x`` or ``y`` channel are binned before they are imported into Vega-Lite (
``"binned"`` ).
If ``true``, default `binning parameters
<https://vega.github.io/vega-lite/docs/bin.html>`__ will be applied.
If ``"binned"``, this indicates that the data for the ``x`` (or ``y`` ) channel are
already binned. You can map the bin-start field to ``x`` (or ``y`` ) and the bin-end
field to ``x2`` (or ``y2`` ). The scale and axis will be formatted similar to
binning in Vega-Lite. To adjust the axis ticks based on the bin step, you can also
set the axis's `tickMinStep
<https://vega.github.io/vega-lite/docs/axis.html#ticks>`__ property.
**Default value:** ``false``
**See also:** `bin <https://vega.github.io/vega-lite/docs/bin.html>`__
documentation.
bounds : enum('full', 'flush')
The bounds calculation method to use for determining the extent of a sub-plot. One
of ``full`` (the default) or ``flush``.
* If set to ``full``, the entire calculated bounds (including axes, title, and
legend) will be used. - If set to ``flush``, only the specified width and height
values for the sub-view will be used. The ``flush`` setting can be useful when
attempting to place sub-plots without axes or legends into a uniform grid
structure.
**Default value:** ``"full"``
center : anyOf(boolean, :class:`RowColboolean`)
Boolean flag indicating if subviews should be centered relative to their respective
rows or columns.
An object value of the form ``{"row": boolean, "column": boolean}`` can be used to
supply different centering values for rows and columns.
**Default value:** ``false``
columns : float
The number of columns to include in the view composition layout.
**Default value** : ``undefined`` -- An infinite number of columns (a single row)
will be assumed. This is equivalent to ``hconcat`` (for ``concat`` ) and to using
the ``column`` channel (for ``facet`` and ``repeat`` ).
**Note** :
1) This property is only for: - the general (wrappable) ``concat`` operator (not
``hconcat`` / ``vconcat`` ) - the ``facet`` and ``repeat`` operator with one
field/repetition definition (without row/column nesting)
2) Setting the ``columns`` to ``1`` is equivalent to ``vconcat`` (for ``concat`` )
and to using the ``row`` channel (for ``facet`` and ``repeat`` ).
field : :class:`Field`
**Required.** A string defining the name of the field from which to pull a data
value or an object defining iterated values from the `repeat
<https://vega.github.io/vega-lite/docs/repeat.html>`__ operator.
**See also:** `field <https://vega.github.io/vega-lite/docs/field.html>`__
documentation.
**Notes:** 1) Dots ( ``.`` ) and brackets ( ``[`` and ``]`` ) can be used to access
nested objects (e.g., ``"field": "foo.bar"`` and ``"field": "foo['bar']"`` ). If
field names contain dots or brackets but are not nested, you can use ``\\`` to
escape dots and brackets (e.g., ``"a\\.b"`` and ``"a\\[0\\]"`` ). See more details
about escaping in the `field documentation
<https://vega.github.io/vega-lite/docs/field.html>`__. 2) ``field`` is not required
if ``aggregate`` is ``count``.
header : :class:`Header`
An object defining properties of a facet's header.
sort : anyOf(:class:`SortArray`, :class:`SortOrder`, :class:`EncodingSortField`, None)
Sort order for the encoded field.
For continuous fields (quantitative or temporal), ``sort`` can be either
``"ascending"`` or ``"descending"``.
For discrete fields, ``sort`` can be one of the following: - ``"ascending"`` or
``"descending"`` -- for sorting by the values' natural order in JavaScript. - `A
sort field definition
<https://vega.github.io/vega-lite/docs/sort.html#sort-field>`__ for sorting by
another field. - `An array specifying the field values in preferred order
<https://vega.github.io/vega-lite/docs/sort.html#sort-array>`__. In this case, the
sort order will obey the values in the array, followed by any unspecified values in
their original order. For discrete time field, values in the sort array can be
`date-time definition objects <types#datetime>`__. In addition, for time units
``"month"`` and ``"day"``, the values can be the month or day names (case
insensitive) or their 3-letter initials (e.g., ``"Mon"``, ``"Tue"`` ). - ``null``
indicating no sort.
**Default value:** ``"ascending"``
**Note:** ``null`` is not supported for ``row`` and ``column``.
spacing : anyOf(float, :class:`RowColnumber`)
The spacing in pixels between sub-views of the composition operator. An object of
the form ``{"row": number, "column": number}`` can be used to set different spacing
values for rows and columns.
**Default value** : Depends on ``"spacing"`` property of `the view composition
configuration <https://vega.github.io/vega-lite/docs/config.html#view-config>`__ (
``20`` by default)
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit (e.g., ``year``, ``yearmonth``, ``month``, ``hours`` ) for a temporal
field. or `a temporal field that gets casted as ordinal
<https://vega.github.io/vega-lite/docs/type.html#cast>`__.
**Default value:** ``undefined`` (None)
**See also:** `timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__
documentation.
title : anyOf(:class:`Text`, None)
A title for the field. If ``null``, the title will be removed.
**Default value:** derived from the field's name and transformation function (
``aggregate``, ``bin`` and ``timeUnit`` ). If the field has an aggregate function,
the function is displayed as part of the title (e.g., ``"Sum of Profit"`` ). If the
field is binned or has a time unit applied, the applied function is shown in
parentheses (e.g., ``"Profit (binned)"``, ``"Transaction Date (year-month)"`` ).
Otherwise, the title is simply the field name.
**Notes** :
1) You can customize the default field title format by providing the `fieldTitle
<https://vega.github.io/vega-lite/docs/config.html#top-level-config>`__ property in
the `config <https://vega.github.io/vega-lite/docs/config.html>`__ or `fieldTitle
function via the compile function's options
<https://vega.github.io/vega-lite/docs/compile.html#field-title>`__.
2) If both field definition's ``title`` and axis, header, or legend ``title`` are
defined, axis/header/legend title will be used.
type : :class:`StandardType`
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/FacetEncodingFieldDef'}
def __init__(self, aggregate=Undefined, align=Undefined, band=Undefined, bin=Undefined,
bounds=Undefined, center=Undefined, columns=Undefined, field=Undefined,
header=Undefined, sort=Undefined, spacing=Undefined, timeUnit=Undefined,
title=Undefined, type=Undefined, **kwds):
super(FacetEncodingFieldDef, self).__init__(aggregate=aggregate, align=align, band=band,
bin=bin, bounds=bounds, center=center,
columns=columns, field=field, header=header,
sort=sort, spacing=spacing, timeUnit=timeUnit,
title=title, type=type, **kwds)
class FacetFieldDef(VegaLiteSchema):
"""FacetFieldDef schema wrapper
Mapping(required=[])
Attributes
----------
aggregate : :class:`Aggregate`
Aggregation function for the field (e.g., ``"mean"``, ``"sum"``, ``"median"``,
``"min"``, ``"max"``, ``"count"`` ).
**Default value:** ``undefined`` (None)
**See also:** `aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__
documentation.
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
bin : anyOf(boolean, :class:`BinParams`, None)
A flag for binning a ``quantitative`` field, `an object defining binning parameters
<https://vega.github.io/vega-lite/docs/bin.html#params>`__, or indicating that the
data for ``x`` or ``y`` channel are binned before they are imported into Vega-Lite (
``"binned"`` ).
If ``true``, default `binning parameters
<https://vega.github.io/vega-lite/docs/bin.html>`__ will be applied.
If ``"binned"``, this indicates that the data for the ``x`` (or ``y`` ) channel are
already binned. You can map the bin-start field to ``x`` (or ``y`` ) and the bin-end
field to ``x2`` (or ``y2`` ). The scale and axis will be formatted similar to
binning in Vega-Lite. To adjust the axis ticks based on the bin step, you can also
set the axis's `tickMinStep
<https://vega.github.io/vega-lite/docs/axis.html#ticks>`__ property.
**Default value:** ``false``
**See also:** `bin <https://vega.github.io/vega-lite/docs/bin.html>`__
documentation.
field : :class:`Field`
**Required.** A string defining the name of the field from which to pull a data
value or an object defining iterated values from the `repeat
<https://vega.github.io/vega-lite/docs/repeat.html>`__ operator.
**See also:** `field <https://vega.github.io/vega-lite/docs/field.html>`__
documentation.
**Notes:** 1) Dots ( ``.`` ) and brackets ( ``[`` and ``]`` ) can be used to access
nested objects (e.g., ``"field": "foo.bar"`` and ``"field": "foo['bar']"`` ). If
field names contain dots or brackets but are not nested, you can use ``\\`` to
escape dots and brackets (e.g., ``"a\\.b"`` and ``"a\\[0\\]"`` ). See more details
about escaping in the `field documentation
<https://vega.github.io/vega-lite/docs/field.html>`__. 2) ``field`` is not required
if ``aggregate`` is ``count``.
header : :class:`Header`
An object defining properties of a facet's header.
sort : anyOf(:class:`SortArray`, :class:`SortOrder`, :class:`EncodingSortField`, None)
Sort order for the encoded field.
For continuous fields (quantitative or temporal), ``sort`` can be either
``"ascending"`` or ``"descending"``.
For discrete fields, ``sort`` can be one of the following: - ``"ascending"`` or
``"descending"`` -- for sorting by the values' natural order in JavaScript. - `A
sort field definition
<https://vega.github.io/vega-lite/docs/sort.html#sort-field>`__ for sorting by
another field. - `An array specifying the field values in preferred order
<https://vega.github.io/vega-lite/docs/sort.html#sort-array>`__. In this case, the
sort order will obey the values in the array, followed by any unspecified values in
their original order. For discrete time field, values in the sort array can be
`date-time definition objects <types#datetime>`__. In addition, for time units
``"month"`` and ``"day"``, the values can be the month or day names (case
insensitive) or their 3-letter initials (e.g., ``"Mon"``, ``"Tue"`` ). - ``null``
indicating no sort.
**Default value:** ``"ascending"``
**Note:** ``null`` is not supported for ``row`` and ``column``.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit (e.g., ``year``, ``yearmonth``, ``month``, ``hours`` ) for a temporal
field. or `a temporal field that gets casted as ordinal
<https://vega.github.io/vega-lite/docs/type.html#cast>`__.
**Default value:** ``undefined`` (None)
**See also:** `timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__
documentation.
title : anyOf(:class:`Text`, None)
A title for the field. If ``null``, the title will be removed.
**Default value:** derived from the field's name and transformation function (
``aggregate``, ``bin`` and ``timeUnit`` ). If the field has an aggregate function,
the function is displayed as part of the title (e.g., ``"Sum of Profit"`` ). If the
field is binned or has a time unit applied, the applied function is shown in
parentheses (e.g., ``"Profit (binned)"``, ``"Transaction Date (year-month)"`` ).
Otherwise, the title is simply the field name.
**Notes** :
1) You can customize the default field title format by providing the `fieldTitle
<https://vega.github.io/vega-lite/docs/config.html#top-level-config>`__ property in
the `config <https://vega.github.io/vega-lite/docs/config.html>`__ or `fieldTitle
function via the compile function's options
<https://vega.github.io/vega-lite/docs/compile.html#field-title>`__.
2) If both field definition's ``title`` and axis, header, or legend ``title`` are
defined, axis/header/legend title will be used.
type : :class:`StandardType`
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/FacetFieldDef'}
def __init__(self, aggregate=Undefined, band=Undefined, bin=Undefined, field=Undefined,
header=Undefined, sort=Undefined, timeUnit=Undefined, title=Undefined, type=Undefined,
**kwds):
super(FacetFieldDef, self).__init__(aggregate=aggregate, band=band, bin=bin, field=field,
header=header, sort=sort, timeUnit=timeUnit, title=title,
type=type, **kwds)
class FacetFieldDefFieldName(VegaLiteSchema):
"""FacetFieldDefFieldName schema wrapper
Mapping(required=[])
Attributes
----------
aggregate : :class:`Aggregate`
Aggregation function for the field (e.g., ``"mean"``, ``"sum"``, ``"median"``,
``"min"``, ``"max"``, ``"count"`` ).
**Default value:** ``undefined`` (None)
**See also:** `aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__
documentation.
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
bin : anyOf(boolean, :class:`BinParams`, None)
A flag for binning a ``quantitative`` field, `an object defining binning parameters
<https://vega.github.io/vega-lite/docs/bin.html#params>`__, or indicating that the
data for ``x`` or ``y`` channel are binned before they are imported into Vega-Lite (
``"binned"`` ).
If ``true``, default `binning parameters
<https://vega.github.io/vega-lite/docs/bin.html>`__ will be applied.
If ``"binned"``, this indicates that the data for the ``x`` (or ``y`` ) channel are
already binned. You can map the bin-start field to ``x`` (or ``y`` ) and the bin-end
field to ``x2`` (or ``y2`` ). The scale and axis will be formatted similar to
binning in Vega-Lite. To adjust the axis ticks based on the bin step, you can also
set the axis's `tickMinStep
<https://vega.github.io/vega-lite/docs/axis.html#ticks>`__ property.
**Default value:** ``false``
**See also:** `bin <https://vega.github.io/vega-lite/docs/bin.html>`__
documentation.
field : :class:`FieldName`
**Required.** A string defining the name of the field from which to pull a data
value or an object defining iterated values from the `repeat
<https://vega.github.io/vega-lite/docs/repeat.html>`__ operator.
**See also:** `field <https://vega.github.io/vega-lite/docs/field.html>`__
documentation.
**Notes:** 1) Dots ( ``.`` ) and brackets ( ``[`` and ``]`` ) can be used to access
nested objects (e.g., ``"field": "foo.bar"`` and ``"field": "foo['bar']"`` ). If
field names contain dots or brackets but are not nested, you can use ``\\`` to
escape dots and brackets (e.g., ``"a\\.b"`` and ``"a\\[0\\]"`` ). See more details
about escaping in the `field documentation
<https://vega.github.io/vega-lite/docs/field.html>`__. 2) ``field`` is not required
if ``aggregate`` is ``count``.
header : :class:`Header`
An object defining properties of a facet's header.
sort : anyOf(:class:`SortArray`, :class:`SortOrder`, :class:`EncodingSortFieldFieldName`,
None)
Sort order for the encoded field.
For continuous fields (quantitative or temporal), ``sort`` can be either
``"ascending"`` or ``"descending"``.
For discrete fields, ``sort`` can be one of the following: - ``"ascending"`` or
``"descending"`` -- for sorting by the values' natural order in JavaScript. - `A
sort field definition
<https://vega.github.io/vega-lite/docs/sort.html#sort-field>`__ for sorting by
another field. - `An array specifying the field values in preferred order
<https://vega.github.io/vega-lite/docs/sort.html#sort-array>`__. In this case, the
sort order will obey the values in the array, followed by any unspecified values in
their original order. For discrete time field, values in the sort array can be
`date-time definition objects <types#datetime>`__. In addition, for time units
``"month"`` and ``"day"``, the values can be the month or day names (case
insensitive) or their 3-letter initials (e.g., ``"Mon"``, ``"Tue"`` ). - ``null``
indicating no sort.
**Default value:** ``"ascending"``
**Note:** ``null`` is not supported for ``row`` and ``column``.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit (e.g., ``year``, ``yearmonth``, ``month``, ``hours`` ) for a temporal
field. or `a temporal field that gets casted as ordinal
<https://vega.github.io/vega-lite/docs/type.html#cast>`__.
**Default value:** ``undefined`` (None)
**See also:** `timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__
documentation.
title : anyOf(:class:`Text`, None)
A title for the field. If ``null``, the title will be removed.
**Default value:** derived from the field's name and transformation function (
``aggregate``, ``bin`` and ``timeUnit`` ). If the field has an aggregate function,
the function is displayed as part of the title (e.g., ``"Sum of Profit"`` ). If the
field is binned or has a time unit applied, the applied function is shown in
parentheses (e.g., ``"Profit (binned)"``, ``"Transaction Date (year-month)"`` ).
Otherwise, the title is simply the field name.
**Notes** :
1) You can customize the default field title format by providing the `fieldTitle
<https://vega.github.io/vega-lite/docs/config.html#top-level-config>`__ property in
the `config <https://vega.github.io/vega-lite/docs/config.html>`__ or `fieldTitle
function via the compile function's options
<https://vega.github.io/vega-lite/docs/compile.html#field-title>`__.
2) If both field definition's ``title`` and axis, header, or legend ``title`` are
defined, axis/header/legend title will be used.
type : :class:`StandardType`
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/FacetFieldDef<FieldName>'}
def __init__(self, aggregate=Undefined, band=Undefined, bin=Undefined, field=Undefined,
header=Undefined, sort=Undefined, timeUnit=Undefined, title=Undefined, type=Undefined,
**kwds):
super(FacetFieldDefFieldName, self).__init__(aggregate=aggregate, band=band, bin=bin,
field=field, header=header, sort=sort,
timeUnit=timeUnit, title=title, type=type, **kwds)
class FacetMapping(VegaLiteSchema):
"""FacetMapping schema wrapper
Mapping(required=[])
Attributes
----------
column : :class:`FacetFieldDef`
A field definition for the horizontal facet of trellis plots.
row : :class:`FacetFieldDef`
A field definition for the vertical facet of trellis plots.
"""
_schema = {'$ref': '#/definitions/FacetMapping'}
def __init__(self, column=Undefined, row=Undefined, **kwds):
super(FacetMapping, self).__init__(column=column, row=row, **kwds)
class FacetMappingFieldName(VegaLiteSchema):
"""FacetMappingFieldName schema wrapper
Mapping(required=[])
Attributes
----------
column : :class:`FacetFieldDefFieldName`
A field definition for the horizontal facet of trellis plots.
row : :class:`FacetFieldDefFieldName`
A field definition for the vertical facet of trellis plots.
"""
_schema = {'$ref': '#/definitions/FacetMapping<FieldName>'}
def __init__(self, column=Undefined, row=Undefined, **kwds):
super(FacetMappingFieldName, self).__init__(column=column, row=row, **kwds)
class FacetedEncoding(VegaLiteSchema):
"""FacetedEncoding schema wrapper
Mapping(required=[])
Attributes
----------
angle : :class:`NumericMarkPropDef`
Rotation angle of point and text marks.
color : :class:`ColorDef`
Color of the marks – either fill or stroke color based on the ``filled`` property
of mark definition. By default, ``color`` represents fill color for ``"area"``,
``"bar"``, ``"tick"``, ``"text"``, ``"trail"``, ``"circle"``, and ``"square"`` /
stroke color for ``"line"`` and ``"point"``.
**Default value:** If undefined, the default color depends on `mark config
<https://vega.github.io/vega-lite/docs/config.html#mark-config>`__ 's ``color``
property.
*Note:* 1) For fine-grained control over both fill and stroke colors of the marks,
please use the ``fill`` and ``stroke`` channels. The ``fill`` or ``stroke``
encodings have higher precedence than ``color``, thus may override the ``color``
encoding if conflicting encodings are specified. 2) See the scale documentation for
more information about customizing `color scheme
<https://vega.github.io/vega-lite/docs/scale.html#scheme>`__.
column : :class:`RowColumnEncodingFieldDef`
A field definition for the horizontal facet of trellis plots.
description : anyOf(:class:`StringFieldDefWithCondition`,
:class:`StringValueDefWithCondition`)
A text description of this mark for ARIA accessibility (SVG output only). For SVG
output the ``"aria-label"`` attribute will be set to this description.
detail : anyOf(:class:`FieldDefWithoutScale`, List(:class:`FieldDefWithoutScale`))
Additional levels of detail for grouping data in aggregate views and in line, trail,
and area marks without mapping data to a specific visual channel.
facet : :class:`FacetEncodingFieldDef`
A field definition for the (flexible) facet of trellis plots.
If either ``row`` or ``column`` is specified, this channel will be ignored.
fill : :class:`ColorDef`
Fill color of the marks. **Default value:** If undefined, the default color depends
on `mark config <https://vega.github.io/vega-lite/docs/config.html#mark-config>`__
's ``color`` property.
*Note:* The ``fill`` encoding has higher precedence than ``color``, thus may
override the ``color`` encoding if conflicting encodings are specified.
fillOpacity : :class:`NumericMarkPropDef`
Fill opacity of the marks.
**Default value:** If undefined, the default opacity depends on `mark config
<https://vega.github.io/vega-lite/docs/config.html#mark-config>`__ 's
``fillOpacity`` property.
href : anyOf(:class:`StringFieldDefWithCondition`, :class:`StringValueDefWithCondition`)
A URL to load upon mouse click.
key : :class:`FieldDefWithoutScale`
A data field to use as a unique key for data binding. When a visualization’s data is
updated, the key value will be used to match data elements to existing mark
instances. Use a key channel to enable object constancy for transitions over dynamic
data.
latitude : :class:`LatLongDef`
Latitude position of geographically projected marks.
latitude2 : :class:`Position2Def`
Latitude-2 position for geographically projected ranged ``"area"``, ``"bar"``,
``"rect"``, and ``"rule"``.
longitude : :class:`LatLongDef`
Longitude position of geographically projected marks.
longitude2 : :class:`Position2Def`
Longitude-2 position for geographically projected ranged ``"area"``, ``"bar"``,
``"rect"``, and ``"rule"``.
opacity : :class:`NumericMarkPropDef`
Opacity of the marks.
**Default value:** If undefined, the default opacity depends on `mark config
<https://vega.github.io/vega-lite/docs/config.html#mark-config>`__ 's ``opacity``
property.
order : anyOf(:class:`OrderFieldDef`, List(:class:`OrderFieldDef`), :class:`OrderValueDef`)
Order of the marks. - For stacked marks, this ``order`` channel encodes `stack order
<https://vega.github.io/vega-lite/docs/stack.html#order>`__. - For line and trail
marks, this ``order`` channel encodes order of data points in the lines. This can be
useful for creating `a connected scatterplot
<https://vega.github.io/vega-lite/examples/connected_scatterplot.html>`__. Setting
``order`` to ``{"value": null}`` makes the line marks use the original order in the
data sources. - Otherwise, this ``order`` channel encodes layer order of the marks.
**Note** : In aggregate plots, ``order`` field should be ``aggregate`` d to avoid
creating additional aggregation grouping.
radius : :class:`PolarDef`
The outer radius in pixels of arc marks.
radius2 : :class:`Position2Def`
The inner radius in pixels of arc marks.
row : :class:`RowColumnEncodingFieldDef`
A field definition for the vertical facet of trellis plots.
shape : :class:`ShapeDef`
Shape of the mark.
#.
For ``point`` marks the supported values include: - plotting shapes:
``"circle"``, ``"square"``, ``"cross"``, ``"diamond"``, ``"triangle-up"``,
``"triangle-down"``, ``"triangle-right"``, or ``"triangle-left"``. - the line
symbol ``"stroke"`` - centered directional shapes ``"arrow"``, ``"wedge"``, or
``"triangle"`` - a custom `SVG path string
<https://developer.mozilla.org/en-US/docs/Web/SVG/Tutorial/Paths>`__ (For correct
sizing, custom shape paths should be defined within a square bounding box with
coordinates ranging from -1 to 1 along both the x and y dimensions.)
#.
For ``geoshape`` marks it should be a field definition of the geojson data
**Default value:** If undefined, the default shape depends on `mark config
<https://vega.github.io/vega-lite/docs/config.html#point-config>`__ 's ``shape``
property. ( ``"circle"`` if unset.)
size : :class:`NumericMarkPropDef`
Size of the mark. - For ``"point"``, ``"square"`` and ``"circle"``, – the symbol
size, or pixel area of the mark. - For ``"bar"`` and ``"tick"`` – the bar and tick's
size. - For ``"text"`` – the text's font size. - Size is unsupported for ``"line"``,
``"area"``, and ``"rect"``. (Use ``"trail"`` instead of line with varying size)
stroke : :class:`ColorDef`
Stroke color of the marks. **Default value:** If undefined, the default color
depends on `mark config
<https://vega.github.io/vega-lite/docs/config.html#mark-config>`__ 's ``color``
property.
*Note:* The ``stroke`` encoding has higher precedence than ``color``, thus may
override the ``color`` encoding if conflicting encodings are specified.
strokeDash : :class:`NumericArrayMarkPropDef`
Stroke dash of the marks.
**Default value:** ``[1,0]`` (No dash).
strokeOpacity : :class:`NumericMarkPropDef`
Stroke opacity of the marks.
**Default value:** If undefined, the default opacity depends on `mark config
<https://vega.github.io/vega-lite/docs/config.html#mark-config>`__ 's
``strokeOpacity`` property.
strokeWidth : :class:`NumericMarkPropDef`
Stroke width of the marks.
**Default value:** If undefined, the default stroke width depends on `mark config
<https://vega.github.io/vega-lite/docs/config.html#mark-config>`__ 's
``strokeWidth`` property.
text : :class:`TextDef`
Text of the ``text`` mark.
theta : :class:`PolarDef`
For arc marks, the arc length in radians if theta2 is not specified, otherwise the
start arc angle. (A value of 0 indicates up or “north”, increasing values proceed
clockwise.)
For text marks, polar coordinate angle in radians.
theta2 : :class:`Position2Def`
The end angle of arc marks in radians. A value of 0 indicates up or “north”,
increasing values proceed clockwise.
tooltip : anyOf(:class:`StringFieldDefWithCondition`, :class:`StringValueDefWithCondition`,
List(:class:`StringFieldDef`), None)
The tooltip text to show upon mouse hover. Specifying ``tooltip`` encoding overrides
`the tooltip property in the mark definition
<https://vega.github.io/vega-lite/docs/mark.html#mark-def>`__.
See the `tooltip <https://vega.github.io/vega-lite/docs/tooltip.html>`__
documentation for a detailed discussion about tooltip in Vega-Lite.
url : anyOf(:class:`StringFieldDefWithCondition`, :class:`StringValueDefWithCondition`)
The URL of an image mark.
x : :class:`PositionDef`
X coordinates of the marks, or width of horizontal ``"bar"`` and ``"area"`` without
specified ``x2`` or ``width``.
The ``value`` of this channel can be a number or a string ``"width"`` for the width
of the plot.
x2 : :class:`Position2Def`
X2 coordinates for ranged ``"area"``, ``"bar"``, ``"rect"``, and ``"rule"``.
The ``value`` of this channel can be a number or a string ``"width"`` for the width
of the plot.
xError : anyOf(:class:`SecondaryFieldDef`, :class:`ValueDefnumber`)
Error value of x coordinates for error specified ``"errorbar"`` and ``"errorband"``.
xError2 : anyOf(:class:`SecondaryFieldDef`, :class:`ValueDefnumber`)
Secondary error value of x coordinates for error specified ``"errorbar"`` and
``"errorband"``.
y : :class:`PositionDef`
Y coordinates of the marks, or height of vertical ``"bar"`` and ``"area"`` without
specified ``y2`` or ``height``.
The ``value`` of this channel can be a number or a string ``"height"`` for the
height of the plot.
y2 : :class:`Position2Def`
Y2 coordinates for ranged ``"area"``, ``"bar"``, ``"rect"``, and ``"rule"``.
The ``value`` of this channel can be a number or a string ``"height"`` for the
height of the plot.
yError : anyOf(:class:`SecondaryFieldDef`, :class:`ValueDefnumber`)
Error value of y coordinates for error specified ``"errorbar"`` and ``"errorband"``.
yError2 : anyOf(:class:`SecondaryFieldDef`, :class:`ValueDefnumber`)
Secondary error value of y coordinates for error specified ``"errorbar"`` and
``"errorband"``.
"""
_schema = {'$ref': '#/definitions/FacetedEncoding'}
def __init__(self, angle=Undefined, color=Undefined, column=Undefined, description=Undefined,
detail=Undefined, facet=Undefined, fill=Undefined, fillOpacity=Undefined,
href=Undefined, key=Undefined, latitude=Undefined, latitude2=Undefined,
longitude=Undefined, longitude2=Undefined, opacity=Undefined, order=Undefined,
radius=Undefined, radius2=Undefined, row=Undefined, shape=Undefined, size=Undefined,
stroke=Undefined, strokeDash=Undefined, strokeOpacity=Undefined, strokeWidth=Undefined,
text=Undefined, theta=Undefined, theta2=Undefined, tooltip=Undefined, url=Undefined,
x=Undefined, x2=Undefined, xError=Undefined, xError2=Undefined, y=Undefined,
y2=Undefined, yError=Undefined, yError2=Undefined, **kwds):
super(FacetedEncoding, self).__init__(angle=angle, color=color, column=column,
description=description, detail=detail, facet=facet,
fill=fill, fillOpacity=fillOpacity, href=href, key=key,
latitude=latitude, latitude2=latitude2,
longitude=longitude, longitude2=longitude2,
opacity=opacity, order=order, radius=radius,
radius2=radius2, row=row, shape=shape, size=size,
stroke=stroke, strokeDash=strokeDash,
strokeOpacity=strokeOpacity, strokeWidth=strokeWidth,
text=text, theta=theta, theta2=theta2, tooltip=tooltip,
url=url, x=x, x2=x2, xError=xError, xError2=xError2, y=y,
y2=y2, yError=yError, yError2=yError2, **kwds)
class Field(VegaLiteSchema):
"""Field schema wrapper
anyOf(:class:`FieldName`, :class:`RepeatRef`)
"""
_schema = {'$ref': '#/definitions/Field'}
def __init__(self, *args, **kwds):
super(Field, self).__init__(*args, **kwds)
class FieldDefWithoutScale(VegaLiteSchema):
"""FieldDefWithoutScale schema wrapper
Mapping(required=[])
Definition object for a data field, its type and transformation of an encoding channel.
Attributes
----------
aggregate : :class:`Aggregate`
Aggregation function for the field (e.g., ``"mean"``, ``"sum"``, ``"median"``,
``"min"``, ``"max"``, ``"count"`` ).
**Default value:** ``undefined`` (None)
**See also:** `aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__
documentation.
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
bin : anyOf(boolean, :class:`BinParams`, string, None)
A flag for binning a ``quantitative`` field, `an object defining binning parameters
<https://vega.github.io/vega-lite/docs/bin.html#params>`__, or indicating that the
data for ``x`` or ``y`` channel are binned before they are imported into Vega-Lite (
``"binned"`` ).
If ``true``, default `binning parameters
<https://vega.github.io/vega-lite/docs/bin.html>`__ will be applied.
If ``"binned"``, this indicates that the data for the ``x`` (or ``y`` ) channel are
already binned. You can map the bin-start field to ``x`` (or ``y`` ) and the bin-end
field to ``x2`` (or ``y2`` ). The scale and axis will be formatted similar to
binning in Vega-Lite. To adjust the axis ticks based on the bin step, you can also
set the axis's `tickMinStep
<https://vega.github.io/vega-lite/docs/axis.html#ticks>`__ property.
**Default value:** ``false``
**See also:** `bin <https://vega.github.io/vega-lite/docs/bin.html>`__
documentation.
field : :class:`Field`
**Required.** A string defining the name of the field from which to pull a data
value or an object defining iterated values from the `repeat
<https://vega.github.io/vega-lite/docs/repeat.html>`__ operator.
**See also:** `field <https://vega.github.io/vega-lite/docs/field.html>`__
documentation.
**Notes:** 1) Dots ( ``.`` ) and brackets ( ``[`` and ``]`` ) can be used to access
nested objects (e.g., ``"field": "foo.bar"`` and ``"field": "foo['bar']"`` ). If
field names contain dots or brackets but are not nested, you can use ``\\`` to
escape dots and brackets (e.g., ``"a\\.b"`` and ``"a\\[0\\]"`` ). See more details
about escaping in the `field documentation
<https://vega.github.io/vega-lite/docs/field.html>`__. 2) ``field`` is not required
if ``aggregate`` is ``count``.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit (e.g., ``year``, ``yearmonth``, ``month``, ``hours`` ) for a temporal
field. or `a temporal field that gets casted as ordinal
<https://vega.github.io/vega-lite/docs/type.html#cast>`__.
**Default value:** ``undefined`` (None)
**See also:** `timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__
documentation.
title : anyOf(:class:`Text`, None)
A title for the field. If ``null``, the title will be removed.
**Default value:** derived from the field's name and transformation function (
``aggregate``, ``bin`` and ``timeUnit`` ). If the field has an aggregate function,
the function is displayed as part of the title (e.g., ``"Sum of Profit"`` ). If the
field is binned or has a time unit applied, the applied function is shown in
parentheses (e.g., ``"Profit (binned)"``, ``"Transaction Date (year-month)"`` ).
Otherwise, the title is simply the field name.
**Notes** :
1) You can customize the default field title format by providing the `fieldTitle
<https://vega.github.io/vega-lite/docs/config.html#top-level-config>`__ property in
the `config <https://vega.github.io/vega-lite/docs/config.html>`__ or `fieldTitle
function via the compile function's options
<https://vega.github.io/vega-lite/docs/compile.html#field-title>`__.
2) If both field definition's ``title`` and axis, header, or legend ``title`` are
defined, axis/header/legend title will be used.
type : :class:`StandardType`
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/FieldDefWithoutScale'}
def __init__(self, aggregate=Undefined, band=Undefined, bin=Undefined, field=Undefined,
timeUnit=Undefined, title=Undefined, type=Undefined, **kwds):
super(FieldDefWithoutScale, self).__init__(aggregate=aggregate, band=band, bin=bin, field=field,
timeUnit=timeUnit, title=title, type=type, **kwds)
class FieldName(Field):
"""FieldName schema wrapper
string
"""
_schema = {'$ref': '#/definitions/FieldName'}
def __init__(self, *args):
super(FieldName, self).__init__(*args)
class FieldOrDatumDefWithConditionStringFieldDefstring(VegaLiteSchema):
"""FieldOrDatumDefWithConditionStringFieldDefstring schema wrapper
Mapping(required=[])
A FieldDef with Condition :raw-html:`<ValueDef>` { condition: {value: ...}, field:
..., ... }
Attributes
----------
aggregate : :class:`Aggregate`
Aggregation function for the field (e.g., ``"mean"``, ``"sum"``, ``"median"``,
``"min"``, ``"max"``, ``"count"`` ).
**Default value:** ``undefined`` (None)
**See also:** `aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__
documentation.
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
bin : anyOf(boolean, :class:`BinParams`, string, None)
A flag for binning a ``quantitative`` field, `an object defining binning parameters
<https://vega.github.io/vega-lite/docs/bin.html#params>`__, or indicating that the
data for ``x`` or ``y`` channel are binned before they are imported into Vega-Lite (
``"binned"`` ).
If ``true``, default `binning parameters
<https://vega.github.io/vega-lite/docs/bin.html>`__ will be applied.
If ``"binned"``, this indicates that the data for the ``x`` (or ``y`` ) channel are
already binned. You can map the bin-start field to ``x`` (or ``y`` ) and the bin-end
field to ``x2`` (or ``y2`` ). The scale and axis will be formatted similar to
binning in Vega-Lite. To adjust the axis ticks based on the bin step, you can also
set the axis's `tickMinStep
<https://vega.github.io/vega-lite/docs/axis.html#ticks>`__ property.
**Default value:** ``false``
**See also:** `bin <https://vega.github.io/vega-lite/docs/bin.html>`__
documentation.
condition : anyOf(:class:`ConditionalValueDefstringExprRef`,
List(:class:`ConditionalValueDefstringExprRef`))
One or more value definition(s) with `a selection or a test predicate
<https://vega.github.io/vega-lite/docs/condition.html>`__.
**Note:** A field definition's ``condition`` property can only contain `conditional
value definitions <https://vega.github.io/vega-lite/docs/condition.html#value>`__
since Vega-Lite only allows at most one encoded field per encoding channel.
field : :class:`Field`
**Required.** A string defining the name of the field from which to pull a data
value or an object defining iterated values from the `repeat
<https://vega.github.io/vega-lite/docs/repeat.html>`__ operator.
**See also:** `field <https://vega.github.io/vega-lite/docs/field.html>`__
documentation.
**Notes:** 1) Dots ( ``.`` ) and brackets ( ``[`` and ``]`` ) can be used to access
nested objects (e.g., ``"field": "foo.bar"`` and ``"field": "foo['bar']"`` ). If
field names contain dots or brackets but are not nested, you can use ``\\`` to
escape dots and brackets (e.g., ``"a\\.b"`` and ``"a\\[0\\]"`` ). See more details
about escaping in the `field documentation
<https://vega.github.io/vega-lite/docs/field.html>`__. 2) ``field`` is not required
if ``aggregate`` is ``count``.
format : anyOf(string, :class:`Dictunknown`)
When used with the default ``"number"`` and ``"time"`` format type, the text
formatting pattern for labels of guides (axes, legends, headers) and text marks.
* If the format type is ``"number"`` (e.g., for quantitative fields), this is D3's
`number format pattern <https://github.com/d3/d3-format#locale_format>`__. - If
the format type is ``"time"`` (e.g., for temporal fields), this is D3's `time
format pattern <https://github.com/d3/d3-time-format#locale_format>`__.
See the `format documentation <https://vega.github.io/vega-lite/docs/format.html>`__
for more examples.
When used with a `custom formatType
<https://vega.github.io/vega-lite/docs/config.html#custom-format-type>`__, this
value will be passed as ``format`` alongside ``datum.value`` to the registered
function.
**Default value:** Derived from `numberFormat
<https://vega.github.io/vega-lite/docs/config.html#format>`__ config for number
format and from `timeFormat
<https://vega.github.io/vega-lite/docs/config.html#format>`__ config for time
format.
formatType : string
The format type for labels. One of ``"number"``, ``"time"``, or a `registered custom
format type
<https://vega.github.io/vega-lite/docs/config.html#custom-format-type>`__.
**Default value:** - ``"time"`` for temporal fields and ordinal and nominal fields
with ``timeUnit``. - ``"number"`` for quantitative fields as well as ordinal and
nominal fields without ``timeUnit``.
labelExpr : string
`Vega expression <https://vega.github.io/vega/docs/expressions/>`__ for customizing
labels text.
**Note:** The label text and value can be assessed via the ``label`` and ``value``
properties of the axis's backing ``datum`` object.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit (e.g., ``year``, ``yearmonth``, ``month``, ``hours`` ) for a temporal
field. or `a temporal field that gets casted as ordinal
<https://vega.github.io/vega-lite/docs/type.html#cast>`__.
**Default value:** ``undefined`` (None)
**See also:** `timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__
documentation.
title : anyOf(:class:`Text`, None)
A title for the field. If ``null``, the title will be removed.
**Default value:** derived from the field's name and transformation function (
``aggregate``, ``bin`` and ``timeUnit`` ). If the field has an aggregate function,
the function is displayed as part of the title (e.g., ``"Sum of Profit"`` ). If the
field is binned or has a time unit applied, the applied function is shown in
parentheses (e.g., ``"Profit (binned)"``, ``"Transaction Date (year-month)"`` ).
Otherwise, the title is simply the field name.
**Notes** :
1) You can customize the default field title format by providing the `fieldTitle
<https://vega.github.io/vega-lite/docs/config.html#top-level-config>`__ property in
the `config <https://vega.github.io/vega-lite/docs/config.html>`__ or `fieldTitle
function via the compile function's options
<https://vega.github.io/vega-lite/docs/compile.html#field-title>`__.
2) If both field definition's ``title`` and axis, header, or legend ``title`` are
defined, axis/header/legend title will be used.
type : :class:`StandardType`
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/FieldOrDatumDefWithCondition<StringFieldDef,string>'}
def __init__(self, aggregate=Undefined, band=Undefined, bin=Undefined, condition=Undefined,
field=Undefined, format=Undefined, formatType=Undefined, labelExpr=Undefined,
timeUnit=Undefined, title=Undefined, type=Undefined, **kwds):
super(FieldOrDatumDefWithConditionStringFieldDefstring, self).__init__(aggregate=aggregate,
band=band, bin=bin,
condition=condition,
field=field,
format=format,
formatType=formatType,
labelExpr=labelExpr,
timeUnit=timeUnit,
title=title, type=type,
**kwds)
class Fit(VegaLiteSchema):
"""Fit schema wrapper
anyOf(:class:`GeoJsonFeature`, :class:`GeoJsonFeatureCollection`,
List(:class:`GeoJsonFeature`))
"""
_schema = {'$ref': '#/definitions/Fit'}
def __init__(self, *args, **kwds):
super(Fit, self).__init__(*args, **kwds)
class FontStyle(VegaLiteSchema):
"""FontStyle schema wrapper
string
"""
_schema = {'$ref': '#/definitions/FontStyle'}
def __init__(self, *args):
super(FontStyle, self).__init__(*args)
class FontWeight(VegaLiteSchema):
"""FontWeight schema wrapper
enum('normal', 'bold', 'lighter', 'bolder', 100, 200, 300, 400, 500, 600, 700, 800, 900)
"""
_schema = {'$ref': '#/definitions/FontWeight'}
def __init__(self, *args):
super(FontWeight, self).__init__(*args)
class Generator(Data):
"""Generator schema wrapper
anyOf(:class:`SequenceGenerator`, :class:`SphereGenerator`, :class:`GraticuleGenerator`)
"""
_schema = {'$ref': '#/definitions/Generator'}
def __init__(self, *args, **kwds):
super(Generator, self).__init__(*args, **kwds)
class GeoJsonFeature(Fit):
"""GeoJsonFeature schema wrapper
Any
"""
_schema = {'$ref': '#/definitions/GeoJsonFeature'}
def __init__(self, *args, **kwds):
super(GeoJsonFeature, self).__init__(*args, **kwds)
class GeoJsonFeatureCollection(Fit):
"""GeoJsonFeatureCollection schema wrapper
Any
"""
_schema = {'$ref': '#/definitions/GeoJsonFeatureCollection'}
def __init__(self, *args, **kwds):
super(GeoJsonFeatureCollection, self).__init__(*args, **kwds)
class Gradient(VegaLiteSchema):
"""Gradient schema wrapper
anyOf(:class:`LinearGradient`, :class:`RadialGradient`)
"""
_schema = {'$ref': '#/definitions/Gradient'}
def __init__(self, *args, **kwds):
super(Gradient, self).__init__(*args, **kwds)
class GradientStop(VegaLiteSchema):
"""GradientStop schema wrapper
Mapping(required=[offset, color])
Attributes
----------
color : :class:`Color`
The color value at this point in the gradient.
offset : float
The offset fraction for the color stop, indicating its position within the gradient.
"""
_schema = {'$ref': '#/definitions/GradientStop'}
def __init__(self, color=Undefined, offset=Undefined, **kwds):
super(GradientStop, self).__init__(color=color, offset=offset, **kwds)
class GraticuleGenerator(Generator):
"""GraticuleGenerator schema wrapper
Mapping(required=[graticule])
Attributes
----------
graticule : anyOf(boolean, :class:`GraticuleParams`)
Generate graticule GeoJSON data for geographic reference lines.
name : string
Provide a placeholder name and bind data at runtime.
"""
_schema = {'$ref': '#/definitions/GraticuleGenerator'}
def __init__(self, graticule=Undefined, name=Undefined, **kwds):
super(GraticuleGenerator, self).__init__(graticule=graticule, name=name, **kwds)
class GraticuleParams(VegaLiteSchema):
"""GraticuleParams schema wrapper
Mapping(required=[])
Attributes
----------
extent : :class:`Vector2Vector2number`
Sets both the major and minor extents to the same values.
extentMajor : :class:`Vector2Vector2number`
The major extent of the graticule as a two-element array of coordinates.
extentMinor : :class:`Vector2Vector2number`
The minor extent of the graticule as a two-element array of coordinates.
precision : float
The precision of the graticule in degrees.
**Default value:** ``2.5``
step : :class:`Vector2number`
Sets both the major and minor step angles to the same values.
stepMajor : :class:`Vector2number`
The major step angles of the graticule.
**Default value:** ``[90, 360]``
stepMinor : :class:`Vector2number`
The minor step angles of the graticule.
**Default value:** ``[10, 10]``
"""
_schema = {'$ref': '#/definitions/GraticuleParams'}
def __init__(self, extent=Undefined, extentMajor=Undefined, extentMinor=Undefined,
precision=Undefined, step=Undefined, stepMajor=Undefined, stepMinor=Undefined, **kwds):
super(GraticuleParams, self).__init__(extent=extent, extentMajor=extentMajor,
extentMinor=extentMinor, precision=precision, step=step,
stepMajor=stepMajor, stepMinor=stepMinor, **kwds)
class Header(VegaLiteSchema):
"""Header schema wrapper
Mapping(required=[])
Headers of row / column channels for faceted plots.
Attributes
----------
format : anyOf(string, :class:`Dictunknown`)
When used with the default ``"number"`` and ``"time"`` format type, the text
formatting pattern for labels of guides (axes, legends, headers) and text marks.
* If the format type is ``"number"`` (e.g., for quantitative fields), this is D3's
`number format pattern <https://github.com/d3/d3-format#locale_format>`__. - If
the format type is ``"time"`` (e.g., for temporal fields), this is D3's `time
format pattern <https://github.com/d3/d3-time-format#locale_format>`__.
See the `format documentation <https://vega.github.io/vega-lite/docs/format.html>`__
for more examples.
When used with a `custom formatType
<https://vega.github.io/vega-lite/docs/config.html#custom-format-type>`__, this
value will be passed as ``format`` alongside ``datum.value`` to the registered
function.
**Default value:** Derived from `numberFormat
<https://vega.github.io/vega-lite/docs/config.html#format>`__ config for number
format and from `timeFormat
<https://vega.github.io/vega-lite/docs/config.html#format>`__ config for time
format.
formatType : string
The format type for labels. One of ``"number"``, ``"time"``, or a `registered custom
format type
<https://vega.github.io/vega-lite/docs/config.html#custom-format-type>`__.
**Default value:** - ``"time"`` for temporal fields and ordinal and nominal fields
with ``timeUnit``. - ``"number"`` for quantitative fields as well as ordinal and
nominal fields without ``timeUnit``.
labelAlign : anyOf(:class:`Align`, :class:`ExprRef`)
Horizontal text alignment of header labels. One of ``"left"``, ``"center"``, or
``"right"``.
labelAnchor : :class:`TitleAnchor`
The anchor position for placing the labels. One of ``"start"``, ``"middle"``, or
``"end"``. For example, with a label orientation of top these anchor positions map
to a left-, center-, or right-aligned label.
labelAngle : float
The rotation angle of the header labels.
**Default value:** ``0`` for column header, ``-90`` for row header.
labelBaseline : anyOf(:class:`TextBaseline`, :class:`ExprRef`)
The vertical text baseline for the header labels. One of ``"alphabetic"`` (default),
``"top"``, ``"middle"``, ``"bottom"``, ``"line-top"``, or ``"line-bottom"``. The
``"line-top"`` and ``"line-bottom"`` values operate similarly to ``"top"`` and
``"bottom"``, but are calculated relative to the ``titleLineHeight`` rather than
``titleFontSize`` alone.
labelColor : anyOf(:class:`Color`, :class:`ExprRef`)
The color of the header label, can be in hex color code or regular color name.
labelExpr : string
`Vega expression <https://vega.github.io/vega/docs/expressions/>`__ for customizing
labels.
**Note:** The label text and value can be assessed via the ``label`` and ``value``
properties of the header's backing ``datum`` object.
labelFont : anyOf(string, :class:`ExprRef`)
The font of the header label.
labelFontSize : anyOf(float, :class:`ExprRef`)
The font size of the header label, in pixels.
labelFontStyle : anyOf(:class:`FontStyle`, :class:`ExprRef`)
The font style of the header label.
labelFontWeight : anyOf(:class:`FontWeight`, :class:`ExprRef`)
The font weight of the header label.
labelLimit : anyOf(float, :class:`ExprRef`)
The maximum length of the header label in pixels. The text value will be
automatically truncated if the rendered size exceeds the limit.
**Default value:** ``0``, indicating no limit
labelLineHeight : anyOf(float, :class:`ExprRef`)
Line height in pixels for multi-line header labels or title text with ``"line-top"``
or ``"line-bottom"`` baseline.
labelOrient : :class:`Orient`
The orientation of the header label. One of ``"top"``, ``"bottom"``, ``"left"`` or
``"right"``.
labelPadding : anyOf(float, :class:`ExprRef`)
The padding, in pixel, between facet header's label and the plot.
**Default value:** ``10``
labels : boolean
A boolean flag indicating if labels should be included as part of the header.
**Default value:** ``true``.
orient : :class:`Orient`
Shortcut for setting both labelOrient and titleOrient.
title : anyOf(:class:`Text`, None)
A title for the field. If ``null``, the title will be removed.
**Default value:** derived from the field's name and transformation function (
``aggregate``, ``bin`` and ``timeUnit`` ). If the field has an aggregate function,
the function is displayed as part of the title (e.g., ``"Sum of Profit"`` ). If the
field is binned or has a time unit applied, the applied function is shown in
parentheses (e.g., ``"Profit (binned)"``, ``"Transaction Date (year-month)"`` ).
Otherwise, the title is simply the field name.
**Notes** :
1) You can customize the default field title format by providing the `fieldTitle
<https://vega.github.io/vega-lite/docs/config.html#top-level-config>`__ property in
the `config <https://vega.github.io/vega-lite/docs/config.html>`__ or `fieldTitle
function via the compile function's options
<https://vega.github.io/vega-lite/docs/compile.html#field-title>`__.
2) If both field definition's ``title`` and axis, header, or legend ``title`` are
defined, axis/header/legend title will be used.
titleAlign : anyOf(:class:`Align`, :class:`ExprRef`)
Horizontal text alignment (to the anchor) of header titles.
titleAnchor : :class:`TitleAnchor`
The anchor position for placing the title. One of ``"start"``, ``"middle"``, or
``"end"``. For example, with an orientation of top these anchor positions map to a
left-, center-, or right-aligned title.
titleAngle : float
The rotation angle of the header title.
**Default value:** ``0``.
titleBaseline : anyOf(:class:`TextBaseline`, :class:`ExprRef`)
The vertical text baseline for the header title. One of ``"alphabetic"`` (default),
``"top"``, ``"middle"``, ``"bottom"``, ``"line-top"``, or ``"line-bottom"``. The
``"line-top"`` and ``"line-bottom"`` values operate similarly to ``"top"`` and
``"bottom"``, but are calculated relative to the ``titleLineHeight`` rather than
``titleFontSize`` alone.
**Default value:** ``"middle"``
titleColor : anyOf(:class:`Color`, :class:`ExprRef`)
Color of the header title, can be in hex color code or regular color name.
titleFont : anyOf(string, :class:`ExprRef`)
Font of the header title. (e.g., ``"Helvetica Neue"`` ).
titleFontSize : anyOf(float, :class:`ExprRef`)
Font size of the header title.
titleFontStyle : anyOf(:class:`FontStyle`, :class:`ExprRef`)
The font style of the header title.
titleFontWeight : anyOf(:class:`FontWeight`, :class:`ExprRef`)
Font weight of the header title. This can be either a string (e.g ``"bold"``,
``"normal"`` ) or a number ( ``100``, ``200``, ``300``, ..., ``900`` where
``"normal"`` = ``400`` and ``"bold"`` = ``700`` ).
titleLimit : anyOf(float, :class:`ExprRef`)
The maximum length of the header title in pixels. The text value will be
automatically truncated if the rendered size exceeds the limit.
**Default value:** ``0``, indicating no limit
titleLineHeight : anyOf(float, :class:`ExprRef`)
Line height in pixels for multi-line header title text or title text with
``"line-top"`` or ``"line-bottom"`` baseline.
titleOrient : :class:`Orient`
The orientation of the header title. One of ``"top"``, ``"bottom"``, ``"left"`` or
``"right"``.
titlePadding : anyOf(float, :class:`ExprRef`)
The padding, in pixel, between facet header's title and the label.
**Default value:** ``10``
"""
_schema = {'$ref': '#/definitions/Header'}
def __init__(self, format=Undefined, formatType=Undefined, labelAlign=Undefined,
labelAnchor=Undefined, labelAngle=Undefined, labelBaseline=Undefined,
labelColor=Undefined, labelExpr=Undefined, labelFont=Undefined,
labelFontSize=Undefined, labelFontStyle=Undefined, labelFontWeight=Undefined,
labelLimit=Undefined, labelLineHeight=Undefined, labelOrient=Undefined,
labelPadding=Undefined, labels=Undefined, orient=Undefined, title=Undefined,
titleAlign=Undefined, titleAnchor=Undefined, titleAngle=Undefined,
titleBaseline=Undefined, titleColor=Undefined, titleFont=Undefined,
titleFontSize=Undefined, titleFontStyle=Undefined, titleFontWeight=Undefined,
titleLimit=Undefined, titleLineHeight=Undefined, titleOrient=Undefined,
titlePadding=Undefined, **kwds):
super(Header, self).__init__(format=format, formatType=formatType, labelAlign=labelAlign,
labelAnchor=labelAnchor, labelAngle=labelAngle,
labelBaseline=labelBaseline, labelColor=labelColor,
labelExpr=labelExpr, labelFont=labelFont,
labelFontSize=labelFontSize, labelFontStyle=labelFontStyle,
labelFontWeight=labelFontWeight, labelLimit=labelLimit,
labelLineHeight=labelLineHeight, labelOrient=labelOrient,
labelPadding=labelPadding, labels=labels, orient=orient,
title=title, titleAlign=titleAlign, titleAnchor=titleAnchor,
titleAngle=titleAngle, titleBaseline=titleBaseline,
titleColor=titleColor, titleFont=titleFont,
titleFontSize=titleFontSize, titleFontStyle=titleFontStyle,
titleFontWeight=titleFontWeight, titleLimit=titleLimit,
titleLineHeight=titleLineHeight, titleOrient=titleOrient,
titlePadding=titlePadding, **kwds)
class HeaderConfig(VegaLiteSchema):
"""HeaderConfig schema wrapper
Mapping(required=[])
Attributes
----------
format : anyOf(string, :class:`Dictunknown`)
When used with the default ``"number"`` and ``"time"`` format type, the text
formatting pattern for labels of guides (axes, legends, headers) and text marks.
* If the format type is ``"number"`` (e.g., for quantitative fields), this is D3's
`number format pattern <https://github.com/d3/d3-format#locale_format>`__. - If
the format type is ``"time"`` (e.g., for temporal fields), this is D3's `time
format pattern <https://github.com/d3/d3-time-format#locale_format>`__.
See the `format documentation <https://vega.github.io/vega-lite/docs/format.html>`__
for more examples.
When used with a `custom formatType
<https://vega.github.io/vega-lite/docs/config.html#custom-format-type>`__, this
value will be passed as ``format`` alongside ``datum.value`` to the registered
function.
**Default value:** Derived from `numberFormat
<https://vega.github.io/vega-lite/docs/config.html#format>`__ config for number
format and from `timeFormat
<https://vega.github.io/vega-lite/docs/config.html#format>`__ config for time
format.
formatType : string
The format type for labels. One of ``"number"``, ``"time"``, or a `registered custom
format type
<https://vega.github.io/vega-lite/docs/config.html#custom-format-type>`__.
**Default value:** - ``"time"`` for temporal fields and ordinal and nominal fields
with ``timeUnit``. - ``"number"`` for quantitative fields as well as ordinal and
nominal fields without ``timeUnit``.
labelAlign : anyOf(:class:`Align`, :class:`ExprRef`)
Horizontal text alignment of header labels. One of ``"left"``, ``"center"``, or
``"right"``.
labelAnchor : :class:`TitleAnchor`
The anchor position for placing the labels. One of ``"start"``, ``"middle"``, or
``"end"``. For example, with a label orientation of top these anchor positions map
to a left-, center-, or right-aligned label.
labelAngle : float
The rotation angle of the header labels.
**Default value:** ``0`` for column header, ``-90`` for row header.
labelBaseline : anyOf(:class:`TextBaseline`, :class:`ExprRef`)
The vertical text baseline for the header labels. One of ``"alphabetic"`` (default),
``"top"``, ``"middle"``, ``"bottom"``, ``"line-top"``, or ``"line-bottom"``. The
``"line-top"`` and ``"line-bottom"`` values operate similarly to ``"top"`` and
``"bottom"``, but are calculated relative to the ``titleLineHeight`` rather than
``titleFontSize`` alone.
labelColor : anyOf(:class:`Color`, :class:`ExprRef`)
The color of the header label, can be in hex color code or regular color name.
labelExpr : string
`Vega expression <https://vega.github.io/vega/docs/expressions/>`__ for customizing
labels.
**Note:** The label text and value can be assessed via the ``label`` and ``value``
properties of the header's backing ``datum`` object.
labelFont : anyOf(string, :class:`ExprRef`)
The font of the header label.
labelFontSize : anyOf(float, :class:`ExprRef`)
The font size of the header label, in pixels.
labelFontStyle : anyOf(:class:`FontStyle`, :class:`ExprRef`)
The font style of the header label.
labelFontWeight : anyOf(:class:`FontWeight`, :class:`ExprRef`)
The font weight of the header label.
labelLimit : anyOf(float, :class:`ExprRef`)
The maximum length of the header label in pixels. The text value will be
automatically truncated if the rendered size exceeds the limit.
**Default value:** ``0``, indicating no limit
labelLineHeight : anyOf(float, :class:`ExprRef`)
Line height in pixels for multi-line header labels or title text with ``"line-top"``
or ``"line-bottom"`` baseline.
labelOrient : :class:`Orient`
The orientation of the header label. One of ``"top"``, ``"bottom"``, ``"left"`` or
``"right"``.
labelPadding : anyOf(float, :class:`ExprRef`)
The padding, in pixel, between facet header's label and the plot.
**Default value:** ``10``
labels : boolean
A boolean flag indicating if labels should be included as part of the header.
**Default value:** ``true``.
orient : :class:`Orient`
Shortcut for setting both labelOrient and titleOrient.
title : None
Set to null to disable title for the axis, legend, or header.
titleAlign : anyOf(:class:`Align`, :class:`ExprRef`)
Horizontal text alignment (to the anchor) of header titles.
titleAnchor : :class:`TitleAnchor`
The anchor position for placing the title. One of ``"start"``, ``"middle"``, or
``"end"``. For example, with an orientation of top these anchor positions map to a
left-, center-, or right-aligned title.
titleAngle : float
The rotation angle of the header title.
**Default value:** ``0``.
titleBaseline : anyOf(:class:`TextBaseline`, :class:`ExprRef`)
The vertical text baseline for the header title. One of ``"alphabetic"`` (default),
``"top"``, ``"middle"``, ``"bottom"``, ``"line-top"``, or ``"line-bottom"``. The
``"line-top"`` and ``"line-bottom"`` values operate similarly to ``"top"`` and
``"bottom"``, but are calculated relative to the ``titleLineHeight`` rather than
``titleFontSize`` alone.
**Default value:** ``"middle"``
titleColor : anyOf(:class:`Color`, :class:`ExprRef`)
Color of the header title, can be in hex color code or regular color name.
titleFont : anyOf(string, :class:`ExprRef`)
Font of the header title. (e.g., ``"Helvetica Neue"`` ).
titleFontSize : anyOf(float, :class:`ExprRef`)
Font size of the header title.
titleFontStyle : anyOf(:class:`FontStyle`, :class:`ExprRef`)
The font style of the header title.
titleFontWeight : anyOf(:class:`FontWeight`, :class:`ExprRef`)
Font weight of the header title. This can be either a string (e.g ``"bold"``,
``"normal"`` ) or a number ( ``100``, ``200``, ``300``, ..., ``900`` where
``"normal"`` = ``400`` and ``"bold"`` = ``700`` ).
titleLimit : anyOf(float, :class:`ExprRef`)
The maximum length of the header title in pixels. The text value will be
automatically truncated if the rendered size exceeds the limit.
**Default value:** ``0``, indicating no limit
titleLineHeight : anyOf(float, :class:`ExprRef`)
Line height in pixels for multi-line header title text or title text with
``"line-top"`` or ``"line-bottom"`` baseline.
titleOrient : :class:`Orient`
The orientation of the header title. One of ``"top"``, ``"bottom"``, ``"left"`` or
``"right"``.
titlePadding : anyOf(float, :class:`ExprRef`)
The padding, in pixel, between facet header's title and the label.
**Default value:** ``10``
"""
_schema = {'$ref': '#/definitions/HeaderConfig'}
def __init__(self, format=Undefined, formatType=Undefined, labelAlign=Undefined,
labelAnchor=Undefined, labelAngle=Undefined, labelBaseline=Undefined,
labelColor=Undefined, labelExpr=Undefined, labelFont=Undefined,
labelFontSize=Undefined, labelFontStyle=Undefined, labelFontWeight=Undefined,
labelLimit=Undefined, labelLineHeight=Undefined, labelOrient=Undefined,
labelPadding=Undefined, labels=Undefined, orient=Undefined, title=Undefined,
titleAlign=Undefined, titleAnchor=Undefined, titleAngle=Undefined,
titleBaseline=Undefined, titleColor=Undefined, titleFont=Undefined,
titleFontSize=Undefined, titleFontStyle=Undefined, titleFontWeight=Undefined,
titleLimit=Undefined, titleLineHeight=Undefined, titleOrient=Undefined,
titlePadding=Undefined, **kwds):
super(HeaderConfig, self).__init__(format=format, formatType=formatType, labelAlign=labelAlign,
labelAnchor=labelAnchor, labelAngle=labelAngle,
labelBaseline=labelBaseline, labelColor=labelColor,
labelExpr=labelExpr, labelFont=labelFont,
labelFontSize=labelFontSize, labelFontStyle=labelFontStyle,
labelFontWeight=labelFontWeight, labelLimit=labelLimit,
labelLineHeight=labelLineHeight, labelOrient=labelOrient,
labelPadding=labelPadding, labels=labels, orient=orient,
title=title, titleAlign=titleAlign, titleAnchor=titleAnchor,
titleAngle=titleAngle, titleBaseline=titleBaseline,
titleColor=titleColor, titleFont=titleFont,
titleFontSize=titleFontSize, titleFontStyle=titleFontStyle,
titleFontWeight=titleFontWeight, titleLimit=titleLimit,
titleLineHeight=titleLineHeight, titleOrient=titleOrient,
titlePadding=titlePadding, **kwds)
class HexColor(Color):
"""HexColor schema wrapper
string
"""
_schema = {'$ref': '#/definitions/HexColor'}
def __init__(self, *args):
super(HexColor, self).__init__(*args)
class ImputeMethod(VegaLiteSchema):
"""ImputeMethod schema wrapper
enum('value', 'median', 'max', 'min', 'mean')
"""
_schema = {'$ref': '#/definitions/ImputeMethod'}
def __init__(self, *args):
super(ImputeMethod, self).__init__(*args)
class ImputeParams(VegaLiteSchema):
"""ImputeParams schema wrapper
Mapping(required=[])
Attributes
----------
frame : List([anyOf(None, float), anyOf(None, float)])
A frame specification as a two-element array used to control the window over which
the specified method is applied. The array entries should either be a number
indicating the offset from the current data object, or null to indicate unbounded
rows preceding or following the current data object. For example, the value ``[-5,
5]`` indicates that the window should include five objects preceding and five
objects following the current object.
**Default value:** : ``[null, null]`` indicating that the window includes all
objects.
keyvals : anyOf(List(Any), :class:`ImputeSequence`)
Defines the key values that should be considered for imputation. An array of key
values or an object defining a `number sequence
<https://vega.github.io/vega-lite/docs/impute.html#sequence-def>`__.
If provided, this will be used in addition to the key values observed within the
input data. If not provided, the values will be derived from all unique values of
the ``key`` field. For ``impute`` in ``encoding``, the key field is the x-field if
the y-field is imputed, or vice versa.
If there is no impute grouping, this property *must* be specified.
method : :class:`ImputeMethod`
The imputation method to use for the field value of imputed data objects. One of
``"value"``, ``"mean"``, ``"median"``, ``"max"`` or ``"min"``.
**Default value:** ``"value"``
value : Any
The field value to use when the imputation ``method`` is ``"value"``.
"""
_schema = {'$ref': '#/definitions/ImputeParams'}
def __init__(self, frame=Undefined, keyvals=Undefined, method=Undefined, value=Undefined, **kwds):
super(ImputeParams, self).__init__(frame=frame, keyvals=keyvals, method=method, value=value,
**kwds)
class ImputeSequence(VegaLiteSchema):
"""ImputeSequence schema wrapper
Mapping(required=[stop])
Attributes
----------
stop : float
The ending value(exclusive) of the sequence.
start : float
The starting value of the sequence. **Default value:** ``0``
step : float
The step value between sequence entries. **Default value:** ``1`` or ``-1`` if
``stop < start``
"""
_schema = {'$ref': '#/definitions/ImputeSequence'}
def __init__(self, stop=Undefined, start=Undefined, step=Undefined, **kwds):
super(ImputeSequence, self).__init__(stop=stop, start=start, step=step, **kwds)
class InlineData(DataSource):
"""InlineData schema wrapper
Mapping(required=[values])
Attributes
----------
values : :class:`InlineDataset`
The full data set, included inline. This can be an array of objects or primitive
values, an object, or a string. Arrays of primitive values are ingested as objects
with a ``data`` property. Strings are parsed according to the specified format type.
format : :class:`DataFormat`
An object that specifies the format for parsing the data.
name : string
Provide a placeholder name and bind data at runtime.
"""
_schema = {'$ref': '#/definitions/InlineData'}
def __init__(self, values=Undefined, format=Undefined, name=Undefined, **kwds):
super(InlineData, self).__init__(values=values, format=format, name=name, **kwds)
class InlineDataset(VegaLiteSchema):
"""InlineDataset schema wrapper
anyOf(List(float), List(string), List(boolean), List(Mapping(required=[])), string,
Mapping(required=[]))
"""
_schema = {'$ref': '#/definitions/InlineDataset'}
def __init__(self, *args, **kwds):
super(InlineDataset, self).__init__(*args, **kwds)
class InputBinding(Binding):
"""InputBinding schema wrapper
Mapping(required=[])
Attributes
----------
autocomplete : string
debounce : float
element : :class:`Element`
input : string
name : string
placeholder : string
type : string
"""
_schema = {'$ref': '#/definitions/InputBinding'}
def __init__(self, autocomplete=Undefined, debounce=Undefined, element=Undefined, input=Undefined,
name=Undefined, placeholder=Undefined, type=Undefined, **kwds):
super(InputBinding, self).__init__(autocomplete=autocomplete, debounce=debounce,
element=element, input=input, name=name,
placeholder=placeholder, type=type, **kwds)
class Interpolate(VegaLiteSchema):
"""Interpolate schema wrapper
enum('basis', 'basis-open', 'basis-closed', 'bundle', 'cardinal', 'cardinal-open',
'cardinal-closed', 'catmull-rom', 'linear', 'linear-closed', 'monotone', 'natural', 'step',
'step-before', 'step-after')
"""
_schema = {'$ref': '#/definitions/Interpolate'}
def __init__(self, *args):
super(Interpolate, self).__init__(*args)
class IntervalSelectionConfig(VegaLiteSchema):
"""IntervalSelectionConfig schema wrapper
Mapping(required=[])
Attributes
----------
bind : string
Establishes a two-way binding between the interval selection and the scales used
within the same view. This allows a user to interactively pan and zoom the view.
**See also:** `bind <https://vega.github.io/vega-lite/docs/bind.html>`__
documentation.
clear : anyOf(:class:`Stream`, string, boolean)
Clears the selection, emptying it of all values. Can be a `Event Stream
<https://vega.github.io/vega/docs/event-streams/>`__ or ``false`` to disable.
**Default value:** ``dblclick``.
**See also:** `clear <https://vega.github.io/vega-lite/docs/clear.html>`__
documentation.
empty : enum('all', 'none')
By default, ``all`` data values are considered to lie within an empty selection.
When set to ``none``, empty selections contain no data values.
encodings : List(:class:`SingleDefUnitChannel`)
An array of encoding channels. The corresponding data field values must match for a
data tuple to fall within the selection.
**See also:** `encodings <https://vega.github.io/vega-lite/docs/project.html>`__
documentation.
fields : List(:class:`FieldName`)
An array of field names whose values must match for a data tuple to fall within the
selection.
**See also:** `fields <https://vega.github.io/vega-lite/docs/project.html>`__
documentation.
init : :class:`SelectionInitIntervalMapping`
Initialize the selection with a mapping between `projected channels or field names
<https://vega.github.io/vega-lite/docs/project.html>`__ and arrays of initial
values.
**See also:** `init <https://vega.github.io/vega-lite/docs/init.html>`__
documentation.
mark : :class:`BrushConfig`
An interval selection also adds a rectangle mark to depict the extents of the
interval. The ``mark`` property can be used to customize the appearance of the mark.
**See also:** `mark <https://vega.github.io/vega-lite/docs/selection-mark.html>`__
documentation.
on : anyOf(:class:`Stream`, string)
A `Vega event stream <https://vega.github.io/vega/docs/event-streams/>`__ (object or
selector) that triggers the selection. For interval selections, the event stream
must specify a `start and end
<https://vega.github.io/vega/docs/event-streams/#between-filters>`__.
resolve : :class:`SelectionResolution`
With layered and multi-view displays, a strategy that determines how selections'
data queries are resolved when applied in a filter transform, conditional encoding
rule, or scale domain.
**See also:** `resolve
<https://vega.github.io/vega-lite/docs/selection-resolve.html>`__ documentation.
translate : anyOf(string, boolean)
When truthy, allows a user to interactively move an interval selection
back-and-forth. Can be ``true``, ``false`` (to disable panning), or a `Vega event
stream definition <https://vega.github.io/vega/docs/event-streams/>`__ which must
include a start and end event to trigger continuous panning.
**Default value:** ``true``, which corresponds to ``[mousedown, window:mouseup] >
window:mousemove!`` which corresponds to clicks and dragging within an interval
selection to reposition it.
**See also:** `translate <https://vega.github.io/vega-lite/docs/translate.html>`__
documentation.
zoom : anyOf(string, boolean)
When truthy, allows a user to interactively resize an interval selection. Can be
``true``, ``false`` (to disable zooming), or a `Vega event stream definition
<https://vega.github.io/vega/docs/event-streams/>`__. Currently, only ``wheel``
events are supported.
**Default value:** ``true``, which corresponds to ``wheel!``.
**See also:** `zoom <https://vega.github.io/vega-lite/docs/zoom.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/IntervalSelectionConfig'}
def __init__(self, bind=Undefined, clear=Undefined, empty=Undefined, encodings=Undefined,
fields=Undefined, init=Undefined, mark=Undefined, on=Undefined, resolve=Undefined,
translate=Undefined, zoom=Undefined, **kwds):
super(IntervalSelectionConfig, self).__init__(bind=bind, clear=clear, empty=empty,
encodings=encodings, fields=fields, init=init,
mark=mark, on=on, resolve=resolve,
translate=translate, zoom=zoom, **kwds)
class JoinAggregateFieldDef(VegaLiteSchema):
"""JoinAggregateFieldDef schema wrapper
Mapping(required=[op, as])
Attributes
----------
op : :class:`AggregateOp`
The aggregation operation to apply (e.g., ``"sum"``, ``"average"`` or ``"count"`` ).
See the list of all supported operations `here
<https://vega.github.io/vega-lite/docs/aggregate.html#ops>`__.
field : :class:`FieldName`
The data field for which to compute the aggregate function. This can be omitted for
functions that do not operate over a field such as ``"count"``.
as : :class:`FieldName`
The output name for the join aggregate operation.
"""
_schema = {'$ref': '#/definitions/JoinAggregateFieldDef'}
def __init__(self, op=Undefined, field=Undefined, **kwds):
super(JoinAggregateFieldDef, self).__init__(op=op, field=field, **kwds)
class JsonDataFormat(DataFormat):
"""JsonDataFormat schema wrapper
Mapping(required=[])
Attributes
----------
parse : anyOf(:class:`Parse`, None)
If set to ``null``, disable type inference based on the spec and only use type
inference based on the data. Alternatively, a parsing directive object can be
provided for explicit data types. Each property of the object corresponds to a field
name, and the value to the desired data type (one of ``"number"``, ``"boolean"``,
``"date"``, or null (do not parse the field)). For example, ``"parse":
{"modified_on": "date"}`` parses the ``modified_on`` field in each input record a
Date value.
For ``"date"``, we parse data based using Javascript's `Date.parse()
<https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/parse>`__.
For Specific date formats can be provided (e.g., ``{foo: "date:'%m%d%Y'"}`` ), using
the `d3-time-format syntax <https://github.com/d3/d3-time-format#locale_format>`__.
UTC date format parsing is supported similarly (e.g., ``{foo: "utc:'%m%d%Y'"}`` ).
See more about `UTC time
<https://vega.github.io/vega-lite/docs/timeunit.html#utc>`__
property : string
The JSON property containing the desired data. This parameter can be used when the
loaded JSON file may have surrounding structure or meta-data. For example
``"property": "values.features"`` is equivalent to retrieving
``json.values.features`` from the loaded JSON object.
type : string
Type of input data: ``"json"``, ``"csv"``, ``"tsv"``, ``"dsv"``.
**Default value:** The default format type is determined by the extension of the
file URL. If no extension is detected, ``"json"`` will be used by default.
"""
_schema = {'$ref': '#/definitions/JsonDataFormat'}
def __init__(self, parse=Undefined, property=Undefined, type=Undefined, **kwds):
super(JsonDataFormat, self).__init__(parse=parse, property=property, type=type, **kwds)
class LabelOverlap(VegaLiteSchema):
"""LabelOverlap schema wrapper
anyOf(boolean, string, string)
"""
_schema = {'$ref': '#/definitions/LabelOverlap'}
def __init__(self, *args, **kwds):
super(LabelOverlap, self).__init__(*args, **kwds)
class LatLongDef(VegaLiteSchema):
"""LatLongDef schema wrapper
anyOf(:class:`LatLongFieldDef`, :class:`DatumDef`, :class:`NumericValueDef`)
"""
_schema = {'$ref': '#/definitions/LatLongDef'}
def __init__(self, *args, **kwds):
super(LatLongDef, self).__init__(*args, **kwds)
class LatLongFieldDef(LatLongDef):
"""LatLongFieldDef schema wrapper
Mapping(required=[])
Attributes
----------
aggregate : :class:`Aggregate`
Aggregation function for the field (e.g., ``"mean"``, ``"sum"``, ``"median"``,
``"min"``, ``"max"``, ``"count"`` ).
**Default value:** ``undefined`` (None)
**See also:** `aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__
documentation.
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
bin : None
A flag for binning a ``quantitative`` field, `an object defining binning parameters
<https://vega.github.io/vega-lite/docs/bin.html#params>`__, or indicating that the
data for ``x`` or ``y`` channel are binned before they are imported into Vega-Lite (
``"binned"`` ).
If ``true``, default `binning parameters
<https://vega.github.io/vega-lite/docs/bin.html>`__ will be applied.
If ``"binned"``, this indicates that the data for the ``x`` (or ``y`` ) channel are
already binned. You can map the bin-start field to ``x`` (or ``y`` ) and the bin-end
field to ``x2`` (or ``y2`` ). The scale and axis will be formatted similar to
binning in Vega-Lite. To adjust the axis ticks based on the bin step, you can also
set the axis's `tickMinStep
<https://vega.github.io/vega-lite/docs/axis.html#ticks>`__ property.
**Default value:** ``false``
**See also:** `bin <https://vega.github.io/vega-lite/docs/bin.html>`__
documentation.
field : :class:`Field`
**Required.** A string defining the name of the field from which to pull a data
value or an object defining iterated values from the `repeat
<https://vega.github.io/vega-lite/docs/repeat.html>`__ operator.
**See also:** `field <https://vega.github.io/vega-lite/docs/field.html>`__
documentation.
**Notes:** 1) Dots ( ``.`` ) and brackets ( ``[`` and ``]`` ) can be used to access
nested objects (e.g., ``"field": "foo.bar"`` and ``"field": "foo['bar']"`` ). If
field names contain dots or brackets but are not nested, you can use ``\\`` to
escape dots and brackets (e.g., ``"a\\.b"`` and ``"a\\[0\\]"`` ). See more details
about escaping in the `field documentation
<https://vega.github.io/vega-lite/docs/field.html>`__. 2) ``field`` is not required
if ``aggregate`` is ``count``.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit (e.g., ``year``, ``yearmonth``, ``month``, ``hours`` ) for a temporal
field. or `a temporal field that gets casted as ordinal
<https://vega.github.io/vega-lite/docs/type.html#cast>`__.
**Default value:** ``undefined`` (None)
**See also:** `timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__
documentation.
title : anyOf(:class:`Text`, None)
A title for the field. If ``null``, the title will be removed.
**Default value:** derived from the field's name and transformation function (
``aggregate``, ``bin`` and ``timeUnit`` ). If the field has an aggregate function,
the function is displayed as part of the title (e.g., ``"Sum of Profit"`` ). If the
field is binned or has a time unit applied, the applied function is shown in
parentheses (e.g., ``"Profit (binned)"``, ``"Transaction Date (year-month)"`` ).
Otherwise, the title is simply the field name.
**Notes** :
1) You can customize the default field title format by providing the `fieldTitle
<https://vega.github.io/vega-lite/docs/config.html#top-level-config>`__ property in
the `config <https://vega.github.io/vega-lite/docs/config.html>`__ or `fieldTitle
function via the compile function's options
<https://vega.github.io/vega-lite/docs/compile.html#field-title>`__.
2) If both field definition's ``title`` and axis, header, or legend ``title`` are
defined, axis/header/legend title will be used.
type : string
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/LatLongFieldDef'}
def __init__(self, aggregate=Undefined, band=Undefined, bin=Undefined, field=Undefined,
timeUnit=Undefined, title=Undefined, type=Undefined, **kwds):
super(LatLongFieldDef, self).__init__(aggregate=aggregate, band=band, bin=bin, field=field,
timeUnit=timeUnit, title=title, type=type, **kwds)
class LayerRepeatMapping(VegaLiteSchema):
"""LayerRepeatMapping schema wrapper
Mapping(required=[layer])
Attributes
----------
layer : List(string)
An array of fields to be repeated as layers.
column : List(string)
An array of fields to be repeated horizontally.
row : List(string)
An array of fields to be repeated vertically.
"""
_schema = {'$ref': '#/definitions/LayerRepeatMapping'}
def __init__(self, layer=Undefined, column=Undefined, row=Undefined, **kwds):
super(LayerRepeatMapping, self).__init__(layer=layer, column=column, row=row, **kwds)
class LayoutAlign(VegaLiteSchema):
"""LayoutAlign schema wrapper
enum('all', 'each', 'none')
"""
_schema = {'$ref': '#/definitions/LayoutAlign'}
def __init__(self, *args):
super(LayoutAlign, self).__init__(*args)
class Legend(VegaLiteSchema):
"""Legend schema wrapper
Mapping(required=[])
Properties of a legend or boolean flag for determining whether to show it.
Attributes
----------
aria : anyOf(boolean, :class:`ExprRef`)
clipHeight : anyOf(float, :class:`ExprRef`)
columnPadding : anyOf(float, :class:`ExprRef`)
columns : anyOf(float, :class:`ExprRef`)
cornerRadius : anyOf(float, :class:`ExprRef`)
description : anyOf(string, :class:`ExprRef`)
direction : :class:`Orientation`
The direction of the legend, one of ``"vertical"`` or ``"horizontal"``.
**Default value:** - For top-/bottom- ``orient`` ed legends, ``"horizontal"`` - For
left-/right- ``orient`` ed legends, ``"vertical"`` - For top/bottom-left/right-
``orient`` ed legends, ``"horizontal"`` for gradient legends and ``"vertical"`` for
symbol legends.
fillColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`)
format : anyOf(string, :class:`Dictunknown`)
When used with the default ``"number"`` and ``"time"`` format type, the text
formatting pattern for labels of guides (axes, legends, headers) and text marks.
* If the format type is ``"number"`` (e.g., for quantitative fields), this is D3's
`number format pattern <https://github.com/d3/d3-format#locale_format>`__. - If
the format type is ``"time"`` (e.g., for temporal fields), this is D3's `time
format pattern <https://github.com/d3/d3-time-format#locale_format>`__.
See the `format documentation <https://vega.github.io/vega-lite/docs/format.html>`__
for more examples.
When used with a `custom formatType
<https://vega.github.io/vega-lite/docs/config.html#custom-format-type>`__, this
value will be passed as ``format`` alongside ``datum.value`` to the registered
function.
**Default value:** Derived from `numberFormat
<https://vega.github.io/vega-lite/docs/config.html#format>`__ config for number
format and from `timeFormat
<https://vega.github.io/vega-lite/docs/config.html#format>`__ config for time
format.
formatType : string
The format type for labels. One of ``"number"``, ``"time"``, or a `registered custom
format type
<https://vega.github.io/vega-lite/docs/config.html#custom-format-type>`__.
**Default value:** - ``"time"`` for temporal fields and ordinal and nominal fields
with ``timeUnit``. - ``"number"`` for quantitative fields as well as ordinal and
nominal fields without ``timeUnit``.
gradientLength : anyOf(float, :class:`ExprRef`)
gradientOpacity : anyOf(float, :class:`ExprRef`)
gradientStrokeColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`)
gradientStrokeWidth : anyOf(float, :class:`ExprRef`)
gradientThickness : anyOf(float, :class:`ExprRef`)
gridAlign : anyOf(:class:`LayoutAlign`, :class:`ExprRef`)
labelAlign : anyOf(:class:`Align`, :class:`ExprRef`)
labelBaseline : anyOf(:class:`TextBaseline`, :class:`ExprRef`)
labelColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`)
labelExpr : string
`Vega expression <https://vega.github.io/vega/docs/expressions/>`__ for customizing
labels.
**Note:** The label text and value can be assessed via the ``label`` and ``value``
properties of the legend's backing ``datum`` object.
labelFont : anyOf(string, :class:`ExprRef`)
labelFontSize : anyOf(float, :class:`ExprRef`)
labelFontStyle : anyOf(:class:`FontStyle`, :class:`ExprRef`)
labelFontWeight : anyOf(:class:`FontWeight`, :class:`ExprRef`)
labelLimit : anyOf(float, :class:`ExprRef`)
labelOffset : anyOf(float, :class:`ExprRef`)
labelOpacity : anyOf(float, :class:`ExprRef`)
labelOverlap : anyOf(:class:`LabelOverlap`, :class:`ExprRef`)
labelPadding : anyOf(float, :class:`ExprRef`)
labelSeparation : anyOf(float, :class:`ExprRef`)
legendX : anyOf(float, :class:`ExprRef`)
legendY : anyOf(float, :class:`ExprRef`)
offset : anyOf(float, :class:`ExprRef`)
orient : :class:`LegendOrient`
The orientation of the legend, which determines how the legend is positioned within
the scene. One of ``"left"``, ``"right"``, ``"top"``, ``"bottom"``, ``"top-left"``,
``"top-right"``, ``"bottom-left"``, ``"bottom-right"``, ``"none"``.
**Default value:** ``"right"``
padding : anyOf(float, :class:`ExprRef`)
rowPadding : anyOf(float, :class:`ExprRef`)
strokeColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`)
symbolDash : anyOf(List(float), :class:`ExprRef`)
symbolDashOffset : anyOf(float, :class:`ExprRef`)
symbolFillColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`)
symbolLimit : anyOf(float, :class:`ExprRef`)
symbolOffset : anyOf(float, :class:`ExprRef`)
symbolOpacity : anyOf(float, :class:`ExprRef`)
symbolSize : anyOf(float, :class:`ExprRef`)
symbolStrokeColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`)
symbolStrokeWidth : anyOf(float, :class:`ExprRef`)
symbolType : anyOf(:class:`SymbolShape`, :class:`ExprRef`)
tickCount : anyOf(:class:`TickCount`, :class:`ExprRef`)
tickMinStep : anyOf(float, :class:`ExprRef`)
The minimum desired step between legend ticks, in terms of scale domain values. For
example, a value of ``1`` indicates that ticks should not be less than 1 unit apart.
If ``tickMinStep`` is specified, the ``tickCount`` value will be adjusted, if
necessary, to enforce the minimum step value.
**Default value** : ``undefined``
title : anyOf(:class:`Text`, None)
A title for the field. If ``null``, the title will be removed.
**Default value:** derived from the field's name and transformation function (
``aggregate``, ``bin`` and ``timeUnit`` ). If the field has an aggregate function,
the function is displayed as part of the title (e.g., ``"Sum of Profit"`` ). If the
field is binned or has a time unit applied, the applied function is shown in
parentheses (e.g., ``"Profit (binned)"``, ``"Transaction Date (year-month)"`` ).
Otherwise, the title is simply the field name.
**Notes** :
1) You can customize the default field title format by providing the `fieldTitle
<https://vega.github.io/vega-lite/docs/config.html#top-level-config>`__ property in
the `config <https://vega.github.io/vega-lite/docs/config.html>`__ or `fieldTitle
function via the compile function's options
<https://vega.github.io/vega-lite/docs/compile.html#field-title>`__.
2) If both field definition's ``title`` and axis, header, or legend ``title`` are
defined, axis/header/legend title will be used.
titleAlign : anyOf(:class:`Align`, :class:`ExprRef`)
titleAnchor : anyOf(:class:`TitleAnchor`, :class:`ExprRef`)
titleBaseline : anyOf(:class:`TextBaseline`, :class:`ExprRef`)
titleColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`)
titleFont : anyOf(string, :class:`ExprRef`)
titleFontSize : anyOf(float, :class:`ExprRef`)
titleFontStyle : anyOf(:class:`FontStyle`, :class:`ExprRef`)
titleFontWeight : anyOf(:class:`FontWeight`, :class:`ExprRef`)
titleLimit : anyOf(float, :class:`ExprRef`)
titleLineHeight : anyOf(float, :class:`ExprRef`)
titleOpacity : anyOf(float, :class:`ExprRef`)
titleOrient : anyOf(:class:`Orient`, :class:`ExprRef`)
titlePadding : anyOf(float, :class:`ExprRef`)
type : enum('symbol', 'gradient')
The type of the legend. Use ``"symbol"`` to create a discrete legend and
``"gradient"`` for a continuous color gradient.
**Default value:** ``"gradient"`` for non-binned quantitative fields and temporal
fields; ``"symbol"`` otherwise.
values : anyOf(List(float), List(string), List(boolean), List(:class:`DateTime`),
:class:`ExprRef`)
Explicitly set the visible legend values.
zindex : float
A non-negative integer indicating the z-index of the legend. If zindex is 0, legend
should be drawn behind all chart elements. To put them in front, use zindex = 1.
"""
_schema = {'$ref': '#/definitions/Legend'}
def __init__(self, aria=Undefined, clipHeight=Undefined, columnPadding=Undefined, columns=Undefined,
cornerRadius=Undefined, description=Undefined, direction=Undefined,
fillColor=Undefined, format=Undefined, formatType=Undefined, gradientLength=Undefined,
gradientOpacity=Undefined, gradientStrokeColor=Undefined,
gradientStrokeWidth=Undefined, gradientThickness=Undefined, gridAlign=Undefined,
labelAlign=Undefined, labelBaseline=Undefined, labelColor=Undefined,
labelExpr=Undefined, labelFont=Undefined, labelFontSize=Undefined,
labelFontStyle=Undefined, labelFontWeight=Undefined, labelLimit=Undefined,
labelOffset=Undefined, labelOpacity=Undefined, labelOverlap=Undefined,
labelPadding=Undefined, labelSeparation=Undefined, legendX=Undefined,
legendY=Undefined, offset=Undefined, orient=Undefined, padding=Undefined,
rowPadding=Undefined, strokeColor=Undefined, symbolDash=Undefined,
symbolDashOffset=Undefined, symbolFillColor=Undefined, symbolLimit=Undefined,
symbolOffset=Undefined, symbolOpacity=Undefined, symbolSize=Undefined,
symbolStrokeColor=Undefined, symbolStrokeWidth=Undefined, symbolType=Undefined,
tickCount=Undefined, tickMinStep=Undefined, title=Undefined, titleAlign=Undefined,
titleAnchor=Undefined, titleBaseline=Undefined, titleColor=Undefined,
titleFont=Undefined, titleFontSize=Undefined, titleFontStyle=Undefined,
titleFontWeight=Undefined, titleLimit=Undefined, titleLineHeight=Undefined,
titleOpacity=Undefined, titleOrient=Undefined, titlePadding=Undefined, type=Undefined,
values=Undefined, zindex=Undefined, **kwds):
super(Legend, self).__init__(aria=aria, clipHeight=clipHeight, columnPadding=columnPadding,
columns=columns, cornerRadius=cornerRadius,
description=description, direction=direction, fillColor=fillColor,
format=format, formatType=formatType,
gradientLength=gradientLength, gradientOpacity=gradientOpacity,
gradientStrokeColor=gradientStrokeColor,
gradientStrokeWidth=gradientStrokeWidth,
gradientThickness=gradientThickness, gridAlign=gridAlign,
labelAlign=labelAlign, labelBaseline=labelBaseline,
labelColor=labelColor, labelExpr=labelExpr, labelFont=labelFont,
labelFontSize=labelFontSize, labelFontStyle=labelFontStyle,
labelFontWeight=labelFontWeight, labelLimit=labelLimit,
labelOffset=labelOffset, labelOpacity=labelOpacity,
labelOverlap=labelOverlap, labelPadding=labelPadding,
labelSeparation=labelSeparation, legendX=legendX, legendY=legendY,
offset=offset, orient=orient, padding=padding,
rowPadding=rowPadding, strokeColor=strokeColor,
symbolDash=symbolDash, symbolDashOffset=symbolDashOffset,
symbolFillColor=symbolFillColor, symbolLimit=symbolLimit,
symbolOffset=symbolOffset, symbolOpacity=symbolOpacity,
symbolSize=symbolSize, symbolStrokeColor=symbolStrokeColor,
symbolStrokeWidth=symbolStrokeWidth, symbolType=symbolType,
tickCount=tickCount, tickMinStep=tickMinStep, title=title,
titleAlign=titleAlign, titleAnchor=titleAnchor,
titleBaseline=titleBaseline, titleColor=titleColor,
titleFont=titleFont, titleFontSize=titleFontSize,
titleFontStyle=titleFontStyle, titleFontWeight=titleFontWeight,
titleLimit=titleLimit, titleLineHeight=titleLineHeight,
titleOpacity=titleOpacity, titleOrient=titleOrient,
titlePadding=titlePadding, type=type, values=values, zindex=zindex,
**kwds)
class LegendBinding(VegaLiteSchema):
"""LegendBinding schema wrapper
anyOf(string, :class:`LegendStreamBinding`)
"""
_schema = {'$ref': '#/definitions/LegendBinding'}
def __init__(self, *args, **kwds):
super(LegendBinding, self).__init__(*args, **kwds)
class LegendConfig(VegaLiteSchema):
"""LegendConfig schema wrapper
Mapping(required=[])
Attributes
----------
aria : anyOf(boolean, :class:`ExprRef`)
clipHeight : anyOf(float, :class:`ExprRef`)
columnPadding : anyOf(float, :class:`ExprRef`)
columns : anyOf(float, :class:`ExprRef`)
cornerRadius : anyOf(float, :class:`ExprRef`)
description : anyOf(string, :class:`ExprRef`)
direction : :class:`Orientation`
The direction of the legend, one of ``"vertical"`` or ``"horizontal"``.
**Default value:** - For top-/bottom- ``orient`` ed legends, ``"horizontal"`` - For
left-/right- ``orient`` ed legends, ``"vertical"`` - For top/bottom-left/right-
``orient`` ed legends, ``"horizontal"`` for gradient legends and ``"vertical"`` for
symbol legends.
disable : boolean
Disable legend by default
fillColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`)
gradientDirection : anyOf(:class:`Orientation`, :class:`ExprRef`)
gradientHorizontalMaxLength : float
Max legend length for a horizontal gradient when ``config.legend.gradientLength`` is
undefined.
**Default value:** ``200``
gradientHorizontalMinLength : float
Min legend length for a horizontal gradient when ``config.legend.gradientLength`` is
undefined.
**Default value:** ``100``
gradientLabelLimit : anyOf(float, :class:`ExprRef`)
gradientLabelOffset : anyOf(float, :class:`ExprRef`)
gradientLength : anyOf(float, :class:`ExprRef`)
gradientOpacity : anyOf(float, :class:`ExprRef`)
gradientStrokeColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`)
gradientStrokeWidth : anyOf(float, :class:`ExprRef`)
gradientThickness : anyOf(float, :class:`ExprRef`)
gradientVerticalMaxLength : float
Max legend length for a vertical gradient when ``config.legend.gradientLength`` is
undefined.
**Default value:** ``200``
gradientVerticalMinLength : float
Min legend length for a vertical gradient when ``config.legend.gradientLength`` is
undefined.
**Default value:** ``100``
gridAlign : anyOf(:class:`LayoutAlign`, :class:`ExprRef`)
labelAlign : anyOf(:class:`Align`, :class:`ExprRef`)
labelBaseline : anyOf(:class:`TextBaseline`, :class:`ExprRef`)
labelColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`)
labelFont : anyOf(string, :class:`ExprRef`)
labelFontSize : anyOf(float, :class:`ExprRef`)
labelFontStyle : anyOf(:class:`FontStyle`, :class:`ExprRef`)
labelFontWeight : anyOf(:class:`FontWeight`, :class:`ExprRef`)
labelLimit : anyOf(float, :class:`ExprRef`)
labelOffset : anyOf(float, :class:`ExprRef`)
labelOpacity : anyOf(float, :class:`ExprRef`)
labelOverlap : anyOf(:class:`LabelOverlap`, :class:`ExprRef`)
The strategy to use for resolving overlap of labels in gradient legends. If
``false``, no overlap reduction is attempted. If set to ``true`` or ``"parity"``, a
strategy of removing every other label is used. If set to ``"greedy"``, a linear
scan of the labels is performed, removing any label that overlaps with the last
visible label (this often works better for log-scaled axes).
**Default value:** ``"greedy"`` for ``log scales otherwise`` true`.
labelPadding : anyOf(float, :class:`ExprRef`)
labelSeparation : anyOf(float, :class:`ExprRef`)
layout : :class:`ExprRef`
legendX : anyOf(float, :class:`ExprRef`)
legendY : anyOf(float, :class:`ExprRef`)
offset : anyOf(float, :class:`ExprRef`)
orient : :class:`LegendOrient`
The orientation of the legend, which determines how the legend is positioned within
the scene. One of ``"left"``, ``"right"``, ``"top"``, ``"bottom"``, ``"top-left"``,
``"top-right"``, ``"bottom-left"``, ``"bottom-right"``, ``"none"``.
**Default value:** ``"right"``
padding : anyOf(float, :class:`ExprRef`)
rowPadding : anyOf(float, :class:`ExprRef`)
strokeColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`)
strokeDash : anyOf(List(float), :class:`ExprRef`)
strokeWidth : anyOf(float, :class:`ExprRef`)
symbolBaseFillColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`)
symbolBaseStrokeColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`)
symbolDash : anyOf(List(float), :class:`ExprRef`)
symbolDashOffset : anyOf(float, :class:`ExprRef`)
symbolDirection : anyOf(:class:`Orientation`, :class:`ExprRef`)
symbolFillColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`)
symbolLimit : anyOf(float, :class:`ExprRef`)
symbolOffset : anyOf(float, :class:`ExprRef`)
symbolOpacity : anyOf(float, :class:`ExprRef`)
symbolSize : anyOf(float, :class:`ExprRef`)
symbolStrokeColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`)
symbolStrokeWidth : anyOf(float, :class:`ExprRef`)
symbolType : anyOf(:class:`SymbolShape`, :class:`ExprRef`)
tickCount : anyOf(:class:`TickCount`, :class:`ExprRef`)
title : None
Set to null to disable title for the axis, legend, or header.
titleAlign : anyOf(:class:`Align`, :class:`ExprRef`)
titleAnchor : anyOf(:class:`TitleAnchor`, :class:`ExprRef`)
titleBaseline : anyOf(:class:`TextBaseline`, :class:`ExprRef`)
titleColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`)
titleFont : anyOf(string, :class:`ExprRef`)
titleFontSize : anyOf(float, :class:`ExprRef`)
titleFontStyle : anyOf(:class:`FontStyle`, :class:`ExprRef`)
titleFontWeight : anyOf(:class:`FontWeight`, :class:`ExprRef`)
titleLimit : anyOf(float, :class:`ExprRef`)
titleLineHeight : anyOf(float, :class:`ExprRef`)
titleOpacity : anyOf(float, :class:`ExprRef`)
titleOrient : anyOf(:class:`Orient`, :class:`ExprRef`)
titlePadding : anyOf(float, :class:`ExprRef`)
unselectedOpacity : float
The opacity of unselected legend entries.
**Default value:** 0.35.
zindex : anyOf(float, :class:`ExprRef`)
"""
_schema = {'$ref': '#/definitions/LegendConfig'}
def __init__(self, aria=Undefined, clipHeight=Undefined, columnPadding=Undefined, columns=Undefined,
cornerRadius=Undefined, description=Undefined, direction=Undefined, disable=Undefined,
fillColor=Undefined, gradientDirection=Undefined,
gradientHorizontalMaxLength=Undefined, gradientHorizontalMinLength=Undefined,
gradientLabelLimit=Undefined, gradientLabelOffset=Undefined, gradientLength=Undefined,
gradientOpacity=Undefined, gradientStrokeColor=Undefined,
gradientStrokeWidth=Undefined, gradientThickness=Undefined,
gradientVerticalMaxLength=Undefined, gradientVerticalMinLength=Undefined,
gridAlign=Undefined, labelAlign=Undefined, labelBaseline=Undefined,
labelColor=Undefined, labelFont=Undefined, labelFontSize=Undefined,
labelFontStyle=Undefined, labelFontWeight=Undefined, labelLimit=Undefined,
labelOffset=Undefined, labelOpacity=Undefined, labelOverlap=Undefined,
labelPadding=Undefined, labelSeparation=Undefined, layout=Undefined, legendX=Undefined,
legendY=Undefined, offset=Undefined, orient=Undefined, padding=Undefined,
rowPadding=Undefined, strokeColor=Undefined, strokeDash=Undefined,
strokeWidth=Undefined, symbolBaseFillColor=Undefined, symbolBaseStrokeColor=Undefined,
symbolDash=Undefined, symbolDashOffset=Undefined, symbolDirection=Undefined,
symbolFillColor=Undefined, symbolLimit=Undefined, symbolOffset=Undefined,
symbolOpacity=Undefined, symbolSize=Undefined, symbolStrokeColor=Undefined,
symbolStrokeWidth=Undefined, symbolType=Undefined, tickCount=Undefined,
title=Undefined, titleAlign=Undefined, titleAnchor=Undefined, titleBaseline=Undefined,
titleColor=Undefined, titleFont=Undefined, titleFontSize=Undefined,
titleFontStyle=Undefined, titleFontWeight=Undefined, titleLimit=Undefined,
titleLineHeight=Undefined, titleOpacity=Undefined, titleOrient=Undefined,
titlePadding=Undefined, unselectedOpacity=Undefined, zindex=Undefined, **kwds):
super(LegendConfig, self).__init__(aria=aria, clipHeight=clipHeight,
columnPadding=columnPadding, columns=columns,
cornerRadius=cornerRadius, description=description,
direction=direction, disable=disable, fillColor=fillColor,
gradientDirection=gradientDirection,
gradientHorizontalMaxLength=gradientHorizontalMaxLength,
gradientHorizontalMinLength=gradientHorizontalMinLength,
gradientLabelLimit=gradientLabelLimit,
gradientLabelOffset=gradientLabelOffset,
gradientLength=gradientLength,
gradientOpacity=gradientOpacity,
gradientStrokeColor=gradientStrokeColor,
gradientStrokeWidth=gradientStrokeWidth,
gradientThickness=gradientThickness,
gradientVerticalMaxLength=gradientVerticalMaxLength,
gradientVerticalMinLength=gradientVerticalMinLength,
gridAlign=gridAlign, labelAlign=labelAlign,
labelBaseline=labelBaseline, labelColor=labelColor,
labelFont=labelFont, labelFontSize=labelFontSize,
labelFontStyle=labelFontStyle,
labelFontWeight=labelFontWeight, labelLimit=labelLimit,
labelOffset=labelOffset, labelOpacity=labelOpacity,
labelOverlap=labelOverlap, labelPadding=labelPadding,
labelSeparation=labelSeparation, layout=layout,
legendX=legendX, legendY=legendY, offset=offset,
orient=orient, padding=padding, rowPadding=rowPadding,
strokeColor=strokeColor, strokeDash=strokeDash,
strokeWidth=strokeWidth,
symbolBaseFillColor=symbolBaseFillColor,
symbolBaseStrokeColor=symbolBaseStrokeColor,
symbolDash=symbolDash, symbolDashOffset=symbolDashOffset,
symbolDirection=symbolDirection,
symbolFillColor=symbolFillColor, symbolLimit=symbolLimit,
symbolOffset=symbolOffset, symbolOpacity=symbolOpacity,
symbolSize=symbolSize, symbolStrokeColor=symbolStrokeColor,
symbolStrokeWidth=symbolStrokeWidth, symbolType=symbolType,
tickCount=tickCount, title=title, titleAlign=titleAlign,
titleAnchor=titleAnchor, titleBaseline=titleBaseline,
titleColor=titleColor, titleFont=titleFont,
titleFontSize=titleFontSize, titleFontStyle=titleFontStyle,
titleFontWeight=titleFontWeight, titleLimit=titleLimit,
titleLineHeight=titleLineHeight, titleOpacity=titleOpacity,
titleOrient=titleOrient, titlePadding=titlePadding,
unselectedOpacity=unselectedOpacity, zindex=zindex, **kwds)
class LegendOrient(VegaLiteSchema):
"""LegendOrient schema wrapper
enum('none', 'left', 'right', 'top', 'bottom', 'top-left', 'top-right', 'bottom-left',
'bottom-right')
"""
_schema = {'$ref': '#/definitions/LegendOrient'}
def __init__(self, *args):
super(LegendOrient, self).__init__(*args)
class LegendResolveMap(VegaLiteSchema):
"""LegendResolveMap schema wrapper
Mapping(required=[])
Attributes
----------
angle : :class:`ResolveMode`
color : :class:`ResolveMode`
fill : :class:`ResolveMode`
fillOpacity : :class:`ResolveMode`
opacity : :class:`ResolveMode`
shape : :class:`ResolveMode`
size : :class:`ResolveMode`
stroke : :class:`ResolveMode`
strokeDash : :class:`ResolveMode`
strokeOpacity : :class:`ResolveMode`
strokeWidth : :class:`ResolveMode`
"""
_schema = {'$ref': '#/definitions/LegendResolveMap'}
def __init__(self, angle=Undefined, color=Undefined, fill=Undefined, fillOpacity=Undefined,
opacity=Undefined, shape=Undefined, size=Undefined, stroke=Undefined,
strokeDash=Undefined, strokeOpacity=Undefined, strokeWidth=Undefined, **kwds):
super(LegendResolveMap, self).__init__(angle=angle, color=color, fill=fill,
fillOpacity=fillOpacity, opacity=opacity, shape=shape,
size=size, stroke=stroke, strokeDash=strokeDash,
strokeOpacity=strokeOpacity, strokeWidth=strokeWidth,
**kwds)
class LegendStreamBinding(LegendBinding):
"""LegendStreamBinding schema wrapper
Mapping(required=[legend])
Attributes
----------
legend : anyOf(string, :class:`Stream`)
"""
_schema = {'$ref': '#/definitions/LegendStreamBinding'}
def __init__(self, legend=Undefined, **kwds):
super(LegendStreamBinding, self).__init__(legend=legend, **kwds)
class LineConfig(AnyMarkConfig):
"""LineConfig schema wrapper
Mapping(required=[])
Attributes
----------
align : anyOf(:class:`Align`, :class:`ExprRef`)
The horizontal alignment of the text or ranged marks (area, bar, image, rect, rule).
One of ``"left"``, ``"right"``, ``"center"``.
**Note:** Expression reference is *not* supported for range marks.
angle : anyOf(float, :class:`ExprRef`)
aria : anyOf(boolean, :class:`ExprRef`)
ariaRole : anyOf(string, :class:`ExprRef`)
ariaRoleDescription : anyOf(string, :class:`ExprRef`)
aspect : anyOf(boolean, :class:`ExprRef`)
baseline : anyOf(:class:`TextBaseline`, :class:`ExprRef`)
For text marks, the vertical text baseline. One of ``"alphabetic"`` (default),
``"top"``, ``"middle"``, ``"bottom"``, ``"line-top"``, ``"line-bottom"``, or an
expression reference that provides one of the valid values. The ``"line-top"`` and
``"line-bottom"`` values operate similarly to ``"top"`` and ``"bottom"``, but are
calculated relative to the ``lineHeight`` rather than ``fontSize`` alone.
For range marks, the vertical alignment of the marks. One of ``"top"``,
``"middle"``, ``"bottom"``.
**Note:** Expression reference is *not* supported for range marks.
blend : anyOf(:class:`Blend`, :class:`ExprRef`)
color : anyOf(:class:`Color`, :class:`Gradient`, :class:`ExprRef`)
Default color.
**Default value:** :raw-html:`<span style="color: #4682b4;">■</span>`
``"#4682b4"``
**Note:** - This property cannot be used in a `style config
<https://vega.github.io/vega-lite/docs/mark.html#style-config>`__. - The ``fill``
and ``stroke`` properties have higher precedence than ``color`` and will override
``color``.
cornerRadius : anyOf(float, :class:`ExprRef`)
cornerRadiusBottomLeft : anyOf(float, :class:`ExprRef`)
cornerRadiusBottomRight : anyOf(float, :class:`ExprRef`)
cornerRadiusTopLeft : anyOf(float, :class:`ExprRef`)
cornerRadiusTopRight : anyOf(float, :class:`ExprRef`)
cursor : anyOf(:class:`Cursor`, :class:`ExprRef`)
description : anyOf(string, :class:`ExprRef`)
dir : anyOf(:class:`TextDirection`, :class:`ExprRef`)
dx : anyOf(float, :class:`ExprRef`)
dy : anyOf(float, :class:`ExprRef`)
ellipsis : anyOf(string, :class:`ExprRef`)
endAngle : anyOf(float, :class:`ExprRef`)
fill : anyOf(:class:`Color`, :class:`Gradient`, None, :class:`ExprRef`)
Default fill color. This property has higher precedence than ``config.color``. Set
to ``null`` to remove fill.
**Default value:** (None)
fillOpacity : anyOf(float, :class:`ExprRef`)
filled : boolean
Whether the mark's color should be used as fill color instead of stroke color.
**Default value:** ``false`` for all ``point``, ``line``, and ``rule`` marks as well
as ``geoshape`` marks for `graticule
<https://vega.github.io/vega-lite/docs/data.html#graticule>`__ data sources;
otherwise, ``true``.
**Note:** This property cannot be used in a `style config
<https://vega.github.io/vega-lite/docs/mark.html#style-config>`__.
font : anyOf(string, :class:`ExprRef`)
fontSize : anyOf(float, :class:`ExprRef`)
fontStyle : anyOf(:class:`FontStyle`, :class:`ExprRef`)
fontWeight : anyOf(:class:`FontWeight`, :class:`ExprRef`)
height : anyOf(float, :class:`ExprRef`)
href : anyOf(:class:`URI`, :class:`ExprRef`)
innerRadius : anyOf(float, :class:`ExprRef`)
The inner radius in pixels of arc marks. ``innerRadius`` is an alias for
``radius2``.
interpolate : anyOf(:class:`Interpolate`, :class:`ExprRef`)
invalid : enum('filter', None)
Defines how Vega-Lite should handle marks for invalid values ( ``null`` and ``NaN``
). - If set to ``"filter"`` (default), all data items with null values will be
skipped (for line, trail, and area marks) or filtered (for other marks). - If
``null``, all data items are included. In this case, invalid values will be
interpreted as zeroes.
limit : anyOf(float, :class:`ExprRef`)
lineBreak : anyOf(string, :class:`ExprRef`)
lineHeight : anyOf(float, :class:`ExprRef`)
opacity : anyOf(float, :class:`ExprRef`)
The overall opacity (value between [0,1]).
**Default value:** ``0.7`` for non-aggregate plots with ``point``, ``tick``,
``circle``, or ``square`` marks or layered ``bar`` charts and ``1`` otherwise.
order : anyOf(None, boolean)
For line and trail marks, this ``order`` property can be set to ``null`` or
``false`` to make the lines use the original order in the data sources.
orient : :class:`Orientation`
The orientation of a non-stacked bar, tick, area, and line charts. The value is
either horizontal (default) or vertical. - For bar, rule and tick, this determines
whether the size of the bar and tick should be applied to x or y dimension. - For
area, this property determines the orient property of the Vega output. - For line
and trail marks, this property determines the sort order of the points in the line
if ``config.sortLineBy`` is not specified. For stacked charts, this is always
determined by the orientation of the stack; therefore explicitly specified value
will be ignored.
outerRadius : anyOf(float, :class:`ExprRef`)
The outer radius in pixels of arc marks. ``outerRadius`` is an alias for ``radius``.
padAngle : anyOf(float, :class:`ExprRef`)
point : anyOf(boolean, :class:`OverlayMarkDef`, string)
A flag for overlaying points on top of line or area marks, or an object defining the
properties of the overlayed points.
If this property is ``"transparent"``, transparent points will be used (for
enhancing tooltips and selections).
If this property is an empty object ( ``{}`` ) or ``true``, filled points with
default properties will be used.
If this property is ``false``, no points would be automatically added to line or
area marks.
**Default value:** ``false``.
radius : anyOf(float, :class:`ExprRef`)
For arc mark, the primary (outer) radius in pixels.
For text marks, polar coordinate radial offset, in pixels, of the text from the
origin determined by the ``x`` and ``y`` properties.
radius2 : anyOf(float, :class:`ExprRef`)
The secondary (inner) radius in pixels of arc marks.
shape : anyOf(anyOf(:class:`SymbolShape`, string), :class:`ExprRef`)
size : anyOf(float, :class:`ExprRef`)
Default size for marks. - For ``point`` / ``circle`` / ``square``, this represents
the pixel area of the marks. Note that this value sets the area of the symbol; the
side lengths will increase with the square root of this value. - For ``bar``, this
represents the band size of the bar, in pixels. - For ``text``, this represents the
font size, in pixels.
**Default value:** - ``30`` for point, circle, square marks; width/height's ``step``
- ``2`` for bar marks with discrete dimensions; - ``5`` for bar marks with
continuous dimensions; - ``11`` for text marks.
smooth : anyOf(boolean, :class:`ExprRef`)
startAngle : anyOf(float, :class:`ExprRef`)
stroke : anyOf(:class:`Color`, :class:`Gradient`, None, :class:`ExprRef`)
Default stroke color. This property has higher precedence than ``config.color``. Set
to ``null`` to remove stroke.
**Default value:** (None)
strokeCap : anyOf(:class:`StrokeCap`, :class:`ExprRef`)
strokeDash : anyOf(List(float), :class:`ExprRef`)
strokeDashOffset : anyOf(float, :class:`ExprRef`)
strokeJoin : anyOf(:class:`StrokeJoin`, :class:`ExprRef`)
strokeMiterLimit : anyOf(float, :class:`ExprRef`)
strokeOffset : anyOf(float, :class:`ExprRef`)
strokeOpacity : anyOf(float, :class:`ExprRef`)
strokeWidth : anyOf(float, :class:`ExprRef`)
tension : anyOf(float, :class:`ExprRef`)
text : anyOf(:class:`Text`, :class:`ExprRef`)
theta : anyOf(float, :class:`ExprRef`)
For arc marks, the arc length in radians if theta2 is not specified, otherwise the
start arc angle. (A value of 0 indicates up or “north”, increasing values proceed
clockwise.)
For text marks, polar coordinate angle in radians.
theta2 : anyOf(float, :class:`ExprRef`)
The end angle of arc marks in radians. A value of 0 indicates up or “north”,
increasing values proceed clockwise.
timeUnitBand : float
Default relative band size for a time unit. If set to ``1``, the bandwidth of the
marks will be equal to the time unit band step. If set to ``0.5``, bandwidth of the
marks will be half of the time unit band step.
timeUnitBandPosition : float
Default relative band position for a time unit. If set to ``0``, the marks will be
positioned at the beginning of the time unit band step. If set to ``0.5``, the marks
will be positioned in the middle of the time unit band step.
tooltip : anyOf(float, string, boolean, :class:`TooltipContent`, :class:`ExprRef`, None)
The tooltip text string to show upon mouse hover or an object defining which fields
should the tooltip be derived from.
* If ``tooltip`` is ``true`` or ``{"content": "encoding"}``, then all fields from
``encoding`` will be used. - If ``tooltip`` is ``{"content": "data"}``, then all
fields that appear in the highlighted data point will be used. - If set to
``null`` or ``false``, then no tooltip will be used.
See the `tooltip <https://vega.github.io/vega-lite/docs/tooltip.html>`__
documentation for a detailed discussion about tooltip in Vega-Lite.
**Default value:** ``null``
url : anyOf(:class:`URI`, :class:`ExprRef`)
width : anyOf(float, :class:`ExprRef`)
x : anyOf(float, string, :class:`ExprRef`)
X coordinates of the marks, or width of horizontal ``"bar"`` and ``"area"`` without
specified ``x2`` or ``width``.
The ``value`` of this channel can be a number or a string ``"width"`` for the width
of the plot.
x2 : anyOf(float, string, :class:`ExprRef`)
X2 coordinates for ranged ``"area"``, ``"bar"``, ``"rect"``, and ``"rule"``.
The ``value`` of this channel can be a number or a string ``"width"`` for the width
of the plot.
y : anyOf(float, string, :class:`ExprRef`)
Y coordinates of the marks, or height of vertical ``"bar"`` and ``"area"`` without
specified ``y2`` or ``height``.
The ``value`` of this channel can be a number or a string ``"height"`` for the
height of the plot.
y2 : anyOf(float, string, :class:`ExprRef`)
Y2 coordinates for ranged ``"area"``, ``"bar"``, ``"rect"``, and ``"rule"``.
The ``value`` of this channel can be a number or a string ``"height"`` for the
height of the plot.
"""
_schema = {'$ref': '#/definitions/LineConfig'}
def __init__(self, align=Undefined, angle=Undefined, aria=Undefined, ariaRole=Undefined,
ariaRoleDescription=Undefined, aspect=Undefined, baseline=Undefined, blend=Undefined,
color=Undefined, cornerRadius=Undefined, cornerRadiusBottomLeft=Undefined,
cornerRadiusBottomRight=Undefined, cornerRadiusTopLeft=Undefined,
cornerRadiusTopRight=Undefined, cursor=Undefined, description=Undefined, dir=Undefined,
dx=Undefined, dy=Undefined, ellipsis=Undefined, endAngle=Undefined, fill=Undefined,
fillOpacity=Undefined, filled=Undefined, font=Undefined, fontSize=Undefined,
fontStyle=Undefined, fontWeight=Undefined, height=Undefined, href=Undefined,
innerRadius=Undefined, interpolate=Undefined, invalid=Undefined, limit=Undefined,
lineBreak=Undefined, lineHeight=Undefined, opacity=Undefined, order=Undefined,
orient=Undefined, outerRadius=Undefined, padAngle=Undefined, point=Undefined,
radius=Undefined, radius2=Undefined, shape=Undefined, size=Undefined, smooth=Undefined,
startAngle=Undefined, stroke=Undefined, strokeCap=Undefined, strokeDash=Undefined,
strokeDashOffset=Undefined, strokeJoin=Undefined, strokeMiterLimit=Undefined,
strokeOffset=Undefined, strokeOpacity=Undefined, strokeWidth=Undefined,
tension=Undefined, text=Undefined, theta=Undefined, theta2=Undefined,
timeUnitBand=Undefined, timeUnitBandPosition=Undefined, tooltip=Undefined,
url=Undefined, width=Undefined, x=Undefined, x2=Undefined, y=Undefined, y2=Undefined,
**kwds):
super(LineConfig, self).__init__(align=align, angle=angle, aria=aria, ariaRole=ariaRole,
ariaRoleDescription=ariaRoleDescription, aspect=aspect,
baseline=baseline, blend=blend, color=color,
cornerRadius=cornerRadius,
cornerRadiusBottomLeft=cornerRadiusBottomLeft,
cornerRadiusBottomRight=cornerRadiusBottomRight,
cornerRadiusTopLeft=cornerRadiusTopLeft,
cornerRadiusTopRight=cornerRadiusTopRight, cursor=cursor,
description=description, dir=dir, dx=dx, dy=dy,
ellipsis=ellipsis, endAngle=endAngle, fill=fill,
fillOpacity=fillOpacity, filled=filled, font=font,
fontSize=fontSize, fontStyle=fontStyle, fontWeight=fontWeight,
height=height, href=href, innerRadius=innerRadius,
interpolate=interpolate, invalid=invalid, limit=limit,
lineBreak=lineBreak, lineHeight=lineHeight, opacity=opacity,
order=order, orient=orient, outerRadius=outerRadius,
padAngle=padAngle, point=point, radius=radius, radius2=radius2,
shape=shape, size=size, smooth=smooth, startAngle=startAngle,
stroke=stroke, strokeCap=strokeCap, strokeDash=strokeDash,
strokeDashOffset=strokeDashOffset, strokeJoin=strokeJoin,
strokeMiterLimit=strokeMiterLimit, strokeOffset=strokeOffset,
strokeOpacity=strokeOpacity, strokeWidth=strokeWidth,
tension=tension, text=text, theta=theta, theta2=theta2,
timeUnitBand=timeUnitBand,
timeUnitBandPosition=timeUnitBandPosition, tooltip=tooltip,
url=url, width=width, x=x, x2=x2, y=y, y2=y2, **kwds)
class LinearGradient(Gradient):
"""LinearGradient schema wrapper
Mapping(required=[gradient, stops])
Attributes
----------
gradient : string
The type of gradient. Use ``"linear"`` for a linear gradient.
stops : List(:class:`GradientStop`)
An array of gradient stops defining the gradient color sequence.
id : string
x1 : float
The starting x-coordinate, in normalized [0, 1] coordinates, of the linear gradient.
**Default value:** ``0``
x2 : float
The ending x-coordinate, in normalized [0, 1] coordinates, of the linear gradient.
**Default value:** ``1``
y1 : float
The starting y-coordinate, in normalized [0, 1] coordinates, of the linear gradient.
**Default value:** ``0``
y2 : float
The ending y-coordinate, in normalized [0, 1] coordinates, of the linear gradient.
**Default value:** ``0``
"""
_schema = {'$ref': '#/definitions/LinearGradient'}
def __init__(self, gradient=Undefined, stops=Undefined, id=Undefined, x1=Undefined, x2=Undefined,
y1=Undefined, y2=Undefined, **kwds):
super(LinearGradient, self).__init__(gradient=gradient, stops=stops, id=id, x1=x1, x2=x2, y1=y1,
y2=y2, **kwds)
class LookupData(VegaLiteSchema):
"""LookupData schema wrapper
Mapping(required=[data, key])
Attributes
----------
data : :class:`Data`
Secondary data source to lookup in.
key : :class:`FieldName`
Key in data to lookup.
fields : List(:class:`FieldName`)
Fields in foreign data or selection to lookup. If not specified, the entire object
is queried.
"""
_schema = {'$ref': '#/definitions/LookupData'}
def __init__(self, data=Undefined, key=Undefined, fields=Undefined, **kwds):
super(LookupData, self).__init__(data=data, key=key, fields=fields, **kwds)
class LookupSelection(VegaLiteSchema):
"""LookupSelection schema wrapper
Mapping(required=[key, selection])
Attributes
----------
key : :class:`FieldName`
Key in data to lookup.
selection : string
Selection name to look up.
fields : List(:class:`FieldName`)
Fields in foreign data or selection to lookup. If not specified, the entire object
is queried.
"""
_schema = {'$ref': '#/definitions/LookupSelection'}
def __init__(self, key=Undefined, selection=Undefined, fields=Undefined, **kwds):
super(LookupSelection, self).__init__(key=key, selection=selection, fields=fields, **kwds)
class Mark(AnyMark):
"""Mark schema wrapper
enum('arc', 'area', 'bar', 'image', 'line', 'point', 'rect', 'rule', 'text', 'tick',
'trail', 'circle', 'square', 'geoshape')
All types of primitive marks.
"""
_schema = {'$ref': '#/definitions/Mark'}
def __init__(self, *args):
super(Mark, self).__init__(*args)
class MarkConfig(AnyMarkConfig):
"""MarkConfig schema wrapper
Mapping(required=[])
Attributes
----------
align : anyOf(:class:`Align`, :class:`ExprRef`)
The horizontal alignment of the text or ranged marks (area, bar, image, rect, rule).
One of ``"left"``, ``"right"``, ``"center"``.
**Note:** Expression reference is *not* supported for range marks.
angle : anyOf(float, :class:`ExprRef`)
aria : anyOf(boolean, :class:`ExprRef`)
ariaRole : anyOf(string, :class:`ExprRef`)
ariaRoleDescription : anyOf(string, :class:`ExprRef`)
aspect : anyOf(boolean, :class:`ExprRef`)
baseline : anyOf(:class:`TextBaseline`, :class:`ExprRef`)
For text marks, the vertical text baseline. One of ``"alphabetic"`` (default),
``"top"``, ``"middle"``, ``"bottom"``, ``"line-top"``, ``"line-bottom"``, or an
expression reference that provides one of the valid values. The ``"line-top"`` and
``"line-bottom"`` values operate similarly to ``"top"`` and ``"bottom"``, but are
calculated relative to the ``lineHeight`` rather than ``fontSize`` alone.
For range marks, the vertical alignment of the marks. One of ``"top"``,
``"middle"``, ``"bottom"``.
**Note:** Expression reference is *not* supported for range marks.
blend : anyOf(:class:`Blend`, :class:`ExprRef`)
color : anyOf(:class:`Color`, :class:`Gradient`, :class:`ExprRef`)
Default color.
**Default value:** :raw-html:`<span style="color: #4682b4;">■</span>`
``"#4682b4"``
**Note:** - This property cannot be used in a `style config
<https://vega.github.io/vega-lite/docs/mark.html#style-config>`__. - The ``fill``
and ``stroke`` properties have higher precedence than ``color`` and will override
``color``.
cornerRadius : anyOf(float, :class:`ExprRef`)
cornerRadiusBottomLeft : anyOf(float, :class:`ExprRef`)
cornerRadiusBottomRight : anyOf(float, :class:`ExprRef`)
cornerRadiusTopLeft : anyOf(float, :class:`ExprRef`)
cornerRadiusTopRight : anyOf(float, :class:`ExprRef`)
cursor : anyOf(:class:`Cursor`, :class:`ExprRef`)
description : anyOf(string, :class:`ExprRef`)
dir : anyOf(:class:`TextDirection`, :class:`ExprRef`)
dx : anyOf(float, :class:`ExprRef`)
dy : anyOf(float, :class:`ExprRef`)
ellipsis : anyOf(string, :class:`ExprRef`)
endAngle : anyOf(float, :class:`ExprRef`)
fill : anyOf(:class:`Color`, :class:`Gradient`, None, :class:`ExprRef`)
Default fill color. This property has higher precedence than ``config.color``. Set
to ``null`` to remove fill.
**Default value:** (None)
fillOpacity : anyOf(float, :class:`ExprRef`)
filled : boolean
Whether the mark's color should be used as fill color instead of stroke color.
**Default value:** ``false`` for all ``point``, ``line``, and ``rule`` marks as well
as ``geoshape`` marks for `graticule
<https://vega.github.io/vega-lite/docs/data.html#graticule>`__ data sources;
otherwise, ``true``.
**Note:** This property cannot be used in a `style config
<https://vega.github.io/vega-lite/docs/mark.html#style-config>`__.
font : anyOf(string, :class:`ExprRef`)
fontSize : anyOf(float, :class:`ExprRef`)
fontStyle : anyOf(:class:`FontStyle`, :class:`ExprRef`)
fontWeight : anyOf(:class:`FontWeight`, :class:`ExprRef`)
height : anyOf(float, :class:`ExprRef`)
href : anyOf(:class:`URI`, :class:`ExprRef`)
innerRadius : anyOf(float, :class:`ExprRef`)
The inner radius in pixels of arc marks. ``innerRadius`` is an alias for
``radius2``.
interpolate : anyOf(:class:`Interpolate`, :class:`ExprRef`)
invalid : enum('filter', None)
Defines how Vega-Lite should handle marks for invalid values ( ``null`` and ``NaN``
). - If set to ``"filter"`` (default), all data items with null values will be
skipped (for line, trail, and area marks) or filtered (for other marks). - If
``null``, all data items are included. In this case, invalid values will be
interpreted as zeroes.
limit : anyOf(float, :class:`ExprRef`)
lineBreak : anyOf(string, :class:`ExprRef`)
lineHeight : anyOf(float, :class:`ExprRef`)
opacity : anyOf(float, :class:`ExprRef`)
The overall opacity (value between [0,1]).
**Default value:** ``0.7`` for non-aggregate plots with ``point``, ``tick``,
``circle``, or ``square`` marks or layered ``bar`` charts and ``1`` otherwise.
order : anyOf(None, boolean)
For line and trail marks, this ``order`` property can be set to ``null`` or
``false`` to make the lines use the original order in the data sources.
orient : :class:`Orientation`
The orientation of a non-stacked bar, tick, area, and line charts. The value is
either horizontal (default) or vertical. - For bar, rule and tick, this determines
whether the size of the bar and tick should be applied to x or y dimension. - For
area, this property determines the orient property of the Vega output. - For line
and trail marks, this property determines the sort order of the points in the line
if ``config.sortLineBy`` is not specified. For stacked charts, this is always
determined by the orientation of the stack; therefore explicitly specified value
will be ignored.
outerRadius : anyOf(float, :class:`ExprRef`)
The outer radius in pixels of arc marks. ``outerRadius`` is an alias for ``radius``.
padAngle : anyOf(float, :class:`ExprRef`)
radius : anyOf(float, :class:`ExprRef`)
For arc mark, the primary (outer) radius in pixels.
For text marks, polar coordinate radial offset, in pixels, of the text from the
origin determined by the ``x`` and ``y`` properties.
radius2 : anyOf(float, :class:`ExprRef`)
The secondary (inner) radius in pixels of arc marks.
shape : anyOf(anyOf(:class:`SymbolShape`, string), :class:`ExprRef`)
size : anyOf(float, :class:`ExprRef`)
Default size for marks. - For ``point`` / ``circle`` / ``square``, this represents
the pixel area of the marks. Note that this value sets the area of the symbol; the
side lengths will increase with the square root of this value. - For ``bar``, this
represents the band size of the bar, in pixels. - For ``text``, this represents the
font size, in pixels.
**Default value:** - ``30`` for point, circle, square marks; width/height's ``step``
- ``2`` for bar marks with discrete dimensions; - ``5`` for bar marks with
continuous dimensions; - ``11`` for text marks.
smooth : anyOf(boolean, :class:`ExprRef`)
startAngle : anyOf(float, :class:`ExprRef`)
stroke : anyOf(:class:`Color`, :class:`Gradient`, None, :class:`ExprRef`)
Default stroke color. This property has higher precedence than ``config.color``. Set
to ``null`` to remove stroke.
**Default value:** (None)
strokeCap : anyOf(:class:`StrokeCap`, :class:`ExprRef`)
strokeDash : anyOf(List(float), :class:`ExprRef`)
strokeDashOffset : anyOf(float, :class:`ExprRef`)
strokeJoin : anyOf(:class:`StrokeJoin`, :class:`ExprRef`)
strokeMiterLimit : anyOf(float, :class:`ExprRef`)
strokeOffset : anyOf(float, :class:`ExprRef`)
strokeOpacity : anyOf(float, :class:`ExprRef`)
strokeWidth : anyOf(float, :class:`ExprRef`)
tension : anyOf(float, :class:`ExprRef`)
text : anyOf(:class:`Text`, :class:`ExprRef`)
theta : anyOf(float, :class:`ExprRef`)
For arc marks, the arc length in radians if theta2 is not specified, otherwise the
start arc angle. (A value of 0 indicates up or “north”, increasing values proceed
clockwise.)
For text marks, polar coordinate angle in radians.
theta2 : anyOf(float, :class:`ExprRef`)
The end angle of arc marks in radians. A value of 0 indicates up or “north”,
increasing values proceed clockwise.
timeUnitBand : float
Default relative band size for a time unit. If set to ``1``, the bandwidth of the
marks will be equal to the time unit band step. If set to ``0.5``, bandwidth of the
marks will be half of the time unit band step.
timeUnitBandPosition : float
Default relative band position for a time unit. If set to ``0``, the marks will be
positioned at the beginning of the time unit band step. If set to ``0.5``, the marks
will be positioned in the middle of the time unit band step.
tooltip : anyOf(float, string, boolean, :class:`TooltipContent`, :class:`ExprRef`, None)
The tooltip text string to show upon mouse hover or an object defining which fields
should the tooltip be derived from.
* If ``tooltip`` is ``true`` or ``{"content": "encoding"}``, then all fields from
``encoding`` will be used. - If ``tooltip`` is ``{"content": "data"}``, then all
fields that appear in the highlighted data point will be used. - If set to
``null`` or ``false``, then no tooltip will be used.
See the `tooltip <https://vega.github.io/vega-lite/docs/tooltip.html>`__
documentation for a detailed discussion about tooltip in Vega-Lite.
**Default value:** ``null``
url : anyOf(:class:`URI`, :class:`ExprRef`)
width : anyOf(float, :class:`ExprRef`)
x : anyOf(float, string, :class:`ExprRef`)
X coordinates of the marks, or width of horizontal ``"bar"`` and ``"area"`` without
specified ``x2`` or ``width``.
The ``value`` of this channel can be a number or a string ``"width"`` for the width
of the plot.
x2 : anyOf(float, string, :class:`ExprRef`)
X2 coordinates for ranged ``"area"``, ``"bar"``, ``"rect"``, and ``"rule"``.
The ``value`` of this channel can be a number or a string ``"width"`` for the width
of the plot.
y : anyOf(float, string, :class:`ExprRef`)
Y coordinates of the marks, or height of vertical ``"bar"`` and ``"area"`` without
specified ``y2`` or ``height``.
The ``value`` of this channel can be a number or a string ``"height"`` for the
height of the plot.
y2 : anyOf(float, string, :class:`ExprRef`)
Y2 coordinates for ranged ``"area"``, ``"bar"``, ``"rect"``, and ``"rule"``.
The ``value`` of this channel can be a number or a string ``"height"`` for the
height of the plot.
"""
_schema = {'$ref': '#/definitions/MarkConfig'}
def __init__(self, align=Undefined, angle=Undefined, aria=Undefined, ariaRole=Undefined,
ariaRoleDescription=Undefined, aspect=Undefined, baseline=Undefined, blend=Undefined,
color=Undefined, cornerRadius=Undefined, cornerRadiusBottomLeft=Undefined,
cornerRadiusBottomRight=Undefined, cornerRadiusTopLeft=Undefined,
cornerRadiusTopRight=Undefined, cursor=Undefined, description=Undefined, dir=Undefined,
dx=Undefined, dy=Undefined, ellipsis=Undefined, endAngle=Undefined, fill=Undefined,
fillOpacity=Undefined, filled=Undefined, font=Undefined, fontSize=Undefined,
fontStyle=Undefined, fontWeight=Undefined, height=Undefined, href=Undefined,
innerRadius=Undefined, interpolate=Undefined, invalid=Undefined, limit=Undefined,
lineBreak=Undefined, lineHeight=Undefined, opacity=Undefined, order=Undefined,
orient=Undefined, outerRadius=Undefined, padAngle=Undefined, radius=Undefined,
radius2=Undefined, shape=Undefined, size=Undefined, smooth=Undefined,
startAngle=Undefined, stroke=Undefined, strokeCap=Undefined, strokeDash=Undefined,
strokeDashOffset=Undefined, strokeJoin=Undefined, strokeMiterLimit=Undefined,
strokeOffset=Undefined, strokeOpacity=Undefined, strokeWidth=Undefined,
tension=Undefined, text=Undefined, theta=Undefined, theta2=Undefined,
timeUnitBand=Undefined, timeUnitBandPosition=Undefined, tooltip=Undefined,
url=Undefined, width=Undefined, x=Undefined, x2=Undefined, y=Undefined, y2=Undefined,
**kwds):
super(MarkConfig, self).__init__(align=align, angle=angle, aria=aria, ariaRole=ariaRole,
ariaRoleDescription=ariaRoleDescription, aspect=aspect,
baseline=baseline, blend=blend, color=color,
cornerRadius=cornerRadius,
cornerRadiusBottomLeft=cornerRadiusBottomLeft,
cornerRadiusBottomRight=cornerRadiusBottomRight,
cornerRadiusTopLeft=cornerRadiusTopLeft,
cornerRadiusTopRight=cornerRadiusTopRight, cursor=cursor,
description=description, dir=dir, dx=dx, dy=dy,
ellipsis=ellipsis, endAngle=endAngle, fill=fill,
fillOpacity=fillOpacity, filled=filled, font=font,
fontSize=fontSize, fontStyle=fontStyle, fontWeight=fontWeight,
height=height, href=href, innerRadius=innerRadius,
interpolate=interpolate, invalid=invalid, limit=limit,
lineBreak=lineBreak, lineHeight=lineHeight, opacity=opacity,
order=order, orient=orient, outerRadius=outerRadius,
padAngle=padAngle, radius=radius, radius2=radius2, shape=shape,
size=size, smooth=smooth, startAngle=startAngle, stroke=stroke,
strokeCap=strokeCap, strokeDash=strokeDash,
strokeDashOffset=strokeDashOffset, strokeJoin=strokeJoin,
strokeMiterLimit=strokeMiterLimit, strokeOffset=strokeOffset,
strokeOpacity=strokeOpacity, strokeWidth=strokeWidth,
tension=tension, text=text, theta=theta, theta2=theta2,
timeUnitBand=timeUnitBand,
timeUnitBandPosition=timeUnitBandPosition, tooltip=tooltip,
url=url, width=width, x=x, x2=x2, y=y, y2=y2, **kwds)
class MarkConfigExprOrSignalRef(VegaLiteSchema):
"""MarkConfigExprOrSignalRef schema wrapper
Mapping(required=[])
Attributes
----------
align : anyOf(:class:`Align`, :class:`ExprOrSignalRef`)
The horizontal alignment of the text or ranged marks (area, bar, image, rect, rule).
One of ``"left"``, ``"right"``, ``"center"``.
**Note:** Expression reference is *not* supported for range marks.
angle : anyOf(float, :class:`ExprOrSignalRef`)
aria : anyOf(boolean, :class:`ExprOrSignalRef`)
ariaRole : anyOf(string, :class:`ExprOrSignalRef`)
ariaRoleDescription : anyOf(string, :class:`ExprOrSignalRef`)
aspect : anyOf(boolean, :class:`ExprOrSignalRef`)
baseline : anyOf(:class:`TextBaseline`, :class:`ExprOrSignalRef`)
For text marks, the vertical text baseline. One of ``"alphabetic"`` (default),
``"top"``, ``"middle"``, ``"bottom"``, ``"line-top"``, ``"line-bottom"``, or an
expression reference that provides one of the valid values. The ``"line-top"`` and
``"line-bottom"`` values operate similarly to ``"top"`` and ``"bottom"``, but are
calculated relative to the ``lineHeight`` rather than ``fontSize`` alone.
For range marks, the vertical alignment of the marks. One of ``"top"``,
``"middle"``, ``"bottom"``.
**Note:** Expression reference is *not* supported for range marks.
blend : anyOf(:class:`Blend`, :class:`ExprOrSignalRef`)
color : anyOf(:class:`Color`, :class:`Gradient`, :class:`ExprOrSignalRef`)
Default color.
**Default value:** :raw-html:`<span style="color: #4682b4;">■</span>`
``"#4682b4"``
**Note:** - This property cannot be used in a `style config
<https://vega.github.io/vega-lite/docs/mark.html#style-config>`__. - The ``fill``
and ``stroke`` properties have higher precedence than ``color`` and will override
``color``.
cornerRadius : anyOf(float, :class:`ExprOrSignalRef`)
cornerRadiusBottomLeft : anyOf(float, :class:`ExprOrSignalRef`)
cornerRadiusBottomRight : anyOf(float, :class:`ExprOrSignalRef`)
cornerRadiusTopLeft : anyOf(float, :class:`ExprOrSignalRef`)
cornerRadiusTopRight : anyOf(float, :class:`ExprOrSignalRef`)
cursor : anyOf(:class:`Cursor`, :class:`ExprOrSignalRef`)
description : anyOf(string, :class:`ExprOrSignalRef`)
dir : anyOf(:class:`TextDirection`, :class:`ExprOrSignalRef`)
dx : anyOf(float, :class:`ExprOrSignalRef`)
dy : anyOf(float, :class:`ExprOrSignalRef`)
ellipsis : anyOf(string, :class:`ExprOrSignalRef`)
endAngle : anyOf(float, :class:`ExprOrSignalRef`)
fill : anyOf(:class:`Color`, :class:`Gradient`, None, :class:`ExprOrSignalRef`)
Default fill color. This property has higher precedence than ``config.color``. Set
to ``null`` to remove fill.
**Default value:** (None)
fillOpacity : anyOf(float, :class:`ExprOrSignalRef`)
filled : boolean
Whether the mark's color should be used as fill color instead of stroke color.
**Default value:** ``false`` for all ``point``, ``line``, and ``rule`` marks as well
as ``geoshape`` marks for `graticule
<https://vega.github.io/vega-lite/docs/data.html#graticule>`__ data sources;
otherwise, ``true``.
**Note:** This property cannot be used in a `style config
<https://vega.github.io/vega-lite/docs/mark.html#style-config>`__.
font : anyOf(string, :class:`ExprOrSignalRef`)
fontSize : anyOf(float, :class:`ExprOrSignalRef`)
fontStyle : anyOf(:class:`FontStyle`, :class:`ExprOrSignalRef`)
fontWeight : anyOf(:class:`FontWeight`, :class:`ExprOrSignalRef`)
height : anyOf(float, :class:`ExprOrSignalRef`)
href : anyOf(:class:`URI`, :class:`ExprOrSignalRef`)
innerRadius : anyOf(float, :class:`ExprOrSignalRef`)
The inner radius in pixels of arc marks. ``innerRadius`` is an alias for
``radius2``.
interpolate : anyOf(:class:`Interpolate`, :class:`ExprOrSignalRef`)
invalid : enum('filter', None)
Defines how Vega-Lite should handle marks for invalid values ( ``null`` and ``NaN``
). - If set to ``"filter"`` (default), all data items with null values will be
skipped (for line, trail, and area marks) or filtered (for other marks). - If
``null``, all data items are included. In this case, invalid values will be
interpreted as zeroes.
limit : anyOf(float, :class:`ExprOrSignalRef`)
lineBreak : anyOf(string, :class:`ExprOrSignalRef`)
lineHeight : anyOf(float, :class:`ExprOrSignalRef`)
opacity : anyOf(float, :class:`ExprOrSignalRef`)
The overall opacity (value between [0,1]).
**Default value:** ``0.7`` for non-aggregate plots with ``point``, ``tick``,
``circle``, or ``square`` marks or layered ``bar`` charts and ``1`` otherwise.
order : anyOf(None, boolean)
For line and trail marks, this ``order`` property can be set to ``null`` or
``false`` to make the lines use the original order in the data sources.
orient : :class:`Orientation`
The orientation of a non-stacked bar, tick, area, and line charts. The value is
either horizontal (default) or vertical. - For bar, rule and tick, this determines
whether the size of the bar and tick should be applied to x or y dimension. - For
area, this property determines the orient property of the Vega output. - For line
and trail marks, this property determines the sort order of the points in the line
if ``config.sortLineBy`` is not specified. For stacked charts, this is always
determined by the orientation of the stack; therefore explicitly specified value
will be ignored.
outerRadius : anyOf(float, :class:`ExprOrSignalRef`)
The outer radius in pixels of arc marks. ``outerRadius`` is an alias for ``radius``.
padAngle : anyOf(float, :class:`ExprOrSignalRef`)
radius : anyOf(float, :class:`ExprOrSignalRef`)
For arc mark, the primary (outer) radius in pixels.
For text marks, polar coordinate radial offset, in pixels, of the text from the
origin determined by the ``x`` and ``y`` properties.
radius2 : anyOf(float, :class:`ExprOrSignalRef`)
The secondary (inner) radius in pixels of arc marks.
shape : anyOf(anyOf(:class:`SymbolShape`, string), :class:`ExprOrSignalRef`)
size : anyOf(float, :class:`ExprOrSignalRef`)
Default size for marks. - For ``point`` / ``circle`` / ``square``, this represents
the pixel area of the marks. Note that this value sets the area of the symbol; the
side lengths will increase with the square root of this value. - For ``bar``, this
represents the band size of the bar, in pixels. - For ``text``, this represents the
font size, in pixels.
**Default value:** - ``30`` for point, circle, square marks; width/height's ``step``
- ``2`` for bar marks with discrete dimensions; - ``5`` for bar marks with
continuous dimensions; - ``11`` for text marks.
smooth : anyOf(boolean, :class:`ExprOrSignalRef`)
startAngle : anyOf(float, :class:`ExprOrSignalRef`)
stroke : anyOf(:class:`Color`, :class:`Gradient`, None, :class:`ExprOrSignalRef`)
Default stroke color. This property has higher precedence than ``config.color``. Set
to ``null`` to remove stroke.
**Default value:** (None)
strokeCap : anyOf(:class:`StrokeCap`, :class:`ExprOrSignalRef`)
strokeDash : anyOf(List(float), :class:`ExprOrSignalRef`)
strokeDashOffset : anyOf(float, :class:`ExprOrSignalRef`)
strokeJoin : anyOf(:class:`StrokeJoin`, :class:`ExprOrSignalRef`)
strokeMiterLimit : anyOf(float, :class:`ExprOrSignalRef`)
strokeOffset : anyOf(float, :class:`ExprOrSignalRef`)
strokeOpacity : anyOf(float, :class:`ExprOrSignalRef`)
strokeWidth : anyOf(float, :class:`ExprOrSignalRef`)
tension : anyOf(float, :class:`ExprOrSignalRef`)
text : anyOf(:class:`Text`, :class:`ExprOrSignalRef`)
theta : anyOf(float, :class:`ExprOrSignalRef`)
For arc marks, the arc length in radians if theta2 is not specified, otherwise the
start arc angle. (A value of 0 indicates up or “north”, increasing values proceed
clockwise.)
For text marks, polar coordinate angle in radians.
theta2 : anyOf(float, :class:`ExprOrSignalRef`)
The end angle of arc marks in radians. A value of 0 indicates up or “north”,
increasing values proceed clockwise.
timeUnitBand : float
Default relative band size for a time unit. If set to ``1``, the bandwidth of the
marks will be equal to the time unit band step. If set to ``0.5``, bandwidth of the
marks will be half of the time unit band step.
timeUnitBandPosition : float
Default relative band position for a time unit. If set to ``0``, the marks will be
positioned at the beginning of the time unit band step. If set to ``0.5``, the marks
will be positioned in the middle of the time unit band step.
tooltip : anyOf(float, string, boolean, :class:`TooltipContent`, :class:`ExprOrSignalRef`,
None)
The tooltip text string to show upon mouse hover or an object defining which fields
should the tooltip be derived from.
* If ``tooltip`` is ``true`` or ``{"content": "encoding"}``, then all fields from
``encoding`` will be used. - If ``tooltip`` is ``{"content": "data"}``, then all
fields that appear in the highlighted data point will be used. - If set to
``null`` or ``false``, then no tooltip will be used.
See the `tooltip <https://vega.github.io/vega-lite/docs/tooltip.html>`__
documentation for a detailed discussion about tooltip in Vega-Lite.
**Default value:** ``null``
url : anyOf(:class:`URI`, :class:`ExprOrSignalRef`)
width : anyOf(float, :class:`ExprOrSignalRef`)
x : anyOf(float, string, :class:`ExprOrSignalRef`)
X coordinates of the marks, or width of horizontal ``"bar"`` and ``"area"`` without
specified ``x2`` or ``width``.
The ``value`` of this channel can be a number or a string ``"width"`` for the width
of the plot.
x2 : anyOf(float, string, :class:`ExprOrSignalRef`)
X2 coordinates for ranged ``"area"``, ``"bar"``, ``"rect"``, and ``"rule"``.
The ``value`` of this channel can be a number or a string ``"width"`` for the width
of the plot.
y : anyOf(float, string, :class:`ExprOrSignalRef`)
Y coordinates of the marks, or height of vertical ``"bar"`` and ``"area"`` without
specified ``y2`` or ``height``.
The ``value`` of this channel can be a number or a string ``"height"`` for the
height of the plot.
y2 : anyOf(float, string, :class:`ExprOrSignalRef`)
Y2 coordinates for ranged ``"area"``, ``"bar"``, ``"rect"``, and ``"rule"``.
The ``value`` of this channel can be a number or a string ``"height"`` for the
height of the plot.
"""
_schema = {'$ref': '#/definitions/MarkConfig<ExprOrSignalRef>'}
def __init__(self, align=Undefined, angle=Undefined, aria=Undefined, ariaRole=Undefined,
ariaRoleDescription=Undefined, aspect=Undefined, baseline=Undefined, blend=Undefined,
color=Undefined, cornerRadius=Undefined, cornerRadiusBottomLeft=Undefined,
cornerRadiusBottomRight=Undefined, cornerRadiusTopLeft=Undefined,
cornerRadiusTopRight=Undefined, cursor=Undefined, description=Undefined, dir=Undefined,
dx=Undefined, dy=Undefined, ellipsis=Undefined, endAngle=Undefined, fill=Undefined,
fillOpacity=Undefined, filled=Undefined, font=Undefined, fontSize=Undefined,
fontStyle=Undefined, fontWeight=Undefined, height=Undefined, href=Undefined,
innerRadius=Undefined, interpolate=Undefined, invalid=Undefined, limit=Undefined,
lineBreak=Undefined, lineHeight=Undefined, opacity=Undefined, order=Undefined,
orient=Undefined, outerRadius=Undefined, padAngle=Undefined, radius=Undefined,
radius2=Undefined, shape=Undefined, size=Undefined, smooth=Undefined,
startAngle=Undefined, stroke=Undefined, strokeCap=Undefined, strokeDash=Undefined,
strokeDashOffset=Undefined, strokeJoin=Undefined, strokeMiterLimit=Undefined,
strokeOffset=Undefined, strokeOpacity=Undefined, strokeWidth=Undefined,
tension=Undefined, text=Undefined, theta=Undefined, theta2=Undefined,
timeUnitBand=Undefined, timeUnitBandPosition=Undefined, tooltip=Undefined,
url=Undefined, width=Undefined, x=Undefined, x2=Undefined, y=Undefined, y2=Undefined,
**kwds):
super(MarkConfigExprOrSignalRef, self).__init__(align=align, angle=angle, aria=aria,
ariaRole=ariaRole,
ariaRoleDescription=ariaRoleDescription,
aspect=aspect, baseline=baseline, blend=blend,
color=color, cornerRadius=cornerRadius,
cornerRadiusBottomLeft=cornerRadiusBottomLeft,
cornerRadiusBottomRight=cornerRadiusBottomRight,
cornerRadiusTopLeft=cornerRadiusTopLeft,
cornerRadiusTopRight=cornerRadiusTopRight,
cursor=cursor, description=description, dir=dir,
dx=dx, dy=dy, ellipsis=ellipsis,
endAngle=endAngle, fill=fill,
fillOpacity=fillOpacity, filled=filled,
font=font, fontSize=fontSize,
fontStyle=fontStyle, fontWeight=fontWeight,
height=height, href=href,
innerRadius=innerRadius,
interpolate=interpolate, invalid=invalid,
limit=limit, lineBreak=lineBreak,
lineHeight=lineHeight, opacity=opacity,
order=order, orient=orient,
outerRadius=outerRadius, padAngle=padAngle,
radius=radius, radius2=radius2, shape=shape,
size=size, smooth=smooth, startAngle=startAngle,
stroke=stroke, strokeCap=strokeCap,
strokeDash=strokeDash,
strokeDashOffset=strokeDashOffset,
strokeJoin=strokeJoin,
strokeMiterLimit=strokeMiterLimit,
strokeOffset=strokeOffset,
strokeOpacity=strokeOpacity,
strokeWidth=strokeWidth, tension=tension,
text=text, theta=theta, theta2=theta2,
timeUnitBand=timeUnitBand,
timeUnitBandPosition=timeUnitBandPosition,
tooltip=tooltip, url=url, width=width, x=x,
x2=x2, y=y, y2=y2, **kwds)
class MarkDef(AnyMark):
"""MarkDef schema wrapper
Mapping(required=[type])
Attributes
----------
type : :class:`Mark`
The mark type. This could a primitive mark type (one of ``"bar"``, ``"circle"``,
``"square"``, ``"tick"``, ``"line"``, ``"area"``, ``"point"``, ``"geoshape"``,
``"rule"``, and ``"text"`` ) or a composite mark type ( ``"boxplot"``,
``"errorband"``, ``"errorbar"`` ).
align : anyOf(:class:`Align`, :class:`ExprRef`)
The horizontal alignment of the text or ranged marks (area, bar, image, rect, rule).
One of ``"left"``, ``"right"``, ``"center"``.
**Note:** Expression reference is *not* supported for range marks.
angle : anyOf(float, :class:`ExprRef`)
aria : anyOf(boolean, :class:`ExprRef`)
ariaRole : anyOf(string, :class:`ExprRef`)
ariaRoleDescription : anyOf(string, :class:`ExprRef`)
aspect : anyOf(boolean, :class:`ExprRef`)
bandSize : float
The width of the ticks.
**Default value:** 3/4 of step (width step for horizontal ticks and height step for
vertical ticks).
baseline : anyOf(:class:`TextBaseline`, :class:`ExprRef`)
For text marks, the vertical text baseline. One of ``"alphabetic"`` (default),
``"top"``, ``"middle"``, ``"bottom"``, ``"line-top"``, ``"line-bottom"``, or an
expression reference that provides one of the valid values. The ``"line-top"`` and
``"line-bottom"`` values operate similarly to ``"top"`` and ``"bottom"``, but are
calculated relative to the ``lineHeight`` rather than ``fontSize`` alone.
For range marks, the vertical alignment of the marks. One of ``"top"``,
``"middle"``, ``"bottom"``.
**Note:** Expression reference is *not* supported for range marks.
binSpacing : float
Offset between bars for binned field. The ideal value for this is either 0
(preferred by statisticians) or 1 (Vega-Lite default, D3 example style).
**Default value:** ``1``
blend : anyOf(:class:`Blend`, :class:`ExprRef`)
clip : boolean
Whether a mark be clipped to the enclosing group’s width and height.
color : anyOf(:class:`Color`, :class:`Gradient`, :class:`ExprRef`)
Default color.
**Default value:** :raw-html:`<span style="color: #4682b4;">■</span>`
``"#4682b4"``
**Note:** - This property cannot be used in a `style config
<https://vega.github.io/vega-lite/docs/mark.html#style-config>`__. - The ``fill``
and ``stroke`` properties have higher precedence than ``color`` and will override
``color``.
continuousBandSize : float
The default size of the bars on continuous scales.
**Default value:** ``5``
cornerRadius : anyOf(float, :class:`ExprRef`)
cornerRadiusBottomLeft : anyOf(float, :class:`ExprRef`)
cornerRadiusBottomRight : anyOf(float, :class:`ExprRef`)
cornerRadiusEnd : anyOf(float, :class:`ExprRef`)
* For vertical bars, top-left and top-right corner radius. - For horizontal bars,
top-right and bottom-right corner radius.
cornerRadiusTopLeft : anyOf(float, :class:`ExprRef`)
cornerRadiusTopRight : anyOf(float, :class:`ExprRef`)
cursor : anyOf(:class:`Cursor`, :class:`ExprRef`)
description : anyOf(string, :class:`ExprRef`)
dir : anyOf(:class:`TextDirection`, :class:`ExprRef`)
discreteBandSize : float
The default size of the bars with discrete dimensions. If unspecified, the default
size is ``step-2``, which provides 2 pixel offset between bars.
dx : anyOf(float, :class:`ExprRef`)
dy : anyOf(float, :class:`ExprRef`)
ellipsis : anyOf(string, :class:`ExprRef`)
fill : anyOf(:class:`Color`, :class:`Gradient`, None, :class:`ExprRef`)
Default fill color. This property has higher precedence than ``config.color``. Set
to ``null`` to remove fill.
**Default value:** (None)
fillOpacity : anyOf(float, :class:`ExprRef`)
filled : boolean
Whether the mark's color should be used as fill color instead of stroke color.
**Default value:** ``false`` for all ``point``, ``line``, and ``rule`` marks as well
as ``geoshape`` marks for `graticule
<https://vega.github.io/vega-lite/docs/data.html#graticule>`__ data sources;
otherwise, ``true``.
**Note:** This property cannot be used in a `style config
<https://vega.github.io/vega-lite/docs/mark.html#style-config>`__.
font : anyOf(string, :class:`ExprRef`)
fontSize : anyOf(float, :class:`ExprRef`)
fontStyle : anyOf(:class:`FontStyle`, :class:`ExprRef`)
fontWeight : anyOf(:class:`FontWeight`, :class:`ExprRef`)
height : anyOf(float, :class:`ExprRef`)
href : anyOf(:class:`URI`, :class:`ExprRef`)
innerRadius : anyOf(float, :class:`ExprRef`)
The inner radius in pixels of arc marks. ``innerRadius`` is an alias for
``radius2``.
interpolate : anyOf(:class:`Interpolate`, :class:`ExprRef`)
invalid : enum('filter', None)
Defines how Vega-Lite should handle marks for invalid values ( ``null`` and ``NaN``
). - If set to ``"filter"`` (default), all data items with null values will be
skipped (for line, trail, and area marks) or filtered (for other marks). - If
``null``, all data items are included. In this case, invalid values will be
interpreted as zeroes.
limit : anyOf(float, :class:`ExprRef`)
line : anyOf(boolean, :class:`OverlayMarkDef`)
A flag for overlaying line on top of area marks, or an object defining the
properties of the overlayed lines.
If this value is an empty object ( ``{}`` ) or ``true``, lines with default
properties will be used.
If this value is ``false``, no lines would be automatically added to area marks.
**Default value:** ``false``.
lineBreak : anyOf(string, :class:`ExprRef`)
lineHeight : anyOf(float, :class:`ExprRef`)
opacity : anyOf(float, :class:`ExprRef`)
The overall opacity (value between [0,1]).
**Default value:** ``0.7`` for non-aggregate plots with ``point``, ``tick``,
``circle``, or ``square`` marks or layered ``bar`` charts and ``1`` otherwise.
order : anyOf(None, boolean)
For line and trail marks, this ``order`` property can be set to ``null`` or
``false`` to make the lines use the original order in the data sources.
orient : :class:`Orientation`
The orientation of a non-stacked bar, tick, area, and line charts. The value is
either horizontal (default) or vertical. - For bar, rule and tick, this determines
whether the size of the bar and tick should be applied to x or y dimension. - For
area, this property determines the orient property of the Vega output. - For line
and trail marks, this property determines the sort order of the points in the line
if ``config.sortLineBy`` is not specified. For stacked charts, this is always
determined by the orientation of the stack; therefore explicitly specified value
will be ignored.
outerRadius : anyOf(float, :class:`ExprRef`)
The outer radius in pixels of arc marks. ``outerRadius`` is an alias for ``radius``.
padAngle : anyOf(float, :class:`ExprRef`)
point : anyOf(boolean, :class:`OverlayMarkDef`, string)
A flag for overlaying points on top of line or area marks, or an object defining the
properties of the overlayed points.
If this property is ``"transparent"``, transparent points will be used (for
enhancing tooltips and selections).
If this property is an empty object ( ``{}`` ) or ``true``, filled points with
default properties will be used.
If this property is ``false``, no points would be automatically added to line or
area marks.
**Default value:** ``false``.
radius : anyOf(float, :class:`ExprRef`)
For arc mark, the primary (outer) radius in pixels.
For text marks, polar coordinate radial offset, in pixels, of the text from the
origin determined by the ``x`` and ``y`` properties.
radius2 : anyOf(float, :class:`ExprRef`)
The secondary (inner) radius in pixels of arc marks.
radius2Offset : anyOf(float, :class:`ExprRef`)
Offset for radius2.
radiusOffset : anyOf(float, :class:`ExprRef`)
Offset for radius.
shape : anyOf(anyOf(:class:`SymbolShape`, string), :class:`ExprRef`)
size : anyOf(float, :class:`ExprRef`)
Default size for marks. - For ``point`` / ``circle`` / ``square``, this represents
the pixel area of the marks. Note that this value sets the area of the symbol; the
side lengths will increase with the square root of this value. - For ``bar``, this
represents the band size of the bar, in pixels. - For ``text``, this represents the
font size, in pixels.
**Default value:** - ``30`` for point, circle, square marks; width/height's ``step``
- ``2`` for bar marks with discrete dimensions; - ``5`` for bar marks with
continuous dimensions; - ``11`` for text marks.
smooth : anyOf(boolean, :class:`ExprRef`)
stroke : anyOf(:class:`Color`, :class:`Gradient`, None, :class:`ExprRef`)
Default stroke color. This property has higher precedence than ``config.color``. Set
to ``null`` to remove stroke.
**Default value:** (None)
strokeCap : anyOf(:class:`StrokeCap`, :class:`ExprRef`)
strokeDash : anyOf(List(float), :class:`ExprRef`)
strokeDashOffset : anyOf(float, :class:`ExprRef`)
strokeJoin : anyOf(:class:`StrokeJoin`, :class:`ExprRef`)
strokeMiterLimit : anyOf(float, :class:`ExprRef`)
strokeOffset : anyOf(float, :class:`ExprRef`)
strokeOpacity : anyOf(float, :class:`ExprRef`)
strokeWidth : anyOf(float, :class:`ExprRef`)
style : anyOf(string, List(string))
A string or array of strings indicating the name of custom styles to apply to the
mark. A style is a named collection of mark property defaults defined within the
`style configuration
<https://vega.github.io/vega-lite/docs/mark.html#style-config>`__. If style is an
array, later styles will override earlier styles. Any `mark properties
<https://vega.github.io/vega-lite/docs/encoding.html#mark-prop>`__ explicitly
defined within the ``encoding`` will override a style default.
**Default value:** The mark's name. For example, a bar mark will have style
``"bar"`` by default. **Note:** Any specified style will augment the default style.
For example, a bar mark with ``"style": "foo"`` will receive from
``config.style.bar`` and ``config.style.foo`` (the specified style ``"foo"`` has
higher precedence).
tension : anyOf(float, :class:`ExprRef`)
text : anyOf(:class:`Text`, :class:`ExprRef`)
theta : anyOf(float, :class:`ExprRef`)
For arc marks, the arc length in radians if theta2 is not specified, otherwise the
start arc angle. (A value of 0 indicates up or “north”, increasing values proceed
clockwise.)
For text marks, polar coordinate angle in radians.
theta2 : anyOf(float, :class:`ExprRef`)
The end angle of arc marks in radians. A value of 0 indicates up or “north”,
increasing values proceed clockwise.
theta2Offset : anyOf(float, :class:`ExprRef`)
Offset for theta2.
thetaOffset : anyOf(float, :class:`ExprRef`)
Offset for theta.
thickness : float
Thickness of the tick mark.
**Default value:** ``1``
timeUnitBand : float
Default relative band size for a time unit. If set to ``1``, the bandwidth of the
marks will be equal to the time unit band step. If set to ``0.5``, bandwidth of the
marks will be half of the time unit band step.
timeUnitBandPosition : float
Default relative band position for a time unit. If set to ``0``, the marks will be
positioned at the beginning of the time unit band step. If set to ``0.5``, the marks
will be positioned in the middle of the time unit band step.
tooltip : anyOf(float, string, boolean, :class:`TooltipContent`, :class:`ExprRef`, None)
The tooltip text string to show upon mouse hover or an object defining which fields
should the tooltip be derived from.
* If ``tooltip`` is ``true`` or ``{"content": "encoding"}``, then all fields from
``encoding`` will be used. - If ``tooltip`` is ``{"content": "data"}``, then all
fields that appear in the highlighted data point will be used. - If set to
``null`` or ``false``, then no tooltip will be used.
See the `tooltip <https://vega.github.io/vega-lite/docs/tooltip.html>`__
documentation for a detailed discussion about tooltip in Vega-Lite.
**Default value:** ``null``
url : anyOf(:class:`URI`, :class:`ExprRef`)
width : anyOf(float, :class:`ExprRef`)
x : anyOf(float, string, :class:`ExprRef`)
X coordinates of the marks, or width of horizontal ``"bar"`` and ``"area"`` without
specified ``x2`` or ``width``.
The ``value`` of this channel can be a number or a string ``"width"`` for the width
of the plot.
x2 : anyOf(float, string, :class:`ExprRef`)
X2 coordinates for ranged ``"area"``, ``"bar"``, ``"rect"``, and ``"rule"``.
The ``value`` of this channel can be a number or a string ``"width"`` for the width
of the plot.
x2Offset : anyOf(float, :class:`ExprRef`)
Offset for x2-position.
xOffset : anyOf(float, :class:`ExprRef`)
Offset for x-position.
y : anyOf(float, string, :class:`ExprRef`)
Y coordinates of the marks, or height of vertical ``"bar"`` and ``"area"`` without
specified ``y2`` or ``height``.
The ``value`` of this channel can be a number or a string ``"height"`` for the
height of the plot.
y2 : anyOf(float, string, :class:`ExprRef`)
Y2 coordinates for ranged ``"area"``, ``"bar"``, ``"rect"``, and ``"rule"``.
The ``value`` of this channel can be a number or a string ``"height"`` for the
height of the plot.
y2Offset : anyOf(float, :class:`ExprRef`)
Offset for y2-position.
yOffset : anyOf(float, :class:`ExprRef`)
Offset for y-position.
"""
_schema = {'$ref': '#/definitions/MarkDef'}
def __init__(self, type=Undefined, align=Undefined, angle=Undefined, aria=Undefined,
ariaRole=Undefined, ariaRoleDescription=Undefined, aspect=Undefined,
bandSize=Undefined, baseline=Undefined, binSpacing=Undefined, blend=Undefined,
clip=Undefined, color=Undefined, continuousBandSize=Undefined, cornerRadius=Undefined,
cornerRadiusBottomLeft=Undefined, cornerRadiusBottomRight=Undefined,
cornerRadiusEnd=Undefined, cornerRadiusTopLeft=Undefined,
cornerRadiusTopRight=Undefined, cursor=Undefined, description=Undefined, dir=Undefined,
discreteBandSize=Undefined, dx=Undefined, dy=Undefined, ellipsis=Undefined,
fill=Undefined, fillOpacity=Undefined, filled=Undefined, font=Undefined,
fontSize=Undefined, fontStyle=Undefined, fontWeight=Undefined, height=Undefined,
href=Undefined, innerRadius=Undefined, interpolate=Undefined, invalid=Undefined,
limit=Undefined, line=Undefined, lineBreak=Undefined, lineHeight=Undefined,
opacity=Undefined, order=Undefined, orient=Undefined, outerRadius=Undefined,
padAngle=Undefined, point=Undefined, radius=Undefined, radius2=Undefined,
radius2Offset=Undefined, radiusOffset=Undefined, shape=Undefined, size=Undefined,
smooth=Undefined, stroke=Undefined, strokeCap=Undefined, strokeDash=Undefined,
strokeDashOffset=Undefined, strokeJoin=Undefined, strokeMiterLimit=Undefined,
strokeOffset=Undefined, strokeOpacity=Undefined, strokeWidth=Undefined,
style=Undefined, tension=Undefined, text=Undefined, theta=Undefined, theta2=Undefined,
theta2Offset=Undefined, thetaOffset=Undefined, thickness=Undefined,
timeUnitBand=Undefined, timeUnitBandPosition=Undefined, tooltip=Undefined,
url=Undefined, width=Undefined, x=Undefined, x2=Undefined, x2Offset=Undefined,
xOffset=Undefined, y=Undefined, y2=Undefined, y2Offset=Undefined, yOffset=Undefined,
**kwds):
super(MarkDef, self).__init__(type=type, align=align, angle=angle, aria=aria, ariaRole=ariaRole,
ariaRoleDescription=ariaRoleDescription, aspect=aspect,
bandSize=bandSize, baseline=baseline, binSpacing=binSpacing,
blend=blend, clip=clip, color=color,
continuousBandSize=continuousBandSize, cornerRadius=cornerRadius,
cornerRadiusBottomLeft=cornerRadiusBottomLeft,
cornerRadiusBottomRight=cornerRadiusBottomRight,
cornerRadiusEnd=cornerRadiusEnd,
cornerRadiusTopLeft=cornerRadiusTopLeft,
cornerRadiusTopRight=cornerRadiusTopRight, cursor=cursor,
description=description, dir=dir,
discreteBandSize=discreteBandSize, dx=dx, dy=dy,
ellipsis=ellipsis, fill=fill, fillOpacity=fillOpacity,
filled=filled, font=font, fontSize=fontSize, fontStyle=fontStyle,
fontWeight=fontWeight, height=height, href=href,
innerRadius=innerRadius, interpolate=interpolate, invalid=invalid,
limit=limit, line=line, lineBreak=lineBreak,
lineHeight=lineHeight, opacity=opacity, order=order,
orient=orient, outerRadius=outerRadius, padAngle=padAngle,
point=point, radius=radius, radius2=radius2,
radius2Offset=radius2Offset, radiusOffset=radiusOffset,
shape=shape, size=size, smooth=smooth, stroke=stroke,
strokeCap=strokeCap, strokeDash=strokeDash,
strokeDashOffset=strokeDashOffset, strokeJoin=strokeJoin,
strokeMiterLimit=strokeMiterLimit, strokeOffset=strokeOffset,
strokeOpacity=strokeOpacity, strokeWidth=strokeWidth, style=style,
tension=tension, text=text, theta=theta, theta2=theta2,
theta2Offset=theta2Offset, thetaOffset=thetaOffset,
thickness=thickness, timeUnitBand=timeUnitBand,
timeUnitBandPosition=timeUnitBandPosition, tooltip=tooltip,
url=url, width=width, x=x, x2=x2, x2Offset=x2Offset,
xOffset=xOffset, y=y, y2=y2, y2Offset=y2Offset, yOffset=yOffset,
**kwds)
class MarkPropDefGradientstringnull(VegaLiteSchema):
"""MarkPropDefGradientstringnull schema wrapper
anyOf(:class:`FieldOrDatumDefWithConditionMarkPropFieldDefGradientstringnull`,
:class:`FieldOrDatumDefWithConditionDatumDefGradientstringnull`,
:class:`ValueDefWithConditionMarkPropFieldOrDatumDefGradientstringnull`)
"""
_schema = {'$ref': '#/definitions/MarkPropDef<(Gradient|string|null)>'}
def __init__(self, *args, **kwds):
super(MarkPropDefGradientstringnull, self).__init__(*args, **kwds)
class FieldOrDatumDefWithConditionDatumDefGradientstringnull(ColorDef, MarkPropDefGradientstringnull):
"""FieldOrDatumDefWithConditionDatumDefGradientstringnull schema wrapper
Mapping(required=[])
A FieldDef with Condition :raw-html:`<ValueDef>` { condition: {value: ...}, field:
..., ... }
Attributes
----------
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
condition : anyOf(:class:`ConditionalValueDefGradientstringnullExprRef`,
List(:class:`ConditionalValueDefGradientstringnullExprRef`))
One or more value definition(s) with `a selection or a test predicate
<https://vega.github.io/vega-lite/docs/condition.html>`__.
**Note:** A field definition's ``condition`` property can only contain `conditional
value definitions <https://vega.github.io/vega-lite/docs/condition.html#value>`__
since Vega-Lite only allows at most one encoded field per encoding channel.
datum : anyOf(:class:`PrimitiveValue`, :class:`DateTime`, :class:`ExprRef`,
:class:`RepeatRef`)
A constant value in data domain.
type : :class:`Type`
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/FieldOrDatumDefWithCondition<DatumDef,(Gradient|string|null)>'}
def __init__(self, band=Undefined, condition=Undefined, datum=Undefined, type=Undefined, **kwds):
super(FieldOrDatumDefWithConditionDatumDefGradientstringnull, self).__init__(band=band,
condition=condition,
datum=datum,
type=type, **kwds)
class FieldOrDatumDefWithConditionMarkPropFieldDefGradientstringnull(ColorDef, MarkPropDefGradientstringnull):
"""FieldOrDatumDefWithConditionMarkPropFieldDefGradientstringnull schema wrapper
Mapping(required=[])
A FieldDef with Condition :raw-html:`<ValueDef>` { condition: {value: ...}, field:
..., ... }
Attributes
----------
aggregate : :class:`Aggregate`
Aggregation function for the field (e.g., ``"mean"``, ``"sum"``, ``"median"``,
``"min"``, ``"max"``, ``"count"`` ).
**Default value:** ``undefined`` (None)
**See also:** `aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__
documentation.
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
bin : anyOf(boolean, :class:`BinParams`, None)
A flag for binning a ``quantitative`` field, `an object defining binning parameters
<https://vega.github.io/vega-lite/docs/bin.html#params>`__, or indicating that the
data for ``x`` or ``y`` channel are binned before they are imported into Vega-Lite (
``"binned"`` ).
If ``true``, default `binning parameters
<https://vega.github.io/vega-lite/docs/bin.html>`__ will be applied.
If ``"binned"``, this indicates that the data for the ``x`` (or ``y`` ) channel are
already binned. You can map the bin-start field to ``x`` (or ``y`` ) and the bin-end
field to ``x2`` (or ``y2`` ). The scale and axis will be formatted similar to
binning in Vega-Lite. To adjust the axis ticks based on the bin step, you can also
set the axis's `tickMinStep
<https://vega.github.io/vega-lite/docs/axis.html#ticks>`__ property.
**Default value:** ``false``
**See also:** `bin <https://vega.github.io/vega-lite/docs/bin.html>`__
documentation.
condition : anyOf(:class:`ConditionalValueDefGradientstringnullExprRef`,
List(:class:`ConditionalValueDefGradientstringnullExprRef`))
One or more value definition(s) with `a selection or a test predicate
<https://vega.github.io/vega-lite/docs/condition.html>`__.
**Note:** A field definition's ``condition`` property can only contain `conditional
value definitions <https://vega.github.io/vega-lite/docs/condition.html#value>`__
since Vega-Lite only allows at most one encoded field per encoding channel.
field : :class:`Field`
**Required.** A string defining the name of the field from which to pull a data
value or an object defining iterated values from the `repeat
<https://vega.github.io/vega-lite/docs/repeat.html>`__ operator.
**See also:** `field <https://vega.github.io/vega-lite/docs/field.html>`__
documentation.
**Notes:** 1) Dots ( ``.`` ) and brackets ( ``[`` and ``]`` ) can be used to access
nested objects (e.g., ``"field": "foo.bar"`` and ``"field": "foo['bar']"`` ). If
field names contain dots or brackets but are not nested, you can use ``\\`` to
escape dots and brackets (e.g., ``"a\\.b"`` and ``"a\\[0\\]"`` ). See more details
about escaping in the `field documentation
<https://vega.github.io/vega-lite/docs/field.html>`__. 2) ``field`` is not required
if ``aggregate`` is ``count``.
legend : anyOf(:class:`Legend`, None)
An object defining properties of the legend. If ``null``, the legend for the
encoding channel will be removed.
**Default value:** If undefined, default `legend properties
<https://vega.github.io/vega-lite/docs/legend.html>`__ are applied.
**See also:** `legend <https://vega.github.io/vega-lite/docs/legend.html>`__
documentation.
scale : anyOf(:class:`Scale`, None)
An object defining properties of the channel's scale, which is the function that
transforms values in the data domain (numbers, dates, strings, etc) to visual values
(pixels, colors, sizes) of the encoding channels.
If ``null``, the scale will be `disabled and the data value will be directly encoded
<https://vega.github.io/vega-lite/docs/scale.html#disable>`__.
**Default value:** If undefined, default `scale properties
<https://vega.github.io/vega-lite/docs/scale.html>`__ are applied.
**See also:** `scale <https://vega.github.io/vega-lite/docs/scale.html>`__
documentation.
sort : :class:`Sort`
Sort order for the encoded field.
For continuous fields (quantitative or temporal), ``sort`` can be either
``"ascending"`` or ``"descending"``.
For discrete fields, ``sort`` can be one of the following: - ``"ascending"`` or
``"descending"`` -- for sorting by the values' natural order in JavaScript. - `A
string indicating an encoding channel name to sort by
<https://vega.github.io/vega-lite/docs/sort.html#sort-by-encoding>`__ (e.g., ``"x"``
or ``"y"`` ) with an optional minus prefix for descending sort (e.g., ``"-x"`` to
sort by x-field, descending). This channel string is short-form of `a
sort-by-encoding definition
<https://vega.github.io/vega-lite/docs/sort.html#sort-by-encoding>`__. For example,
``"sort": "-x"`` is equivalent to ``"sort": {"encoding": "x", "order":
"descending"}``. - `A sort field definition
<https://vega.github.io/vega-lite/docs/sort.html#sort-field>`__ for sorting by
another field. - `An array specifying the field values in preferred order
<https://vega.github.io/vega-lite/docs/sort.html#sort-array>`__. In this case, the
sort order will obey the values in the array, followed by any unspecified values in
their original order. For discrete time field, values in the sort array can be
`date-time definition objects <types#datetime>`__. In addition, for time units
``"month"`` and ``"day"``, the values can be the month or day names (case
insensitive) or their 3-letter initials (e.g., ``"Mon"``, ``"Tue"`` ). - ``null``
indicating no sort.
**Default value:** ``"ascending"``
**Note:** ``null`` and sorting by another channel is not supported for ``row`` and
``column``.
**See also:** `sort <https://vega.github.io/vega-lite/docs/sort.html>`__
documentation.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit (e.g., ``year``, ``yearmonth``, ``month``, ``hours`` ) for a temporal
field. or `a temporal field that gets casted as ordinal
<https://vega.github.io/vega-lite/docs/type.html#cast>`__.
**Default value:** ``undefined`` (None)
**See also:** `timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__
documentation.
title : anyOf(:class:`Text`, None)
A title for the field. If ``null``, the title will be removed.
**Default value:** derived from the field's name and transformation function (
``aggregate``, ``bin`` and ``timeUnit`` ). If the field has an aggregate function,
the function is displayed as part of the title (e.g., ``"Sum of Profit"`` ). If the
field is binned or has a time unit applied, the applied function is shown in
parentheses (e.g., ``"Profit (binned)"``, ``"Transaction Date (year-month)"`` ).
Otherwise, the title is simply the field name.
**Notes** :
1) You can customize the default field title format by providing the `fieldTitle
<https://vega.github.io/vega-lite/docs/config.html#top-level-config>`__ property in
the `config <https://vega.github.io/vega-lite/docs/config.html>`__ or `fieldTitle
function via the compile function's options
<https://vega.github.io/vega-lite/docs/compile.html#field-title>`__.
2) If both field definition's ``title`` and axis, header, or legend ``title`` are
defined, axis/header/legend title will be used.
type : :class:`StandardType`
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/FieldOrDatumDefWithCondition<MarkPropFieldDef,(Gradient|string|null)>'}
def __init__(self, aggregate=Undefined, band=Undefined, bin=Undefined, condition=Undefined,
field=Undefined, legend=Undefined, scale=Undefined, sort=Undefined, timeUnit=Undefined,
title=Undefined, type=Undefined, **kwds):
super(FieldOrDatumDefWithConditionMarkPropFieldDefGradientstringnull, self).__init__(aggregate=aggregate,
band=band,
bin=bin,
condition=condition,
field=field,
legend=legend,
scale=scale,
sort=sort,
timeUnit=timeUnit,
title=title,
type=type,
**kwds)
class MarkPropDefnumber(VegaLiteSchema):
"""MarkPropDefnumber schema wrapper
anyOf(:class:`FieldOrDatumDefWithConditionMarkPropFieldDefnumber`,
:class:`FieldOrDatumDefWithConditionDatumDefnumber`,
:class:`ValueDefWithConditionMarkPropFieldOrDatumDefnumber`)
"""
_schema = {'$ref': '#/definitions/MarkPropDef<number>'}
def __init__(self, *args, **kwds):
super(MarkPropDefnumber, self).__init__(*args, **kwds)
class MarkPropDefnumberArray(VegaLiteSchema):
"""MarkPropDefnumberArray schema wrapper
anyOf(:class:`FieldOrDatumDefWithConditionMarkPropFieldDefnumberArray`,
:class:`FieldOrDatumDefWithConditionDatumDefnumberArray`,
:class:`ValueDefWithConditionMarkPropFieldOrDatumDefnumberArray`)
"""
_schema = {'$ref': '#/definitions/MarkPropDef<number[]>'}
def __init__(self, *args, **kwds):
super(MarkPropDefnumberArray, self).__init__(*args, **kwds)
class MarkPropDefstringnullTypeForShape(VegaLiteSchema):
"""MarkPropDefstringnullTypeForShape schema wrapper
anyOf(:class:`FieldOrDatumDefWithConditionMarkPropFieldDefTypeForShapestringnull`,
:class:`FieldOrDatumDefWithConditionDatumDefstringnull`,
:class:`ValueDefWithConditionMarkPropFieldOrDatumDefTypeForShapestringnull`)
"""
_schema = {'$ref': '#/definitions/MarkPropDef<(string|null),TypeForShape>'}
def __init__(self, *args, **kwds):
super(MarkPropDefstringnullTypeForShape, self).__init__(*args, **kwds)
class MarkType(VegaLiteSchema):
"""MarkType schema wrapper
enum('arc', 'area', 'image', 'group', 'line', 'path', 'rect', 'rule', 'shape', 'symbol',
'text', 'trail')
"""
_schema = {'$ref': '#/definitions/MarkType'}
def __init__(self, *args):
super(MarkType, self).__init__(*args)
class Month(VegaLiteSchema):
"""Month schema wrapper
float
"""
_schema = {'$ref': '#/definitions/Month'}
def __init__(self, *args):
super(Month, self).__init__(*args)
class MultiSelectionConfig(VegaLiteSchema):
"""MultiSelectionConfig schema wrapper
Mapping(required=[])
Attributes
----------
bind : :class:`LegendBinding`
When set, a selection is populated by interacting with the corresponding legend.
Direct manipulation interaction is disabled by default; to re-enable it, set the
selection's `on
<https://vega.github.io/vega-lite/docs/selection.html#common-selection-properties>`__
property.
Legend bindings are restricted to selections that only specify a single field or
encoding.
clear : anyOf(:class:`Stream`, string, boolean)
Clears the selection, emptying it of all values. Can be a `Event Stream
<https://vega.github.io/vega/docs/event-streams/>`__ or ``false`` to disable.
**Default value:** ``dblclick``.
**See also:** `clear <https://vega.github.io/vega-lite/docs/clear.html>`__
documentation.
empty : enum('all', 'none')
By default, ``all`` data values are considered to lie within an empty selection.
When set to ``none``, empty selections contain no data values.
encodings : List(:class:`SingleDefUnitChannel`)
An array of encoding channels. The corresponding data field values must match for a
data tuple to fall within the selection.
**See also:** `encodings <https://vega.github.io/vega-lite/docs/project.html>`__
documentation.
fields : List(:class:`FieldName`)
An array of field names whose values must match for a data tuple to fall within the
selection.
**See also:** `fields <https://vega.github.io/vega-lite/docs/project.html>`__
documentation.
init : List(:class:`SelectionInitMapping`)
Initialize the selection with a mapping between `projected channels or field names
<https://vega.github.io/vega-lite/docs/project.html>`__ and an initial value (or
array of values).
**See also:** `init <https://vega.github.io/vega-lite/docs/init.html>`__
documentation.
nearest : boolean
When true, an invisible voronoi diagram is computed to accelerate discrete
selection. The data value *nearest* the mouse cursor is added to the selection.
**See also:** `nearest <https://vega.github.io/vega-lite/docs/nearest.html>`__
documentation.
on : anyOf(:class:`Stream`, string)
A `Vega event stream <https://vega.github.io/vega/docs/event-streams/>`__ (object or
selector) that triggers the selection. For interval selections, the event stream
must specify a `start and end
<https://vega.github.io/vega/docs/event-streams/#between-filters>`__.
resolve : :class:`SelectionResolution`
With layered and multi-view displays, a strategy that determines how selections'
data queries are resolved when applied in a filter transform, conditional encoding
rule, or scale domain.
**See also:** `resolve
<https://vega.github.io/vega-lite/docs/selection-resolve.html>`__ documentation.
toggle : anyOf(string, boolean)
Controls whether data values should be toggled or only ever inserted into multi
selections. Can be ``true``, ``false`` (for insertion only), or a `Vega expression
<https://vega.github.io/vega/docs/expressions/>`__.
**Default value:** ``true``, which corresponds to ``event.shiftKey`` (i.e., data
values are toggled when a user interacts with the shift-key pressed).
Setting the value to the Vega expression ``"true"`` will toggle data values without
the user pressing the shift-key.
**See also:** `toggle <https://vega.github.io/vega-lite/docs/toggle.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/MultiSelectionConfig'}
def __init__(self, bind=Undefined, clear=Undefined, empty=Undefined, encodings=Undefined,
fields=Undefined, init=Undefined, nearest=Undefined, on=Undefined, resolve=Undefined,
toggle=Undefined, **kwds):
super(MultiSelectionConfig, self).__init__(bind=bind, clear=clear, empty=empty,
encodings=encodings, fields=fields, init=init,
nearest=nearest, on=on, resolve=resolve,
toggle=toggle, **kwds)
class NamedData(DataSource):
"""NamedData schema wrapper
Mapping(required=[name])
Attributes
----------
name : string
Provide a placeholder name and bind data at runtime.
format : :class:`DataFormat`
An object that specifies the format for parsing the data.
"""
_schema = {'$ref': '#/definitions/NamedData'}
def __init__(self, name=Undefined, format=Undefined, **kwds):
super(NamedData, self).__init__(name=name, format=format, **kwds)
class NonArgAggregateOp(Aggregate):
"""NonArgAggregateOp schema wrapper
enum('average', 'count', 'distinct', 'max', 'mean', 'median', 'min', 'missing', 'product',
'q1', 'q3', 'ci0', 'ci1', 'stderr', 'stdev', 'stdevp', 'sum', 'valid', 'values', 'variance',
'variancep')
"""
_schema = {'$ref': '#/definitions/NonArgAggregateOp'}
def __init__(self, *args):
super(NonArgAggregateOp, self).__init__(*args)
class NormalizedSpec(VegaLiteSchema):
"""NormalizedSpec schema wrapper
anyOf(:class:`FacetedUnitSpec`, :class:`LayerSpec`, :class:`RepeatSpec`,
:class:`NormalizedFacetSpec`, :class:`NormalizedConcatSpecGenericSpec`,
:class:`NormalizedVConcatSpecGenericSpec`, :class:`NormalizedHConcatSpecGenericSpec`)
Any specification in Vega-Lite.
"""
_schema = {'$ref': '#/definitions/NormalizedSpec'}
def __init__(self, *args, **kwds):
super(NormalizedSpec, self).__init__(*args, **kwds)
class NormalizedConcatSpecGenericSpec(NormalizedSpec):
"""NormalizedConcatSpecGenericSpec schema wrapper
Mapping(required=[concat])
Base interface for a generalized concatenation specification.
Attributes
----------
concat : List(:class:`NormalizedSpec`)
A list of views to be concatenated.
align : anyOf(:class:`LayoutAlign`, :class:`RowColLayoutAlign`)
The alignment to apply to grid rows and columns. The supported string values are
``"all"``, ``"each"``, and ``"none"``.
* For ``"none"``, a flow layout will be used, in which adjacent subviews are simply
placed one after the other. - For ``"each"``, subviews will be aligned into a
clean grid structure, but each row or column may be of variable size. - For
``"all"``, subviews will be aligned and each row or column will be sized
identically based on the maximum observed size. String values for this property
will be applied to both grid rows and columns.
Alternatively, an object value of the form ``{"row": string, "column": string}`` can
be used to supply different alignments for rows and columns.
**Default value:** ``"all"``.
bounds : enum('full', 'flush')
The bounds calculation method to use for determining the extent of a sub-plot. One
of ``full`` (the default) or ``flush``.
* If set to ``full``, the entire calculated bounds (including axes, title, and
legend) will be used. - If set to ``flush``, only the specified width and height
values for the sub-view will be used. The ``flush`` setting can be useful when
attempting to place sub-plots without axes or legends into a uniform grid
structure.
**Default value:** ``"full"``
center : anyOf(boolean, :class:`RowColboolean`)
Boolean flag indicating if subviews should be centered relative to their respective
rows or columns.
An object value of the form ``{"row": boolean, "column": boolean}`` can be used to
supply different centering values for rows and columns.
**Default value:** ``false``
columns : float
The number of columns to include in the view composition layout.
**Default value** : ``undefined`` -- An infinite number of columns (a single row)
will be assumed. This is equivalent to ``hconcat`` (for ``concat`` ) and to using
the ``column`` channel (for ``facet`` and ``repeat`` ).
**Note** :
1) This property is only for: - the general (wrappable) ``concat`` operator (not
``hconcat`` / ``vconcat`` ) - the ``facet`` and ``repeat`` operator with one
field/repetition definition (without row/column nesting)
2) Setting the ``columns`` to ``1`` is equivalent to ``vconcat`` (for ``concat`` )
and to using the ``row`` channel (for ``facet`` and ``repeat`` ).
data : anyOf(:class:`Data`, None)
An object describing the data source. Set to ``null`` to ignore the parent's data
source. If no data is set, it is derived from the parent.
description : string
Description of this mark for commenting purpose.
name : string
Name of the visualization for later reference.
resolve : :class:`Resolve`
Scale, axis, and legend resolutions for view composition specifications.
spacing : anyOf(float, :class:`RowColnumber`)
The spacing in pixels between sub-views of the composition operator. An object of
the form ``{"row": number, "column": number}`` can be used to set different spacing
values for rows and columns.
**Default value** : Depends on ``"spacing"`` property of `the view composition
configuration <https://vega.github.io/vega-lite/docs/config.html#view-config>`__ (
``20`` by default)
title : anyOf(:class:`Text`, :class:`TitleParams`)
Title for the plot.
transform : List(:class:`Transform`)
An array of data transformations such as filter and new field calculation.
"""
_schema = {'$ref': '#/definitions/NormalizedConcatSpec<GenericSpec>'}
def __init__(self, concat=Undefined, align=Undefined, bounds=Undefined, center=Undefined,
columns=Undefined, data=Undefined, description=Undefined, name=Undefined,
resolve=Undefined, spacing=Undefined, title=Undefined, transform=Undefined, **kwds):
super(NormalizedConcatSpecGenericSpec, self).__init__(concat=concat, align=align, bounds=bounds,
center=center, columns=columns, data=data,
description=description, name=name,
resolve=resolve, spacing=spacing,
title=title, transform=transform, **kwds)
class NormalizedFacetSpec(NormalizedSpec):
"""NormalizedFacetSpec schema wrapper
Mapping(required=[facet, spec])
Base interface for a facet specification.
Attributes
----------
facet : anyOf(:class:`FacetFieldDef`, :class:`FacetMapping`)
Definition for how to facet the data. One of: 1) `a field definition for faceting
the plot by one field
<https://vega.github.io/vega-lite/docs/facet.html#field-def>`__ 2) `An object that
maps row and column channels to their field definitions
<https://vega.github.io/vega-lite/docs/facet.html#mapping>`__
spec : anyOf(:class:`LayerSpec`, :class:`FacetedUnitSpec`)
A specification of the view that gets faceted.
align : anyOf(:class:`LayoutAlign`, :class:`RowColLayoutAlign`)
The alignment to apply to grid rows and columns. The supported string values are
``"all"``, ``"each"``, and ``"none"``.
* For ``"none"``, a flow layout will be used, in which adjacent subviews are simply
placed one after the other. - For ``"each"``, subviews will be aligned into a
clean grid structure, but each row or column may be of variable size. - For
``"all"``, subviews will be aligned and each row or column will be sized
identically based on the maximum observed size. String values for this property
will be applied to both grid rows and columns.
Alternatively, an object value of the form ``{"row": string, "column": string}`` can
be used to supply different alignments for rows and columns.
**Default value:** ``"all"``.
bounds : enum('full', 'flush')
The bounds calculation method to use for determining the extent of a sub-plot. One
of ``full`` (the default) or ``flush``.
* If set to ``full``, the entire calculated bounds (including axes, title, and
legend) will be used. - If set to ``flush``, only the specified width and height
values for the sub-view will be used. The ``flush`` setting can be useful when
attempting to place sub-plots without axes or legends into a uniform grid
structure.
**Default value:** ``"full"``
center : anyOf(boolean, :class:`RowColboolean`)
Boolean flag indicating if subviews should be centered relative to their respective
rows or columns.
An object value of the form ``{"row": boolean, "column": boolean}`` can be used to
supply different centering values for rows and columns.
**Default value:** ``false``
columns : float
The number of columns to include in the view composition layout.
**Default value** : ``undefined`` -- An infinite number of columns (a single row)
will be assumed. This is equivalent to ``hconcat`` (for ``concat`` ) and to using
the ``column`` channel (for ``facet`` and ``repeat`` ).
**Note** :
1) This property is only for: - the general (wrappable) ``concat`` operator (not
``hconcat`` / ``vconcat`` ) - the ``facet`` and ``repeat`` operator with one
field/repetition definition (without row/column nesting)
2) Setting the ``columns`` to ``1`` is equivalent to ``vconcat`` (for ``concat`` )
and to using the ``row`` channel (for ``facet`` and ``repeat`` ).
data : anyOf(:class:`Data`, None)
An object describing the data source. Set to ``null`` to ignore the parent's data
source. If no data is set, it is derived from the parent.
description : string
Description of this mark for commenting purpose.
name : string
Name of the visualization for later reference.
resolve : :class:`Resolve`
Scale, axis, and legend resolutions for view composition specifications.
spacing : anyOf(float, :class:`RowColnumber`)
The spacing in pixels between sub-views of the composition operator. An object of
the form ``{"row": number, "column": number}`` can be used to set different spacing
values for rows and columns.
**Default value** : Depends on ``"spacing"`` property of `the view composition
configuration <https://vega.github.io/vega-lite/docs/config.html#view-config>`__ (
``20`` by default)
title : anyOf(:class:`Text`, :class:`TitleParams`)
Title for the plot.
transform : List(:class:`Transform`)
An array of data transformations such as filter and new field calculation.
"""
_schema = {'$ref': '#/definitions/NormalizedFacetSpec'}
def __init__(self, facet=Undefined, spec=Undefined, align=Undefined, bounds=Undefined,
center=Undefined, columns=Undefined, data=Undefined, description=Undefined,
name=Undefined, resolve=Undefined, spacing=Undefined, title=Undefined,
transform=Undefined, **kwds):
super(NormalizedFacetSpec, self).__init__(facet=facet, spec=spec, align=align, bounds=bounds,
center=center, columns=columns, data=data,
description=description, name=name, resolve=resolve,
spacing=spacing, title=title, transform=transform,
**kwds)
class NormalizedHConcatSpecGenericSpec(NormalizedSpec):
"""NormalizedHConcatSpecGenericSpec schema wrapper
Mapping(required=[hconcat])
Base interface for a horizontal concatenation specification.
Attributes
----------
hconcat : List(:class:`NormalizedSpec`)
A list of views to be concatenated and put into a row.
bounds : enum('full', 'flush')
The bounds calculation method to use for determining the extent of a sub-plot. One
of ``full`` (the default) or ``flush``.
* If set to ``full``, the entire calculated bounds (including axes, title, and
legend) will be used. - If set to ``flush``, only the specified width and height
values for the sub-view will be used. The ``flush`` setting can be useful when
attempting to place sub-plots without axes or legends into a uniform grid
structure.
**Default value:** ``"full"``
center : boolean
Boolean flag indicating if subviews should be centered relative to their respective
rows or columns.
**Default value:** ``false``
data : anyOf(:class:`Data`, None)
An object describing the data source. Set to ``null`` to ignore the parent's data
source. If no data is set, it is derived from the parent.
description : string
Description of this mark for commenting purpose.
name : string
Name of the visualization for later reference.
resolve : :class:`Resolve`
Scale, axis, and legend resolutions for view composition specifications.
spacing : float
The spacing in pixels between sub-views of the concat operator.
**Default value** : ``10``
title : anyOf(:class:`Text`, :class:`TitleParams`)
Title for the plot.
transform : List(:class:`Transform`)
An array of data transformations such as filter and new field calculation.
"""
_schema = {'$ref': '#/definitions/NormalizedHConcatSpec<GenericSpec>'}
def __init__(self, hconcat=Undefined, bounds=Undefined, center=Undefined, data=Undefined,
description=Undefined, name=Undefined, resolve=Undefined, spacing=Undefined,
title=Undefined, transform=Undefined, **kwds):
super(NormalizedHConcatSpecGenericSpec, self).__init__(hconcat=hconcat, bounds=bounds,
center=center, data=data,
description=description, name=name,
resolve=resolve, spacing=spacing,
title=title, transform=transform, **kwds)
class NormalizedVConcatSpecGenericSpec(NormalizedSpec):
"""NormalizedVConcatSpecGenericSpec schema wrapper
Mapping(required=[vconcat])
Base interface for a vertical concatenation specification.
Attributes
----------
vconcat : List(:class:`NormalizedSpec`)
A list of views to be concatenated and put into a column.
bounds : enum('full', 'flush')
The bounds calculation method to use for determining the extent of a sub-plot. One
of ``full`` (the default) or ``flush``.
* If set to ``full``, the entire calculated bounds (including axes, title, and
legend) will be used. - If set to ``flush``, only the specified width and height
values for the sub-view will be used. The ``flush`` setting can be useful when
attempting to place sub-plots without axes or legends into a uniform grid
structure.
**Default value:** ``"full"``
center : boolean
Boolean flag indicating if subviews should be centered relative to their respective
rows or columns.
**Default value:** ``false``
data : anyOf(:class:`Data`, None)
An object describing the data source. Set to ``null`` to ignore the parent's data
source. If no data is set, it is derived from the parent.
description : string
Description of this mark for commenting purpose.
name : string
Name of the visualization for later reference.
resolve : :class:`Resolve`
Scale, axis, and legend resolutions for view composition specifications.
spacing : float
The spacing in pixels between sub-views of the concat operator.
**Default value** : ``10``
title : anyOf(:class:`Text`, :class:`TitleParams`)
Title for the plot.
transform : List(:class:`Transform`)
An array of data transformations such as filter and new field calculation.
"""
_schema = {'$ref': '#/definitions/NormalizedVConcatSpec<GenericSpec>'}
def __init__(self, vconcat=Undefined, bounds=Undefined, center=Undefined, data=Undefined,
description=Undefined, name=Undefined, resolve=Undefined, spacing=Undefined,
title=Undefined, transform=Undefined, **kwds):
super(NormalizedVConcatSpecGenericSpec, self).__init__(vconcat=vconcat, bounds=bounds,
center=center, data=data,
description=description, name=name,
resolve=resolve, spacing=spacing,
title=title, transform=transform, **kwds)
class NumericArrayMarkPropDef(VegaLiteSchema):
"""NumericArrayMarkPropDef schema wrapper
anyOf(:class:`FieldOrDatumDefWithConditionMarkPropFieldDefnumberArray`,
:class:`FieldOrDatumDefWithConditionDatumDefnumberArray`,
:class:`ValueDefWithConditionMarkPropFieldOrDatumDefnumberArray`)
"""
_schema = {'$ref': '#/definitions/NumericArrayMarkPropDef'}
def __init__(self, *args, **kwds):
super(NumericArrayMarkPropDef, self).__init__(*args, **kwds)
class FieldOrDatumDefWithConditionDatumDefnumberArray(MarkPropDefnumberArray, NumericArrayMarkPropDef):
"""FieldOrDatumDefWithConditionDatumDefnumberArray schema wrapper
Mapping(required=[])
A FieldDef with Condition :raw-html:`<ValueDef>` { condition: {value: ...}, field:
..., ... }
Attributes
----------
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
condition : anyOf(:class:`ConditionalValueDefnumberArrayExprRef`,
List(:class:`ConditionalValueDefnumberArrayExprRef`))
One or more value definition(s) with `a selection or a test predicate
<https://vega.github.io/vega-lite/docs/condition.html>`__.
**Note:** A field definition's ``condition`` property can only contain `conditional
value definitions <https://vega.github.io/vega-lite/docs/condition.html#value>`__
since Vega-Lite only allows at most one encoded field per encoding channel.
datum : anyOf(:class:`PrimitiveValue`, :class:`DateTime`, :class:`ExprRef`,
:class:`RepeatRef`)
A constant value in data domain.
type : :class:`Type`
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/FieldOrDatumDefWithCondition<DatumDef,number[]>'}
def __init__(self, band=Undefined, condition=Undefined, datum=Undefined, type=Undefined, **kwds):
super(FieldOrDatumDefWithConditionDatumDefnumberArray, self).__init__(band=band,
condition=condition,
datum=datum, type=type,
**kwds)
class FieldOrDatumDefWithConditionMarkPropFieldDefnumberArray(MarkPropDefnumberArray, NumericArrayMarkPropDef):
"""FieldOrDatumDefWithConditionMarkPropFieldDefnumberArray schema wrapper
Mapping(required=[])
A FieldDef with Condition :raw-html:`<ValueDef>` { condition: {value: ...}, field:
..., ... }
Attributes
----------
aggregate : :class:`Aggregate`
Aggregation function for the field (e.g., ``"mean"``, ``"sum"``, ``"median"``,
``"min"``, ``"max"``, ``"count"`` ).
**Default value:** ``undefined`` (None)
**See also:** `aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__
documentation.
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
bin : anyOf(boolean, :class:`BinParams`, None)
A flag for binning a ``quantitative`` field, `an object defining binning parameters
<https://vega.github.io/vega-lite/docs/bin.html#params>`__, or indicating that the
data for ``x`` or ``y`` channel are binned before they are imported into Vega-Lite (
``"binned"`` ).
If ``true``, default `binning parameters
<https://vega.github.io/vega-lite/docs/bin.html>`__ will be applied.
If ``"binned"``, this indicates that the data for the ``x`` (or ``y`` ) channel are
already binned. You can map the bin-start field to ``x`` (or ``y`` ) and the bin-end
field to ``x2`` (or ``y2`` ). The scale and axis will be formatted similar to
binning in Vega-Lite. To adjust the axis ticks based on the bin step, you can also
set the axis's `tickMinStep
<https://vega.github.io/vega-lite/docs/axis.html#ticks>`__ property.
**Default value:** ``false``
**See also:** `bin <https://vega.github.io/vega-lite/docs/bin.html>`__
documentation.
condition : anyOf(:class:`ConditionalValueDefnumberArrayExprRef`,
List(:class:`ConditionalValueDefnumberArrayExprRef`))
One or more value definition(s) with `a selection or a test predicate
<https://vega.github.io/vega-lite/docs/condition.html>`__.
**Note:** A field definition's ``condition`` property can only contain `conditional
value definitions <https://vega.github.io/vega-lite/docs/condition.html#value>`__
since Vega-Lite only allows at most one encoded field per encoding channel.
field : :class:`Field`
**Required.** A string defining the name of the field from which to pull a data
value or an object defining iterated values from the `repeat
<https://vega.github.io/vega-lite/docs/repeat.html>`__ operator.
**See also:** `field <https://vega.github.io/vega-lite/docs/field.html>`__
documentation.
**Notes:** 1) Dots ( ``.`` ) and brackets ( ``[`` and ``]`` ) can be used to access
nested objects (e.g., ``"field": "foo.bar"`` and ``"field": "foo['bar']"`` ). If
field names contain dots or brackets but are not nested, you can use ``\\`` to
escape dots and brackets (e.g., ``"a\\.b"`` and ``"a\\[0\\]"`` ). See more details
about escaping in the `field documentation
<https://vega.github.io/vega-lite/docs/field.html>`__. 2) ``field`` is not required
if ``aggregate`` is ``count``.
legend : anyOf(:class:`Legend`, None)
An object defining properties of the legend. If ``null``, the legend for the
encoding channel will be removed.
**Default value:** If undefined, default `legend properties
<https://vega.github.io/vega-lite/docs/legend.html>`__ are applied.
**See also:** `legend <https://vega.github.io/vega-lite/docs/legend.html>`__
documentation.
scale : anyOf(:class:`Scale`, None)
An object defining properties of the channel's scale, which is the function that
transforms values in the data domain (numbers, dates, strings, etc) to visual values
(pixels, colors, sizes) of the encoding channels.
If ``null``, the scale will be `disabled and the data value will be directly encoded
<https://vega.github.io/vega-lite/docs/scale.html#disable>`__.
**Default value:** If undefined, default `scale properties
<https://vega.github.io/vega-lite/docs/scale.html>`__ are applied.
**See also:** `scale <https://vega.github.io/vega-lite/docs/scale.html>`__
documentation.
sort : :class:`Sort`
Sort order for the encoded field.
For continuous fields (quantitative or temporal), ``sort`` can be either
``"ascending"`` or ``"descending"``.
For discrete fields, ``sort`` can be one of the following: - ``"ascending"`` or
``"descending"`` -- for sorting by the values' natural order in JavaScript. - `A
string indicating an encoding channel name to sort by
<https://vega.github.io/vega-lite/docs/sort.html#sort-by-encoding>`__ (e.g., ``"x"``
or ``"y"`` ) with an optional minus prefix for descending sort (e.g., ``"-x"`` to
sort by x-field, descending). This channel string is short-form of `a
sort-by-encoding definition
<https://vega.github.io/vega-lite/docs/sort.html#sort-by-encoding>`__. For example,
``"sort": "-x"`` is equivalent to ``"sort": {"encoding": "x", "order":
"descending"}``. - `A sort field definition
<https://vega.github.io/vega-lite/docs/sort.html#sort-field>`__ for sorting by
another field. - `An array specifying the field values in preferred order
<https://vega.github.io/vega-lite/docs/sort.html#sort-array>`__. In this case, the
sort order will obey the values in the array, followed by any unspecified values in
their original order. For discrete time field, values in the sort array can be
`date-time definition objects <types#datetime>`__. In addition, for time units
``"month"`` and ``"day"``, the values can be the month or day names (case
insensitive) or their 3-letter initials (e.g., ``"Mon"``, ``"Tue"`` ). - ``null``
indicating no sort.
**Default value:** ``"ascending"``
**Note:** ``null`` and sorting by another channel is not supported for ``row`` and
``column``.
**See also:** `sort <https://vega.github.io/vega-lite/docs/sort.html>`__
documentation.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit (e.g., ``year``, ``yearmonth``, ``month``, ``hours`` ) for a temporal
field. or `a temporal field that gets casted as ordinal
<https://vega.github.io/vega-lite/docs/type.html#cast>`__.
**Default value:** ``undefined`` (None)
**See also:** `timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__
documentation.
title : anyOf(:class:`Text`, None)
A title for the field. If ``null``, the title will be removed.
**Default value:** derived from the field's name and transformation function (
``aggregate``, ``bin`` and ``timeUnit`` ). If the field has an aggregate function,
the function is displayed as part of the title (e.g., ``"Sum of Profit"`` ). If the
field is binned or has a time unit applied, the applied function is shown in
parentheses (e.g., ``"Profit (binned)"``, ``"Transaction Date (year-month)"`` ).
Otherwise, the title is simply the field name.
**Notes** :
1) You can customize the default field title format by providing the `fieldTitle
<https://vega.github.io/vega-lite/docs/config.html#top-level-config>`__ property in
the `config <https://vega.github.io/vega-lite/docs/config.html>`__ or `fieldTitle
function via the compile function's options
<https://vega.github.io/vega-lite/docs/compile.html#field-title>`__.
2) If both field definition's ``title`` and axis, header, or legend ``title`` are
defined, axis/header/legend title will be used.
type : :class:`StandardType`
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/FieldOrDatumDefWithCondition<MarkPropFieldDef,number[]>'}
def __init__(self, aggregate=Undefined, band=Undefined, bin=Undefined, condition=Undefined,
field=Undefined, legend=Undefined, scale=Undefined, sort=Undefined, timeUnit=Undefined,
title=Undefined, type=Undefined, **kwds):
super(FieldOrDatumDefWithConditionMarkPropFieldDefnumberArray, self).__init__(aggregate=aggregate,
band=band,
bin=bin,
condition=condition,
field=field,
legend=legend,
scale=scale,
sort=sort,
timeUnit=timeUnit,
title=title,
type=type, **kwds)
class NumericMarkPropDef(VegaLiteSchema):
"""NumericMarkPropDef schema wrapper
anyOf(:class:`FieldOrDatumDefWithConditionMarkPropFieldDefnumber`,
:class:`FieldOrDatumDefWithConditionDatumDefnumber`,
:class:`ValueDefWithConditionMarkPropFieldOrDatumDefnumber`)
"""
_schema = {'$ref': '#/definitions/NumericMarkPropDef'}
def __init__(self, *args, **kwds):
super(NumericMarkPropDef, self).__init__(*args, **kwds)
class FieldOrDatumDefWithConditionDatumDefnumber(MarkPropDefnumber, NumericMarkPropDef):
"""FieldOrDatumDefWithConditionDatumDefnumber schema wrapper
Mapping(required=[])
A FieldDef with Condition :raw-html:`<ValueDef>` { condition: {value: ...}, field:
..., ... }
Attributes
----------
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
condition : anyOf(:class:`ConditionalValueDefnumberExprRef`,
List(:class:`ConditionalValueDefnumberExprRef`))
One or more value definition(s) with `a selection or a test predicate
<https://vega.github.io/vega-lite/docs/condition.html>`__.
**Note:** A field definition's ``condition`` property can only contain `conditional
value definitions <https://vega.github.io/vega-lite/docs/condition.html#value>`__
since Vega-Lite only allows at most one encoded field per encoding channel.
datum : anyOf(:class:`PrimitiveValue`, :class:`DateTime`, :class:`ExprRef`,
:class:`RepeatRef`)
A constant value in data domain.
type : :class:`Type`
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/FieldOrDatumDefWithCondition<DatumDef,number>'}
def __init__(self, band=Undefined, condition=Undefined, datum=Undefined, type=Undefined, **kwds):
super(FieldOrDatumDefWithConditionDatumDefnumber, self).__init__(band=band, condition=condition,
datum=datum, type=type, **kwds)
class FieldOrDatumDefWithConditionMarkPropFieldDefnumber(MarkPropDefnumber, NumericMarkPropDef):
"""FieldOrDatumDefWithConditionMarkPropFieldDefnumber schema wrapper
Mapping(required=[])
A FieldDef with Condition :raw-html:`<ValueDef>` { condition: {value: ...}, field:
..., ... }
Attributes
----------
aggregate : :class:`Aggregate`
Aggregation function for the field (e.g., ``"mean"``, ``"sum"``, ``"median"``,
``"min"``, ``"max"``, ``"count"`` ).
**Default value:** ``undefined`` (None)
**See also:** `aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__
documentation.
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
bin : anyOf(boolean, :class:`BinParams`, None)
A flag for binning a ``quantitative`` field, `an object defining binning parameters
<https://vega.github.io/vega-lite/docs/bin.html#params>`__, or indicating that the
data for ``x`` or ``y`` channel are binned before they are imported into Vega-Lite (
``"binned"`` ).
If ``true``, default `binning parameters
<https://vega.github.io/vega-lite/docs/bin.html>`__ will be applied.
If ``"binned"``, this indicates that the data for the ``x`` (or ``y`` ) channel are
already binned. You can map the bin-start field to ``x`` (or ``y`` ) and the bin-end
field to ``x2`` (or ``y2`` ). The scale and axis will be formatted similar to
binning in Vega-Lite. To adjust the axis ticks based on the bin step, you can also
set the axis's `tickMinStep
<https://vega.github.io/vega-lite/docs/axis.html#ticks>`__ property.
**Default value:** ``false``
**See also:** `bin <https://vega.github.io/vega-lite/docs/bin.html>`__
documentation.
condition : anyOf(:class:`ConditionalValueDefnumberExprRef`,
List(:class:`ConditionalValueDefnumberExprRef`))
One or more value definition(s) with `a selection or a test predicate
<https://vega.github.io/vega-lite/docs/condition.html>`__.
**Note:** A field definition's ``condition`` property can only contain `conditional
value definitions <https://vega.github.io/vega-lite/docs/condition.html#value>`__
since Vega-Lite only allows at most one encoded field per encoding channel.
field : :class:`Field`
**Required.** A string defining the name of the field from which to pull a data
value or an object defining iterated values from the `repeat
<https://vega.github.io/vega-lite/docs/repeat.html>`__ operator.
**See also:** `field <https://vega.github.io/vega-lite/docs/field.html>`__
documentation.
**Notes:** 1) Dots ( ``.`` ) and brackets ( ``[`` and ``]`` ) can be used to access
nested objects (e.g., ``"field": "foo.bar"`` and ``"field": "foo['bar']"`` ). If
field names contain dots or brackets but are not nested, you can use ``\\`` to
escape dots and brackets (e.g., ``"a\\.b"`` and ``"a\\[0\\]"`` ). See more details
about escaping in the `field documentation
<https://vega.github.io/vega-lite/docs/field.html>`__. 2) ``field`` is not required
if ``aggregate`` is ``count``.
legend : anyOf(:class:`Legend`, None)
An object defining properties of the legend. If ``null``, the legend for the
encoding channel will be removed.
**Default value:** If undefined, default `legend properties
<https://vega.github.io/vega-lite/docs/legend.html>`__ are applied.
**See also:** `legend <https://vega.github.io/vega-lite/docs/legend.html>`__
documentation.
scale : anyOf(:class:`Scale`, None)
An object defining properties of the channel's scale, which is the function that
transforms values in the data domain (numbers, dates, strings, etc) to visual values
(pixels, colors, sizes) of the encoding channels.
If ``null``, the scale will be `disabled and the data value will be directly encoded
<https://vega.github.io/vega-lite/docs/scale.html#disable>`__.
**Default value:** If undefined, default `scale properties
<https://vega.github.io/vega-lite/docs/scale.html>`__ are applied.
**See also:** `scale <https://vega.github.io/vega-lite/docs/scale.html>`__
documentation.
sort : :class:`Sort`
Sort order for the encoded field.
For continuous fields (quantitative or temporal), ``sort`` can be either
``"ascending"`` or ``"descending"``.
For discrete fields, ``sort`` can be one of the following: - ``"ascending"`` or
``"descending"`` -- for sorting by the values' natural order in JavaScript. - `A
string indicating an encoding channel name to sort by
<https://vega.github.io/vega-lite/docs/sort.html#sort-by-encoding>`__ (e.g., ``"x"``
or ``"y"`` ) with an optional minus prefix for descending sort (e.g., ``"-x"`` to
sort by x-field, descending). This channel string is short-form of `a
sort-by-encoding definition
<https://vega.github.io/vega-lite/docs/sort.html#sort-by-encoding>`__. For example,
``"sort": "-x"`` is equivalent to ``"sort": {"encoding": "x", "order":
"descending"}``. - `A sort field definition
<https://vega.github.io/vega-lite/docs/sort.html#sort-field>`__ for sorting by
another field. - `An array specifying the field values in preferred order
<https://vega.github.io/vega-lite/docs/sort.html#sort-array>`__. In this case, the
sort order will obey the values in the array, followed by any unspecified values in
their original order. For discrete time field, values in the sort array can be
`date-time definition objects <types#datetime>`__. In addition, for time units
``"month"`` and ``"day"``, the values can be the month or day names (case
insensitive) or their 3-letter initials (e.g., ``"Mon"``, ``"Tue"`` ). - ``null``
indicating no sort.
**Default value:** ``"ascending"``
**Note:** ``null`` and sorting by another channel is not supported for ``row`` and
``column``.
**See also:** `sort <https://vega.github.io/vega-lite/docs/sort.html>`__
documentation.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit (e.g., ``year``, ``yearmonth``, ``month``, ``hours`` ) for a temporal
field. or `a temporal field that gets casted as ordinal
<https://vega.github.io/vega-lite/docs/type.html#cast>`__.
**Default value:** ``undefined`` (None)
**See also:** `timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__
documentation.
title : anyOf(:class:`Text`, None)
A title for the field. If ``null``, the title will be removed.
**Default value:** derived from the field's name and transformation function (
``aggregate``, ``bin`` and ``timeUnit`` ). If the field has an aggregate function,
the function is displayed as part of the title (e.g., ``"Sum of Profit"`` ). If the
field is binned or has a time unit applied, the applied function is shown in
parentheses (e.g., ``"Profit (binned)"``, ``"Transaction Date (year-month)"`` ).
Otherwise, the title is simply the field name.
**Notes** :
1) You can customize the default field title format by providing the `fieldTitle
<https://vega.github.io/vega-lite/docs/config.html#top-level-config>`__ property in
the `config <https://vega.github.io/vega-lite/docs/config.html>`__ or `fieldTitle
function via the compile function's options
<https://vega.github.io/vega-lite/docs/compile.html#field-title>`__.
2) If both field definition's ``title`` and axis, header, or legend ``title`` are
defined, axis/header/legend title will be used.
type : :class:`StandardType`
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/FieldOrDatumDefWithCondition<MarkPropFieldDef,number>'}
def __init__(self, aggregate=Undefined, band=Undefined, bin=Undefined, condition=Undefined,
field=Undefined, legend=Undefined, scale=Undefined, sort=Undefined, timeUnit=Undefined,
title=Undefined, type=Undefined, **kwds):
super(FieldOrDatumDefWithConditionMarkPropFieldDefnumber, self).__init__(aggregate=aggregate,
band=band, bin=bin,
condition=condition,
field=field,
legend=legend,
scale=scale, sort=sort,
timeUnit=timeUnit,
title=title, type=type,
**kwds)
class NumericValueDef(LatLongDef):
"""NumericValueDef schema wrapper
Mapping(required=[value])
Definition object for a constant value (primitive value or gradient definition) of an
encoding channel.
Attributes
----------
value : anyOf(float, :class:`ExprRef`)
A constant value in visual domain (e.g., ``"red"`` / ``"#0099ff"`` / `gradient
definition <https://vega.github.io/vega-lite/docs/types.html#gradient>`__ for color,
values between ``0`` to ``1`` for opacity).
"""
_schema = {'$ref': '#/definitions/NumericValueDef'}
def __init__(self, value=Undefined, **kwds):
super(NumericValueDef, self).__init__(value=value, **kwds)
class OrderFieldDef(VegaLiteSchema):
"""OrderFieldDef schema wrapper
Mapping(required=[])
Attributes
----------
aggregate : :class:`Aggregate`
Aggregation function for the field (e.g., ``"mean"``, ``"sum"``, ``"median"``,
``"min"``, ``"max"``, ``"count"`` ).
**Default value:** ``undefined`` (None)
**See also:** `aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__
documentation.
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
bin : anyOf(boolean, :class:`BinParams`, string, None)
A flag for binning a ``quantitative`` field, `an object defining binning parameters
<https://vega.github.io/vega-lite/docs/bin.html#params>`__, or indicating that the
data for ``x`` or ``y`` channel are binned before they are imported into Vega-Lite (
``"binned"`` ).
If ``true``, default `binning parameters
<https://vega.github.io/vega-lite/docs/bin.html>`__ will be applied.
If ``"binned"``, this indicates that the data for the ``x`` (or ``y`` ) channel are
already binned. You can map the bin-start field to ``x`` (or ``y`` ) and the bin-end
field to ``x2`` (or ``y2`` ). The scale and axis will be formatted similar to
binning in Vega-Lite. To adjust the axis ticks based on the bin step, you can also
set the axis's `tickMinStep
<https://vega.github.io/vega-lite/docs/axis.html#ticks>`__ property.
**Default value:** ``false``
**See also:** `bin <https://vega.github.io/vega-lite/docs/bin.html>`__
documentation.
field : :class:`Field`
**Required.** A string defining the name of the field from which to pull a data
value or an object defining iterated values from the `repeat
<https://vega.github.io/vega-lite/docs/repeat.html>`__ operator.
**See also:** `field <https://vega.github.io/vega-lite/docs/field.html>`__
documentation.
**Notes:** 1) Dots ( ``.`` ) and brackets ( ``[`` and ``]`` ) can be used to access
nested objects (e.g., ``"field": "foo.bar"`` and ``"field": "foo['bar']"`` ). If
field names contain dots or brackets but are not nested, you can use ``\\`` to
escape dots and brackets (e.g., ``"a\\.b"`` and ``"a\\[0\\]"`` ). See more details
about escaping in the `field documentation
<https://vega.github.io/vega-lite/docs/field.html>`__. 2) ``field`` is not required
if ``aggregate`` is ``count``.
sort : :class:`SortOrder`
The sort order. One of ``"ascending"`` (default) or ``"descending"``.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit (e.g., ``year``, ``yearmonth``, ``month``, ``hours`` ) for a temporal
field. or `a temporal field that gets casted as ordinal
<https://vega.github.io/vega-lite/docs/type.html#cast>`__.
**Default value:** ``undefined`` (None)
**See also:** `timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__
documentation.
title : anyOf(:class:`Text`, None)
A title for the field. If ``null``, the title will be removed.
**Default value:** derived from the field's name and transformation function (
``aggregate``, ``bin`` and ``timeUnit`` ). If the field has an aggregate function,
the function is displayed as part of the title (e.g., ``"Sum of Profit"`` ). If the
field is binned or has a time unit applied, the applied function is shown in
parentheses (e.g., ``"Profit (binned)"``, ``"Transaction Date (year-month)"`` ).
Otherwise, the title is simply the field name.
**Notes** :
1) You can customize the default field title format by providing the `fieldTitle
<https://vega.github.io/vega-lite/docs/config.html#top-level-config>`__ property in
the `config <https://vega.github.io/vega-lite/docs/config.html>`__ or `fieldTitle
function via the compile function's options
<https://vega.github.io/vega-lite/docs/compile.html#field-title>`__.
2) If both field definition's ``title`` and axis, header, or legend ``title`` are
defined, axis/header/legend title will be used.
type : :class:`StandardType`
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/OrderFieldDef'}
def __init__(self, aggregate=Undefined, band=Undefined, bin=Undefined, field=Undefined,
sort=Undefined, timeUnit=Undefined, title=Undefined, type=Undefined, **kwds):
super(OrderFieldDef, self).__init__(aggregate=aggregate, band=band, bin=bin, field=field,
sort=sort, timeUnit=timeUnit, title=title, type=type, **kwds)
class OrderValueDef(VegaLiteSchema):
"""OrderValueDef schema wrapper
Mapping(required=[value])
Attributes
----------
value : anyOf(float, :class:`ExprRef`)
A constant value in visual domain (e.g., ``"red"`` / ``"#0099ff"`` / `gradient
definition <https://vega.github.io/vega-lite/docs/types.html#gradient>`__ for color,
values between ``0`` to ``1`` for opacity).
condition : anyOf(:class:`ConditionalValueDefnumber`,
List(:class:`ConditionalValueDefnumber`))
One or more value definition(s) with `a selection or a test predicate
<https://vega.github.io/vega-lite/docs/condition.html>`__.
**Note:** A field definition's ``condition`` property can only contain `conditional
value definitions <https://vega.github.io/vega-lite/docs/condition.html#value>`__
since Vega-Lite only allows at most one encoded field per encoding channel.
"""
_schema = {'$ref': '#/definitions/OrderValueDef'}
def __init__(self, value=Undefined, condition=Undefined, **kwds):
super(OrderValueDef, self).__init__(value=value, condition=condition, **kwds)
class Orient(VegaLiteSchema):
"""Orient schema wrapper
enum('left', 'right', 'top', 'bottom')
"""
_schema = {'$ref': '#/definitions/Orient'}
def __init__(self, *args):
super(Orient, self).__init__(*args)
class Orientation(VegaLiteSchema):
"""Orientation schema wrapper
enum('horizontal', 'vertical')
"""
_schema = {'$ref': '#/definitions/Orientation'}
def __init__(self, *args):
super(Orientation, self).__init__(*args)
class OverlayMarkDef(VegaLiteSchema):
"""OverlayMarkDef schema wrapper
Mapping(required=[])
Attributes
----------
align : anyOf(:class:`Align`, :class:`ExprRef`)
The horizontal alignment of the text or ranged marks (area, bar, image, rect, rule).
One of ``"left"``, ``"right"``, ``"center"``.
**Note:** Expression reference is *not* supported for range marks.
angle : anyOf(float, :class:`ExprRef`)
aria : anyOf(boolean, :class:`ExprRef`)
ariaRole : anyOf(string, :class:`ExprRef`)
ariaRoleDescription : anyOf(string, :class:`ExprRef`)
aspect : anyOf(boolean, :class:`ExprRef`)
baseline : anyOf(:class:`TextBaseline`, :class:`ExprRef`)
For text marks, the vertical text baseline. One of ``"alphabetic"`` (default),
``"top"``, ``"middle"``, ``"bottom"``, ``"line-top"``, ``"line-bottom"``, or an
expression reference that provides one of the valid values. The ``"line-top"`` and
``"line-bottom"`` values operate similarly to ``"top"`` and ``"bottom"``, but are
calculated relative to the ``lineHeight`` rather than ``fontSize`` alone.
For range marks, the vertical alignment of the marks. One of ``"top"``,
``"middle"``, ``"bottom"``.
**Note:** Expression reference is *not* supported for range marks.
blend : anyOf(:class:`Blend`, :class:`ExprRef`)
clip : boolean
Whether a mark be clipped to the enclosing group’s width and height.
color : anyOf(:class:`Color`, :class:`Gradient`, :class:`ExprRef`)
Default color.
**Default value:** :raw-html:`<span style="color: #4682b4;">■</span>`
``"#4682b4"``
**Note:** - This property cannot be used in a `style config
<https://vega.github.io/vega-lite/docs/mark.html#style-config>`__. - The ``fill``
and ``stroke`` properties have higher precedence than ``color`` and will override
``color``.
cornerRadius : anyOf(float, :class:`ExprRef`)
cornerRadiusBottomLeft : anyOf(float, :class:`ExprRef`)
cornerRadiusBottomRight : anyOf(float, :class:`ExprRef`)
cornerRadiusTopLeft : anyOf(float, :class:`ExprRef`)
cornerRadiusTopRight : anyOf(float, :class:`ExprRef`)
cursor : anyOf(:class:`Cursor`, :class:`ExprRef`)
description : anyOf(string, :class:`ExprRef`)
dir : anyOf(:class:`TextDirection`, :class:`ExprRef`)
dx : anyOf(float, :class:`ExprRef`)
dy : anyOf(float, :class:`ExprRef`)
ellipsis : anyOf(string, :class:`ExprRef`)
endAngle : anyOf(float, :class:`ExprRef`)
fill : anyOf(:class:`Color`, :class:`Gradient`, None, :class:`ExprRef`)
Default fill color. This property has higher precedence than ``config.color``. Set
to ``null`` to remove fill.
**Default value:** (None)
fillOpacity : anyOf(float, :class:`ExprRef`)
filled : boolean
Whether the mark's color should be used as fill color instead of stroke color.
**Default value:** ``false`` for all ``point``, ``line``, and ``rule`` marks as well
as ``geoshape`` marks for `graticule
<https://vega.github.io/vega-lite/docs/data.html#graticule>`__ data sources;
otherwise, ``true``.
**Note:** This property cannot be used in a `style config
<https://vega.github.io/vega-lite/docs/mark.html#style-config>`__.
font : anyOf(string, :class:`ExprRef`)
fontSize : anyOf(float, :class:`ExprRef`)
fontStyle : anyOf(:class:`FontStyle`, :class:`ExprRef`)
fontWeight : anyOf(:class:`FontWeight`, :class:`ExprRef`)
height : anyOf(float, :class:`ExprRef`)
href : anyOf(:class:`URI`, :class:`ExprRef`)
innerRadius : anyOf(float, :class:`ExprRef`)
The inner radius in pixels of arc marks. ``innerRadius`` is an alias for
``radius2``.
interpolate : anyOf(:class:`Interpolate`, :class:`ExprRef`)
invalid : enum('filter', None)
Defines how Vega-Lite should handle marks for invalid values ( ``null`` and ``NaN``
). - If set to ``"filter"`` (default), all data items with null values will be
skipped (for line, trail, and area marks) or filtered (for other marks). - If
``null``, all data items are included. In this case, invalid values will be
interpreted as zeroes.
limit : anyOf(float, :class:`ExprRef`)
lineBreak : anyOf(string, :class:`ExprRef`)
lineHeight : anyOf(float, :class:`ExprRef`)
opacity : anyOf(float, :class:`ExprRef`)
The overall opacity (value between [0,1]).
**Default value:** ``0.7`` for non-aggregate plots with ``point``, ``tick``,
``circle``, or ``square`` marks or layered ``bar`` charts and ``1`` otherwise.
order : anyOf(None, boolean)
For line and trail marks, this ``order`` property can be set to ``null`` or
``false`` to make the lines use the original order in the data sources.
orient : :class:`Orientation`
The orientation of a non-stacked bar, tick, area, and line charts. The value is
either horizontal (default) or vertical. - For bar, rule and tick, this determines
whether the size of the bar and tick should be applied to x or y dimension. - For
area, this property determines the orient property of the Vega output. - For line
and trail marks, this property determines the sort order of the points in the line
if ``config.sortLineBy`` is not specified. For stacked charts, this is always
determined by the orientation of the stack; therefore explicitly specified value
will be ignored.
outerRadius : anyOf(float, :class:`ExprRef`)
The outer radius in pixels of arc marks. ``outerRadius`` is an alias for ``radius``.
padAngle : anyOf(float, :class:`ExprRef`)
radius : anyOf(float, :class:`ExprRef`)
For arc mark, the primary (outer) radius in pixels.
For text marks, polar coordinate radial offset, in pixels, of the text from the
origin determined by the ``x`` and ``y`` properties.
radius2 : anyOf(float, :class:`ExprRef`)
The secondary (inner) radius in pixels of arc marks.
radius2Offset : anyOf(float, :class:`ExprRef`)
Offset for radius2.
radiusOffset : anyOf(float, :class:`ExprRef`)
Offset for radius.
shape : anyOf(anyOf(:class:`SymbolShape`, string), :class:`ExprRef`)
size : anyOf(float, :class:`ExprRef`)
Default size for marks. - For ``point`` / ``circle`` / ``square``, this represents
the pixel area of the marks. Note that this value sets the area of the symbol; the
side lengths will increase with the square root of this value. - For ``bar``, this
represents the band size of the bar, in pixels. - For ``text``, this represents the
font size, in pixels.
**Default value:** - ``30`` for point, circle, square marks; width/height's ``step``
- ``2`` for bar marks with discrete dimensions; - ``5`` for bar marks with
continuous dimensions; - ``11`` for text marks.
smooth : anyOf(boolean, :class:`ExprRef`)
startAngle : anyOf(float, :class:`ExprRef`)
stroke : anyOf(:class:`Color`, :class:`Gradient`, None, :class:`ExprRef`)
Default stroke color. This property has higher precedence than ``config.color``. Set
to ``null`` to remove stroke.
**Default value:** (None)
strokeCap : anyOf(:class:`StrokeCap`, :class:`ExprRef`)
strokeDash : anyOf(List(float), :class:`ExprRef`)
strokeDashOffset : anyOf(float, :class:`ExprRef`)
strokeJoin : anyOf(:class:`StrokeJoin`, :class:`ExprRef`)
strokeMiterLimit : anyOf(float, :class:`ExprRef`)
strokeOffset : anyOf(float, :class:`ExprRef`)
strokeOpacity : anyOf(float, :class:`ExprRef`)
strokeWidth : anyOf(float, :class:`ExprRef`)
style : anyOf(string, List(string))
A string or array of strings indicating the name of custom styles to apply to the
mark. A style is a named collection of mark property defaults defined within the
`style configuration
<https://vega.github.io/vega-lite/docs/mark.html#style-config>`__. If style is an
array, later styles will override earlier styles. Any `mark properties
<https://vega.github.io/vega-lite/docs/encoding.html#mark-prop>`__ explicitly
defined within the ``encoding`` will override a style default.
**Default value:** The mark's name. For example, a bar mark will have style
``"bar"`` by default. **Note:** Any specified style will augment the default style.
For example, a bar mark with ``"style": "foo"`` will receive from
``config.style.bar`` and ``config.style.foo`` (the specified style ``"foo"`` has
higher precedence).
tension : anyOf(float, :class:`ExprRef`)
text : anyOf(:class:`Text`, :class:`ExprRef`)
theta : anyOf(float, :class:`ExprRef`)
For arc marks, the arc length in radians if theta2 is not specified, otherwise the
start arc angle. (A value of 0 indicates up or “north”, increasing values proceed
clockwise.)
For text marks, polar coordinate angle in radians.
theta2 : anyOf(float, :class:`ExprRef`)
The end angle of arc marks in radians. A value of 0 indicates up or “north”,
increasing values proceed clockwise.
theta2Offset : anyOf(float, :class:`ExprRef`)
Offset for theta2.
thetaOffset : anyOf(float, :class:`ExprRef`)
Offset for theta.
timeUnitBand : float
Default relative band size for a time unit. If set to ``1``, the bandwidth of the
marks will be equal to the time unit band step. If set to ``0.5``, bandwidth of the
marks will be half of the time unit band step.
timeUnitBandPosition : float
Default relative band position for a time unit. If set to ``0``, the marks will be
positioned at the beginning of the time unit band step. If set to ``0.5``, the marks
will be positioned in the middle of the time unit band step.
tooltip : anyOf(float, string, boolean, :class:`TooltipContent`, :class:`ExprRef`, None)
The tooltip text string to show upon mouse hover or an object defining which fields
should the tooltip be derived from.
* If ``tooltip`` is ``true`` or ``{"content": "encoding"}``, then all fields from
``encoding`` will be used. - If ``tooltip`` is ``{"content": "data"}``, then all
fields that appear in the highlighted data point will be used. - If set to
``null`` or ``false``, then no tooltip will be used.
See the `tooltip <https://vega.github.io/vega-lite/docs/tooltip.html>`__
documentation for a detailed discussion about tooltip in Vega-Lite.
**Default value:** ``null``
url : anyOf(:class:`URI`, :class:`ExprRef`)
width : anyOf(float, :class:`ExprRef`)
x : anyOf(float, string, :class:`ExprRef`)
X coordinates of the marks, or width of horizontal ``"bar"`` and ``"area"`` without
specified ``x2`` or ``width``.
The ``value`` of this channel can be a number or a string ``"width"`` for the width
of the plot.
x2 : anyOf(float, string, :class:`ExprRef`)
X2 coordinates for ranged ``"area"``, ``"bar"``, ``"rect"``, and ``"rule"``.
The ``value`` of this channel can be a number or a string ``"width"`` for the width
of the plot.
x2Offset : anyOf(float, :class:`ExprRef`)
Offset for x2-position.
xOffset : anyOf(float, :class:`ExprRef`)
Offset for x-position.
y : anyOf(float, string, :class:`ExprRef`)
Y coordinates of the marks, or height of vertical ``"bar"`` and ``"area"`` without
specified ``y2`` or ``height``.
The ``value`` of this channel can be a number or a string ``"height"`` for the
height of the plot.
y2 : anyOf(float, string, :class:`ExprRef`)
Y2 coordinates for ranged ``"area"``, ``"bar"``, ``"rect"``, and ``"rule"``.
The ``value`` of this channel can be a number or a string ``"height"`` for the
height of the plot.
y2Offset : anyOf(float, :class:`ExprRef`)
Offset for y2-position.
yOffset : anyOf(float, :class:`ExprRef`)
Offset for y-position.
"""
_schema = {'$ref': '#/definitions/OverlayMarkDef'}
def __init__(self, align=Undefined, angle=Undefined, aria=Undefined, ariaRole=Undefined,
ariaRoleDescription=Undefined, aspect=Undefined, baseline=Undefined, blend=Undefined,
clip=Undefined, color=Undefined, cornerRadius=Undefined,
cornerRadiusBottomLeft=Undefined, cornerRadiusBottomRight=Undefined,
cornerRadiusTopLeft=Undefined, cornerRadiusTopRight=Undefined, cursor=Undefined,
description=Undefined, dir=Undefined, dx=Undefined, dy=Undefined, ellipsis=Undefined,
endAngle=Undefined, fill=Undefined, fillOpacity=Undefined, filled=Undefined,
font=Undefined, fontSize=Undefined, fontStyle=Undefined, fontWeight=Undefined,
height=Undefined, href=Undefined, innerRadius=Undefined, interpolate=Undefined,
invalid=Undefined, limit=Undefined, lineBreak=Undefined, lineHeight=Undefined,
opacity=Undefined, order=Undefined, orient=Undefined, outerRadius=Undefined,
padAngle=Undefined, radius=Undefined, radius2=Undefined, radius2Offset=Undefined,
radiusOffset=Undefined, shape=Undefined, size=Undefined, smooth=Undefined,
startAngle=Undefined, stroke=Undefined, strokeCap=Undefined, strokeDash=Undefined,
strokeDashOffset=Undefined, strokeJoin=Undefined, strokeMiterLimit=Undefined,
strokeOffset=Undefined, strokeOpacity=Undefined, strokeWidth=Undefined,
style=Undefined, tension=Undefined, text=Undefined, theta=Undefined, theta2=Undefined,
theta2Offset=Undefined, thetaOffset=Undefined, timeUnitBand=Undefined,
timeUnitBandPosition=Undefined, tooltip=Undefined, url=Undefined, width=Undefined,
x=Undefined, x2=Undefined, x2Offset=Undefined, xOffset=Undefined, y=Undefined,
y2=Undefined, y2Offset=Undefined, yOffset=Undefined, **kwds):
super(OverlayMarkDef, self).__init__(align=align, angle=angle, aria=aria, ariaRole=ariaRole,
ariaRoleDescription=ariaRoleDescription, aspect=aspect,
baseline=baseline, blend=blend, clip=clip, color=color,
cornerRadius=cornerRadius,
cornerRadiusBottomLeft=cornerRadiusBottomLeft,
cornerRadiusBottomRight=cornerRadiusBottomRight,
cornerRadiusTopLeft=cornerRadiusTopLeft,
cornerRadiusTopRight=cornerRadiusTopRight, cursor=cursor,
description=description, dir=dir, dx=dx, dy=dy,
ellipsis=ellipsis, endAngle=endAngle, fill=fill,
fillOpacity=fillOpacity, filled=filled, font=font,
fontSize=fontSize, fontStyle=fontStyle,
fontWeight=fontWeight, height=height, href=href,
innerRadius=innerRadius, interpolate=interpolate,
invalid=invalid, limit=limit, lineBreak=lineBreak,
lineHeight=lineHeight, opacity=opacity, order=order,
orient=orient, outerRadius=outerRadius, padAngle=padAngle,
radius=radius, radius2=radius2,
radius2Offset=radius2Offset, radiusOffset=radiusOffset,
shape=shape, size=size, smooth=smooth,
startAngle=startAngle, stroke=stroke, strokeCap=strokeCap,
strokeDash=strokeDash, strokeDashOffset=strokeDashOffset,
strokeJoin=strokeJoin, strokeMiterLimit=strokeMiterLimit,
strokeOffset=strokeOffset, strokeOpacity=strokeOpacity,
strokeWidth=strokeWidth, style=style, tension=tension,
text=text, theta=theta, theta2=theta2,
theta2Offset=theta2Offset, thetaOffset=thetaOffset,
timeUnitBand=timeUnitBand,
timeUnitBandPosition=timeUnitBandPosition, tooltip=tooltip,
url=url, width=width, x=x, x2=x2, x2Offset=x2Offset,
xOffset=xOffset, y=y, y2=y2, y2Offset=y2Offset,
yOffset=yOffset, **kwds)
class Padding(VegaLiteSchema):
"""Padding schema wrapper
anyOf(float, Mapping(required=[]))
"""
_schema = {'$ref': '#/definitions/Padding'}
def __init__(self, *args, **kwds):
super(Padding, self).__init__(*args, **kwds)
class Parameter(VegaLiteSchema):
"""Parameter schema wrapper
Mapping(required=[name])
Attributes
----------
name : string
Required. A unique name for the parameter. Parameter names should be valid
JavaScript identifiers: they should contain only alphanumeric characters (or “$”, or
“_”) and may not start with a digit. Reserved keywords that may not be used as
parameter names are "datum", "event", "item", and "parent".
bind : :class:`Binding`
Binds the parameter to an external input element such as a slider, selection list or
radio button group.
description : string
A text description of the parameter, useful for inline documentation.
expr : :class:`Expr`
An expression for the value of the parameter. This expression may include other
parameters, in which case the parameter will automatically update in response to
upstream parameter changes.
value : Any
The initial value of the parameter.
**Default value:** ``undefined``
"""
_schema = {'$ref': '#/definitions/Parameter'}
def __init__(self, name=Undefined, bind=Undefined, description=Undefined, expr=Undefined,
value=Undefined, **kwds):
super(Parameter, self).__init__(name=name, bind=bind, description=description, expr=expr,
value=value, **kwds)
class Parse(VegaLiteSchema):
"""Parse schema wrapper
Mapping(required=[])
"""
_schema = {'$ref': '#/definitions/Parse'}
def __init__(self, **kwds):
super(Parse, self).__init__(**kwds)
class ParseValue(VegaLiteSchema):
"""ParseValue schema wrapper
anyOf(None, string, string, string, string, string)
"""
_schema = {'$ref': '#/definitions/ParseValue'}
def __init__(self, *args, **kwds):
super(ParseValue, self).__init__(*args, **kwds)
class PolarDef(VegaLiteSchema):
"""PolarDef schema wrapper
anyOf(:class:`PositionFieldDefBase`, :class:`PositionDatumDefBase`,
:class:`PositionValueDef`)
"""
_schema = {'$ref': '#/definitions/PolarDef'}
def __init__(self, *args, **kwds):
super(PolarDef, self).__init__(*args, **kwds)
class Position2Def(VegaLiteSchema):
"""Position2Def schema wrapper
anyOf(:class:`SecondaryFieldDef`, :class:`DatumDef`, :class:`PositionValueDef`)
"""
_schema = {'$ref': '#/definitions/Position2Def'}
def __init__(self, *args, **kwds):
super(Position2Def, self).__init__(*args, **kwds)
class DatumDef(LatLongDef, Position2Def):
"""DatumDef schema wrapper
Mapping(required=[])
Attributes
----------
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
datum : anyOf(:class:`PrimitiveValue`, :class:`DateTime`, :class:`ExprRef`,
:class:`RepeatRef`)
A constant value in data domain.
type : :class:`Type`
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/DatumDef'}
def __init__(self, band=Undefined, datum=Undefined, type=Undefined, **kwds):
super(DatumDef, self).__init__(band=band, datum=datum, type=type, **kwds)
class PositionDatumDefBase(PolarDef):
"""PositionDatumDefBase schema wrapper
Mapping(required=[])
Attributes
----------
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
datum : anyOf(:class:`PrimitiveValue`, :class:`DateTime`, :class:`ExprRef`,
:class:`RepeatRef`)
A constant value in data domain.
scale : anyOf(:class:`Scale`, None)
An object defining properties of the channel's scale, which is the function that
transforms values in the data domain (numbers, dates, strings, etc) to visual values
(pixels, colors, sizes) of the encoding channels.
If ``null``, the scale will be `disabled and the data value will be directly encoded
<https://vega.github.io/vega-lite/docs/scale.html#disable>`__.
**Default value:** If undefined, default `scale properties
<https://vega.github.io/vega-lite/docs/scale.html>`__ are applied.
**See also:** `scale <https://vega.github.io/vega-lite/docs/scale.html>`__
documentation.
stack : anyOf(:class:`StackOffset`, None, boolean)
Type of stacking offset if the field should be stacked. ``stack`` is only applicable
for ``x``, ``y``, ``theta``, and ``radius`` channels with continuous domains. For
example, ``stack`` of ``y`` can be used to customize stacking for a vertical bar
chart.
``stack`` can be one of the following values: - ``"zero"`` or `true`: stacking with
baseline offset at zero value of the scale (for creating typical stacked
[bar](https://vega.github.io/vega-lite/docs/stack.html#bar) and `area
<https://vega.github.io/vega-lite/docs/stack.html#area>`__ chart). - ``"normalize"``
- stacking with normalized domain (for creating `normalized stacked bar and area
charts <https://vega.github.io/vega-lite/docs/stack.html#normalized>`__.
:raw-html:`<br/>` - ``"center"`` - stacking with center baseline (for `streamgraph
<https://vega.github.io/vega-lite/docs/stack.html#streamgraph>`__ ). - ``null`` or
``false`` - No-stacking. This will produce layered `bar
<https://vega.github.io/vega-lite/docs/stack.html#layered-bar-chart>`__ and area
chart.
**Default value:** ``zero`` for plots with all of the following conditions are true:
(1) the mark is ``bar``, ``area``, or ``arc`` ; (2) the stacked measure channel (x
or y) has a linear scale; (3) At least one of non-position channels mapped to an
unaggregated field that is different from x and y. Otherwise, ``null`` by default.
**See also:** `stack <https://vega.github.io/vega-lite/docs/stack.html>`__
documentation.
type : :class:`Type`
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/PositionDatumDefBase'}
def __init__(self, band=Undefined, datum=Undefined, scale=Undefined, stack=Undefined,
type=Undefined, **kwds):
super(PositionDatumDefBase, self).__init__(band=band, datum=datum, scale=scale, stack=stack,
type=type, **kwds)
class PositionDef(VegaLiteSchema):
"""PositionDef schema wrapper
anyOf(:class:`PositionFieldDef`, :class:`PositionDatumDef`, :class:`PositionValueDef`)
"""
_schema = {'$ref': '#/definitions/PositionDef'}
def __init__(self, *args, **kwds):
super(PositionDef, self).__init__(*args, **kwds)
class PositionDatumDef(PositionDef):
"""PositionDatumDef schema wrapper
Mapping(required=[])
Attributes
----------
axis : anyOf(:class:`Axis`, None)
An object defining properties of axis's gridlines, ticks and labels. If ``null``,
the axis for the encoding channel will be removed.
**Default value:** If undefined, default `axis properties
<https://vega.github.io/vega-lite/docs/axis.html>`__ are applied.
**See also:** `axis <https://vega.github.io/vega-lite/docs/axis.html>`__
documentation.
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
datum : anyOf(:class:`PrimitiveValue`, :class:`DateTime`, :class:`ExprRef`,
:class:`RepeatRef`)
A constant value in data domain.
impute : anyOf(:class:`ImputeParams`, None)
An object defining the properties of the Impute Operation to be applied. The field
value of the other positional channel is taken as ``key`` of the ``Impute``
Operation. The field of the ``color`` channel if specified is used as ``groupby`` of
the ``Impute`` Operation.
**See also:** `impute <https://vega.github.io/vega-lite/docs/impute.html>`__
documentation.
scale : anyOf(:class:`Scale`, None)
An object defining properties of the channel's scale, which is the function that
transforms values in the data domain (numbers, dates, strings, etc) to visual values
(pixels, colors, sizes) of the encoding channels.
If ``null``, the scale will be `disabled and the data value will be directly encoded
<https://vega.github.io/vega-lite/docs/scale.html#disable>`__.
**Default value:** If undefined, default `scale properties
<https://vega.github.io/vega-lite/docs/scale.html>`__ are applied.
**See also:** `scale <https://vega.github.io/vega-lite/docs/scale.html>`__
documentation.
stack : anyOf(:class:`StackOffset`, None, boolean)
Type of stacking offset if the field should be stacked. ``stack`` is only applicable
for ``x``, ``y``, ``theta``, and ``radius`` channels with continuous domains. For
example, ``stack`` of ``y`` can be used to customize stacking for a vertical bar
chart.
``stack`` can be one of the following values: - ``"zero"`` or `true`: stacking with
baseline offset at zero value of the scale (for creating typical stacked
[bar](https://vega.github.io/vega-lite/docs/stack.html#bar) and `area
<https://vega.github.io/vega-lite/docs/stack.html#area>`__ chart). - ``"normalize"``
- stacking with normalized domain (for creating `normalized stacked bar and area
charts <https://vega.github.io/vega-lite/docs/stack.html#normalized>`__.
:raw-html:`<br/>` - ``"center"`` - stacking with center baseline (for `streamgraph
<https://vega.github.io/vega-lite/docs/stack.html#streamgraph>`__ ). - ``null`` or
``false`` - No-stacking. This will produce layered `bar
<https://vega.github.io/vega-lite/docs/stack.html#layered-bar-chart>`__ and area
chart.
**Default value:** ``zero`` for plots with all of the following conditions are true:
(1) the mark is ``bar``, ``area``, or ``arc`` ; (2) the stacked measure channel (x
or y) has a linear scale; (3) At least one of non-position channels mapped to an
unaggregated field that is different from x and y. Otherwise, ``null`` by default.
**See also:** `stack <https://vega.github.io/vega-lite/docs/stack.html>`__
documentation.
type : :class:`Type`
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/PositionDatumDef'}
def __init__(self, axis=Undefined, band=Undefined, datum=Undefined, impute=Undefined,
scale=Undefined, stack=Undefined, type=Undefined, **kwds):
super(PositionDatumDef, self).__init__(axis=axis, band=band, datum=datum, impute=impute,
scale=scale, stack=stack, type=type, **kwds)
class PositionFieldDef(PositionDef):
"""PositionFieldDef schema wrapper
Mapping(required=[])
Attributes
----------
aggregate : :class:`Aggregate`
Aggregation function for the field (e.g., ``"mean"``, ``"sum"``, ``"median"``,
``"min"``, ``"max"``, ``"count"`` ).
**Default value:** ``undefined`` (None)
**See also:** `aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__
documentation.
axis : anyOf(:class:`Axis`, None)
An object defining properties of axis's gridlines, ticks and labels. If ``null``,
the axis for the encoding channel will be removed.
**Default value:** If undefined, default `axis properties
<https://vega.github.io/vega-lite/docs/axis.html>`__ are applied.
**See also:** `axis <https://vega.github.io/vega-lite/docs/axis.html>`__
documentation.
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
bin : anyOf(boolean, :class:`BinParams`, string, None)
A flag for binning a ``quantitative`` field, `an object defining binning parameters
<https://vega.github.io/vega-lite/docs/bin.html#params>`__, or indicating that the
data for ``x`` or ``y`` channel are binned before they are imported into Vega-Lite (
``"binned"`` ).
If ``true``, default `binning parameters
<https://vega.github.io/vega-lite/docs/bin.html>`__ will be applied.
If ``"binned"``, this indicates that the data for the ``x`` (or ``y`` ) channel are
already binned. You can map the bin-start field to ``x`` (or ``y`` ) and the bin-end
field to ``x2`` (or ``y2`` ). The scale and axis will be formatted similar to
binning in Vega-Lite. To adjust the axis ticks based on the bin step, you can also
set the axis's `tickMinStep
<https://vega.github.io/vega-lite/docs/axis.html#ticks>`__ property.
**Default value:** ``false``
**See also:** `bin <https://vega.github.io/vega-lite/docs/bin.html>`__
documentation.
field : :class:`Field`
**Required.** A string defining the name of the field from which to pull a data
value or an object defining iterated values from the `repeat
<https://vega.github.io/vega-lite/docs/repeat.html>`__ operator.
**See also:** `field <https://vega.github.io/vega-lite/docs/field.html>`__
documentation.
**Notes:** 1) Dots ( ``.`` ) and brackets ( ``[`` and ``]`` ) can be used to access
nested objects (e.g., ``"field": "foo.bar"`` and ``"field": "foo['bar']"`` ). If
field names contain dots or brackets but are not nested, you can use ``\\`` to
escape dots and brackets (e.g., ``"a\\.b"`` and ``"a\\[0\\]"`` ). See more details
about escaping in the `field documentation
<https://vega.github.io/vega-lite/docs/field.html>`__. 2) ``field`` is not required
if ``aggregate`` is ``count``.
impute : anyOf(:class:`ImputeParams`, None)
An object defining the properties of the Impute Operation to be applied. The field
value of the other positional channel is taken as ``key`` of the ``Impute``
Operation. The field of the ``color`` channel if specified is used as ``groupby`` of
the ``Impute`` Operation.
**See also:** `impute <https://vega.github.io/vega-lite/docs/impute.html>`__
documentation.
scale : anyOf(:class:`Scale`, None)
An object defining properties of the channel's scale, which is the function that
transforms values in the data domain (numbers, dates, strings, etc) to visual values
(pixels, colors, sizes) of the encoding channels.
If ``null``, the scale will be `disabled and the data value will be directly encoded
<https://vega.github.io/vega-lite/docs/scale.html#disable>`__.
**Default value:** If undefined, default `scale properties
<https://vega.github.io/vega-lite/docs/scale.html>`__ are applied.
**See also:** `scale <https://vega.github.io/vega-lite/docs/scale.html>`__
documentation.
sort : :class:`Sort`
Sort order for the encoded field.
For continuous fields (quantitative or temporal), ``sort`` can be either
``"ascending"`` or ``"descending"``.
For discrete fields, ``sort`` can be one of the following: - ``"ascending"`` or
``"descending"`` -- for sorting by the values' natural order in JavaScript. - `A
string indicating an encoding channel name to sort by
<https://vega.github.io/vega-lite/docs/sort.html#sort-by-encoding>`__ (e.g., ``"x"``
or ``"y"`` ) with an optional minus prefix for descending sort (e.g., ``"-x"`` to
sort by x-field, descending). This channel string is short-form of `a
sort-by-encoding definition
<https://vega.github.io/vega-lite/docs/sort.html#sort-by-encoding>`__. For example,
``"sort": "-x"`` is equivalent to ``"sort": {"encoding": "x", "order":
"descending"}``. - `A sort field definition
<https://vega.github.io/vega-lite/docs/sort.html#sort-field>`__ for sorting by
another field. - `An array specifying the field values in preferred order
<https://vega.github.io/vega-lite/docs/sort.html#sort-array>`__. In this case, the
sort order will obey the values in the array, followed by any unspecified values in
their original order. For discrete time field, values in the sort array can be
`date-time definition objects <types#datetime>`__. In addition, for time units
``"month"`` and ``"day"``, the values can be the month or day names (case
insensitive) or their 3-letter initials (e.g., ``"Mon"``, ``"Tue"`` ). - ``null``
indicating no sort.
**Default value:** ``"ascending"``
**Note:** ``null`` and sorting by another channel is not supported for ``row`` and
``column``.
**See also:** `sort <https://vega.github.io/vega-lite/docs/sort.html>`__
documentation.
stack : anyOf(:class:`StackOffset`, None, boolean)
Type of stacking offset if the field should be stacked. ``stack`` is only applicable
for ``x``, ``y``, ``theta``, and ``radius`` channels with continuous domains. For
example, ``stack`` of ``y`` can be used to customize stacking for a vertical bar
chart.
``stack`` can be one of the following values: - ``"zero"`` or `true`: stacking with
baseline offset at zero value of the scale (for creating typical stacked
[bar](https://vega.github.io/vega-lite/docs/stack.html#bar) and `area
<https://vega.github.io/vega-lite/docs/stack.html#area>`__ chart). - ``"normalize"``
- stacking with normalized domain (for creating `normalized stacked bar and area
charts <https://vega.github.io/vega-lite/docs/stack.html#normalized>`__.
:raw-html:`<br/>` - ``"center"`` - stacking with center baseline (for `streamgraph
<https://vega.github.io/vega-lite/docs/stack.html#streamgraph>`__ ). - ``null`` or
``false`` - No-stacking. This will produce layered `bar
<https://vega.github.io/vega-lite/docs/stack.html#layered-bar-chart>`__ and area
chart.
**Default value:** ``zero`` for plots with all of the following conditions are true:
(1) the mark is ``bar``, ``area``, or ``arc`` ; (2) the stacked measure channel (x
or y) has a linear scale; (3) At least one of non-position channels mapped to an
unaggregated field that is different from x and y. Otherwise, ``null`` by default.
**See also:** `stack <https://vega.github.io/vega-lite/docs/stack.html>`__
documentation.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit (e.g., ``year``, ``yearmonth``, ``month``, ``hours`` ) for a temporal
field. or `a temporal field that gets casted as ordinal
<https://vega.github.io/vega-lite/docs/type.html#cast>`__.
**Default value:** ``undefined`` (None)
**See also:** `timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__
documentation.
title : anyOf(:class:`Text`, None)
A title for the field. If ``null``, the title will be removed.
**Default value:** derived from the field's name and transformation function (
``aggregate``, ``bin`` and ``timeUnit`` ). If the field has an aggregate function,
the function is displayed as part of the title (e.g., ``"Sum of Profit"`` ). If the
field is binned or has a time unit applied, the applied function is shown in
parentheses (e.g., ``"Profit (binned)"``, ``"Transaction Date (year-month)"`` ).
Otherwise, the title is simply the field name.
**Notes** :
1) You can customize the default field title format by providing the `fieldTitle
<https://vega.github.io/vega-lite/docs/config.html#top-level-config>`__ property in
the `config <https://vega.github.io/vega-lite/docs/config.html>`__ or `fieldTitle
function via the compile function's options
<https://vega.github.io/vega-lite/docs/compile.html#field-title>`__.
2) If both field definition's ``title`` and axis, header, or legend ``title`` are
defined, axis/header/legend title will be used.
type : :class:`StandardType`
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/PositionFieldDef'}
def __init__(self, aggregate=Undefined, axis=Undefined, band=Undefined, bin=Undefined,
field=Undefined, impute=Undefined, scale=Undefined, sort=Undefined, stack=Undefined,
timeUnit=Undefined, title=Undefined, type=Undefined, **kwds):
super(PositionFieldDef, self).__init__(aggregate=aggregate, axis=axis, band=band, bin=bin,
field=field, impute=impute, scale=scale, sort=sort,
stack=stack, timeUnit=timeUnit, title=title, type=type,
**kwds)
class PositionFieldDefBase(PolarDef):
"""PositionFieldDefBase schema wrapper
Mapping(required=[])
Attributes
----------
aggregate : :class:`Aggregate`
Aggregation function for the field (e.g., ``"mean"``, ``"sum"``, ``"median"``,
``"min"``, ``"max"``, ``"count"`` ).
**Default value:** ``undefined`` (None)
**See also:** `aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__
documentation.
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
bin : anyOf(boolean, :class:`BinParams`, string, None)
A flag for binning a ``quantitative`` field, `an object defining binning parameters
<https://vega.github.io/vega-lite/docs/bin.html#params>`__, or indicating that the
data for ``x`` or ``y`` channel are binned before they are imported into Vega-Lite (
``"binned"`` ).
If ``true``, default `binning parameters
<https://vega.github.io/vega-lite/docs/bin.html>`__ will be applied.
If ``"binned"``, this indicates that the data for the ``x`` (or ``y`` ) channel are
already binned. You can map the bin-start field to ``x`` (or ``y`` ) and the bin-end
field to ``x2`` (or ``y2`` ). The scale and axis will be formatted similar to
binning in Vega-Lite. To adjust the axis ticks based on the bin step, you can also
set the axis's `tickMinStep
<https://vega.github.io/vega-lite/docs/axis.html#ticks>`__ property.
**Default value:** ``false``
**See also:** `bin <https://vega.github.io/vega-lite/docs/bin.html>`__
documentation.
field : :class:`Field`
**Required.** A string defining the name of the field from which to pull a data
value or an object defining iterated values from the `repeat
<https://vega.github.io/vega-lite/docs/repeat.html>`__ operator.
**See also:** `field <https://vega.github.io/vega-lite/docs/field.html>`__
documentation.
**Notes:** 1) Dots ( ``.`` ) and brackets ( ``[`` and ``]`` ) can be used to access
nested objects (e.g., ``"field": "foo.bar"`` and ``"field": "foo['bar']"`` ). If
field names contain dots or brackets but are not nested, you can use ``\\`` to
escape dots and brackets (e.g., ``"a\\.b"`` and ``"a\\[0\\]"`` ). See more details
about escaping in the `field documentation
<https://vega.github.io/vega-lite/docs/field.html>`__. 2) ``field`` is not required
if ``aggregate`` is ``count``.
scale : anyOf(:class:`Scale`, None)
An object defining properties of the channel's scale, which is the function that
transforms values in the data domain (numbers, dates, strings, etc) to visual values
(pixels, colors, sizes) of the encoding channels.
If ``null``, the scale will be `disabled and the data value will be directly encoded
<https://vega.github.io/vega-lite/docs/scale.html#disable>`__.
**Default value:** If undefined, default `scale properties
<https://vega.github.io/vega-lite/docs/scale.html>`__ are applied.
**See also:** `scale <https://vega.github.io/vega-lite/docs/scale.html>`__
documentation.
sort : :class:`Sort`
Sort order for the encoded field.
For continuous fields (quantitative or temporal), ``sort`` can be either
``"ascending"`` or ``"descending"``.
For discrete fields, ``sort`` can be one of the following: - ``"ascending"`` or
``"descending"`` -- for sorting by the values' natural order in JavaScript. - `A
string indicating an encoding channel name to sort by
<https://vega.github.io/vega-lite/docs/sort.html#sort-by-encoding>`__ (e.g., ``"x"``
or ``"y"`` ) with an optional minus prefix for descending sort (e.g., ``"-x"`` to
sort by x-field, descending). This channel string is short-form of `a
sort-by-encoding definition
<https://vega.github.io/vega-lite/docs/sort.html#sort-by-encoding>`__. For example,
``"sort": "-x"`` is equivalent to ``"sort": {"encoding": "x", "order":
"descending"}``. - `A sort field definition
<https://vega.github.io/vega-lite/docs/sort.html#sort-field>`__ for sorting by
another field. - `An array specifying the field values in preferred order
<https://vega.github.io/vega-lite/docs/sort.html#sort-array>`__. In this case, the
sort order will obey the values in the array, followed by any unspecified values in
their original order. For discrete time field, values in the sort array can be
`date-time definition objects <types#datetime>`__. In addition, for time units
``"month"`` and ``"day"``, the values can be the month or day names (case
insensitive) or their 3-letter initials (e.g., ``"Mon"``, ``"Tue"`` ). - ``null``
indicating no sort.
**Default value:** ``"ascending"``
**Note:** ``null`` and sorting by another channel is not supported for ``row`` and
``column``.
**See also:** `sort <https://vega.github.io/vega-lite/docs/sort.html>`__
documentation.
stack : anyOf(:class:`StackOffset`, None, boolean)
Type of stacking offset if the field should be stacked. ``stack`` is only applicable
for ``x``, ``y``, ``theta``, and ``radius`` channels with continuous domains. For
example, ``stack`` of ``y`` can be used to customize stacking for a vertical bar
chart.
``stack`` can be one of the following values: - ``"zero"`` or `true`: stacking with
baseline offset at zero value of the scale (for creating typical stacked
[bar](https://vega.github.io/vega-lite/docs/stack.html#bar) and `area
<https://vega.github.io/vega-lite/docs/stack.html#area>`__ chart). - ``"normalize"``
- stacking with normalized domain (for creating `normalized stacked bar and area
charts <https://vega.github.io/vega-lite/docs/stack.html#normalized>`__.
:raw-html:`<br/>` - ``"center"`` - stacking with center baseline (for `streamgraph
<https://vega.github.io/vega-lite/docs/stack.html#streamgraph>`__ ). - ``null`` or
``false`` - No-stacking. This will produce layered `bar
<https://vega.github.io/vega-lite/docs/stack.html#layered-bar-chart>`__ and area
chart.
**Default value:** ``zero`` for plots with all of the following conditions are true:
(1) the mark is ``bar``, ``area``, or ``arc`` ; (2) the stacked measure channel (x
or y) has a linear scale; (3) At least one of non-position channels mapped to an
unaggregated field that is different from x and y. Otherwise, ``null`` by default.
**See also:** `stack <https://vega.github.io/vega-lite/docs/stack.html>`__
documentation.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit (e.g., ``year``, ``yearmonth``, ``month``, ``hours`` ) for a temporal
field. or `a temporal field that gets casted as ordinal
<https://vega.github.io/vega-lite/docs/type.html#cast>`__.
**Default value:** ``undefined`` (None)
**See also:** `timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__
documentation.
title : anyOf(:class:`Text`, None)
A title for the field. If ``null``, the title will be removed.
**Default value:** derived from the field's name and transformation function (
``aggregate``, ``bin`` and ``timeUnit`` ). If the field has an aggregate function,
the function is displayed as part of the title (e.g., ``"Sum of Profit"`` ). If the
field is binned or has a time unit applied, the applied function is shown in
parentheses (e.g., ``"Profit (binned)"``, ``"Transaction Date (year-month)"`` ).
Otherwise, the title is simply the field name.
**Notes** :
1) You can customize the default field title format by providing the `fieldTitle
<https://vega.github.io/vega-lite/docs/config.html#top-level-config>`__ property in
the `config <https://vega.github.io/vega-lite/docs/config.html>`__ or `fieldTitle
function via the compile function's options
<https://vega.github.io/vega-lite/docs/compile.html#field-title>`__.
2) If both field definition's ``title`` and axis, header, or legend ``title`` are
defined, axis/header/legend title will be used.
type : :class:`StandardType`
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/PositionFieldDefBase'}
def __init__(self, aggregate=Undefined, band=Undefined, bin=Undefined, field=Undefined,
scale=Undefined, sort=Undefined, stack=Undefined, timeUnit=Undefined, title=Undefined,
type=Undefined, **kwds):
super(PositionFieldDefBase, self).__init__(aggregate=aggregate, band=band, bin=bin, field=field,
scale=scale, sort=sort, stack=stack,
timeUnit=timeUnit, title=title, type=type, **kwds)
class PositionValueDef(PolarDef, Position2Def, PositionDef):
"""PositionValueDef schema wrapper
Mapping(required=[value])
Definition object for a constant value (primitive value or gradient definition) of an
encoding channel.
Attributes
----------
value : anyOf(float, string, string, :class:`ExprRef`)
A constant value in visual domain (e.g., ``"red"`` / ``"#0099ff"`` / `gradient
definition <https://vega.github.io/vega-lite/docs/types.html#gradient>`__ for color,
values between ``0`` to ``1`` for opacity).
"""
_schema = {'$ref': '#/definitions/PositionValueDef'}
def __init__(self, value=Undefined, **kwds):
super(PositionValueDef, self).__init__(value=value, **kwds)
class PredicateComposition(VegaLiteSchema):
"""PredicateComposition schema wrapper
anyOf(:class:`LogicalNotPredicate`, :class:`LogicalAndPredicate`,
:class:`LogicalOrPredicate`, :class:`Predicate`)
"""
_schema = {'$ref': '#/definitions/PredicateComposition'}
def __init__(self, *args, **kwds):
super(PredicateComposition, self).__init__(*args, **kwds)
class LogicalAndPredicate(PredicateComposition):
"""LogicalAndPredicate schema wrapper
Mapping(required=[and])
Attributes
----------
and : List(:class:`PredicateComposition`)
"""
_schema = {'$ref': '#/definitions/LogicalAnd<Predicate>'}
def __init__(self, **kwds):
super(LogicalAndPredicate, self).__init__(**kwds)
class LogicalNotPredicate(PredicateComposition):
"""LogicalNotPredicate schema wrapper
Mapping(required=[not])
Attributes
----------
not : :class:`PredicateComposition`
"""
_schema = {'$ref': '#/definitions/LogicalNot<Predicate>'}
def __init__(self, **kwds):
super(LogicalNotPredicate, self).__init__(**kwds)
class LogicalOrPredicate(PredicateComposition):
"""LogicalOrPredicate schema wrapper
Mapping(required=[or])
Attributes
----------
or : List(:class:`PredicateComposition`)
"""
_schema = {'$ref': '#/definitions/LogicalOr<Predicate>'}
def __init__(self, **kwds):
super(LogicalOrPredicate, self).__init__(**kwds)
class Predicate(PredicateComposition):
"""Predicate schema wrapper
anyOf(:class:`FieldEqualPredicate`, :class:`FieldRangePredicate`,
:class:`FieldOneOfPredicate`, :class:`FieldLTPredicate`, :class:`FieldGTPredicate`,
:class:`FieldLTEPredicate`, :class:`FieldGTEPredicate`, :class:`FieldValidPredicate`,
:class:`SelectionPredicate`, string)
"""
_schema = {'$ref': '#/definitions/Predicate'}
def __init__(self, *args, **kwds):
super(Predicate, self).__init__(*args, **kwds)
class FieldEqualPredicate(Predicate):
"""FieldEqualPredicate schema wrapper
Mapping(required=[equal, field])
Attributes
----------
equal : anyOf(string, float, boolean, :class:`DateTime`, :class:`ExprRef`)
The value that the field should be equal to.
field : :class:`FieldName`
Field to be tested.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit for the field to be tested.
"""
_schema = {'$ref': '#/definitions/FieldEqualPredicate'}
def __init__(self, equal=Undefined, field=Undefined, timeUnit=Undefined, **kwds):
super(FieldEqualPredicate, self).__init__(equal=equal, field=field, timeUnit=timeUnit, **kwds)
class FieldGTEPredicate(Predicate):
"""FieldGTEPredicate schema wrapper
Mapping(required=[field, gte])
Attributes
----------
field : :class:`FieldName`
Field to be tested.
gte : anyOf(string, float, :class:`DateTime`, :class:`ExprRef`)
The value that the field should be greater than or equals to.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit for the field to be tested.
"""
_schema = {'$ref': '#/definitions/FieldGTEPredicate'}
def __init__(self, field=Undefined, gte=Undefined, timeUnit=Undefined, **kwds):
super(FieldGTEPredicate, self).__init__(field=field, gte=gte, timeUnit=timeUnit, **kwds)
class FieldGTPredicate(Predicate):
"""FieldGTPredicate schema wrapper
Mapping(required=[field, gt])
Attributes
----------
field : :class:`FieldName`
Field to be tested.
gt : anyOf(string, float, :class:`DateTime`, :class:`ExprRef`)
The value that the field should be greater than.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit for the field to be tested.
"""
_schema = {'$ref': '#/definitions/FieldGTPredicate'}
def __init__(self, field=Undefined, gt=Undefined, timeUnit=Undefined, **kwds):
super(FieldGTPredicate, self).__init__(field=field, gt=gt, timeUnit=timeUnit, **kwds)
class FieldLTEPredicate(Predicate):
"""FieldLTEPredicate schema wrapper
Mapping(required=[field, lte])
Attributes
----------
field : :class:`FieldName`
Field to be tested.
lte : anyOf(string, float, :class:`DateTime`, :class:`ExprRef`)
The value that the field should be less than or equals to.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit for the field to be tested.
"""
_schema = {'$ref': '#/definitions/FieldLTEPredicate'}
def __init__(self, field=Undefined, lte=Undefined, timeUnit=Undefined, **kwds):
super(FieldLTEPredicate, self).__init__(field=field, lte=lte, timeUnit=timeUnit, **kwds)
class FieldLTPredicate(Predicate):
"""FieldLTPredicate schema wrapper
Mapping(required=[field, lt])
Attributes
----------
field : :class:`FieldName`
Field to be tested.
lt : anyOf(string, float, :class:`DateTime`, :class:`ExprRef`)
The value that the field should be less than.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit for the field to be tested.
"""
_schema = {'$ref': '#/definitions/FieldLTPredicate'}
def __init__(self, field=Undefined, lt=Undefined, timeUnit=Undefined, **kwds):
super(FieldLTPredicate, self).__init__(field=field, lt=lt, timeUnit=timeUnit, **kwds)
class FieldOneOfPredicate(Predicate):
"""FieldOneOfPredicate schema wrapper
Mapping(required=[field, oneOf])
Attributes
----------
field : :class:`FieldName`
Field to be tested.
oneOf : anyOf(List(string), List(float), List(boolean), List(:class:`DateTime`))
A set of values that the ``field`` 's value should be a member of, for a data item
included in the filtered data.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit for the field to be tested.
"""
_schema = {'$ref': '#/definitions/FieldOneOfPredicate'}
def __init__(self, field=Undefined, oneOf=Undefined, timeUnit=Undefined, **kwds):
super(FieldOneOfPredicate, self).__init__(field=field, oneOf=oneOf, timeUnit=timeUnit, **kwds)
class FieldRangePredicate(Predicate):
"""FieldRangePredicate schema wrapper
Mapping(required=[field, range])
Attributes
----------
field : :class:`FieldName`
Field to be tested.
range : anyOf(List(anyOf(float, :class:`DateTime`, None, :class:`ExprRef`)),
:class:`ExprRef`)
An array of inclusive minimum and maximum values for a field value of a data item to
be included in the filtered data.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit for the field to be tested.
"""
_schema = {'$ref': '#/definitions/FieldRangePredicate'}
def __init__(self, field=Undefined, range=Undefined, timeUnit=Undefined, **kwds):
super(FieldRangePredicate, self).__init__(field=field, range=range, timeUnit=timeUnit, **kwds)
class FieldValidPredicate(Predicate):
"""FieldValidPredicate schema wrapper
Mapping(required=[field, valid])
Attributes
----------
field : :class:`FieldName`
Field to be tested.
valid : boolean
If set to true the field's value has to be valid, meaning both not ``null`` and not
`NaN
<https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/NaN>`__.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit for the field to be tested.
"""
_schema = {'$ref': '#/definitions/FieldValidPredicate'}
def __init__(self, field=Undefined, valid=Undefined, timeUnit=Undefined, **kwds):
super(FieldValidPredicate, self).__init__(field=field, valid=valid, timeUnit=timeUnit, **kwds)
class Projection(VegaLiteSchema):
"""Projection schema wrapper
Mapping(required=[])
Attributes
----------
center : :class:`Vector2number`
The projection's center, a two-element array of longitude and latitude in degrees.
**Default value:** ``[0, 0]``
clipAngle : float
The projection's clipping circle radius to the specified angle in degrees. If
``null``, switches to `antimeridian <http://bl.ocks.org/mbostock/3788999>`__ cutting
rather than small-circle clipping.
clipExtent : :class:`Vector2Vector2number`
The projection's viewport clip extent to the specified bounds in pixels. The extent
bounds are specified as an array ``[[x0, y0], [x1, y1]]``, where ``x0`` is the
left-side of the viewport, ``y0`` is the top, ``x1`` is the right and ``y1`` is the
bottom. If ``null``, no viewport clipping is performed.
coefficient : float
distance : float
extent : :class:`Vector2Vector2number`
fit : anyOf(:class:`Fit`, List(:class:`Fit`))
fraction : float
lobes : float
parallel : float
parallels : List(float)
For conic projections, the `two standard parallels
<https://en.wikipedia.org/wiki/Map_projection#Conic>`__ that define the map layout.
The default depends on the specific conic projection used.
pointRadius : float
The default radius (in pixels) to use when drawing GeoJSON ``Point`` and
``MultiPoint`` geometries. This parameter sets a constant default value. To modify
the point radius in response to data, see the corresponding parameter of the GeoPath
and GeoShape transforms.
**Default value:** ``4.5``
precision : float
The threshold for the projection's `adaptive resampling
<http://bl.ocks.org/mbostock/3795544>`__ to the specified value in pixels. This
value corresponds to the `Douglas–Peucker distance
<http://en.wikipedia.org/wiki/Ramer%E2%80%93Douglas%E2%80%93Peucker_algorithm>`__.
If precision is not specified, returns the projection's current resampling precision
which defaults to ``√0.5 ≅ 0.70710…``.
radius : float
ratio : float
reflectX : boolean
reflectY : boolean
rotate : anyOf(:class:`Vector2number`, :class:`Vector3number`)
The projection's three-axis rotation to the specified angles, which must be a two-
or three-element array of numbers [ ``lambda``, ``phi``, ``gamma`` ] specifying the
rotation angles in degrees about each spherical axis. (These correspond to yaw,
pitch and roll.)
**Default value:** ``[0, 0, 0]``
scale : float
The projection’s scale (zoom) factor, overriding automatic fitting. The default
scale is projection-specific. The scale factor corresponds linearly to the distance
between projected points; however, scale factor values are not equivalent across
projections.
size : :class:`Vector2number`
spacing : float
tilt : float
translate : :class:`Vector2number`
The projection’s translation offset as a two-element array ``[tx, ty]``.
type : :class:`ProjectionType`
The cartographic projection to use. This value is case-insensitive, for example
``"albers"`` and ``"Albers"`` indicate the same projection type. You can find all
valid projection types `in the documentation
<https://vega.github.io/vega-lite/docs/projection.html#projection-types>`__.
**Default value:** ``mercator``
"""
_schema = {'$ref': '#/definitions/Projection'}
def __init__(self, center=Undefined, clipAngle=Undefined, clipExtent=Undefined,
coefficient=Undefined, distance=Undefined, extent=Undefined, fit=Undefined,
fraction=Undefined, lobes=Undefined, parallel=Undefined, parallels=Undefined,
pointRadius=Undefined, precision=Undefined, radius=Undefined, ratio=Undefined,
reflectX=Undefined, reflectY=Undefined, rotate=Undefined, scale=Undefined,
size=Undefined, spacing=Undefined, tilt=Undefined, translate=Undefined, type=Undefined,
**kwds):
super(Projection, self).__init__(center=center, clipAngle=clipAngle, clipExtent=clipExtent,
coefficient=coefficient, distance=distance, extent=extent,
fit=fit, fraction=fraction, lobes=lobes, parallel=parallel,
parallels=parallels, pointRadius=pointRadius,
precision=precision, radius=radius, ratio=ratio,
reflectX=reflectX, reflectY=reflectY, rotate=rotate,
scale=scale, size=size, spacing=spacing, tilt=tilt,
translate=translate, type=type, **kwds)
class ProjectionConfig(VegaLiteSchema):
"""ProjectionConfig schema wrapper
Mapping(required=[])
Attributes
----------
center : :class:`Vector2number`
The projection's center, a two-element array of longitude and latitude in degrees.
**Default value:** ``[0, 0]``
clipAngle : float
The projection's clipping circle radius to the specified angle in degrees. If
``null``, switches to `antimeridian <http://bl.ocks.org/mbostock/3788999>`__ cutting
rather than small-circle clipping.
clipExtent : :class:`Vector2Vector2number`
The projection's viewport clip extent to the specified bounds in pixels. The extent
bounds are specified as an array ``[[x0, y0], [x1, y1]]``, where ``x0`` is the
left-side of the viewport, ``y0`` is the top, ``x1`` is the right and ``y1`` is the
bottom. If ``null``, no viewport clipping is performed.
coefficient : float
distance : float
extent : :class:`Vector2Vector2number`
fit : anyOf(:class:`Fit`, List(:class:`Fit`))
fraction : float
lobes : float
parallel : float
parallels : List(float)
For conic projections, the `two standard parallels
<https://en.wikipedia.org/wiki/Map_projection#Conic>`__ that define the map layout.
The default depends on the specific conic projection used.
pointRadius : float
The default radius (in pixels) to use when drawing GeoJSON ``Point`` and
``MultiPoint`` geometries. This parameter sets a constant default value. To modify
the point radius in response to data, see the corresponding parameter of the GeoPath
and GeoShape transforms.
**Default value:** ``4.5``
precision : float
The threshold for the projection's `adaptive resampling
<http://bl.ocks.org/mbostock/3795544>`__ to the specified value in pixels. This
value corresponds to the `Douglas–Peucker distance
<http://en.wikipedia.org/wiki/Ramer%E2%80%93Douglas%E2%80%93Peucker_algorithm>`__.
If precision is not specified, returns the projection's current resampling precision
which defaults to ``√0.5 ≅ 0.70710…``.
radius : float
ratio : float
reflectX : boolean
reflectY : boolean
rotate : anyOf(:class:`Vector2number`, :class:`Vector3number`)
The projection's three-axis rotation to the specified angles, which must be a two-
or three-element array of numbers [ ``lambda``, ``phi``, ``gamma`` ] specifying the
rotation angles in degrees about each spherical axis. (These correspond to yaw,
pitch and roll.)
**Default value:** ``[0, 0, 0]``
scale : float
The projection’s scale (zoom) factor, overriding automatic fitting. The default
scale is projection-specific. The scale factor corresponds linearly to the distance
between projected points; however, scale factor values are not equivalent across
projections.
size : :class:`Vector2number`
spacing : float
tilt : float
translate : :class:`Vector2number`
The projection’s translation offset as a two-element array ``[tx, ty]``.
type : :class:`ProjectionType`
The cartographic projection to use. This value is case-insensitive, for example
``"albers"`` and ``"Albers"`` indicate the same projection type. You can find all
valid projection types `in the documentation
<https://vega.github.io/vega-lite/docs/projection.html#projection-types>`__.
**Default value:** ``mercator``
"""
_schema = {'$ref': '#/definitions/ProjectionConfig'}
def __init__(self, center=Undefined, clipAngle=Undefined, clipExtent=Undefined,
coefficient=Undefined, distance=Undefined, extent=Undefined, fit=Undefined,
fraction=Undefined, lobes=Undefined, parallel=Undefined, parallels=Undefined,
pointRadius=Undefined, precision=Undefined, radius=Undefined, ratio=Undefined,
reflectX=Undefined, reflectY=Undefined, rotate=Undefined, scale=Undefined,
size=Undefined, spacing=Undefined, tilt=Undefined, translate=Undefined, type=Undefined,
**kwds):
super(ProjectionConfig, self).__init__(center=center, clipAngle=clipAngle,
clipExtent=clipExtent, coefficient=coefficient,
distance=distance, extent=extent, fit=fit,
fraction=fraction, lobes=lobes, parallel=parallel,
parallels=parallels, pointRadius=pointRadius,
precision=precision, radius=radius, ratio=ratio,
reflectX=reflectX, reflectY=reflectY, rotate=rotate,
scale=scale, size=size, spacing=spacing, tilt=tilt,
translate=translate, type=type, **kwds)
class ProjectionType(VegaLiteSchema):
"""ProjectionType schema wrapper
enum('albers', 'albersUsa', 'azimuthalEqualArea', 'azimuthalEquidistant', 'conicConformal',
'conicEqualArea', 'conicEquidistant', 'equalEarth', 'equirectangular', 'gnomonic',
'identity', 'mercator', 'naturalEarth1', 'orthographic', 'stereographic',
'transverseMercator')
"""
_schema = {'$ref': '#/definitions/ProjectionType'}
def __init__(self, *args):
super(ProjectionType, self).__init__(*args)
class RadialGradient(Gradient):
"""RadialGradient schema wrapper
Mapping(required=[gradient, stops])
Attributes
----------
gradient : string
The type of gradient. Use ``"radial"`` for a radial gradient.
stops : List(:class:`GradientStop`)
An array of gradient stops defining the gradient color sequence.
id : string
r1 : float
The radius length, in normalized [0, 1] coordinates, of the inner circle for the
gradient.
**Default value:** ``0``
r2 : float
The radius length, in normalized [0, 1] coordinates, of the outer circle for the
gradient.
**Default value:** ``0.5``
x1 : float
The x-coordinate, in normalized [0, 1] coordinates, for the center of the inner
circle for the gradient.
**Default value:** ``0.5``
x2 : float
The x-coordinate, in normalized [0, 1] coordinates, for the center of the outer
circle for the gradient.
**Default value:** ``0.5``
y1 : float
The y-coordinate, in normalized [0, 1] coordinates, for the center of the inner
circle for the gradient.
**Default value:** ``0.5``
y2 : float
The y-coordinate, in normalized [0, 1] coordinates, for the center of the outer
circle for the gradient.
**Default value:** ``0.5``
"""
_schema = {'$ref': '#/definitions/RadialGradient'}
def __init__(self, gradient=Undefined, stops=Undefined, id=Undefined, r1=Undefined, r2=Undefined,
x1=Undefined, x2=Undefined, y1=Undefined, y2=Undefined, **kwds):
super(RadialGradient, self).__init__(gradient=gradient, stops=stops, id=id, r1=r1, r2=r2, x1=x1,
x2=x2, y1=y1, y2=y2, **kwds)
class RangeConfig(VegaLiteSchema):
"""RangeConfig schema wrapper
Mapping(required=[])
Attributes
----------
category : anyOf(:class:`RangeScheme`, List(:class:`Color`))
Default `color scheme <https://vega.github.io/vega/docs/schemes/>`__ for categorical
data.
diverging : anyOf(:class:`RangeScheme`, List(:class:`Color`))
Default `color scheme <https://vega.github.io/vega/docs/schemes/>`__ for diverging
quantitative ramps.
heatmap : anyOf(:class:`RangeScheme`, List(:class:`Color`))
Default `color scheme <https://vega.github.io/vega/docs/schemes/>`__ for
quantitative heatmaps.
ordinal : anyOf(:class:`RangeScheme`, List(:class:`Color`))
Default `color scheme <https://vega.github.io/vega/docs/schemes/>`__ for
rank-ordered data.
ramp : anyOf(:class:`RangeScheme`, List(:class:`Color`))
Default `color scheme <https://vega.github.io/vega/docs/schemes/>`__ for sequential
quantitative ramps.
symbol : List(:class:`SymbolShape`)
Array of `symbol <https://vega.github.io/vega/docs/marks/symbol/>`__ names or paths
for the default shape palette.
"""
_schema = {'$ref': '#/definitions/RangeConfig'}
def __init__(self, category=Undefined, diverging=Undefined, heatmap=Undefined, ordinal=Undefined,
ramp=Undefined, symbol=Undefined, **kwds):
super(RangeConfig, self).__init__(category=category, diverging=diverging, heatmap=heatmap,
ordinal=ordinal, ramp=ramp, symbol=symbol, **kwds)
class RangeRawArray(VegaLiteSchema):
"""RangeRawArray schema wrapper
List(float)
"""
_schema = {'$ref': '#/definitions/RangeRawArray'}
def __init__(self, *args):
super(RangeRawArray, self).__init__(*args)
class RangeScheme(VegaLiteSchema):
"""RangeScheme schema wrapper
anyOf(:class:`RangeEnum`, :class:`RangeRaw`, Mapping(required=[scheme]))
"""
_schema = {'$ref': '#/definitions/RangeScheme'}
def __init__(self, *args, **kwds):
super(RangeScheme, self).__init__(*args, **kwds)
class RangeEnum(RangeScheme):
"""RangeEnum schema wrapper
enum('width', 'height', 'symbol', 'category', 'ordinal', 'ramp', 'diverging', 'heatmap')
"""
_schema = {'$ref': '#/definitions/RangeEnum'}
def __init__(self, *args):
super(RangeEnum, self).__init__(*args)
class RangeRaw(RangeScheme):
"""RangeRaw schema wrapper
List(anyOf(None, boolean, string, float, :class:`RangeRawArray`))
"""
_schema = {'$ref': '#/definitions/RangeRaw'}
def __init__(self, *args):
super(RangeRaw, self).__init__(*args)
class RectConfig(AnyMarkConfig):
"""RectConfig schema wrapper
Mapping(required=[])
Attributes
----------
align : anyOf(:class:`Align`, :class:`ExprRef`)
The horizontal alignment of the text or ranged marks (area, bar, image, rect, rule).
One of ``"left"``, ``"right"``, ``"center"``.
**Note:** Expression reference is *not* supported for range marks.
angle : anyOf(float, :class:`ExprRef`)
aria : anyOf(boolean, :class:`ExprRef`)
ariaRole : anyOf(string, :class:`ExprRef`)
ariaRoleDescription : anyOf(string, :class:`ExprRef`)
aspect : anyOf(boolean, :class:`ExprRef`)
baseline : anyOf(:class:`TextBaseline`, :class:`ExprRef`)
For text marks, the vertical text baseline. One of ``"alphabetic"`` (default),
``"top"``, ``"middle"``, ``"bottom"``, ``"line-top"``, ``"line-bottom"``, or an
expression reference that provides one of the valid values. The ``"line-top"`` and
``"line-bottom"`` values operate similarly to ``"top"`` and ``"bottom"``, but are
calculated relative to the ``lineHeight`` rather than ``fontSize`` alone.
For range marks, the vertical alignment of the marks. One of ``"top"``,
``"middle"``, ``"bottom"``.
**Note:** Expression reference is *not* supported for range marks.
binSpacing : float
Offset between bars for binned field. The ideal value for this is either 0
(preferred by statisticians) or 1 (Vega-Lite default, D3 example style).
**Default value:** ``1``
blend : anyOf(:class:`Blend`, :class:`ExprRef`)
color : anyOf(:class:`Color`, :class:`Gradient`, :class:`ExprRef`)
Default color.
**Default value:** :raw-html:`<span style="color: #4682b4;">■</span>`
``"#4682b4"``
**Note:** - This property cannot be used in a `style config
<https://vega.github.io/vega-lite/docs/mark.html#style-config>`__. - The ``fill``
and ``stroke`` properties have higher precedence than ``color`` and will override
``color``.
continuousBandSize : float
The default size of the bars on continuous scales.
**Default value:** ``5``
cornerRadius : anyOf(float, :class:`ExprRef`)
cornerRadiusBottomLeft : anyOf(float, :class:`ExprRef`)
cornerRadiusBottomRight : anyOf(float, :class:`ExprRef`)
cornerRadiusTopLeft : anyOf(float, :class:`ExprRef`)
cornerRadiusTopRight : anyOf(float, :class:`ExprRef`)
cursor : anyOf(:class:`Cursor`, :class:`ExprRef`)
description : anyOf(string, :class:`ExprRef`)
dir : anyOf(:class:`TextDirection`, :class:`ExprRef`)
discreteBandSize : float
The default size of the bars with discrete dimensions. If unspecified, the default
size is ``step-2``, which provides 2 pixel offset between bars.
dx : anyOf(float, :class:`ExprRef`)
dy : anyOf(float, :class:`ExprRef`)
ellipsis : anyOf(string, :class:`ExprRef`)
endAngle : anyOf(float, :class:`ExprRef`)
fill : anyOf(:class:`Color`, :class:`Gradient`, None, :class:`ExprRef`)
Default fill color. This property has higher precedence than ``config.color``. Set
to ``null`` to remove fill.
**Default value:** (None)
fillOpacity : anyOf(float, :class:`ExprRef`)
filled : boolean
Whether the mark's color should be used as fill color instead of stroke color.
**Default value:** ``false`` for all ``point``, ``line``, and ``rule`` marks as well
as ``geoshape`` marks for `graticule
<https://vega.github.io/vega-lite/docs/data.html#graticule>`__ data sources;
otherwise, ``true``.
**Note:** This property cannot be used in a `style config
<https://vega.github.io/vega-lite/docs/mark.html#style-config>`__.
font : anyOf(string, :class:`ExprRef`)
fontSize : anyOf(float, :class:`ExprRef`)
fontStyle : anyOf(:class:`FontStyle`, :class:`ExprRef`)
fontWeight : anyOf(:class:`FontWeight`, :class:`ExprRef`)
height : anyOf(float, :class:`ExprRef`)
href : anyOf(:class:`URI`, :class:`ExprRef`)
innerRadius : anyOf(float, :class:`ExprRef`)
The inner radius in pixels of arc marks. ``innerRadius`` is an alias for
``radius2``.
interpolate : anyOf(:class:`Interpolate`, :class:`ExprRef`)
invalid : enum('filter', None)
Defines how Vega-Lite should handle marks for invalid values ( ``null`` and ``NaN``
). - If set to ``"filter"`` (default), all data items with null values will be
skipped (for line, trail, and area marks) or filtered (for other marks). - If
``null``, all data items are included. In this case, invalid values will be
interpreted as zeroes.
limit : anyOf(float, :class:`ExprRef`)
lineBreak : anyOf(string, :class:`ExprRef`)
lineHeight : anyOf(float, :class:`ExprRef`)
opacity : anyOf(float, :class:`ExprRef`)
The overall opacity (value between [0,1]).
**Default value:** ``0.7`` for non-aggregate plots with ``point``, ``tick``,
``circle``, or ``square`` marks or layered ``bar`` charts and ``1`` otherwise.
order : anyOf(None, boolean)
For line and trail marks, this ``order`` property can be set to ``null`` or
``false`` to make the lines use the original order in the data sources.
orient : :class:`Orientation`
The orientation of a non-stacked bar, tick, area, and line charts. The value is
either horizontal (default) or vertical. - For bar, rule and tick, this determines
whether the size of the bar and tick should be applied to x or y dimension. - For
area, this property determines the orient property of the Vega output. - For line
and trail marks, this property determines the sort order of the points in the line
if ``config.sortLineBy`` is not specified. For stacked charts, this is always
determined by the orientation of the stack; therefore explicitly specified value
will be ignored.
outerRadius : anyOf(float, :class:`ExprRef`)
The outer radius in pixels of arc marks. ``outerRadius`` is an alias for ``radius``.
padAngle : anyOf(float, :class:`ExprRef`)
radius : anyOf(float, :class:`ExprRef`)
For arc mark, the primary (outer) radius in pixels.
For text marks, polar coordinate radial offset, in pixels, of the text from the
origin determined by the ``x`` and ``y`` properties.
radius2 : anyOf(float, :class:`ExprRef`)
The secondary (inner) radius in pixels of arc marks.
shape : anyOf(anyOf(:class:`SymbolShape`, string), :class:`ExprRef`)
size : anyOf(float, :class:`ExprRef`)
Default size for marks. - For ``point`` / ``circle`` / ``square``, this represents
the pixel area of the marks. Note that this value sets the area of the symbol; the
side lengths will increase with the square root of this value. - For ``bar``, this
represents the band size of the bar, in pixels. - For ``text``, this represents the
font size, in pixels.
**Default value:** - ``30`` for point, circle, square marks; width/height's ``step``
- ``2`` for bar marks with discrete dimensions; - ``5`` for bar marks with
continuous dimensions; - ``11`` for text marks.
smooth : anyOf(boolean, :class:`ExprRef`)
startAngle : anyOf(float, :class:`ExprRef`)
stroke : anyOf(:class:`Color`, :class:`Gradient`, None, :class:`ExprRef`)
Default stroke color. This property has higher precedence than ``config.color``. Set
to ``null`` to remove stroke.
**Default value:** (None)
strokeCap : anyOf(:class:`StrokeCap`, :class:`ExprRef`)
strokeDash : anyOf(List(float), :class:`ExprRef`)
strokeDashOffset : anyOf(float, :class:`ExprRef`)
strokeJoin : anyOf(:class:`StrokeJoin`, :class:`ExprRef`)
strokeMiterLimit : anyOf(float, :class:`ExprRef`)
strokeOffset : anyOf(float, :class:`ExprRef`)
strokeOpacity : anyOf(float, :class:`ExprRef`)
strokeWidth : anyOf(float, :class:`ExprRef`)
tension : anyOf(float, :class:`ExprRef`)
text : anyOf(:class:`Text`, :class:`ExprRef`)
theta : anyOf(float, :class:`ExprRef`)
For arc marks, the arc length in radians if theta2 is not specified, otherwise the
start arc angle. (A value of 0 indicates up or “north”, increasing values proceed
clockwise.)
For text marks, polar coordinate angle in radians.
theta2 : anyOf(float, :class:`ExprRef`)
The end angle of arc marks in radians. A value of 0 indicates up or “north”,
increasing values proceed clockwise.
timeUnitBand : float
Default relative band size for a time unit. If set to ``1``, the bandwidth of the
marks will be equal to the time unit band step. If set to ``0.5``, bandwidth of the
marks will be half of the time unit band step.
timeUnitBandPosition : float
Default relative band position for a time unit. If set to ``0``, the marks will be
positioned at the beginning of the time unit band step. If set to ``0.5``, the marks
will be positioned in the middle of the time unit band step.
tooltip : anyOf(float, string, boolean, :class:`TooltipContent`, :class:`ExprRef`, None)
The tooltip text string to show upon mouse hover or an object defining which fields
should the tooltip be derived from.
* If ``tooltip`` is ``true`` or ``{"content": "encoding"}``, then all fields from
``encoding`` will be used. - If ``tooltip`` is ``{"content": "data"}``, then all
fields that appear in the highlighted data point will be used. - If set to
``null`` or ``false``, then no tooltip will be used.
See the `tooltip <https://vega.github.io/vega-lite/docs/tooltip.html>`__
documentation for a detailed discussion about tooltip in Vega-Lite.
**Default value:** ``null``
url : anyOf(:class:`URI`, :class:`ExprRef`)
width : anyOf(float, :class:`ExprRef`)
x : anyOf(float, string, :class:`ExprRef`)
X coordinates of the marks, or width of horizontal ``"bar"`` and ``"area"`` without
specified ``x2`` or ``width``.
The ``value`` of this channel can be a number or a string ``"width"`` for the width
of the plot.
x2 : anyOf(float, string, :class:`ExprRef`)
X2 coordinates for ranged ``"area"``, ``"bar"``, ``"rect"``, and ``"rule"``.
The ``value`` of this channel can be a number or a string ``"width"`` for the width
of the plot.
y : anyOf(float, string, :class:`ExprRef`)
Y coordinates of the marks, or height of vertical ``"bar"`` and ``"area"`` without
specified ``y2`` or ``height``.
The ``value`` of this channel can be a number or a string ``"height"`` for the
height of the plot.
y2 : anyOf(float, string, :class:`ExprRef`)
Y2 coordinates for ranged ``"area"``, ``"bar"``, ``"rect"``, and ``"rule"``.
The ``value`` of this channel can be a number or a string ``"height"`` for the
height of the plot.
"""
_schema = {'$ref': '#/definitions/RectConfig'}
def __init__(self, align=Undefined, angle=Undefined, aria=Undefined, ariaRole=Undefined,
ariaRoleDescription=Undefined, aspect=Undefined, baseline=Undefined,
binSpacing=Undefined, blend=Undefined, color=Undefined, continuousBandSize=Undefined,
cornerRadius=Undefined, cornerRadiusBottomLeft=Undefined,
cornerRadiusBottomRight=Undefined, cornerRadiusTopLeft=Undefined,
cornerRadiusTopRight=Undefined, cursor=Undefined, description=Undefined, dir=Undefined,
discreteBandSize=Undefined, dx=Undefined, dy=Undefined, ellipsis=Undefined,
endAngle=Undefined, fill=Undefined, fillOpacity=Undefined, filled=Undefined,
font=Undefined, fontSize=Undefined, fontStyle=Undefined, fontWeight=Undefined,
height=Undefined, href=Undefined, innerRadius=Undefined, interpolate=Undefined,
invalid=Undefined, limit=Undefined, lineBreak=Undefined, lineHeight=Undefined,
opacity=Undefined, order=Undefined, orient=Undefined, outerRadius=Undefined,
padAngle=Undefined, radius=Undefined, radius2=Undefined, shape=Undefined,
size=Undefined, smooth=Undefined, startAngle=Undefined, stroke=Undefined,
strokeCap=Undefined, strokeDash=Undefined, strokeDashOffset=Undefined,
strokeJoin=Undefined, strokeMiterLimit=Undefined, strokeOffset=Undefined,
strokeOpacity=Undefined, strokeWidth=Undefined, tension=Undefined, text=Undefined,
theta=Undefined, theta2=Undefined, timeUnitBand=Undefined,
timeUnitBandPosition=Undefined, tooltip=Undefined, url=Undefined, width=Undefined,
x=Undefined, x2=Undefined, y=Undefined, y2=Undefined, **kwds):
super(RectConfig, self).__init__(align=align, angle=angle, aria=aria, ariaRole=ariaRole,
ariaRoleDescription=ariaRoleDescription, aspect=aspect,
baseline=baseline, binSpacing=binSpacing, blend=blend,
color=color, continuousBandSize=continuousBandSize,
cornerRadius=cornerRadius,
cornerRadiusBottomLeft=cornerRadiusBottomLeft,
cornerRadiusBottomRight=cornerRadiusBottomRight,
cornerRadiusTopLeft=cornerRadiusTopLeft,
cornerRadiusTopRight=cornerRadiusTopRight, cursor=cursor,
description=description, dir=dir,
discreteBandSize=discreteBandSize, dx=dx, dy=dy,
ellipsis=ellipsis, endAngle=endAngle, fill=fill,
fillOpacity=fillOpacity, filled=filled, font=font,
fontSize=fontSize, fontStyle=fontStyle, fontWeight=fontWeight,
height=height, href=href, innerRadius=innerRadius,
interpolate=interpolate, invalid=invalid, limit=limit,
lineBreak=lineBreak, lineHeight=lineHeight, opacity=opacity,
order=order, orient=orient, outerRadius=outerRadius,
padAngle=padAngle, radius=radius, radius2=radius2, shape=shape,
size=size, smooth=smooth, startAngle=startAngle, stroke=stroke,
strokeCap=strokeCap, strokeDash=strokeDash,
strokeDashOffset=strokeDashOffset, strokeJoin=strokeJoin,
strokeMiterLimit=strokeMiterLimit, strokeOffset=strokeOffset,
strokeOpacity=strokeOpacity, strokeWidth=strokeWidth,
tension=tension, text=text, theta=theta, theta2=theta2,
timeUnitBand=timeUnitBand,
timeUnitBandPosition=timeUnitBandPosition, tooltip=tooltip,
url=url, width=width, x=x, x2=x2, y=y, y2=y2, **kwds)
class RepeatMapping(VegaLiteSchema):
"""RepeatMapping schema wrapper
Mapping(required=[])
Attributes
----------
column : List(string)
An array of fields to be repeated horizontally.
row : List(string)
An array of fields to be repeated vertically.
"""
_schema = {'$ref': '#/definitions/RepeatMapping'}
def __init__(self, column=Undefined, row=Undefined, **kwds):
super(RepeatMapping, self).__init__(column=column, row=row, **kwds)
class RepeatRef(Field):
"""RepeatRef schema wrapper
Mapping(required=[repeat])
Reference to a repeated value.
Attributes
----------
repeat : enum('row', 'column', 'repeat', 'layer')
"""
_schema = {'$ref': '#/definitions/RepeatRef'}
def __init__(self, repeat=Undefined, **kwds):
super(RepeatRef, self).__init__(repeat=repeat, **kwds)
class Resolve(VegaLiteSchema):
"""Resolve schema wrapper
Mapping(required=[])
Defines how scales, axes, and legends from different specs should be combined. Resolve is a
mapping from ``scale``, ``axis``, and ``legend`` to a mapping from channels to resolutions.
Scales and guides can be resolved to be ``"independent"`` or ``"shared"``.
Attributes
----------
axis : :class:`AxisResolveMap`
legend : :class:`LegendResolveMap`
scale : :class:`ScaleResolveMap`
"""
_schema = {'$ref': '#/definitions/Resolve'}
def __init__(self, axis=Undefined, legend=Undefined, scale=Undefined, **kwds):
super(Resolve, self).__init__(axis=axis, legend=legend, scale=scale, **kwds)
class ResolveMode(VegaLiteSchema):
"""ResolveMode schema wrapper
enum('independent', 'shared')
"""
_schema = {'$ref': '#/definitions/ResolveMode'}
def __init__(self, *args):
super(ResolveMode, self).__init__(*args)
class RowColLayoutAlign(VegaLiteSchema):
"""RowColLayoutAlign schema wrapper
Mapping(required=[])
Attributes
----------
column : :class:`LayoutAlign`
row : :class:`LayoutAlign`
"""
_schema = {'$ref': '#/definitions/RowCol<LayoutAlign>'}
def __init__(self, column=Undefined, row=Undefined, **kwds):
super(RowColLayoutAlign, self).__init__(column=column, row=row, **kwds)
class RowColboolean(VegaLiteSchema):
"""RowColboolean schema wrapper
Mapping(required=[])
Attributes
----------
column : boolean
row : boolean
"""
_schema = {'$ref': '#/definitions/RowCol<boolean>'}
def __init__(self, column=Undefined, row=Undefined, **kwds):
super(RowColboolean, self).__init__(column=column, row=row, **kwds)
class RowColnumber(VegaLiteSchema):
"""RowColnumber schema wrapper
Mapping(required=[])
Attributes
----------
column : float
row : float
"""
_schema = {'$ref': '#/definitions/RowCol<number>'}
def __init__(self, column=Undefined, row=Undefined, **kwds):
super(RowColnumber, self).__init__(column=column, row=row, **kwds)
class RowColumnEncodingFieldDef(VegaLiteSchema):
"""RowColumnEncodingFieldDef schema wrapper
Mapping(required=[])
Attributes
----------
aggregate : :class:`Aggregate`
Aggregation function for the field (e.g., ``"mean"``, ``"sum"``, ``"median"``,
``"min"``, ``"max"``, ``"count"`` ).
**Default value:** ``undefined`` (None)
**See also:** `aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__
documentation.
align : :class:`LayoutAlign`
The alignment to apply to row/column facet's subplot. The supported string values
are ``"all"``, ``"each"``, and ``"none"``.
* For ``"none"``, a flow layout will be used, in which adjacent subviews are simply
placed one after the other. - For ``"each"``, subviews will be aligned into a
clean grid structure, but each row or column may be of variable size. - For
``"all"``, subviews will be aligned and each row or column will be sized
identically based on the maximum observed size. String values for this property
will be applied to both grid rows and columns.
**Default value:** ``"all"``.
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
bin : anyOf(boolean, :class:`BinParams`, None)
A flag for binning a ``quantitative`` field, `an object defining binning parameters
<https://vega.github.io/vega-lite/docs/bin.html#params>`__, or indicating that the
data for ``x`` or ``y`` channel are binned before they are imported into Vega-Lite (
``"binned"`` ).
If ``true``, default `binning parameters
<https://vega.github.io/vega-lite/docs/bin.html>`__ will be applied.
If ``"binned"``, this indicates that the data for the ``x`` (or ``y`` ) channel are
already binned. You can map the bin-start field to ``x`` (or ``y`` ) and the bin-end
field to ``x2`` (or ``y2`` ). The scale and axis will be formatted similar to
binning in Vega-Lite. To adjust the axis ticks based on the bin step, you can also
set the axis's `tickMinStep
<https://vega.github.io/vega-lite/docs/axis.html#ticks>`__ property.
**Default value:** ``false``
**See also:** `bin <https://vega.github.io/vega-lite/docs/bin.html>`__
documentation.
center : boolean
Boolean flag indicating if facet's subviews should be centered relative to their
respective rows or columns.
**Default value:** ``false``
field : :class:`Field`
**Required.** A string defining the name of the field from which to pull a data
value or an object defining iterated values from the `repeat
<https://vega.github.io/vega-lite/docs/repeat.html>`__ operator.
**See also:** `field <https://vega.github.io/vega-lite/docs/field.html>`__
documentation.
**Notes:** 1) Dots ( ``.`` ) and brackets ( ``[`` and ``]`` ) can be used to access
nested objects (e.g., ``"field": "foo.bar"`` and ``"field": "foo['bar']"`` ). If
field names contain dots or brackets but are not nested, you can use ``\\`` to
escape dots and brackets (e.g., ``"a\\.b"`` and ``"a\\[0\\]"`` ). See more details
about escaping in the `field documentation
<https://vega.github.io/vega-lite/docs/field.html>`__. 2) ``field`` is not required
if ``aggregate`` is ``count``.
header : :class:`Header`
An object defining properties of a facet's header.
sort : anyOf(:class:`SortArray`, :class:`SortOrder`, :class:`EncodingSortField`, None)
Sort order for the encoded field.
For continuous fields (quantitative or temporal), ``sort`` can be either
``"ascending"`` or ``"descending"``.
For discrete fields, ``sort`` can be one of the following: - ``"ascending"`` or
``"descending"`` -- for sorting by the values' natural order in JavaScript. - `A
sort field definition
<https://vega.github.io/vega-lite/docs/sort.html#sort-field>`__ for sorting by
another field. - `An array specifying the field values in preferred order
<https://vega.github.io/vega-lite/docs/sort.html#sort-array>`__. In this case, the
sort order will obey the values in the array, followed by any unspecified values in
their original order. For discrete time field, values in the sort array can be
`date-time definition objects <types#datetime>`__. In addition, for time units
``"month"`` and ``"day"``, the values can be the month or day names (case
insensitive) or their 3-letter initials (e.g., ``"Mon"``, ``"Tue"`` ). - ``null``
indicating no sort.
**Default value:** ``"ascending"``
**Note:** ``null`` is not supported for ``row`` and ``column``.
spacing : float
The spacing in pixels between facet's sub-views.
**Default value** : Depends on ``"spacing"`` property of `the view composition
configuration <https://vega.github.io/vega-lite/docs/config.html#view-config>`__ (
``20`` by default)
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit (e.g., ``year``, ``yearmonth``, ``month``, ``hours`` ) for a temporal
field. or `a temporal field that gets casted as ordinal
<https://vega.github.io/vega-lite/docs/type.html#cast>`__.
**Default value:** ``undefined`` (None)
**See also:** `timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__
documentation.
title : anyOf(:class:`Text`, None)
A title for the field. If ``null``, the title will be removed.
**Default value:** derived from the field's name and transformation function (
``aggregate``, ``bin`` and ``timeUnit`` ). If the field has an aggregate function,
the function is displayed as part of the title (e.g., ``"Sum of Profit"`` ). If the
field is binned or has a time unit applied, the applied function is shown in
parentheses (e.g., ``"Profit (binned)"``, ``"Transaction Date (year-month)"`` ).
Otherwise, the title is simply the field name.
**Notes** :
1) You can customize the default field title format by providing the `fieldTitle
<https://vega.github.io/vega-lite/docs/config.html#top-level-config>`__ property in
the `config <https://vega.github.io/vega-lite/docs/config.html>`__ or `fieldTitle
function via the compile function's options
<https://vega.github.io/vega-lite/docs/compile.html#field-title>`__.
2) If both field definition's ``title`` and axis, header, or legend ``title`` are
defined, axis/header/legend title will be used.
type : :class:`StandardType`
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/RowColumnEncodingFieldDef'}
def __init__(self, aggregate=Undefined, align=Undefined, band=Undefined, bin=Undefined,
center=Undefined, field=Undefined, header=Undefined, sort=Undefined, spacing=Undefined,
timeUnit=Undefined, title=Undefined, type=Undefined, **kwds):
super(RowColumnEncodingFieldDef, self).__init__(aggregate=aggregate, align=align, band=band,
bin=bin, center=center, field=field,
header=header, sort=sort, spacing=spacing,
timeUnit=timeUnit, title=title, type=type,
**kwds)
class Scale(VegaLiteSchema):
"""Scale schema wrapper
Mapping(required=[])
Attributes
----------
align : anyOf(float, :class:`ExprRef`)
The alignment of the steps within the scale range.
This value must lie in the range ``[0,1]``. A value of ``0.5`` indicates that the
steps should be centered within the range. A value of ``0`` or ``1`` may be used to
shift the bands to one side, say to position them adjacent to an axis.
**Default value:** ``0.5``
base : anyOf(float, :class:`ExprRef`)
The logarithm base of the ``log`` scale (default ``10`` ).
bins : :class:`ScaleBins`
Bin boundaries can be provided to scales as either an explicit array of bin
boundaries or as a bin specification object. The legal values are: - An `array
<../types/#Array>`__ literal of bin boundary values. For example, ``[0, 5, 10, 15,
20]``. The array must include both starting and ending boundaries. The previous
example uses five values to indicate a total of four bin intervals: [0-5), [5-10),
[10-15), [15-20]. Array literals may include signal references as elements. - A `bin
specification object <https://vega.github.io/vega-lite/docs/scale.html#bins>`__ that
indicates the bin *step* size, and optionally the *start* and *stop* boundaries. -
An array of bin boundaries over the scale domain. If provided, axes and legends will
use the bin boundaries to inform the choice of tick marks and text labels.
clamp : anyOf(boolean, :class:`ExprRef`)
If ``true``, values that exceed the data domain are clamped to either the minimum or
maximum range value
**Default value:** derived from the `scale config
<https://vega.github.io/vega-lite/docs/config.html#scale-config>`__ 's ``clamp`` (
``true`` by default).
constant : anyOf(float, :class:`ExprRef`)
A constant determining the slope of the symlog function around zero. Only used for
``symlog`` scales.
**Default value:** ``1``
domain : anyOf(List(anyOf(None, string, float, boolean, :class:`DateTime`,
:class:`ExprRef`)), string, :class:`SelectionExtent`, :class:`DomainUnionWith`,
:class:`ExprRef`)
Customized domain values in the form of constant values or dynamic values driven by
a selection.
1) Constant ``domain`` for *quantitative* fields can take one of the following
forms:
* A two-element array with minimum and maximum values. To create a diverging scale,
this two-element array can be combined with the ``domainMid`` property. - An array
with more than two entries, for `Piecewise quantitative scales
<https://vega.github.io/vega-lite/docs/scale.html#piecewise>`__. - A string value
``"unaggregated"``, if the input field is aggregated, to indicate that the domain
should include the raw data values prior to the aggregation.
2) Constant ``domain`` for *temporal* fields can be a two-element array with minimum
and maximum values, in the form of either timestamps or the `DateTime definition
objects <https://vega.github.io/vega-lite/docs/types.html#datetime>`__.
3) Constant ``domain`` for *ordinal* and *nominal* fields can be an array that lists
valid input values.
4) To combine (union) specified constant domain with the field's values, ``domain``
can be an object with a ``unionWith`` property that specify constant domain to be
combined. For example, ``domain: {unionWith: [0, 100]}`` for a quantitative scale
means that the scale domain always includes ``[0, 100]``, but will include other
values in the fields beyond ``[0, 100]``.
5) Domain can also takes an object defining a field or encoding of a selection that
`interactively determines
<https://vega.github.io/vega-lite/docs/selection.html#scale-domains>`__ the scale
domain.
domainMax : anyOf(float, :class:`DateTime`, :class:`ExprRef`)
Sets the maximum value in the scale domain, overriding the ``domain`` property. This
property is only intended for use with scales having continuous domains.
domainMid : anyOf(float, :class:`ExprRef`)
Inserts a single mid-point value into a two-element domain. The mid-point value must
lie between the domain minimum and maximum values. This property can be useful for
setting a midpoint for `diverging color scales
<https://vega.github.io/vega-lite/docs/scale.html#piecewise>`__. The domainMid
property is only intended for use with scales supporting continuous, piecewise
domains.
domainMin : anyOf(float, :class:`DateTime`, :class:`ExprRef`)
Sets the minimum value in the scale domain, overriding the domain property. This
property is only intended for use with scales having continuous domains.
exponent : anyOf(float, :class:`ExprRef`)
The exponent of the ``pow`` scale.
interpolate : anyOf(:class:`ScaleInterpolateEnum`, :class:`ExprRef`,
:class:`ScaleInterpolateParams`)
The interpolation method for range values. By default, a general interpolator for
numbers, dates, strings and colors (in HCL space) is used. For color ranges, this
property allows interpolation in alternative color spaces. Legal values include
``rgb``, ``hsl``, ``hsl-long``, ``lab``, ``hcl``, ``hcl-long``, ``cubehelix`` and
``cubehelix-long`` ('-long' variants use longer paths in polar coordinate spaces).
If object-valued, this property accepts an object with a string-valued *type*
property and an optional numeric *gamma* property applicable to rgb and cubehelix
interpolators. For more, see the `d3-interpolate documentation
<https://github.com/d3/d3-interpolate>`__.
* **Default value:** ``hcl``
nice : anyOf(boolean, float, :class:`TimeInterval`, :class:`TimeIntervalStep`,
:class:`ExprRef`)
Extending the domain so that it starts and ends on nice round values. This method
typically modifies the scale’s domain, and may only extend the bounds to the nearest
round value. Nicing is useful if the domain is computed from data and may be
irregular. For example, for a domain of *[0.201479…, 0.996679…]*, a nice domain
might be *[0.2, 1.0]*.
For quantitative scales such as linear, ``nice`` can be either a boolean flag or a
number. If ``nice`` is a number, it will represent a desired tick count. This allows
greater control over the step size used to extend the bounds, guaranteeing that the
returned ticks will exactly cover the domain.
For temporal fields with time and utc scales, the ``nice`` value can be a string
indicating the desired time interval. Legal values are ``"millisecond"``,
``"second"``, ``"minute"``, ``"hour"``, ``"day"``, ``"week"``, ``"month"``, and
``"year"``. Alternatively, ``time`` and ``utc`` scales can accept an object-valued
interval specifier of the form ``{"interval": "month", "step": 3}``, which includes
a desired number of interval steps. Here, the domain would snap to quarter (Jan,
Apr, Jul, Oct) boundaries.
**Default value:** ``true`` for unbinned *quantitative* fields; ``false`` otherwise.
padding : anyOf(float, :class:`ExprRef`)
For * `continuous <https://vega.github.io/vega-lite/docs/scale.html#continuous>`__ *
scales, expands the scale domain to accommodate the specified number of pixels on
each of the scale range. The scale range must represent pixels for this parameter to
function as intended. Padding adjustment is performed prior to all other
adjustments, including the effects of the ``zero``, ``nice``, ``domainMin``, and
``domainMax`` properties.
For * `band <https://vega.github.io/vega-lite/docs/scale.html#band>`__ * scales,
shortcut for setting ``paddingInner`` and ``paddingOuter`` to the same value.
For * `point <https://vega.github.io/vega-lite/docs/scale.html#point>`__ * scales,
alias for ``paddingOuter``.
**Default value:** For *continuous* scales, derived from the `scale config
<https://vega.github.io/vega-lite/docs/scale.html#config>`__ 's
``continuousPadding``. For *band and point* scales, see ``paddingInner`` and
``paddingOuter``. By default, Vega-Lite sets padding such that *width/height =
number of unique values * step*.
paddingInner : anyOf(float, :class:`ExprRef`)
The inner padding (spacing) within each band step of band scales, as a fraction of
the step size. This value must lie in the range [0,1].
For point scale, this property is invalid as point scales do not have internal band
widths (only step sizes between bands).
**Default value:** derived from the `scale config
<https://vega.github.io/vega-lite/docs/scale.html#config>`__ 's
``bandPaddingInner``.
paddingOuter : anyOf(float, :class:`ExprRef`)
The outer padding (spacing) at the ends of the range of band and point scales, as a
fraction of the step size. This value must lie in the range [0,1].
**Default value:** derived from the `scale config
<https://vega.github.io/vega-lite/docs/scale.html#config>`__ 's ``bandPaddingOuter``
for band scales and ``pointPadding`` for point scales. By default, Vega-Lite sets
outer padding such that *width/height = number of unique values * step*.
range : anyOf(:class:`RangeEnum`, List(anyOf(float, string, List(float), :class:`ExprRef`)),
Mapping(required=[field]))
The range of the scale. One of:
A string indicating a `pre-defined named scale range
<https://vega.github.io/vega-lite/docs/scale.html#range-config>`__ (e.g., example,
``"symbol"``, or ``"diverging"`` ).
For `continuous scales
<https://vega.github.io/vega-lite/docs/scale.html#continuous>`__, two-element array
indicating minimum and maximum values, or an array with more than two entries for
specifying a `piecewise scale
<https://vega.github.io/vega-lite/docs/scale.html#piecewise>`__.
For `discrete <https://vega.github.io/vega-lite/docs/scale.html#discrete>`__ and
`discretizing <https://vega.github.io/vega-lite/docs/scale.html#discretizing>`__
scales, an array of desired output values or an object with a ``field`` property
representing the range values. For example, if a field ``color`` contains CSS color
names, we can set ``range`` to ``{field: "color"}``.
**Notes:**
1) For color scales you can also specify a color `scheme
<https://vega.github.io/vega-lite/docs/scale.html#scheme>`__ instead of ``range``.
2) Any directly specified ``range`` for ``x`` and ``y`` channels will be ignored.
Range can be customized via the view's corresponding `size
<https://vega.github.io/vega-lite/docs/size.html>`__ ( ``width`` and ``height`` ).
rangeMax : anyOf(float, string, :class:`ExprRef`)
Sets the maximum value in the scale range, overriding the ``range`` property or the
default range. This property is only intended for use with scales having continuous
ranges.
rangeMin : anyOf(float, string, :class:`ExprRef`)
Sets the minimum value in the scale range, overriding the ``range`` property or the
default range. This property is only intended for use with scales having continuous
ranges.
reverse : anyOf(boolean, :class:`ExprRef`)
If true, reverses the order of the scale range. **Default value:** ``false``.
round : anyOf(boolean, :class:`ExprRef`)
If ``true``, rounds numeric output values to integers. This can be helpful for
snapping to the pixel grid.
**Default value:** ``false``.
scheme : anyOf(string, :class:`SchemeParams`, :class:`ExprRef`)
A string indicating a color `scheme
<https://vega.github.io/vega-lite/docs/scale.html#scheme>`__ name (e.g.,
``"category10"`` or ``"blues"`` ) or a `scheme parameter object
<https://vega.github.io/vega-lite/docs/scale.html#scheme-params>`__.
Discrete color schemes may be used with `discrete
<https://vega.github.io/vega-lite/docs/scale.html#discrete>`__ or `discretizing
<https://vega.github.io/vega-lite/docs/scale.html#discretizing>`__ scales.
Continuous color schemes are intended for use with color scales.
For the full list of supported schemes, please refer to the `Vega Scheme
<https://vega.github.io/vega/docs/schemes/#reference>`__ reference.
type : :class:`ScaleType`
The type of scale. Vega-Lite supports the following categories of scale types:
1) `Continuous Scales
<https://vega.github.io/vega-lite/docs/scale.html#continuous>`__ -- mapping
continuous domains to continuous output ranges ( `"linear"
<https://vega.github.io/vega-lite/docs/scale.html#linear>`__, `"pow"
<https://vega.github.io/vega-lite/docs/scale.html#pow>`__, `"sqrt"
<https://vega.github.io/vega-lite/docs/scale.html#sqrt>`__, `"symlog"
<https://vega.github.io/vega-lite/docs/scale.html#symlog>`__, `"log"
<https://vega.github.io/vega-lite/docs/scale.html#log>`__, `"time"
<https://vega.github.io/vega-lite/docs/scale.html#time>`__, `"utc"
<https://vega.github.io/vega-lite/docs/scale.html#utc>`__.
2) `Discrete Scales <https://vega.github.io/vega-lite/docs/scale.html#discrete>`__
-- mapping discrete domains to discrete ( `"ordinal"
<https://vega.github.io/vega-lite/docs/scale.html#ordinal>`__ ) or continuous (
`"band" <https://vega.github.io/vega-lite/docs/scale.html#band>`__ and `"point"
<https://vega.github.io/vega-lite/docs/scale.html#point>`__ ) output ranges.
3) `Discretizing Scales
<https://vega.github.io/vega-lite/docs/scale.html#discretizing>`__ -- mapping
continuous domains to discrete output ranges `"bin-ordinal"
<https://vega.github.io/vega-lite/docs/scale.html#bin-ordinal>`__, `"quantile"
<https://vega.github.io/vega-lite/docs/scale.html#quantile>`__, `"quantize"
<https://vega.github.io/vega-lite/docs/scale.html#quantize>`__ and `"threshold"
<https://vega.github.io/vega-lite/docs/scale.html#threshold>`__.
**Default value:** please see the `scale type table
<https://vega.github.io/vega-lite/docs/scale.html#type>`__.
zero : anyOf(boolean, :class:`ExprRef`)
If ``true``, ensures that a zero baseline value is included in the scale domain.
**Default value:** ``true`` for x and y channels if the quantitative field is not
binned and no custom ``domain`` is provided; ``false`` otherwise.
**Note:** Log, time, and utc scales do not support ``zero``.
"""
_schema = {'$ref': '#/definitions/Scale'}
def __init__(self, align=Undefined, base=Undefined, bins=Undefined, clamp=Undefined,
constant=Undefined, domain=Undefined, domainMax=Undefined, domainMid=Undefined,
domainMin=Undefined, exponent=Undefined, interpolate=Undefined, nice=Undefined,
padding=Undefined, paddingInner=Undefined, paddingOuter=Undefined, range=Undefined,
rangeMax=Undefined, rangeMin=Undefined, reverse=Undefined, round=Undefined,
scheme=Undefined, type=Undefined, zero=Undefined, **kwds):
super(Scale, self).__init__(align=align, base=base, bins=bins, clamp=clamp, constant=constant,
domain=domain, domainMax=domainMax, domainMid=domainMid,
domainMin=domainMin, exponent=exponent, interpolate=interpolate,
nice=nice, padding=padding, paddingInner=paddingInner,
paddingOuter=paddingOuter, range=range, rangeMax=rangeMax,
rangeMin=rangeMin, reverse=reverse, round=round, scheme=scheme,
type=type, zero=zero, **kwds)
class ScaleBins(VegaLiteSchema):
"""ScaleBins schema wrapper
anyOf(List(float), :class:`ScaleBinParams`)
"""
_schema = {'$ref': '#/definitions/ScaleBins'}
def __init__(self, *args, **kwds):
super(ScaleBins, self).__init__(*args, **kwds)
class ScaleBinParams(ScaleBins):
"""ScaleBinParams schema wrapper
Mapping(required=[step])
Attributes
----------
step : float
The step size defining the bin interval width.
start : float
The starting (lowest-valued) bin boundary.
**Default value:** The lowest value of the scale domain will be used.
stop : float
The stopping (highest-valued) bin boundary.
**Default value:** The highest value of the scale domain will be used.
"""
_schema = {'$ref': '#/definitions/ScaleBinParams'}
def __init__(self, step=Undefined, start=Undefined, stop=Undefined, **kwds):
super(ScaleBinParams, self).__init__(step=step, start=start, stop=stop, **kwds)
class ScaleConfig(VegaLiteSchema):
"""ScaleConfig schema wrapper
Mapping(required=[])
Attributes
----------
bandPaddingInner : anyOf(float, :class:`ExprRef`)
Default inner padding for ``x`` and ``y`` band-ordinal scales.
**Default value:** - ``barBandPaddingInner`` for bar marks ( ``0.1`` by default) -
``rectBandPaddingInner`` for rect and other marks ( ``0`` by default)
bandPaddingOuter : anyOf(float, :class:`ExprRef`)
Default outer padding for ``x`` and ``y`` band-ordinal scales.
**Default value:** ``paddingInner/2`` (which makes *width/height = number of unique
values * step* )
barBandPaddingInner : anyOf(float, :class:`ExprRef`)
Default inner padding for ``x`` and ``y`` band-ordinal scales of ``"bar"`` marks.
**Default value:** ``0.1``
clamp : anyOf(boolean, :class:`ExprRef`)
If true, values that exceed the data domain are clamped to either the minimum or
maximum range value
continuousPadding : anyOf(float, :class:`ExprRef`)
Default padding for continuous scales.
**Default:** ``5`` for continuous x-scale of a vertical bar and continuous y-scale
of a horizontal bar.; ``0`` otherwise.
maxBandSize : float
The default max value for mapping quantitative fields to bar's size/bandSize.
If undefined (default), we will use the axis's size (width or height) - 1.
maxFontSize : float
The default max value for mapping quantitative fields to text's size/fontSize.
**Default value:** ``40``
maxOpacity : float
Default max opacity for mapping a field to opacity.
**Default value:** ``0.8``
maxSize : float
Default max value for point size scale.
maxStrokeWidth : float
Default max strokeWidth for the scale of strokeWidth for rule and line marks and of
size for trail marks.
**Default value:** ``4``
minBandSize : float
The default min value for mapping quantitative fields to bar and tick's
size/bandSize scale with zero=false.
**Default value:** ``2``
minFontSize : float
The default min value for mapping quantitative fields to tick's size/fontSize scale
with zero=false
**Default value:** ``8``
minOpacity : float
Default minimum opacity for mapping a field to opacity.
**Default value:** ``0.3``
minSize : float
Default minimum value for point size scale with zero=false.
**Default value:** ``9``
minStrokeWidth : float
Default minimum strokeWidth for the scale of strokeWidth for rule and line marks and
of size for trail marks with zero=false.
**Default value:** ``1``
pointPadding : anyOf(float, :class:`ExprRef`)
Default outer padding for ``x`` and ``y`` point-ordinal scales.
**Default value:** ``0.5`` (which makes *width/height = number of unique values *
step* )
quantileCount : float
Default range cardinality for `quantile
<https://vega.github.io/vega-lite/docs/scale.html#quantile>`__ scale.
**Default value:** ``4``
quantizeCount : float
Default range cardinality for `quantize
<https://vega.github.io/vega-lite/docs/scale.html#quantize>`__ scale.
**Default value:** ``4``
rectBandPaddingInner : anyOf(float, :class:`ExprRef`)
Default inner padding for ``x`` and ``y`` band-ordinal scales of ``"rect"`` marks.
**Default value:** ``0``
round : anyOf(boolean, :class:`ExprRef`)
If true, rounds numeric output values to integers. This can be helpful for snapping
to the pixel grid. (Only available for ``x``, ``y``, and ``size`` scales.)
useUnaggregatedDomain : boolean
Use the source data range before aggregation as scale domain instead of aggregated
data for aggregate axis.
This is equivalent to setting ``domain`` to ``"unaggregate"`` for aggregated
*quantitative* fields by default.
This property only works with aggregate functions that produce values within the raw
data domain ( ``"mean"``, ``"average"``, ``"median"``, ``"q1"``, ``"q3"``,
``"min"``, ``"max"`` ). For other aggregations that produce values outside of the
raw data domain (e.g. ``"count"``, ``"sum"`` ), this property is ignored.
**Default value:** ``false``
xReverse : anyOf(boolean, :class:`ExprRef`)
Reverse x-scale by default (useful for right-to-left charts).
"""
_schema = {'$ref': '#/definitions/ScaleConfig'}
def __init__(self, bandPaddingInner=Undefined, bandPaddingOuter=Undefined,
barBandPaddingInner=Undefined, clamp=Undefined, continuousPadding=Undefined,
maxBandSize=Undefined, maxFontSize=Undefined, maxOpacity=Undefined, maxSize=Undefined,
maxStrokeWidth=Undefined, minBandSize=Undefined, minFontSize=Undefined,
minOpacity=Undefined, minSize=Undefined, minStrokeWidth=Undefined,
pointPadding=Undefined, quantileCount=Undefined, quantizeCount=Undefined,
rectBandPaddingInner=Undefined, round=Undefined, useUnaggregatedDomain=Undefined,
xReverse=Undefined, **kwds):
super(ScaleConfig, self).__init__(bandPaddingInner=bandPaddingInner,
bandPaddingOuter=bandPaddingOuter,
barBandPaddingInner=barBandPaddingInner, clamp=clamp,
continuousPadding=continuousPadding, maxBandSize=maxBandSize,
maxFontSize=maxFontSize, maxOpacity=maxOpacity,
maxSize=maxSize, maxStrokeWidth=maxStrokeWidth,
minBandSize=minBandSize, minFontSize=minFontSize,
minOpacity=minOpacity, minSize=minSize,
minStrokeWidth=minStrokeWidth, pointPadding=pointPadding,
quantileCount=quantileCount, quantizeCount=quantizeCount,
rectBandPaddingInner=rectBandPaddingInner, round=round,
useUnaggregatedDomain=useUnaggregatedDomain,
xReverse=xReverse, **kwds)
class ScaleInterpolateEnum(VegaLiteSchema):
"""ScaleInterpolateEnum schema wrapper
enum('rgb', 'lab', 'hcl', 'hsl', 'hsl-long', 'hcl-long', 'cubehelix', 'cubehelix-long')
"""
_schema = {'$ref': '#/definitions/ScaleInterpolateEnum'}
def __init__(self, *args):
super(ScaleInterpolateEnum, self).__init__(*args)
class ScaleInterpolateParams(VegaLiteSchema):
"""ScaleInterpolateParams schema wrapper
Mapping(required=[type])
Attributes
----------
type : enum('rgb', 'cubehelix', 'cubehelix-long')
gamma : float
"""
_schema = {'$ref': '#/definitions/ScaleInterpolateParams'}
def __init__(self, type=Undefined, gamma=Undefined, **kwds):
super(ScaleInterpolateParams, self).__init__(type=type, gamma=gamma, **kwds)
class ScaleResolveMap(VegaLiteSchema):
"""ScaleResolveMap schema wrapper
Mapping(required=[])
Attributes
----------
angle : :class:`ResolveMode`
color : :class:`ResolveMode`
fill : :class:`ResolveMode`
fillOpacity : :class:`ResolveMode`
opacity : :class:`ResolveMode`
radius : :class:`ResolveMode`
shape : :class:`ResolveMode`
size : :class:`ResolveMode`
stroke : :class:`ResolveMode`
strokeDash : :class:`ResolveMode`
strokeOpacity : :class:`ResolveMode`
strokeWidth : :class:`ResolveMode`
theta : :class:`ResolveMode`
x : :class:`ResolveMode`
y : :class:`ResolveMode`
"""
_schema = {'$ref': '#/definitions/ScaleResolveMap'}
def __init__(self, angle=Undefined, color=Undefined, fill=Undefined, fillOpacity=Undefined,
opacity=Undefined, radius=Undefined, shape=Undefined, size=Undefined, stroke=Undefined,
strokeDash=Undefined, strokeOpacity=Undefined, strokeWidth=Undefined, theta=Undefined,
x=Undefined, y=Undefined, **kwds):
super(ScaleResolveMap, self).__init__(angle=angle, color=color, fill=fill,
fillOpacity=fillOpacity, opacity=opacity, radius=radius,
shape=shape, size=size, stroke=stroke,
strokeDash=strokeDash, strokeOpacity=strokeOpacity,
strokeWidth=strokeWidth, theta=theta, x=x, y=y, **kwds)
class ScaleType(VegaLiteSchema):
"""ScaleType schema wrapper
enum('linear', 'log', 'pow', 'sqrt', 'symlog', 'identity', 'sequential', 'time', 'utc',
'quantile', 'quantize', 'threshold', 'bin-ordinal', 'ordinal', 'point', 'band')
"""
_schema = {'$ref': '#/definitions/ScaleType'}
def __init__(self, *args):
super(ScaleType, self).__init__(*args)
class SchemeParams(VegaLiteSchema):
"""SchemeParams schema wrapper
Mapping(required=[name])
Attributes
----------
name : string
A color scheme name for ordinal scales (e.g., ``"category10"`` or ``"blues"`` ).
For the full list of supported schemes, please refer to the `Vega Scheme
<https://vega.github.io/vega/docs/schemes/#reference>`__ reference.
count : float
The number of colors to use in the scheme. This can be useful for scale types such
as ``"quantize"``, which use the length of the scale range to determine the number
of discrete bins for the scale domain.
extent : List(float)
The extent of the color range to use. For example ``[0.2, 1]`` will rescale the
color scheme such that color values in the range *[0, 0.2)* are excluded from the
scheme.
"""
_schema = {'$ref': '#/definitions/SchemeParams'}
def __init__(self, name=Undefined, count=Undefined, extent=Undefined, **kwds):
super(SchemeParams, self).__init__(name=name, count=count, extent=extent, **kwds)
class SecondaryFieldDef(Position2Def):
"""SecondaryFieldDef schema wrapper
Mapping(required=[])
A field definition of a secondary channel that shares a scale with another primary channel.
For example, ``x2``, ``xError`` and ``xError2`` share the same scale with ``x``.
Attributes
----------
aggregate : :class:`Aggregate`
Aggregation function for the field (e.g., ``"mean"``, ``"sum"``, ``"median"``,
``"min"``, ``"max"``, ``"count"`` ).
**Default value:** ``undefined`` (None)
**See also:** `aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__
documentation.
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
bin : None
A flag for binning a ``quantitative`` field, `an object defining binning parameters
<https://vega.github.io/vega-lite/docs/bin.html#params>`__, or indicating that the
data for ``x`` or ``y`` channel are binned before they are imported into Vega-Lite (
``"binned"`` ).
If ``true``, default `binning parameters
<https://vega.github.io/vega-lite/docs/bin.html>`__ will be applied.
If ``"binned"``, this indicates that the data for the ``x`` (or ``y`` ) channel are
already binned. You can map the bin-start field to ``x`` (or ``y`` ) and the bin-end
field to ``x2`` (or ``y2`` ). The scale and axis will be formatted similar to
binning in Vega-Lite. To adjust the axis ticks based on the bin step, you can also
set the axis's `tickMinStep
<https://vega.github.io/vega-lite/docs/axis.html#ticks>`__ property.
**Default value:** ``false``
**See also:** `bin <https://vega.github.io/vega-lite/docs/bin.html>`__
documentation.
field : :class:`Field`
**Required.** A string defining the name of the field from which to pull a data
value or an object defining iterated values from the `repeat
<https://vega.github.io/vega-lite/docs/repeat.html>`__ operator.
**See also:** `field <https://vega.github.io/vega-lite/docs/field.html>`__
documentation.
**Notes:** 1) Dots ( ``.`` ) and brackets ( ``[`` and ``]`` ) can be used to access
nested objects (e.g., ``"field": "foo.bar"`` and ``"field": "foo['bar']"`` ). If
field names contain dots or brackets but are not nested, you can use ``\\`` to
escape dots and brackets (e.g., ``"a\\.b"`` and ``"a\\[0\\]"`` ). See more details
about escaping in the `field documentation
<https://vega.github.io/vega-lite/docs/field.html>`__. 2) ``field`` is not required
if ``aggregate`` is ``count``.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit (e.g., ``year``, ``yearmonth``, ``month``, ``hours`` ) for a temporal
field. or `a temporal field that gets casted as ordinal
<https://vega.github.io/vega-lite/docs/type.html#cast>`__.
**Default value:** ``undefined`` (None)
**See also:** `timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__
documentation.
title : anyOf(:class:`Text`, None)
A title for the field. If ``null``, the title will be removed.
**Default value:** derived from the field's name and transformation function (
``aggregate``, ``bin`` and ``timeUnit`` ). If the field has an aggregate function,
the function is displayed as part of the title (e.g., ``"Sum of Profit"`` ). If the
field is binned or has a time unit applied, the applied function is shown in
parentheses (e.g., ``"Profit (binned)"``, ``"Transaction Date (year-month)"`` ).
Otherwise, the title is simply the field name.
**Notes** :
1) You can customize the default field title format by providing the `fieldTitle
<https://vega.github.io/vega-lite/docs/config.html#top-level-config>`__ property in
the `config <https://vega.github.io/vega-lite/docs/config.html>`__ or `fieldTitle
function via the compile function's options
<https://vega.github.io/vega-lite/docs/compile.html#field-title>`__.
2) If both field definition's ``title`` and axis, header, or legend ``title`` are
defined, axis/header/legend title will be used.
"""
_schema = {'$ref': '#/definitions/SecondaryFieldDef'}
def __init__(self, aggregate=Undefined, band=Undefined, bin=Undefined, field=Undefined,
timeUnit=Undefined, title=Undefined, **kwds):
super(SecondaryFieldDef, self).__init__(aggregate=aggregate, band=band, bin=bin, field=field,
timeUnit=timeUnit, title=title, **kwds)
class SelectionComposition(VegaLiteSchema):
"""SelectionComposition schema wrapper
anyOf(:class:`SelectionNot`, :class:`SelectionAnd`, :class:`SelectionOr`, string)
"""
_schema = {'$ref': '#/definitions/SelectionComposition'}
def __init__(self, *args, **kwds):
super(SelectionComposition, self).__init__(*args, **kwds)
class SelectionAnd(SelectionComposition):
"""SelectionAnd schema wrapper
Mapping(required=[and])
Attributes
----------
and : List(:class:`SelectionComposition`)
"""
_schema = {'$ref': '#/definitions/SelectionAnd'}
def __init__(self, **kwds):
super(SelectionAnd, self).__init__(**kwds)
class SelectionConfig(VegaLiteSchema):
"""SelectionConfig schema wrapper
Mapping(required=[])
Attributes
----------
interval : :class:`IntervalSelectionConfig`
The default definition for an `interval
<https://vega.github.io/vega-lite/docs/selection.html#type>`__ selection. All
properties and transformations for an interval selection definition (except ``type``
) may be specified here.
For instance, setting ``interval`` to ``{"translate": false}`` disables the ability
to move interval selections by default.
multi : :class:`MultiSelectionConfig`
The default definition for a `multi
<https://vega.github.io/vega-lite/docs/selection.html#type>`__ selection. All
properties and transformations for a multi selection definition (except ``type`` )
may be specified here.
For instance, setting ``multi`` to ``{"toggle": "event.altKey"}`` adds additional
values to multi selections when clicking with the alt-key pressed by default.
single : :class:`SingleSelectionConfig`
The default definition for a `single
<https://vega.github.io/vega-lite/docs/selection.html#type>`__ selection. All
properties and transformations for a single selection definition (except ``type``
) may be specified here.
For instance, setting ``single`` to ``{"on": "dblclick"}`` populates single
selections on double-click by default.
"""
_schema = {'$ref': '#/definitions/SelectionConfig'}
def __init__(self, interval=Undefined, multi=Undefined, single=Undefined, **kwds):
super(SelectionConfig, self).__init__(interval=interval, multi=multi, single=single, **kwds)
class SelectionDef(VegaLiteSchema):
"""SelectionDef schema wrapper
anyOf(:class:`SingleSelection`, :class:`MultiSelection`, :class:`IntervalSelection`)
"""
_schema = {'$ref': '#/definitions/SelectionDef'}
def __init__(self, *args, **kwds):
super(SelectionDef, self).__init__(*args, **kwds)
class IntervalSelection(SelectionDef):
"""IntervalSelection schema wrapper
Mapping(required=[type])
Attributes
----------
type : string
Determines the default event processing and data query for the selection. Vega-Lite
currently supports three selection types:
* ``"single"`` -- to select a single discrete data value on ``click``. - ``"multi"``
-- to select multiple discrete data value; the first value is selected on
``click`` and additional values toggled on shift- ``click``. - ``"interval"`` --
to select a continuous range of data values on ``drag``.
bind : string
Establishes a two-way binding between the interval selection and the scales used
within the same view. This allows a user to interactively pan and zoom the view.
**See also:** `bind <https://vega.github.io/vega-lite/docs/bind.html>`__
documentation.
clear : anyOf(:class:`Stream`, string, boolean)
Clears the selection, emptying it of all values. Can be a `Event Stream
<https://vega.github.io/vega/docs/event-streams/>`__ or ``false`` to disable.
**Default value:** ``dblclick``.
**See also:** `clear <https://vega.github.io/vega-lite/docs/clear.html>`__
documentation.
empty : enum('all', 'none')
By default, ``all`` data values are considered to lie within an empty selection.
When set to ``none``, empty selections contain no data values.
encodings : List(:class:`SingleDefUnitChannel`)
An array of encoding channels. The corresponding data field values must match for a
data tuple to fall within the selection.
**See also:** `encodings <https://vega.github.io/vega-lite/docs/project.html>`__
documentation.
fields : List(:class:`FieldName`)
An array of field names whose values must match for a data tuple to fall within the
selection.
**See also:** `fields <https://vega.github.io/vega-lite/docs/project.html>`__
documentation.
init : :class:`SelectionInitIntervalMapping`
Initialize the selection with a mapping between `projected channels or field names
<https://vega.github.io/vega-lite/docs/project.html>`__ and arrays of initial
values.
**See also:** `init <https://vega.github.io/vega-lite/docs/init.html>`__
documentation.
mark : :class:`BrushConfig`
An interval selection also adds a rectangle mark to depict the extents of the
interval. The ``mark`` property can be used to customize the appearance of the mark.
**See also:** `mark <https://vega.github.io/vega-lite/docs/selection-mark.html>`__
documentation.
on : anyOf(:class:`Stream`, string)
A `Vega event stream <https://vega.github.io/vega/docs/event-streams/>`__ (object or
selector) that triggers the selection. For interval selections, the event stream
must specify a `start and end
<https://vega.github.io/vega/docs/event-streams/#between-filters>`__.
resolve : :class:`SelectionResolution`
With layered and multi-view displays, a strategy that determines how selections'
data queries are resolved when applied in a filter transform, conditional encoding
rule, or scale domain.
**See also:** `resolve
<https://vega.github.io/vega-lite/docs/selection-resolve.html>`__ documentation.
translate : anyOf(string, boolean)
When truthy, allows a user to interactively move an interval selection
back-and-forth. Can be ``true``, ``false`` (to disable panning), or a `Vega event
stream definition <https://vega.github.io/vega/docs/event-streams/>`__ which must
include a start and end event to trigger continuous panning.
**Default value:** ``true``, which corresponds to ``[mousedown, window:mouseup] >
window:mousemove!`` which corresponds to clicks and dragging within an interval
selection to reposition it.
**See also:** `translate <https://vega.github.io/vega-lite/docs/translate.html>`__
documentation.
zoom : anyOf(string, boolean)
When truthy, allows a user to interactively resize an interval selection. Can be
``true``, ``false`` (to disable zooming), or a `Vega event stream definition
<https://vega.github.io/vega/docs/event-streams/>`__. Currently, only ``wheel``
events are supported.
**Default value:** ``true``, which corresponds to ``wheel!``.
**See also:** `zoom <https://vega.github.io/vega-lite/docs/zoom.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/IntervalSelection'}
def __init__(self, type=Undefined, bind=Undefined, clear=Undefined, empty=Undefined,
encodings=Undefined, fields=Undefined, init=Undefined, mark=Undefined, on=Undefined,
resolve=Undefined, translate=Undefined, zoom=Undefined, **kwds):
super(IntervalSelection, self).__init__(type=type, bind=bind, clear=clear, empty=empty,
encodings=encodings, fields=fields, init=init,
mark=mark, on=on, resolve=resolve, translate=translate,
zoom=zoom, **kwds)
class MultiSelection(SelectionDef):
"""MultiSelection schema wrapper
Mapping(required=[type])
Attributes
----------
type : string
Determines the default event processing and data query for the selection. Vega-Lite
currently supports three selection types:
* ``"single"`` -- to select a single discrete data value on ``click``. - ``"multi"``
-- to select multiple discrete data value; the first value is selected on
``click`` and additional values toggled on shift- ``click``. - ``"interval"`` --
to select a continuous range of data values on ``drag``.
bind : :class:`LegendBinding`
When set, a selection is populated by interacting with the corresponding legend.
Direct manipulation interaction is disabled by default; to re-enable it, set the
selection's `on
<https://vega.github.io/vega-lite/docs/selection.html#common-selection-properties>`__
property.
Legend bindings are restricted to selections that only specify a single field or
encoding.
clear : anyOf(:class:`Stream`, string, boolean)
Clears the selection, emptying it of all values. Can be a `Event Stream
<https://vega.github.io/vega/docs/event-streams/>`__ or ``false`` to disable.
**Default value:** ``dblclick``.
**See also:** `clear <https://vega.github.io/vega-lite/docs/clear.html>`__
documentation.
empty : enum('all', 'none')
By default, ``all`` data values are considered to lie within an empty selection.
When set to ``none``, empty selections contain no data values.
encodings : List(:class:`SingleDefUnitChannel`)
An array of encoding channels. The corresponding data field values must match for a
data tuple to fall within the selection.
**See also:** `encodings <https://vega.github.io/vega-lite/docs/project.html>`__
documentation.
fields : List(:class:`FieldName`)
An array of field names whose values must match for a data tuple to fall within the
selection.
**See also:** `fields <https://vega.github.io/vega-lite/docs/project.html>`__
documentation.
init : List(:class:`SelectionInitMapping`)
Initialize the selection with a mapping between `projected channels or field names
<https://vega.github.io/vega-lite/docs/project.html>`__ and an initial value (or
array of values).
**See also:** `init <https://vega.github.io/vega-lite/docs/init.html>`__
documentation.
nearest : boolean
When true, an invisible voronoi diagram is computed to accelerate discrete
selection. The data value *nearest* the mouse cursor is added to the selection.
**See also:** `nearest <https://vega.github.io/vega-lite/docs/nearest.html>`__
documentation.
on : anyOf(:class:`Stream`, string)
A `Vega event stream <https://vega.github.io/vega/docs/event-streams/>`__ (object or
selector) that triggers the selection. For interval selections, the event stream
must specify a `start and end
<https://vega.github.io/vega/docs/event-streams/#between-filters>`__.
resolve : :class:`SelectionResolution`
With layered and multi-view displays, a strategy that determines how selections'
data queries are resolved when applied in a filter transform, conditional encoding
rule, or scale domain.
**See also:** `resolve
<https://vega.github.io/vega-lite/docs/selection-resolve.html>`__ documentation.
toggle : anyOf(string, boolean)
Controls whether data values should be toggled or only ever inserted into multi
selections. Can be ``true``, ``false`` (for insertion only), or a `Vega expression
<https://vega.github.io/vega/docs/expressions/>`__.
**Default value:** ``true``, which corresponds to ``event.shiftKey`` (i.e., data
values are toggled when a user interacts with the shift-key pressed).
Setting the value to the Vega expression ``"true"`` will toggle data values without
the user pressing the shift-key.
**See also:** `toggle <https://vega.github.io/vega-lite/docs/toggle.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/MultiSelection'}
def __init__(self, type=Undefined, bind=Undefined, clear=Undefined, empty=Undefined,
encodings=Undefined, fields=Undefined, init=Undefined, nearest=Undefined, on=Undefined,
resolve=Undefined, toggle=Undefined, **kwds):
super(MultiSelection, self).__init__(type=type, bind=bind, clear=clear, empty=empty,
encodings=encodings, fields=fields, init=init,
nearest=nearest, on=on, resolve=resolve, toggle=toggle,
**kwds)
class SelectionExtent(BinExtent):
"""SelectionExtent schema wrapper
anyOf(Mapping(required=[selection]), Mapping(required=[selection]))
"""
_schema = {'$ref': '#/definitions/SelectionExtent'}
def __init__(self, *args, **kwds):
super(SelectionExtent, self).__init__(*args, **kwds)
class SelectionInit(VegaLiteSchema):
"""SelectionInit schema wrapper
anyOf(:class:`PrimitiveValue`, :class:`DateTime`)
"""
_schema = {'$ref': '#/definitions/SelectionInit'}
def __init__(self, *args, **kwds):
super(SelectionInit, self).__init__(*args, **kwds)
class DateTime(SelectionInit):
"""DateTime schema wrapper
Mapping(required=[])
Object for defining datetime in Vega-Lite Filter. If both month and quarter are provided,
month has higher precedence. ``day`` cannot be combined with other date. We accept string
for month and day names.
Attributes
----------
date : float
Integer value representing the date (day of the month) from 1-31.
day : anyOf(:class:`Day`, string)
Value representing the day of a week. This can be one of: (1) integer value -- ``1``
represents Monday; (2) case-insensitive day name (e.g., ``"Monday"`` ); (3)
case-insensitive, 3-character short day name (e.g., ``"Mon"`` ).
**Warning:** A DateTime definition object with ``day`` ** should not be combined
with ``year``, ``quarter``, ``month``, or ``date``.
hours : float
Integer value representing the hour of a day from 0-23.
milliseconds : float
Integer value representing the millisecond segment of time.
minutes : float
Integer value representing the minute segment of time from 0-59.
month : anyOf(:class:`Month`, string)
One of: (1) integer value representing the month from ``1`` - ``12``. ``1``
represents January; (2) case-insensitive month name (e.g., ``"January"`` ); (3)
case-insensitive, 3-character short month name (e.g., ``"Jan"`` ).
quarter : float
Integer value representing the quarter of the year (from 1-4).
seconds : float
Integer value representing the second segment (0-59) of a time value
utc : boolean
A boolean flag indicating if date time is in utc time. If false, the date time is in
local time
year : float
Integer value representing the year.
"""
_schema = {'$ref': '#/definitions/DateTime'}
def __init__(self, date=Undefined, day=Undefined, hours=Undefined, milliseconds=Undefined,
minutes=Undefined, month=Undefined, quarter=Undefined, seconds=Undefined,
utc=Undefined, year=Undefined, **kwds):
super(DateTime, self).__init__(date=date, day=day, hours=hours, milliseconds=milliseconds,
minutes=minutes, month=month, quarter=quarter, seconds=seconds,
utc=utc, year=year, **kwds)
class PrimitiveValue(SelectionInit):
"""PrimitiveValue schema wrapper
anyOf(float, string, boolean, None)
"""
_schema = {'$ref': '#/definitions/PrimitiveValue'}
def __init__(self, *args):
super(PrimitiveValue, self).__init__(*args)
class SelectionInitInterval(VegaLiteSchema):
"""SelectionInitInterval schema wrapper
anyOf(:class:`Vector2boolean`, :class:`Vector2number`, :class:`Vector2string`,
:class:`Vector2DateTime`)
"""
_schema = {'$ref': '#/definitions/SelectionInitInterval'}
def __init__(self, *args, **kwds):
super(SelectionInitInterval, self).__init__(*args, **kwds)
class SelectionInitIntervalMapping(VegaLiteSchema):
"""SelectionInitIntervalMapping schema wrapper
Mapping(required=[])
"""
_schema = {'$ref': '#/definitions/SelectionInitIntervalMapping'}
def __init__(self, **kwds):
super(SelectionInitIntervalMapping, self).__init__(**kwds)
class SelectionInitMapping(VegaLiteSchema):
"""SelectionInitMapping schema wrapper
Mapping(required=[])
"""
_schema = {'$ref': '#/definitions/SelectionInitMapping'}
def __init__(self, **kwds):
super(SelectionInitMapping, self).__init__(**kwds)
class SelectionNot(SelectionComposition):
"""SelectionNot schema wrapper
Mapping(required=[not])
Attributes
----------
not : :class:`SelectionComposition`
"""
_schema = {'$ref': '#/definitions/SelectionNot'}
def __init__(self, **kwds):
super(SelectionNot, self).__init__(**kwds)
class SelectionOr(SelectionComposition):
"""SelectionOr schema wrapper
Mapping(required=[or])
Attributes
----------
or : List(:class:`SelectionComposition`)
"""
_schema = {'$ref': '#/definitions/SelectionOr'}
def __init__(self, **kwds):
super(SelectionOr, self).__init__(**kwds)
class SelectionPredicate(Predicate):
"""SelectionPredicate schema wrapper
Mapping(required=[selection])
Attributes
----------
selection : :class:`SelectionComposition`
Filter using a selection name or a logical composition of selection names.
"""
_schema = {'$ref': '#/definitions/SelectionPredicate'}
def __init__(self, selection=Undefined, **kwds):
super(SelectionPredicate, self).__init__(selection=selection, **kwds)
class SelectionResolution(VegaLiteSchema):
"""SelectionResolution schema wrapper
enum('global', 'union', 'intersect')
"""
_schema = {'$ref': '#/definitions/SelectionResolution'}
def __init__(self, *args):
super(SelectionResolution, self).__init__(*args)
class SequenceGenerator(Generator):
"""SequenceGenerator schema wrapper
Mapping(required=[sequence])
Attributes
----------
sequence : :class:`SequenceParams`
Generate a sequence of numbers.
name : string
Provide a placeholder name and bind data at runtime.
"""
_schema = {'$ref': '#/definitions/SequenceGenerator'}
def __init__(self, sequence=Undefined, name=Undefined, **kwds):
super(SequenceGenerator, self).__init__(sequence=sequence, name=name, **kwds)
class SequenceParams(VegaLiteSchema):
"""SequenceParams schema wrapper
Mapping(required=[start, stop])
Attributes
----------
start : float
The starting value of the sequence (inclusive).
stop : float
The ending value of the sequence (exclusive).
step : float
The step value between sequence entries.
**Default value:** ``1``
as : :class:`FieldName`
The name of the generated sequence field.
**Default value:** ``"data"``
"""
_schema = {'$ref': '#/definitions/SequenceParams'}
def __init__(self, start=Undefined, stop=Undefined, step=Undefined, **kwds):
super(SequenceParams, self).__init__(start=start, stop=stop, step=step, **kwds)
class SequentialMultiHue(ColorScheme):
"""SequentialMultiHue schema wrapper
enum('turbo', 'viridis', 'inferno', 'magma', 'plasma', 'cividis', 'bluegreen',
'bluegreen-3', 'bluegreen-4', 'bluegreen-5', 'bluegreen-6', 'bluegreen-7', 'bluegreen-8',
'bluegreen-9', 'bluepurple', 'bluepurple-3', 'bluepurple-4', 'bluepurple-5', 'bluepurple-6',
'bluepurple-7', 'bluepurple-8', 'bluepurple-9', 'goldgreen', 'goldgreen-3', 'goldgreen-4',
'goldgreen-5', 'goldgreen-6', 'goldgreen-7', 'goldgreen-8', 'goldgreen-9', 'goldorange',
'goldorange-3', 'goldorange-4', 'goldorange-5', 'goldorange-6', 'goldorange-7',
'goldorange-8', 'goldorange-9', 'goldred', 'goldred-3', 'goldred-4', 'goldred-5',
'goldred-6', 'goldred-7', 'goldred-8', 'goldred-9', 'greenblue', 'greenblue-3',
'greenblue-4', 'greenblue-5', 'greenblue-6', 'greenblue-7', 'greenblue-8', 'greenblue-9',
'orangered', 'orangered-3', 'orangered-4', 'orangered-5', 'orangered-6', 'orangered-7',
'orangered-8', 'orangered-9', 'purplebluegreen', 'purplebluegreen-3', 'purplebluegreen-4',
'purplebluegreen-5', 'purplebluegreen-6', 'purplebluegreen-7', 'purplebluegreen-8',
'purplebluegreen-9', 'purpleblue', 'purpleblue-3', 'purpleblue-4', 'purpleblue-5',
'purpleblue-6', 'purpleblue-7', 'purpleblue-8', 'purpleblue-9', 'purplered', 'purplered-3',
'purplered-4', 'purplered-5', 'purplered-6', 'purplered-7', 'purplered-8', 'purplered-9',
'redpurple', 'redpurple-3', 'redpurple-4', 'redpurple-5', 'redpurple-6', 'redpurple-7',
'redpurple-8', 'redpurple-9', 'yellowgreenblue', 'yellowgreenblue-3', 'yellowgreenblue-4',
'yellowgreenblue-5', 'yellowgreenblue-6', 'yellowgreenblue-7', 'yellowgreenblue-8',
'yellowgreenblue-9', 'yellowgreen', 'yellowgreen-3', 'yellowgreen-4', 'yellowgreen-5',
'yellowgreen-6', 'yellowgreen-7', 'yellowgreen-8', 'yellowgreen-9', 'yelloworangebrown',
'yelloworangebrown-3', 'yelloworangebrown-4', 'yelloworangebrown-5', 'yelloworangebrown-6',
'yelloworangebrown-7', 'yelloworangebrown-8', 'yelloworangebrown-9', 'yelloworangered',
'yelloworangered-3', 'yelloworangered-4', 'yelloworangered-5', 'yelloworangered-6',
'yelloworangered-7', 'yelloworangered-8', 'yelloworangered-9', 'darkblue', 'darkblue-3',
'darkblue-4', 'darkblue-5', 'darkblue-6', 'darkblue-7', 'darkblue-8', 'darkblue-9',
'darkgold', 'darkgold-3', 'darkgold-4', 'darkgold-5', 'darkgold-6', 'darkgold-7',
'darkgold-8', 'darkgold-9', 'darkgreen', 'darkgreen-3', 'darkgreen-4', 'darkgreen-5',
'darkgreen-6', 'darkgreen-7', 'darkgreen-8', 'darkgreen-9', 'darkmulti', 'darkmulti-3',
'darkmulti-4', 'darkmulti-5', 'darkmulti-6', 'darkmulti-7', 'darkmulti-8', 'darkmulti-9',
'darkred', 'darkred-3', 'darkred-4', 'darkred-5', 'darkred-6', 'darkred-7', 'darkred-8',
'darkred-9', 'lightgreyred', 'lightgreyred-3', 'lightgreyred-4', 'lightgreyred-5',
'lightgreyred-6', 'lightgreyred-7', 'lightgreyred-8', 'lightgreyred-9', 'lightgreyteal',
'lightgreyteal-3', 'lightgreyteal-4', 'lightgreyteal-5', 'lightgreyteal-6',
'lightgreyteal-7', 'lightgreyteal-8', 'lightgreyteal-9', 'lightmulti', 'lightmulti-3',
'lightmulti-4', 'lightmulti-5', 'lightmulti-6', 'lightmulti-7', 'lightmulti-8',
'lightmulti-9', 'lightorange', 'lightorange-3', 'lightorange-4', 'lightorange-5',
'lightorange-6', 'lightorange-7', 'lightorange-8', 'lightorange-9', 'lighttealblue',
'lighttealblue-3', 'lighttealblue-4', 'lighttealblue-5', 'lighttealblue-6',
'lighttealblue-7', 'lighttealblue-8', 'lighttealblue-9')
"""
_schema = {'$ref': '#/definitions/SequentialMultiHue'}
def __init__(self, *args):
super(SequentialMultiHue, self).__init__(*args)
class SequentialSingleHue(ColorScheme):
"""SequentialSingleHue schema wrapper
enum('blues', 'tealblues', 'teals', 'greens', 'browns', 'greys', 'purples', 'warmgreys',
'reds', 'oranges')
"""
_schema = {'$ref': '#/definitions/SequentialSingleHue'}
def __init__(self, *args):
super(SequentialSingleHue, self).__init__(*args)
class ShapeDef(VegaLiteSchema):
"""ShapeDef schema wrapper
anyOf(:class:`FieldOrDatumDefWithConditionMarkPropFieldDefTypeForShapestringnull`,
:class:`FieldOrDatumDefWithConditionDatumDefstringnull`,
:class:`ValueDefWithConditionMarkPropFieldOrDatumDefTypeForShapestringnull`)
"""
_schema = {'$ref': '#/definitions/ShapeDef'}
def __init__(self, *args, **kwds):
super(ShapeDef, self).__init__(*args, **kwds)
class FieldOrDatumDefWithConditionDatumDefstringnull(MarkPropDefstringnullTypeForShape, ShapeDef):
"""FieldOrDatumDefWithConditionDatumDefstringnull schema wrapper
Mapping(required=[])
A FieldDef with Condition :raw-html:`<ValueDef>` { condition: {value: ...}, field:
..., ... }
Attributes
----------
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
condition : anyOf(:class:`ConditionalValueDefstringnullExprRef`,
List(:class:`ConditionalValueDefstringnullExprRef`))
One or more value definition(s) with `a selection or a test predicate
<https://vega.github.io/vega-lite/docs/condition.html>`__.
**Note:** A field definition's ``condition`` property can only contain `conditional
value definitions <https://vega.github.io/vega-lite/docs/condition.html#value>`__
since Vega-Lite only allows at most one encoded field per encoding channel.
datum : anyOf(:class:`PrimitiveValue`, :class:`DateTime`, :class:`ExprRef`,
:class:`RepeatRef`)
A constant value in data domain.
type : :class:`Type`
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/FieldOrDatumDefWithCondition<DatumDef,(string|null)>'}
def __init__(self, band=Undefined, condition=Undefined, datum=Undefined, type=Undefined, **kwds):
super(FieldOrDatumDefWithConditionDatumDefstringnull, self).__init__(band=band,
condition=condition,
datum=datum, type=type,
**kwds)
class FieldOrDatumDefWithConditionMarkPropFieldDefTypeForShapestringnull(MarkPropDefstringnullTypeForShape, ShapeDef):
"""FieldOrDatumDefWithConditionMarkPropFieldDefTypeForShapestringnull schema wrapper
Mapping(required=[])
A FieldDef with Condition :raw-html:`<ValueDef>` { condition: {value: ...}, field:
..., ... }
Attributes
----------
aggregate : :class:`Aggregate`
Aggregation function for the field (e.g., ``"mean"``, ``"sum"``, ``"median"``,
``"min"``, ``"max"``, ``"count"`` ).
**Default value:** ``undefined`` (None)
**See also:** `aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__
documentation.
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
bin : anyOf(boolean, :class:`BinParams`, None)
A flag for binning a ``quantitative`` field, `an object defining binning parameters
<https://vega.github.io/vega-lite/docs/bin.html#params>`__, or indicating that the
data for ``x`` or ``y`` channel are binned before they are imported into Vega-Lite (
``"binned"`` ).
If ``true``, default `binning parameters
<https://vega.github.io/vega-lite/docs/bin.html>`__ will be applied.
If ``"binned"``, this indicates that the data for the ``x`` (or ``y`` ) channel are
already binned. You can map the bin-start field to ``x`` (or ``y`` ) and the bin-end
field to ``x2`` (or ``y2`` ). The scale and axis will be formatted similar to
binning in Vega-Lite. To adjust the axis ticks based on the bin step, you can also
set the axis's `tickMinStep
<https://vega.github.io/vega-lite/docs/axis.html#ticks>`__ property.
**Default value:** ``false``
**See also:** `bin <https://vega.github.io/vega-lite/docs/bin.html>`__
documentation.
condition : anyOf(:class:`ConditionalValueDefstringnullExprRef`,
List(:class:`ConditionalValueDefstringnullExprRef`))
One or more value definition(s) with `a selection or a test predicate
<https://vega.github.io/vega-lite/docs/condition.html>`__.
**Note:** A field definition's ``condition`` property can only contain `conditional
value definitions <https://vega.github.io/vega-lite/docs/condition.html#value>`__
since Vega-Lite only allows at most one encoded field per encoding channel.
field : :class:`Field`
**Required.** A string defining the name of the field from which to pull a data
value or an object defining iterated values from the `repeat
<https://vega.github.io/vega-lite/docs/repeat.html>`__ operator.
**See also:** `field <https://vega.github.io/vega-lite/docs/field.html>`__
documentation.
**Notes:** 1) Dots ( ``.`` ) and brackets ( ``[`` and ``]`` ) can be used to access
nested objects (e.g., ``"field": "foo.bar"`` and ``"field": "foo['bar']"`` ). If
field names contain dots or brackets but are not nested, you can use ``\\`` to
escape dots and brackets (e.g., ``"a\\.b"`` and ``"a\\[0\\]"`` ). See more details
about escaping in the `field documentation
<https://vega.github.io/vega-lite/docs/field.html>`__. 2) ``field`` is not required
if ``aggregate`` is ``count``.
legend : anyOf(:class:`Legend`, None)
An object defining properties of the legend. If ``null``, the legend for the
encoding channel will be removed.
**Default value:** If undefined, default `legend properties
<https://vega.github.io/vega-lite/docs/legend.html>`__ are applied.
**See also:** `legend <https://vega.github.io/vega-lite/docs/legend.html>`__
documentation.
scale : anyOf(:class:`Scale`, None)
An object defining properties of the channel's scale, which is the function that
transforms values in the data domain (numbers, dates, strings, etc) to visual values
(pixels, colors, sizes) of the encoding channels.
If ``null``, the scale will be `disabled and the data value will be directly encoded
<https://vega.github.io/vega-lite/docs/scale.html#disable>`__.
**Default value:** If undefined, default `scale properties
<https://vega.github.io/vega-lite/docs/scale.html>`__ are applied.
**See also:** `scale <https://vega.github.io/vega-lite/docs/scale.html>`__
documentation.
sort : :class:`Sort`
Sort order for the encoded field.
For continuous fields (quantitative or temporal), ``sort`` can be either
``"ascending"`` or ``"descending"``.
For discrete fields, ``sort`` can be one of the following: - ``"ascending"`` or
``"descending"`` -- for sorting by the values' natural order in JavaScript. - `A
string indicating an encoding channel name to sort by
<https://vega.github.io/vega-lite/docs/sort.html#sort-by-encoding>`__ (e.g., ``"x"``
or ``"y"`` ) with an optional minus prefix for descending sort (e.g., ``"-x"`` to
sort by x-field, descending). This channel string is short-form of `a
sort-by-encoding definition
<https://vega.github.io/vega-lite/docs/sort.html#sort-by-encoding>`__. For example,
``"sort": "-x"`` is equivalent to ``"sort": {"encoding": "x", "order":
"descending"}``. - `A sort field definition
<https://vega.github.io/vega-lite/docs/sort.html#sort-field>`__ for sorting by
another field. - `An array specifying the field values in preferred order
<https://vega.github.io/vega-lite/docs/sort.html#sort-array>`__. In this case, the
sort order will obey the values in the array, followed by any unspecified values in
their original order. For discrete time field, values in the sort array can be
`date-time definition objects <types#datetime>`__. In addition, for time units
``"month"`` and ``"day"``, the values can be the month or day names (case
insensitive) or their 3-letter initials (e.g., ``"Mon"``, ``"Tue"`` ). - ``null``
indicating no sort.
**Default value:** ``"ascending"``
**Note:** ``null`` and sorting by another channel is not supported for ``row`` and
``column``.
**See also:** `sort <https://vega.github.io/vega-lite/docs/sort.html>`__
documentation.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit (e.g., ``year``, ``yearmonth``, ``month``, ``hours`` ) for a temporal
field. or `a temporal field that gets casted as ordinal
<https://vega.github.io/vega-lite/docs/type.html#cast>`__.
**Default value:** ``undefined`` (None)
**See also:** `timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__
documentation.
title : anyOf(:class:`Text`, None)
A title for the field. If ``null``, the title will be removed.
**Default value:** derived from the field's name and transformation function (
``aggregate``, ``bin`` and ``timeUnit`` ). If the field has an aggregate function,
the function is displayed as part of the title (e.g., ``"Sum of Profit"`` ). If the
field is binned or has a time unit applied, the applied function is shown in
parentheses (e.g., ``"Profit (binned)"``, ``"Transaction Date (year-month)"`` ).
Otherwise, the title is simply the field name.
**Notes** :
1) You can customize the default field title format by providing the `fieldTitle
<https://vega.github.io/vega-lite/docs/config.html#top-level-config>`__ property in
the `config <https://vega.github.io/vega-lite/docs/config.html>`__ or `fieldTitle
function via the compile function's options
<https://vega.github.io/vega-lite/docs/compile.html#field-title>`__.
2) If both field definition's ``title`` and axis, header, or legend ``title`` are
defined, axis/header/legend title will be used.
type : :class:`TypeForShape`
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/FieldOrDatumDefWithCondition<MarkPropFieldDef<TypeForShape>,(string|null)>'}
def __init__(self, aggregate=Undefined, band=Undefined, bin=Undefined, condition=Undefined,
field=Undefined, legend=Undefined, scale=Undefined, sort=Undefined, timeUnit=Undefined,
title=Undefined, type=Undefined, **kwds):
super(FieldOrDatumDefWithConditionMarkPropFieldDefTypeForShapestringnull, self).__init__(aggregate=aggregate,
band=band,
bin=bin,
condition=condition,
field=field,
legend=legend,
scale=scale,
sort=sort,
timeUnit=timeUnit,
title=title,
type=type,
**kwds)
class SharedEncoding(VegaLiteSchema):
"""SharedEncoding schema wrapper
Mapping(required=[])
Attributes
----------
angle : Mapping(required=[])
color : Mapping(required=[])
description : Mapping(required=[])
detail : anyOf(:class:`FieldDefWithoutScale`, List(:class:`FieldDefWithoutScale`))
Additional levels of detail for grouping data in aggregate views and in line, trail,
and area marks without mapping data to a specific visual channel.
fill : Mapping(required=[])
fillOpacity : Mapping(required=[])
href : Mapping(required=[])
key : Mapping(required=[])
latitude : Mapping(required=[])
latitude2 : Mapping(required=[])
longitude : Mapping(required=[])
longitude2 : Mapping(required=[])
opacity : Mapping(required=[])
order : anyOf(:class:`OrderFieldDef`, List(:class:`OrderFieldDef`), :class:`OrderValueDef`)
Order of the marks. - For stacked marks, this ``order`` channel encodes `stack order
<https://vega.github.io/vega-lite/docs/stack.html#order>`__. - For line and trail
marks, this ``order`` channel encodes order of data points in the lines. This can be
useful for creating `a connected scatterplot
<https://vega.github.io/vega-lite/examples/connected_scatterplot.html>`__. Setting
``order`` to ``{"value": null}`` makes the line marks use the original order in the
data sources. - Otherwise, this ``order`` channel encodes layer order of the marks.
**Note** : In aggregate plots, ``order`` field should be ``aggregate`` d to avoid
creating additional aggregation grouping.
radius : Mapping(required=[])
radius2 : Mapping(required=[])
shape : Mapping(required=[])
size : Mapping(required=[])
stroke : Mapping(required=[])
strokeDash : Mapping(required=[])
strokeOpacity : Mapping(required=[])
strokeWidth : Mapping(required=[])
text : Mapping(required=[])
theta : Mapping(required=[])
theta2 : Mapping(required=[])
tooltip : anyOf(:class:`StringFieldDefWithCondition`, :class:`StringValueDefWithCondition`,
List(:class:`StringFieldDef`), None)
The tooltip text to show upon mouse hover. Specifying ``tooltip`` encoding overrides
`the tooltip property in the mark definition
<https://vega.github.io/vega-lite/docs/mark.html#mark-def>`__.
See the `tooltip <https://vega.github.io/vega-lite/docs/tooltip.html>`__
documentation for a detailed discussion about tooltip in Vega-Lite.
url : Mapping(required=[])
x : Mapping(required=[])
x2 : Mapping(required=[])
xError : Mapping(required=[])
xError2 : Mapping(required=[])
y : Mapping(required=[])
y2 : Mapping(required=[])
yError : Mapping(required=[])
yError2 : Mapping(required=[])
"""
_schema = {'$ref': '#/definitions/SharedEncoding'}
def __init__(self, angle=Undefined, color=Undefined, description=Undefined, detail=Undefined,
fill=Undefined, fillOpacity=Undefined, href=Undefined, key=Undefined,
latitude=Undefined, latitude2=Undefined, longitude=Undefined, longitude2=Undefined,
opacity=Undefined, order=Undefined, radius=Undefined, radius2=Undefined,
shape=Undefined, size=Undefined, stroke=Undefined, strokeDash=Undefined,
strokeOpacity=Undefined, strokeWidth=Undefined, text=Undefined, theta=Undefined,
theta2=Undefined, tooltip=Undefined, url=Undefined, x=Undefined, x2=Undefined,
xError=Undefined, xError2=Undefined, y=Undefined, y2=Undefined, yError=Undefined,
yError2=Undefined, **kwds):
super(SharedEncoding, self).__init__(angle=angle, color=color, description=description,
detail=detail, fill=fill, fillOpacity=fillOpacity,
href=href, key=key, latitude=latitude, latitude2=latitude2,
longitude=longitude, longitude2=longitude2,
opacity=opacity, order=order, radius=radius,
radius2=radius2, shape=shape, size=size, stroke=stroke,
strokeDash=strokeDash, strokeOpacity=strokeOpacity,
strokeWidth=strokeWidth, text=text, theta=theta,
theta2=theta2, tooltip=tooltip, url=url, x=x, x2=x2,
xError=xError, xError2=xError2, y=y, y2=y2, yError=yError,
yError2=yError2, **kwds)
class SingleDefUnitChannel(VegaLiteSchema):
"""SingleDefUnitChannel schema wrapper
enum('x', 'y', 'x2', 'y2', 'longitude', 'latitude', 'longitude2', 'latitude2', 'theta',
'theta2', 'radius', 'radius2', 'color', 'fill', 'stroke', 'opacity', 'fillOpacity',
'strokeOpacity', 'strokeWidth', 'strokeDash', 'size', 'angle', 'shape', 'key', 'text',
'href', 'url', 'description')
"""
_schema = {'$ref': '#/definitions/SingleDefUnitChannel'}
def __init__(self, *args):
super(SingleDefUnitChannel, self).__init__(*args)
class SingleSelection(SelectionDef):
"""SingleSelection schema wrapper
Mapping(required=[type])
Attributes
----------
type : string
Determines the default event processing and data query for the selection. Vega-Lite
currently supports three selection types:
* ``"single"`` -- to select a single discrete data value on ``click``. - ``"multi"``
-- to select multiple discrete data value; the first value is selected on
``click`` and additional values toggled on shift- ``click``. - ``"interval"`` --
to select a continuous range of data values on ``drag``.
bind : anyOf(:class:`Binding`, Mapping(required=[]), :class:`LegendBinding`)
When set, a selection is populated by input elements (also known as dynamic query
widgets) or by interacting with the corresponding legend. Direct manipulation
interaction is disabled by default; to re-enable it, set the selection's `on
<https://vega.github.io/vega-lite/docs/selection.html#common-selection-properties>`__
property.
Legend bindings are restricted to selections that only specify a single field or
encoding.
Query widget binding takes the form of Vega's `input element binding definition
<https://vega.github.io/vega/docs/signals/#bind>`__ or can be a mapping between
projected field/encodings and binding definitions.
**See also:** `bind <https://vega.github.io/vega-lite/docs/bind.html>`__
documentation.
clear : anyOf(:class:`Stream`, string, boolean)
Clears the selection, emptying it of all values. Can be a `Event Stream
<https://vega.github.io/vega/docs/event-streams/>`__ or ``false`` to disable.
**Default value:** ``dblclick``.
**See also:** `clear <https://vega.github.io/vega-lite/docs/clear.html>`__
documentation.
empty : enum('all', 'none')
By default, ``all`` data values are considered to lie within an empty selection.
When set to ``none``, empty selections contain no data values.
encodings : List(:class:`SingleDefUnitChannel`)
An array of encoding channels. The corresponding data field values must match for a
data tuple to fall within the selection.
**See also:** `encodings <https://vega.github.io/vega-lite/docs/project.html>`__
documentation.
fields : List(:class:`FieldName`)
An array of field names whose values must match for a data tuple to fall within the
selection.
**See also:** `fields <https://vega.github.io/vega-lite/docs/project.html>`__
documentation.
init : :class:`SelectionInitMapping`
Initialize the selection with a mapping between `projected channels or field names
<https://vega.github.io/vega-lite/docs/project.html>`__ and initial values.
**See also:** `init <https://vega.github.io/vega-lite/docs/init.html>`__
documentation.
nearest : boolean
When true, an invisible voronoi diagram is computed to accelerate discrete
selection. The data value *nearest* the mouse cursor is added to the selection.
**See also:** `nearest <https://vega.github.io/vega-lite/docs/nearest.html>`__
documentation.
on : anyOf(:class:`Stream`, string)
A `Vega event stream <https://vega.github.io/vega/docs/event-streams/>`__ (object or
selector) that triggers the selection. For interval selections, the event stream
must specify a `start and end
<https://vega.github.io/vega/docs/event-streams/#between-filters>`__.
resolve : :class:`SelectionResolution`
With layered and multi-view displays, a strategy that determines how selections'
data queries are resolved when applied in a filter transform, conditional encoding
rule, or scale domain.
**See also:** `resolve
<https://vega.github.io/vega-lite/docs/selection-resolve.html>`__ documentation.
"""
_schema = {'$ref': '#/definitions/SingleSelection'}
def __init__(self, type=Undefined, bind=Undefined, clear=Undefined, empty=Undefined,
encodings=Undefined, fields=Undefined, init=Undefined, nearest=Undefined, on=Undefined,
resolve=Undefined, **kwds):
super(SingleSelection, self).__init__(type=type, bind=bind, clear=clear, empty=empty,
encodings=encodings, fields=fields, init=init,
nearest=nearest, on=on, resolve=resolve, **kwds)
class SingleSelectionConfig(VegaLiteSchema):
"""SingleSelectionConfig schema wrapper
Mapping(required=[])
Attributes
----------
bind : anyOf(:class:`Binding`, Mapping(required=[]), :class:`LegendBinding`)
When set, a selection is populated by input elements (also known as dynamic query
widgets) or by interacting with the corresponding legend. Direct manipulation
interaction is disabled by default; to re-enable it, set the selection's `on
<https://vega.github.io/vega-lite/docs/selection.html#common-selection-properties>`__
property.
Legend bindings are restricted to selections that only specify a single field or
encoding.
Query widget binding takes the form of Vega's `input element binding definition
<https://vega.github.io/vega/docs/signals/#bind>`__ or can be a mapping between
projected field/encodings and binding definitions.
**See also:** `bind <https://vega.github.io/vega-lite/docs/bind.html>`__
documentation.
clear : anyOf(:class:`Stream`, string, boolean)
Clears the selection, emptying it of all values. Can be a `Event Stream
<https://vega.github.io/vega/docs/event-streams/>`__ or ``false`` to disable.
**Default value:** ``dblclick``.
**See also:** `clear <https://vega.github.io/vega-lite/docs/clear.html>`__
documentation.
empty : enum('all', 'none')
By default, ``all`` data values are considered to lie within an empty selection.
When set to ``none``, empty selections contain no data values.
encodings : List(:class:`SingleDefUnitChannel`)
An array of encoding channels. The corresponding data field values must match for a
data tuple to fall within the selection.
**See also:** `encodings <https://vega.github.io/vega-lite/docs/project.html>`__
documentation.
fields : List(:class:`FieldName`)
An array of field names whose values must match for a data tuple to fall within the
selection.
**See also:** `fields <https://vega.github.io/vega-lite/docs/project.html>`__
documentation.
init : :class:`SelectionInitMapping`
Initialize the selection with a mapping between `projected channels or field names
<https://vega.github.io/vega-lite/docs/project.html>`__ and initial values.
**See also:** `init <https://vega.github.io/vega-lite/docs/init.html>`__
documentation.
nearest : boolean
When true, an invisible voronoi diagram is computed to accelerate discrete
selection. The data value *nearest* the mouse cursor is added to the selection.
**See also:** `nearest <https://vega.github.io/vega-lite/docs/nearest.html>`__
documentation.
on : anyOf(:class:`Stream`, string)
A `Vega event stream <https://vega.github.io/vega/docs/event-streams/>`__ (object or
selector) that triggers the selection. For interval selections, the event stream
must specify a `start and end
<https://vega.github.io/vega/docs/event-streams/#between-filters>`__.
resolve : :class:`SelectionResolution`
With layered and multi-view displays, a strategy that determines how selections'
data queries are resolved when applied in a filter transform, conditional encoding
rule, or scale domain.
**See also:** `resolve
<https://vega.github.io/vega-lite/docs/selection-resolve.html>`__ documentation.
"""
_schema = {'$ref': '#/definitions/SingleSelectionConfig'}
def __init__(self, bind=Undefined, clear=Undefined, empty=Undefined, encodings=Undefined,
fields=Undefined, init=Undefined, nearest=Undefined, on=Undefined, resolve=Undefined,
**kwds):
super(SingleSelectionConfig, self).__init__(bind=bind, clear=clear, empty=empty,
encodings=encodings, fields=fields, init=init,
nearest=nearest, on=on, resolve=resolve, **kwds)
class Sort(VegaLiteSchema):
"""Sort schema wrapper
anyOf(:class:`SortArray`, :class:`AllSortString`, :class:`EncodingSortField`,
:class:`SortByEncoding`, None)
"""
_schema = {'$ref': '#/definitions/Sort'}
def __init__(self, *args, **kwds):
super(Sort, self).__init__(*args, **kwds)
class AllSortString(Sort):
"""AllSortString schema wrapper
anyOf(:class:`SortOrder`, :class:`SortByChannel`, :class:`SortByChannelDesc`)
"""
_schema = {'$ref': '#/definitions/AllSortString'}
def __init__(self, *args, **kwds):
super(AllSortString, self).__init__(*args, **kwds)
class EncodingSortField(Sort):
"""EncodingSortField schema wrapper
Mapping(required=[])
A sort definition for sorting a discrete scale in an encoding field definition.
Attributes
----------
field : :class:`Field`
The data `field <https://vega.github.io/vega-lite/docs/field.html>`__ to sort by.
**Default value:** If unspecified, defaults to the field specified in the outer data
reference.
op : :class:`NonArgAggregateOp`
An `aggregate operation
<https://vega.github.io/vega-lite/docs/aggregate.html#ops>`__ to perform on the
field prior to sorting (e.g., ``"count"``, ``"mean"`` and ``"median"`` ). An
aggregation is required when there are multiple values of the sort field for each
encoded data field. The input data objects will be aggregated, grouped by the
encoded data field.
For a full list of operations, please see the documentation for `aggregate
<https://vega.github.io/vega-lite/docs/aggregate.html#ops>`__.
**Default value:** ``"sum"`` for stacked plots. Otherwise, ``"min"``.
order : anyOf(:class:`SortOrder`, None)
The sort order. One of ``"ascending"`` (default), ``"descending"``, or ``null`` (no
not sort).
"""
_schema = {'$ref': '#/definitions/EncodingSortField'}
def __init__(self, field=Undefined, op=Undefined, order=Undefined, **kwds):
super(EncodingSortField, self).__init__(field=field, op=op, order=order, **kwds)
class SortArray(Sort):
"""SortArray schema wrapper
anyOf(List(float), List(string), List(boolean), List(:class:`DateTime`))
"""
_schema = {'$ref': '#/definitions/SortArray'}
def __init__(self, *args, **kwds):
super(SortArray, self).__init__(*args, **kwds)
class SortByChannel(AllSortString):
"""SortByChannel schema wrapper
enum('x', 'y', 'color', 'fill', 'stroke', 'strokeWidth', 'size', 'shape', 'fillOpacity',
'strokeOpacity', 'opacity', 'text')
"""
_schema = {'$ref': '#/definitions/SortByChannel'}
def __init__(self, *args):
super(SortByChannel, self).__init__(*args)
class SortByChannelDesc(AllSortString):
"""SortByChannelDesc schema wrapper
enum('-x', '-y', '-color', '-fill', '-stroke', '-strokeWidth', '-size', '-shape',
'-fillOpacity', '-strokeOpacity', '-opacity', '-text')
"""
_schema = {'$ref': '#/definitions/SortByChannelDesc'}
def __init__(self, *args):
super(SortByChannelDesc, self).__init__(*args)
class SortByEncoding(Sort):
"""SortByEncoding schema wrapper
Mapping(required=[encoding])
Attributes
----------
encoding : :class:`SortByChannel`
The `encoding channel
<https://vega.github.io/vega-lite/docs/encoding.html#channels>`__ to sort by (e.g.,
``"x"``, ``"y"`` )
order : anyOf(:class:`SortOrder`, None)
The sort order. One of ``"ascending"`` (default), ``"descending"``, or ``null`` (no
not sort).
"""
_schema = {'$ref': '#/definitions/SortByEncoding'}
def __init__(self, encoding=Undefined, order=Undefined, **kwds):
super(SortByEncoding, self).__init__(encoding=encoding, order=order, **kwds)
class SortField(VegaLiteSchema):
"""SortField schema wrapper
Mapping(required=[field])
A sort definition for transform
Attributes
----------
field : :class:`FieldName`
The name of the field to sort.
order : anyOf(:class:`SortOrder`, None)
Whether to sort the field in ascending or descending order. One of ``"ascending"``
(default), ``"descending"``, or ``null`` (no not sort).
"""
_schema = {'$ref': '#/definitions/SortField'}
def __init__(self, field=Undefined, order=Undefined, **kwds):
super(SortField, self).__init__(field=field, order=order, **kwds)
class SortOrder(AllSortString):
"""SortOrder schema wrapper
enum('ascending', 'descending')
"""
_schema = {'$ref': '#/definitions/SortOrder'}
def __init__(self, *args):
super(SortOrder, self).__init__(*args)
class Spec(VegaLiteSchema):
"""Spec schema wrapper
anyOf(:class:`FacetedUnitSpec`, :class:`LayerSpec`, :class:`RepeatSpec`, :class:`FacetSpec`,
:class:`ConcatSpecGenericSpec`, :class:`VConcatSpecGenericSpec`,
:class:`HConcatSpecGenericSpec`)
Any specification in Vega-Lite.
"""
_schema = {'$ref': '#/definitions/Spec'}
def __init__(self, *args, **kwds):
super(Spec, self).__init__(*args, **kwds)
class ConcatSpecGenericSpec(Spec):
"""ConcatSpecGenericSpec schema wrapper
Mapping(required=[concat])
Base interface for a generalized concatenation specification.
Attributes
----------
concat : List(:class:`Spec`)
A list of views to be concatenated.
align : anyOf(:class:`LayoutAlign`, :class:`RowColLayoutAlign`)
The alignment to apply to grid rows and columns. The supported string values are
``"all"``, ``"each"``, and ``"none"``.
* For ``"none"``, a flow layout will be used, in which adjacent subviews are simply
placed one after the other. - For ``"each"``, subviews will be aligned into a
clean grid structure, but each row or column may be of variable size. - For
``"all"``, subviews will be aligned and each row or column will be sized
identically based on the maximum observed size. String values for this property
will be applied to both grid rows and columns.
Alternatively, an object value of the form ``{"row": string, "column": string}`` can
be used to supply different alignments for rows and columns.
**Default value:** ``"all"``.
bounds : enum('full', 'flush')
The bounds calculation method to use for determining the extent of a sub-plot. One
of ``full`` (the default) or ``flush``.
* If set to ``full``, the entire calculated bounds (including axes, title, and
legend) will be used. - If set to ``flush``, only the specified width and height
values for the sub-view will be used. The ``flush`` setting can be useful when
attempting to place sub-plots without axes or legends into a uniform grid
structure.
**Default value:** ``"full"``
center : anyOf(boolean, :class:`RowColboolean`)
Boolean flag indicating if subviews should be centered relative to their respective
rows or columns.
An object value of the form ``{"row": boolean, "column": boolean}`` can be used to
supply different centering values for rows and columns.
**Default value:** ``false``
columns : float
The number of columns to include in the view composition layout.
**Default value** : ``undefined`` -- An infinite number of columns (a single row)
will be assumed. This is equivalent to ``hconcat`` (for ``concat`` ) and to using
the ``column`` channel (for ``facet`` and ``repeat`` ).
**Note** :
1) This property is only for: - the general (wrappable) ``concat`` operator (not
``hconcat`` / ``vconcat`` ) - the ``facet`` and ``repeat`` operator with one
field/repetition definition (without row/column nesting)
2) Setting the ``columns`` to ``1`` is equivalent to ``vconcat`` (for ``concat`` )
and to using the ``row`` channel (for ``facet`` and ``repeat`` ).
data : anyOf(:class:`Data`, None)
An object describing the data source. Set to ``null`` to ignore the parent's data
source. If no data is set, it is derived from the parent.
description : string
Description of this mark for commenting purpose.
name : string
Name of the visualization for later reference.
resolve : :class:`Resolve`
Scale, axis, and legend resolutions for view composition specifications.
spacing : anyOf(float, :class:`RowColnumber`)
The spacing in pixels between sub-views of the composition operator. An object of
the form ``{"row": number, "column": number}`` can be used to set different spacing
values for rows and columns.
**Default value** : Depends on ``"spacing"`` property of `the view composition
configuration <https://vega.github.io/vega-lite/docs/config.html#view-config>`__ (
``20`` by default)
title : anyOf(:class:`Text`, :class:`TitleParams`)
Title for the plot.
transform : List(:class:`Transform`)
An array of data transformations such as filter and new field calculation.
"""
_schema = {'$ref': '#/definitions/ConcatSpec<GenericSpec>'}
def __init__(self, concat=Undefined, align=Undefined, bounds=Undefined, center=Undefined,
columns=Undefined, data=Undefined, description=Undefined, name=Undefined,
resolve=Undefined, spacing=Undefined, title=Undefined, transform=Undefined, **kwds):
super(ConcatSpecGenericSpec, self).__init__(concat=concat, align=align, bounds=bounds,
center=center, columns=columns, data=data,
description=description, name=name, resolve=resolve,
spacing=spacing, title=title, transform=transform,
**kwds)
class FacetSpec(Spec):
"""FacetSpec schema wrapper
Mapping(required=[facet, spec])
Base interface for a facet specification.
Attributes
----------
facet : anyOf(:class:`FacetFieldDefFieldName`, :class:`FacetMappingFieldName`)
Definition for how to facet the data. One of: 1) `a field definition for faceting
the plot by one field
<https://vega.github.io/vega-lite/docs/facet.html#field-def>`__ 2) `An object that
maps row and column channels to their field definitions
<https://vega.github.io/vega-lite/docs/facet.html#mapping>`__
spec : anyOf(:class:`LayerSpec`, :class:`FacetedUnitSpec`)
A specification of the view that gets faceted.
align : anyOf(:class:`LayoutAlign`, :class:`RowColLayoutAlign`)
The alignment to apply to grid rows and columns. The supported string values are
``"all"``, ``"each"``, and ``"none"``.
* For ``"none"``, a flow layout will be used, in which adjacent subviews are simply
placed one after the other. - For ``"each"``, subviews will be aligned into a
clean grid structure, but each row or column may be of variable size. - For
``"all"``, subviews will be aligned and each row or column will be sized
identically based on the maximum observed size. String values for this property
will be applied to both grid rows and columns.
Alternatively, an object value of the form ``{"row": string, "column": string}`` can
be used to supply different alignments for rows and columns.
**Default value:** ``"all"``.
bounds : enum('full', 'flush')
The bounds calculation method to use for determining the extent of a sub-plot. One
of ``full`` (the default) or ``flush``.
* If set to ``full``, the entire calculated bounds (including axes, title, and
legend) will be used. - If set to ``flush``, only the specified width and height
values for the sub-view will be used. The ``flush`` setting can be useful when
attempting to place sub-plots without axes or legends into a uniform grid
structure.
**Default value:** ``"full"``
center : anyOf(boolean, :class:`RowColboolean`)
Boolean flag indicating if subviews should be centered relative to their respective
rows or columns.
An object value of the form ``{"row": boolean, "column": boolean}`` can be used to
supply different centering values for rows and columns.
**Default value:** ``false``
columns : float
The number of columns to include in the view composition layout.
**Default value** : ``undefined`` -- An infinite number of columns (a single row)
will be assumed. This is equivalent to ``hconcat`` (for ``concat`` ) and to using
the ``column`` channel (for ``facet`` and ``repeat`` ).
**Note** :
1) This property is only for: - the general (wrappable) ``concat`` operator (not
``hconcat`` / ``vconcat`` ) - the ``facet`` and ``repeat`` operator with one
field/repetition definition (without row/column nesting)
2) Setting the ``columns`` to ``1`` is equivalent to ``vconcat`` (for ``concat`` )
and to using the ``row`` channel (for ``facet`` and ``repeat`` ).
data : anyOf(:class:`Data`, None)
An object describing the data source. Set to ``null`` to ignore the parent's data
source. If no data is set, it is derived from the parent.
description : string
Description of this mark for commenting purpose.
name : string
Name of the visualization for later reference.
resolve : :class:`Resolve`
Scale, axis, and legend resolutions for view composition specifications.
spacing : anyOf(float, :class:`RowColnumber`)
The spacing in pixels between sub-views of the composition operator. An object of
the form ``{"row": number, "column": number}`` can be used to set different spacing
values for rows and columns.
**Default value** : Depends on ``"spacing"`` property of `the view composition
configuration <https://vega.github.io/vega-lite/docs/config.html#view-config>`__ (
``20`` by default)
title : anyOf(:class:`Text`, :class:`TitleParams`)
Title for the plot.
transform : List(:class:`Transform`)
An array of data transformations such as filter and new field calculation.
"""
_schema = {'$ref': '#/definitions/FacetSpec'}
def __init__(self, facet=Undefined, spec=Undefined, align=Undefined, bounds=Undefined,
center=Undefined, columns=Undefined, data=Undefined, description=Undefined,
name=Undefined, resolve=Undefined, spacing=Undefined, title=Undefined,
transform=Undefined, **kwds):
super(FacetSpec, self).__init__(facet=facet, spec=spec, align=align, bounds=bounds,
center=center, columns=columns, data=data,
description=description, name=name, resolve=resolve,
spacing=spacing, title=title, transform=transform, **kwds)
class FacetedUnitSpec(NormalizedSpec, Spec):
"""FacetedUnitSpec schema wrapper
Mapping(required=[mark])
Unit spec that can have a composite mark and row or column channels (shorthand for a facet
spec).
Attributes
----------
mark : :class:`AnyMark`
A string describing the mark type (one of ``"bar"``, ``"circle"``, ``"square"``,
``"tick"``, ``"line"``, ``"area"``, ``"point"``, ``"rule"``, ``"geoshape"``, and
``"text"`` ) or a `mark definition object
<https://vega.github.io/vega-lite/docs/mark.html#mark-def>`__.
align : anyOf(:class:`LayoutAlign`, :class:`RowColLayoutAlign`)
The alignment to apply to grid rows and columns. The supported string values are
``"all"``, ``"each"``, and ``"none"``.
* For ``"none"``, a flow layout will be used, in which adjacent subviews are simply
placed one after the other. - For ``"each"``, subviews will be aligned into a
clean grid structure, but each row or column may be of variable size. - For
``"all"``, subviews will be aligned and each row or column will be sized
identically based on the maximum observed size. String values for this property
will be applied to both grid rows and columns.
Alternatively, an object value of the form ``{"row": string, "column": string}`` can
be used to supply different alignments for rows and columns.
**Default value:** ``"all"``.
bounds : enum('full', 'flush')
The bounds calculation method to use for determining the extent of a sub-plot. One
of ``full`` (the default) or ``flush``.
* If set to ``full``, the entire calculated bounds (including axes, title, and
legend) will be used. - If set to ``flush``, only the specified width and height
values for the sub-view will be used. The ``flush`` setting can be useful when
attempting to place sub-plots without axes or legends into a uniform grid
structure.
**Default value:** ``"full"``
center : anyOf(boolean, :class:`RowColboolean`)
Boolean flag indicating if subviews should be centered relative to their respective
rows or columns.
An object value of the form ``{"row": boolean, "column": boolean}`` can be used to
supply different centering values for rows and columns.
**Default value:** ``false``
data : anyOf(:class:`Data`, None)
An object describing the data source. Set to ``null`` to ignore the parent's data
source. If no data is set, it is derived from the parent.
description : string
Description of this mark for commenting purpose.
encoding : :class:`FacetedEncoding`
A key-value mapping between encoding channels and definition of fields.
height : anyOf(float, string, :class:`Step`)
The height of a visualization.
* For a plot with a continuous y-field, height should be a number. - For a plot with
either a discrete y-field or no y-field, height can be either a number indicating
a fixed height or an object in the form of ``{step: number}`` defining the height
per discrete step. (No y-field is equivalent to having one discrete step.) - To
enable responsive sizing on height, it should be set to ``"container"``.
**Default value:** Based on ``config.view.continuousHeight`` for a plot with a
continuous y-field and ``config.view.discreteHeight`` otherwise.
**Note:** For plots with `row and column channels
<https://vega.github.io/vega-lite/docs/encoding.html#facet>`__, this represents the
height of a single view and the ``"container"`` option cannot be used.
**See also:** `height <https://vega.github.io/vega-lite/docs/size.html>`__
documentation.
name : string
Name of the visualization for later reference.
projection : :class:`Projection`
An object defining properties of geographic projection, which will be applied to
``shape`` path for ``"geoshape"`` marks and to ``latitude`` and ``"longitude"``
channels for other marks.
resolve : :class:`Resolve`
Scale, axis, and legend resolutions for view composition specifications.
selection : Mapping(required=[])
A key-value mapping between selection names and definitions.
spacing : anyOf(float, :class:`RowColnumber`)
The spacing in pixels between sub-views of the composition operator. An object of
the form ``{"row": number, "column": number}`` can be used to set different spacing
values for rows and columns.
**Default value** : Depends on ``"spacing"`` property of `the view composition
configuration <https://vega.github.io/vega-lite/docs/config.html#view-config>`__ (
``20`` by default)
title : anyOf(:class:`Text`, :class:`TitleParams`)
Title for the plot.
transform : List(:class:`Transform`)
An array of data transformations such as filter and new field calculation.
view : :class:`ViewBackground`
An object defining the view background's fill and stroke.
**Default value:** none (transparent)
width : anyOf(float, string, :class:`Step`)
The width of a visualization.
* For a plot with a continuous x-field, width should be a number. - For a plot with
either a discrete x-field or no x-field, width can be either a number indicating a
fixed width or an object in the form of ``{step: number}`` defining the width per
discrete step. (No x-field is equivalent to having one discrete step.) - To enable
responsive sizing on width, it should be set to ``"container"``.
**Default value:** Based on ``config.view.continuousWidth`` for a plot with a
continuous x-field and ``config.view.discreteWidth`` otherwise.
**Note:** For plots with `row and column channels
<https://vega.github.io/vega-lite/docs/encoding.html#facet>`__, this represents the
width of a single view and the ``"container"`` option cannot be used.
**See also:** `width <https://vega.github.io/vega-lite/docs/size.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/FacetedUnitSpec'}
def __init__(self, mark=Undefined, align=Undefined, bounds=Undefined, center=Undefined,
data=Undefined, description=Undefined, encoding=Undefined, height=Undefined,
name=Undefined, projection=Undefined, resolve=Undefined, selection=Undefined,
spacing=Undefined, title=Undefined, transform=Undefined, view=Undefined,
width=Undefined, **kwds):
super(FacetedUnitSpec, self).__init__(mark=mark, align=align, bounds=bounds, center=center,
data=data, description=description, encoding=encoding,
height=height, name=name, projection=projection,
resolve=resolve, selection=selection, spacing=spacing,
title=title, transform=transform, view=view, width=width,
**kwds)
class HConcatSpecGenericSpec(Spec):
"""HConcatSpecGenericSpec schema wrapper
Mapping(required=[hconcat])
Base interface for a horizontal concatenation specification.
Attributes
----------
hconcat : List(:class:`Spec`)
A list of views to be concatenated and put into a row.
bounds : enum('full', 'flush')
The bounds calculation method to use for determining the extent of a sub-plot. One
of ``full`` (the default) or ``flush``.
* If set to ``full``, the entire calculated bounds (including axes, title, and
legend) will be used. - If set to ``flush``, only the specified width and height
values for the sub-view will be used. The ``flush`` setting can be useful when
attempting to place sub-plots without axes or legends into a uniform grid
structure.
**Default value:** ``"full"``
center : boolean
Boolean flag indicating if subviews should be centered relative to their respective
rows or columns.
**Default value:** ``false``
data : anyOf(:class:`Data`, None)
An object describing the data source. Set to ``null`` to ignore the parent's data
source. If no data is set, it is derived from the parent.
description : string
Description of this mark for commenting purpose.
name : string
Name of the visualization for later reference.
resolve : :class:`Resolve`
Scale, axis, and legend resolutions for view composition specifications.
spacing : float
The spacing in pixels between sub-views of the concat operator.
**Default value** : ``10``
title : anyOf(:class:`Text`, :class:`TitleParams`)
Title for the plot.
transform : List(:class:`Transform`)
An array of data transformations such as filter and new field calculation.
"""
_schema = {'$ref': '#/definitions/HConcatSpec<GenericSpec>'}
def __init__(self, hconcat=Undefined, bounds=Undefined, center=Undefined, data=Undefined,
description=Undefined, name=Undefined, resolve=Undefined, spacing=Undefined,
title=Undefined, transform=Undefined, **kwds):
super(HConcatSpecGenericSpec, self).__init__(hconcat=hconcat, bounds=bounds, center=center,
data=data, description=description, name=name,
resolve=resolve, spacing=spacing, title=title,
transform=transform, **kwds)
class LayerSpec(NormalizedSpec, Spec):
"""LayerSpec schema wrapper
Mapping(required=[layer])
A full layered plot specification, which may contains ``encoding`` and ``projection``
properties that will be applied to underlying unit (single-view) specifications.
Attributes
----------
layer : List(anyOf(:class:`LayerSpec`, :class:`UnitSpec`))
Layer or single view specifications to be layered.
**Note** : Specifications inside ``layer`` cannot use ``row`` and ``column``
channels as layering facet specifications is not allowed. Instead, use the `facet
operator <https://vega.github.io/vega-lite/docs/facet.html>`__ and place a layer
inside a facet.
data : anyOf(:class:`Data`, None)
An object describing the data source. Set to ``null`` to ignore the parent's data
source. If no data is set, it is derived from the parent.
description : string
Description of this mark for commenting purpose.
encoding : :class:`SharedEncoding`
A shared key-value mapping between encoding channels and definition of fields in the
underlying layers.
height : anyOf(float, string, :class:`Step`)
The height of a visualization.
* For a plot with a continuous y-field, height should be a number. - For a plot with
either a discrete y-field or no y-field, height can be either a number indicating
a fixed height or an object in the form of ``{step: number}`` defining the height
per discrete step. (No y-field is equivalent to having one discrete step.) - To
enable responsive sizing on height, it should be set to ``"container"``.
**Default value:** Based on ``config.view.continuousHeight`` for a plot with a
continuous y-field and ``config.view.discreteHeight`` otherwise.
**Note:** For plots with `row and column channels
<https://vega.github.io/vega-lite/docs/encoding.html#facet>`__, this represents the
height of a single view and the ``"container"`` option cannot be used.
**See also:** `height <https://vega.github.io/vega-lite/docs/size.html>`__
documentation.
name : string
Name of the visualization for later reference.
projection : :class:`Projection`
An object defining properties of the geographic projection shared by underlying
layers.
resolve : :class:`Resolve`
Scale, axis, and legend resolutions for view composition specifications.
title : anyOf(:class:`Text`, :class:`TitleParams`)
Title for the plot.
transform : List(:class:`Transform`)
An array of data transformations such as filter and new field calculation.
view : :class:`ViewBackground`
An object defining the view background's fill and stroke.
**Default value:** none (transparent)
width : anyOf(float, string, :class:`Step`)
The width of a visualization.
* For a plot with a continuous x-field, width should be a number. - For a plot with
either a discrete x-field or no x-field, width can be either a number indicating a
fixed width or an object in the form of ``{step: number}`` defining the width per
discrete step. (No x-field is equivalent to having one discrete step.) - To enable
responsive sizing on width, it should be set to ``"container"``.
**Default value:** Based on ``config.view.continuousWidth`` for a plot with a
continuous x-field and ``config.view.discreteWidth`` otherwise.
**Note:** For plots with `row and column channels
<https://vega.github.io/vega-lite/docs/encoding.html#facet>`__, this represents the
width of a single view and the ``"container"`` option cannot be used.
**See also:** `width <https://vega.github.io/vega-lite/docs/size.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/LayerSpec'}
def __init__(self, layer=Undefined, data=Undefined, description=Undefined, encoding=Undefined,
height=Undefined, name=Undefined, projection=Undefined, resolve=Undefined,
title=Undefined, transform=Undefined, view=Undefined, width=Undefined, **kwds):
super(LayerSpec, self).__init__(layer=layer, data=data, description=description,
encoding=encoding, height=height, name=name,
projection=projection, resolve=resolve, title=title,
transform=transform, view=view, width=width, **kwds)
class RepeatSpec(NormalizedSpec, Spec):
"""RepeatSpec schema wrapper
anyOf(:class:`NonLayerRepeatSpec`, :class:`LayerRepeatSpec`)
"""
_schema = {'$ref': '#/definitions/RepeatSpec'}
def __init__(self, *args, **kwds):
super(RepeatSpec, self).__init__(*args, **kwds)
class LayerRepeatSpec(RepeatSpec):
"""LayerRepeatSpec schema wrapper
Mapping(required=[repeat, spec])
Attributes
----------
repeat : :class:`LayerRepeatMapping`
Definition for fields to be repeated. One of: 1) An array of fields to be repeated.
If ``"repeat"`` is an array, the field can be referred to as ``{"repeat":
"repeat"}``. The repeated views are laid out in a wrapped row. You can set the
number of columns to control the wrapping. 2) An object that maps ``"row"`` and/or
``"column"`` to the listed fields to be repeated along the particular orientations.
The objects ``{"repeat": "row"}`` and ``{"repeat": "column"}`` can be used to refer
to the repeated field respectively.
spec : anyOf(:class:`LayerSpec`, :class:`UnitSpec`)
A specification of the view that gets repeated.
align : anyOf(:class:`LayoutAlign`, :class:`RowColLayoutAlign`)
The alignment to apply to grid rows and columns. The supported string values are
``"all"``, ``"each"``, and ``"none"``.
* For ``"none"``, a flow layout will be used, in which adjacent subviews are simply
placed one after the other. - For ``"each"``, subviews will be aligned into a
clean grid structure, but each row or column may be of variable size. - For
``"all"``, subviews will be aligned and each row or column will be sized
identically based on the maximum observed size. String values for this property
will be applied to both grid rows and columns.
Alternatively, an object value of the form ``{"row": string, "column": string}`` can
be used to supply different alignments for rows and columns.
**Default value:** ``"all"``.
bounds : enum('full', 'flush')
The bounds calculation method to use for determining the extent of a sub-plot. One
of ``full`` (the default) or ``flush``.
* If set to ``full``, the entire calculated bounds (including axes, title, and
legend) will be used. - If set to ``flush``, only the specified width and height
values for the sub-view will be used. The ``flush`` setting can be useful when
attempting to place sub-plots without axes or legends into a uniform grid
structure.
**Default value:** ``"full"``
center : anyOf(boolean, :class:`RowColboolean`)
Boolean flag indicating if subviews should be centered relative to their respective
rows or columns.
An object value of the form ``{"row": boolean, "column": boolean}`` can be used to
supply different centering values for rows and columns.
**Default value:** ``false``
columns : float
The number of columns to include in the view composition layout.
**Default value** : ``undefined`` -- An infinite number of columns (a single row)
will be assumed. This is equivalent to ``hconcat`` (for ``concat`` ) and to using
the ``column`` channel (for ``facet`` and ``repeat`` ).
**Note** :
1) This property is only for: - the general (wrappable) ``concat`` operator (not
``hconcat`` / ``vconcat`` ) - the ``facet`` and ``repeat`` operator with one
field/repetition definition (without row/column nesting)
2) Setting the ``columns`` to ``1`` is equivalent to ``vconcat`` (for ``concat`` )
and to using the ``row`` channel (for ``facet`` and ``repeat`` ).
data : anyOf(:class:`Data`, None)
An object describing the data source. Set to ``null`` to ignore the parent's data
source. If no data is set, it is derived from the parent.
description : string
Description of this mark for commenting purpose.
name : string
Name of the visualization for later reference.
resolve : :class:`Resolve`
Scale, axis, and legend resolutions for view composition specifications.
spacing : anyOf(float, :class:`RowColnumber`)
The spacing in pixels between sub-views of the composition operator. An object of
the form ``{"row": number, "column": number}`` can be used to set different spacing
values for rows and columns.
**Default value** : Depends on ``"spacing"`` property of `the view composition
configuration <https://vega.github.io/vega-lite/docs/config.html#view-config>`__ (
``20`` by default)
title : anyOf(:class:`Text`, :class:`TitleParams`)
Title for the plot.
transform : List(:class:`Transform`)
An array of data transformations such as filter and new field calculation.
"""
_schema = {'$ref': '#/definitions/LayerRepeatSpec'}
def __init__(self, repeat=Undefined, spec=Undefined, align=Undefined, bounds=Undefined,
center=Undefined, columns=Undefined, data=Undefined, description=Undefined,
name=Undefined, resolve=Undefined, spacing=Undefined, title=Undefined,
transform=Undefined, **kwds):
super(LayerRepeatSpec, self).__init__(repeat=repeat, spec=spec, align=align, bounds=bounds,
center=center, columns=columns, data=data,
description=description, name=name, resolve=resolve,
spacing=spacing, title=title, transform=transform, **kwds)
class NonLayerRepeatSpec(RepeatSpec):
"""NonLayerRepeatSpec schema wrapper
Mapping(required=[repeat, spec])
Base interface for a repeat specification.
Attributes
----------
repeat : anyOf(List(string), :class:`RepeatMapping`)
Definition for fields to be repeated. One of: 1) An array of fields to be repeated.
If ``"repeat"`` is an array, the field can be referred to as ``{"repeat":
"repeat"}``. The repeated views are laid out in a wrapped row. You can set the
number of columns to control the wrapping. 2) An object that maps ``"row"`` and/or
``"column"`` to the listed fields to be repeated along the particular orientations.
The objects ``{"repeat": "row"}`` and ``{"repeat": "column"}`` can be used to refer
to the repeated field respectively.
spec : :class:`Spec`
A specification of the view that gets repeated.
align : anyOf(:class:`LayoutAlign`, :class:`RowColLayoutAlign`)
The alignment to apply to grid rows and columns. The supported string values are
``"all"``, ``"each"``, and ``"none"``.
* For ``"none"``, a flow layout will be used, in which adjacent subviews are simply
placed one after the other. - For ``"each"``, subviews will be aligned into a
clean grid structure, but each row or column may be of variable size. - For
``"all"``, subviews will be aligned and each row or column will be sized
identically based on the maximum observed size. String values for this property
will be applied to both grid rows and columns.
Alternatively, an object value of the form ``{"row": string, "column": string}`` can
be used to supply different alignments for rows and columns.
**Default value:** ``"all"``.
bounds : enum('full', 'flush')
The bounds calculation method to use for determining the extent of a sub-plot. One
of ``full`` (the default) or ``flush``.
* If set to ``full``, the entire calculated bounds (including axes, title, and
legend) will be used. - If set to ``flush``, only the specified width and height
values for the sub-view will be used. The ``flush`` setting can be useful when
attempting to place sub-plots without axes or legends into a uniform grid
structure.
**Default value:** ``"full"``
center : anyOf(boolean, :class:`RowColboolean`)
Boolean flag indicating if subviews should be centered relative to their respective
rows or columns.
An object value of the form ``{"row": boolean, "column": boolean}`` can be used to
supply different centering values for rows and columns.
**Default value:** ``false``
columns : float
The number of columns to include in the view composition layout.
**Default value** : ``undefined`` -- An infinite number of columns (a single row)
will be assumed. This is equivalent to ``hconcat`` (for ``concat`` ) and to using
the ``column`` channel (for ``facet`` and ``repeat`` ).
**Note** :
1) This property is only for: - the general (wrappable) ``concat`` operator (not
``hconcat`` / ``vconcat`` ) - the ``facet`` and ``repeat`` operator with one
field/repetition definition (without row/column nesting)
2) Setting the ``columns`` to ``1`` is equivalent to ``vconcat`` (for ``concat`` )
and to using the ``row`` channel (for ``facet`` and ``repeat`` ).
data : anyOf(:class:`Data`, None)
An object describing the data source. Set to ``null`` to ignore the parent's data
source. If no data is set, it is derived from the parent.
description : string
Description of this mark for commenting purpose.
name : string
Name of the visualization for later reference.
resolve : :class:`Resolve`
Scale, axis, and legend resolutions for view composition specifications.
spacing : anyOf(float, :class:`RowColnumber`)
The spacing in pixels between sub-views of the composition operator. An object of
the form ``{"row": number, "column": number}`` can be used to set different spacing
values for rows and columns.
**Default value** : Depends on ``"spacing"`` property of `the view composition
configuration <https://vega.github.io/vega-lite/docs/config.html#view-config>`__ (
``20`` by default)
title : anyOf(:class:`Text`, :class:`TitleParams`)
Title for the plot.
transform : List(:class:`Transform`)
An array of data transformations such as filter and new field calculation.
"""
_schema = {'$ref': '#/definitions/NonLayerRepeatSpec'}
def __init__(self, repeat=Undefined, spec=Undefined, align=Undefined, bounds=Undefined,
center=Undefined, columns=Undefined, data=Undefined, description=Undefined,
name=Undefined, resolve=Undefined, spacing=Undefined, title=Undefined,
transform=Undefined, **kwds):
super(NonLayerRepeatSpec, self).__init__(repeat=repeat, spec=spec, align=align, bounds=bounds,
center=center, columns=columns, data=data,
description=description, name=name, resolve=resolve,
spacing=spacing, title=title, transform=transform,
**kwds)
class SphereGenerator(Generator):
"""SphereGenerator schema wrapper
Mapping(required=[sphere])
Attributes
----------
sphere : anyOf(boolean, Mapping(required=[]))
Generate sphere GeoJSON data for the full globe.
name : string
Provide a placeholder name and bind data at runtime.
"""
_schema = {'$ref': '#/definitions/SphereGenerator'}
def __init__(self, sphere=Undefined, name=Undefined, **kwds):
super(SphereGenerator, self).__init__(sphere=sphere, name=name, **kwds)
class StackOffset(VegaLiteSchema):
"""StackOffset schema wrapper
enum('zero', 'center', 'normalize')
"""
_schema = {'$ref': '#/definitions/StackOffset'}
def __init__(self, *args):
super(StackOffset, self).__init__(*args)
class StandardType(VegaLiteSchema):
"""StandardType schema wrapper
enum('quantitative', 'ordinal', 'temporal', 'nominal')
"""
_schema = {'$ref': '#/definitions/StandardType'}
def __init__(self, *args):
super(StandardType, self).__init__(*args)
class Step(VegaLiteSchema):
"""Step schema wrapper
Mapping(required=[step])
Attributes
----------
step : float
The size (width/height) per discrete step.
"""
_schema = {'$ref': '#/definitions/Step'}
def __init__(self, step=Undefined, **kwds):
super(Step, self).__init__(step=step, **kwds)
class Stream(VegaLiteSchema):
"""Stream schema wrapper
anyOf(:class:`EventStream`, :class:`DerivedStream`, :class:`MergedStream`)
"""
_schema = {'$ref': '#/definitions/Stream'}
def __init__(self, *args, **kwds):
super(Stream, self).__init__(*args, **kwds)
class DerivedStream(Stream):
"""DerivedStream schema wrapper
Mapping(required=[stream])
Attributes
----------
stream : :class:`Stream`
between : List(:class:`Stream`)
consume : boolean
debounce : float
filter : anyOf(:class:`Expr`, List(:class:`Expr`))
markname : string
marktype : :class:`MarkType`
throttle : float
"""
_schema = {'$ref': '#/definitions/DerivedStream'}
def __init__(self, stream=Undefined, between=Undefined, consume=Undefined, debounce=Undefined,
filter=Undefined, markname=Undefined, marktype=Undefined, throttle=Undefined, **kwds):
super(DerivedStream, self).__init__(stream=stream, between=between, consume=consume,
debounce=debounce, filter=filter, markname=markname,
marktype=marktype, throttle=throttle, **kwds)
class EventStream(Stream):
"""EventStream schema wrapper
anyOf(Mapping(required=[type]), Mapping(required=[source, type]))
"""
_schema = {'$ref': '#/definitions/EventStream'}
def __init__(self, *args, **kwds):
super(EventStream, self).__init__(*args, **kwds)
class MergedStream(Stream):
"""MergedStream schema wrapper
Mapping(required=[merge])
Attributes
----------
merge : List(:class:`Stream`)
between : List(:class:`Stream`)
consume : boolean
debounce : float
filter : anyOf(:class:`Expr`, List(:class:`Expr`))
markname : string
marktype : :class:`MarkType`
throttle : float
"""
_schema = {'$ref': '#/definitions/MergedStream'}
def __init__(self, merge=Undefined, between=Undefined, consume=Undefined, debounce=Undefined,
filter=Undefined, markname=Undefined, marktype=Undefined, throttle=Undefined, **kwds):
super(MergedStream, self).__init__(merge=merge, between=between, consume=consume,
debounce=debounce, filter=filter, markname=markname,
marktype=marktype, throttle=throttle, **kwds)
class StringFieldDef(VegaLiteSchema):
"""StringFieldDef schema wrapper
Mapping(required=[])
Attributes
----------
aggregate : :class:`Aggregate`
Aggregation function for the field (e.g., ``"mean"``, ``"sum"``, ``"median"``,
``"min"``, ``"max"``, ``"count"`` ).
**Default value:** ``undefined`` (None)
**See also:** `aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__
documentation.
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
bin : anyOf(boolean, :class:`BinParams`, string, None)
A flag for binning a ``quantitative`` field, `an object defining binning parameters
<https://vega.github.io/vega-lite/docs/bin.html#params>`__, or indicating that the
data for ``x`` or ``y`` channel are binned before they are imported into Vega-Lite (
``"binned"`` ).
If ``true``, default `binning parameters
<https://vega.github.io/vega-lite/docs/bin.html>`__ will be applied.
If ``"binned"``, this indicates that the data for the ``x`` (or ``y`` ) channel are
already binned. You can map the bin-start field to ``x`` (or ``y`` ) and the bin-end
field to ``x2`` (or ``y2`` ). The scale and axis will be formatted similar to
binning in Vega-Lite. To adjust the axis ticks based on the bin step, you can also
set the axis's `tickMinStep
<https://vega.github.io/vega-lite/docs/axis.html#ticks>`__ property.
**Default value:** ``false``
**See also:** `bin <https://vega.github.io/vega-lite/docs/bin.html>`__
documentation.
field : :class:`Field`
**Required.** A string defining the name of the field from which to pull a data
value or an object defining iterated values from the `repeat
<https://vega.github.io/vega-lite/docs/repeat.html>`__ operator.
**See also:** `field <https://vega.github.io/vega-lite/docs/field.html>`__
documentation.
**Notes:** 1) Dots ( ``.`` ) and brackets ( ``[`` and ``]`` ) can be used to access
nested objects (e.g., ``"field": "foo.bar"`` and ``"field": "foo['bar']"`` ). If
field names contain dots or brackets but are not nested, you can use ``\\`` to
escape dots and brackets (e.g., ``"a\\.b"`` and ``"a\\[0\\]"`` ). See more details
about escaping in the `field documentation
<https://vega.github.io/vega-lite/docs/field.html>`__. 2) ``field`` is not required
if ``aggregate`` is ``count``.
format : anyOf(string, :class:`Dictunknown`)
When used with the default ``"number"`` and ``"time"`` format type, the text
formatting pattern for labels of guides (axes, legends, headers) and text marks.
* If the format type is ``"number"`` (e.g., for quantitative fields), this is D3's
`number format pattern <https://github.com/d3/d3-format#locale_format>`__. - If
the format type is ``"time"`` (e.g., for temporal fields), this is D3's `time
format pattern <https://github.com/d3/d3-time-format#locale_format>`__.
See the `format documentation <https://vega.github.io/vega-lite/docs/format.html>`__
for more examples.
When used with a `custom formatType
<https://vega.github.io/vega-lite/docs/config.html#custom-format-type>`__, this
value will be passed as ``format`` alongside ``datum.value`` to the registered
function.
**Default value:** Derived from `numberFormat
<https://vega.github.io/vega-lite/docs/config.html#format>`__ config for number
format and from `timeFormat
<https://vega.github.io/vega-lite/docs/config.html#format>`__ config for time
format.
formatType : string
The format type for labels. One of ``"number"``, ``"time"``, or a `registered custom
format type
<https://vega.github.io/vega-lite/docs/config.html#custom-format-type>`__.
**Default value:** - ``"time"`` for temporal fields and ordinal and nominal fields
with ``timeUnit``. - ``"number"`` for quantitative fields as well as ordinal and
nominal fields without ``timeUnit``.
labelExpr : string
`Vega expression <https://vega.github.io/vega/docs/expressions/>`__ for customizing
labels text.
**Note:** The label text and value can be assessed via the ``label`` and ``value``
properties of the axis's backing ``datum`` object.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit (e.g., ``year``, ``yearmonth``, ``month``, ``hours`` ) for a temporal
field. or `a temporal field that gets casted as ordinal
<https://vega.github.io/vega-lite/docs/type.html#cast>`__.
**Default value:** ``undefined`` (None)
**See also:** `timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__
documentation.
title : anyOf(:class:`Text`, None)
A title for the field. If ``null``, the title will be removed.
**Default value:** derived from the field's name and transformation function (
``aggregate``, ``bin`` and ``timeUnit`` ). If the field has an aggregate function,
the function is displayed as part of the title (e.g., ``"Sum of Profit"`` ). If the
field is binned or has a time unit applied, the applied function is shown in
parentheses (e.g., ``"Profit (binned)"``, ``"Transaction Date (year-month)"`` ).
Otherwise, the title is simply the field name.
**Notes** :
1) You can customize the default field title format by providing the `fieldTitle
<https://vega.github.io/vega-lite/docs/config.html#top-level-config>`__ property in
the `config <https://vega.github.io/vega-lite/docs/config.html>`__ or `fieldTitle
function via the compile function's options
<https://vega.github.io/vega-lite/docs/compile.html#field-title>`__.
2) If both field definition's ``title`` and axis, header, or legend ``title`` are
defined, axis/header/legend title will be used.
type : :class:`StandardType`
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/StringFieldDef'}
def __init__(self, aggregate=Undefined, band=Undefined, bin=Undefined, field=Undefined,
format=Undefined, formatType=Undefined, labelExpr=Undefined, timeUnit=Undefined,
title=Undefined, type=Undefined, **kwds):
super(StringFieldDef, self).__init__(aggregate=aggregate, band=band, bin=bin, field=field,
format=format, formatType=formatType, labelExpr=labelExpr,
timeUnit=timeUnit, title=title, type=type, **kwds)
class StringFieldDefWithCondition(VegaLiteSchema):
"""StringFieldDefWithCondition schema wrapper
Mapping(required=[])
A FieldDef with Condition :raw-html:`<ValueDef>` { condition: {value: ...}, field:
..., ... }
Attributes
----------
aggregate : :class:`Aggregate`
Aggregation function for the field (e.g., ``"mean"``, ``"sum"``, ``"median"``,
``"min"``, ``"max"``, ``"count"`` ).
**Default value:** ``undefined`` (None)
**See also:** `aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__
documentation.
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
bin : anyOf(boolean, :class:`BinParams`, string, None)
A flag for binning a ``quantitative`` field, `an object defining binning parameters
<https://vega.github.io/vega-lite/docs/bin.html#params>`__, or indicating that the
data for ``x`` or ``y`` channel are binned before they are imported into Vega-Lite (
``"binned"`` ).
If ``true``, default `binning parameters
<https://vega.github.io/vega-lite/docs/bin.html>`__ will be applied.
If ``"binned"``, this indicates that the data for the ``x`` (or ``y`` ) channel are
already binned. You can map the bin-start field to ``x`` (or ``y`` ) and the bin-end
field to ``x2`` (or ``y2`` ). The scale and axis will be formatted similar to
binning in Vega-Lite. To adjust the axis ticks based on the bin step, you can also
set the axis's `tickMinStep
<https://vega.github.io/vega-lite/docs/axis.html#ticks>`__ property.
**Default value:** ``false``
**See also:** `bin <https://vega.github.io/vega-lite/docs/bin.html>`__
documentation.
condition : anyOf(:class:`ConditionalValueDefstringExprRef`,
List(:class:`ConditionalValueDefstringExprRef`))
One or more value definition(s) with `a selection or a test predicate
<https://vega.github.io/vega-lite/docs/condition.html>`__.
**Note:** A field definition's ``condition`` property can only contain `conditional
value definitions <https://vega.github.io/vega-lite/docs/condition.html#value>`__
since Vega-Lite only allows at most one encoded field per encoding channel.
field : :class:`Field`
**Required.** A string defining the name of the field from which to pull a data
value or an object defining iterated values from the `repeat
<https://vega.github.io/vega-lite/docs/repeat.html>`__ operator.
**See also:** `field <https://vega.github.io/vega-lite/docs/field.html>`__
documentation.
**Notes:** 1) Dots ( ``.`` ) and brackets ( ``[`` and ``]`` ) can be used to access
nested objects (e.g., ``"field": "foo.bar"`` and ``"field": "foo['bar']"`` ). If
field names contain dots or brackets but are not nested, you can use ``\\`` to
escape dots and brackets (e.g., ``"a\\.b"`` and ``"a\\[0\\]"`` ). See more details
about escaping in the `field documentation
<https://vega.github.io/vega-lite/docs/field.html>`__. 2) ``field`` is not required
if ``aggregate`` is ``count``.
format : anyOf(string, :class:`Dictunknown`)
When used with the default ``"number"`` and ``"time"`` format type, the text
formatting pattern for labels of guides (axes, legends, headers) and text marks.
* If the format type is ``"number"`` (e.g., for quantitative fields), this is D3's
`number format pattern <https://github.com/d3/d3-format#locale_format>`__. - If
the format type is ``"time"`` (e.g., for temporal fields), this is D3's `time
format pattern <https://github.com/d3/d3-time-format#locale_format>`__.
See the `format documentation <https://vega.github.io/vega-lite/docs/format.html>`__
for more examples.
When used with a `custom formatType
<https://vega.github.io/vega-lite/docs/config.html#custom-format-type>`__, this
value will be passed as ``format`` alongside ``datum.value`` to the registered
function.
**Default value:** Derived from `numberFormat
<https://vega.github.io/vega-lite/docs/config.html#format>`__ config for number
format and from `timeFormat
<https://vega.github.io/vega-lite/docs/config.html#format>`__ config for time
format.
formatType : string
The format type for labels. One of ``"number"``, ``"time"``, or a `registered custom
format type
<https://vega.github.io/vega-lite/docs/config.html#custom-format-type>`__.
**Default value:** - ``"time"`` for temporal fields and ordinal and nominal fields
with ``timeUnit``. - ``"number"`` for quantitative fields as well as ordinal and
nominal fields without ``timeUnit``.
labelExpr : string
`Vega expression <https://vega.github.io/vega/docs/expressions/>`__ for customizing
labels text.
**Note:** The label text and value can be assessed via the ``label`` and ``value``
properties of the axis's backing ``datum`` object.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit (e.g., ``year``, ``yearmonth``, ``month``, ``hours`` ) for a temporal
field. or `a temporal field that gets casted as ordinal
<https://vega.github.io/vega-lite/docs/type.html#cast>`__.
**Default value:** ``undefined`` (None)
**See also:** `timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__
documentation.
title : anyOf(:class:`Text`, None)
A title for the field. If ``null``, the title will be removed.
**Default value:** derived from the field's name and transformation function (
``aggregate``, ``bin`` and ``timeUnit`` ). If the field has an aggregate function,
the function is displayed as part of the title (e.g., ``"Sum of Profit"`` ). If the
field is binned or has a time unit applied, the applied function is shown in
parentheses (e.g., ``"Profit (binned)"``, ``"Transaction Date (year-month)"`` ).
Otherwise, the title is simply the field name.
**Notes** :
1) You can customize the default field title format by providing the `fieldTitle
<https://vega.github.io/vega-lite/docs/config.html#top-level-config>`__ property in
the `config <https://vega.github.io/vega-lite/docs/config.html>`__ or `fieldTitle
function via the compile function's options
<https://vega.github.io/vega-lite/docs/compile.html#field-title>`__.
2) If both field definition's ``title`` and axis, header, or legend ``title`` are
defined, axis/header/legend title will be used.
type : :class:`StandardType`
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/StringFieldDefWithCondition'}
def __init__(self, aggregate=Undefined, band=Undefined, bin=Undefined, condition=Undefined,
field=Undefined, format=Undefined, formatType=Undefined, labelExpr=Undefined,
timeUnit=Undefined, title=Undefined, type=Undefined, **kwds):
super(StringFieldDefWithCondition, self).__init__(aggregate=aggregate, band=band, bin=bin,
condition=condition, field=field,
format=format, formatType=formatType,
labelExpr=labelExpr, timeUnit=timeUnit,
title=title, type=type, **kwds)
class StringValueDefWithCondition(VegaLiteSchema):
"""StringValueDefWithCondition schema wrapper
Mapping(required=[])
Attributes
----------
condition : anyOf(:class:`ConditionalMarkPropFieldOrDatumDef`,
:class:`ConditionalValueDefstringnullExprRef`,
List(:class:`ConditionalValueDefstringnullExprRef`))
A field definition or one or more value definition(s) with a selection predicate.
value : anyOf(string, None, :class:`ExprRef`)
A constant value in visual domain (e.g., ``"red"`` / ``"#0099ff"`` / `gradient
definition <https://vega.github.io/vega-lite/docs/types.html#gradient>`__ for color,
values between ``0`` to ``1`` for opacity).
"""
_schema = {'$ref': '#/definitions/StringValueDefWithCondition'}
def __init__(self, condition=Undefined, value=Undefined, **kwds):
super(StringValueDefWithCondition, self).__init__(condition=condition, value=value, **kwds)
class StrokeCap(VegaLiteSchema):
"""StrokeCap schema wrapper
enum('butt', 'round', 'square')
"""
_schema = {'$ref': '#/definitions/StrokeCap'}
def __init__(self, *args):
super(StrokeCap, self).__init__(*args)
class StrokeJoin(VegaLiteSchema):
"""StrokeJoin schema wrapper
enum('miter', 'round', 'bevel')
"""
_schema = {'$ref': '#/definitions/StrokeJoin'}
def __init__(self, *args):
super(StrokeJoin, self).__init__(*args)
class StyleConfigIndex(VegaLiteSchema):
"""StyleConfigIndex schema wrapper
Mapping(required=[])
Attributes
----------
arc : :class:`RectConfig`
Arc-specific Config
area : :class:`AreaConfig`
Area-Specific Config
bar : :class:`BarConfig`
Bar-Specific Config
circle : :class:`MarkConfig`
Circle-Specific Config
geoshape : :class:`MarkConfig`
Geoshape-Specific Config
image : :class:`RectConfig`
Image-specific Config
line : :class:`LineConfig`
Line-Specific Config
mark : :class:`MarkConfig`
Mark Config
point : :class:`MarkConfig`
Point-Specific Config
rect : :class:`RectConfig`
Rect-Specific Config
rule : :class:`MarkConfig`
Rule-Specific Config
square : :class:`MarkConfig`
Square-Specific Config
text : :class:`MarkConfig`
Text-Specific Config
tick : :class:`TickConfig`
Tick-Specific Config
trail : :class:`LineConfig`
Trail-Specific Config
group-subtitle : :class:`MarkConfig`
Default style for chart subtitles
group-title : :class:`MarkConfig`
Default style for chart titles
guide-label : :class:`MarkConfig`
Default style for axis, legend, and header labels.
guide-title : :class:`MarkConfig`
Default style for axis, legend, and header titles.
"""
_schema = {'$ref': '#/definitions/StyleConfigIndex'}
def __init__(self, arc=Undefined, area=Undefined, bar=Undefined, circle=Undefined,
geoshape=Undefined, image=Undefined, line=Undefined, mark=Undefined, point=Undefined,
rect=Undefined, rule=Undefined, square=Undefined, text=Undefined, tick=Undefined,
trail=Undefined, **kwds):
super(StyleConfigIndex, self).__init__(arc=arc, area=area, bar=bar, circle=circle,
geoshape=geoshape, image=image, line=line, mark=mark,
point=point, rect=rect, rule=rule, square=square,
text=text, tick=tick, trail=trail, **kwds)
class SymbolShape(VegaLiteSchema):
"""SymbolShape schema wrapper
string
"""
_schema = {'$ref': '#/definitions/SymbolShape'}
def __init__(self, *args):
super(SymbolShape, self).__init__(*args)
class Text(VegaLiteSchema):
"""Text schema wrapper
anyOf(string, List(string))
"""
_schema = {'$ref': '#/definitions/Text'}
def __init__(self, *args, **kwds):
super(Text, self).__init__(*args, **kwds)
class TextBaseline(VegaLiteSchema):
"""TextBaseline schema wrapper
anyOf(string, :class:`Baseline`, string, string)
"""
_schema = {'$ref': '#/definitions/TextBaseline'}
def __init__(self, *args, **kwds):
super(TextBaseline, self).__init__(*args, **kwds)
class Baseline(TextBaseline):
"""Baseline schema wrapper
enum('top', 'middle', 'bottom')
"""
_schema = {'$ref': '#/definitions/Baseline'}
def __init__(self, *args):
super(Baseline, self).__init__(*args)
class TextDef(VegaLiteSchema):
"""TextDef schema wrapper
anyOf(:class:`FieldOrDatumDefWithConditionStringFieldDefText`,
:class:`FieldOrDatumDefWithConditionStringDatumDefText`,
:class:`ValueDefWithConditionStringFieldDefText`)
"""
_schema = {'$ref': '#/definitions/TextDef'}
def __init__(self, *args, **kwds):
super(TextDef, self).__init__(*args, **kwds)
class FieldOrDatumDefWithConditionStringDatumDefText(TextDef):
"""FieldOrDatumDefWithConditionStringDatumDefText schema wrapper
Mapping(required=[])
A FieldDef with Condition :raw-html:`<ValueDef>` { condition: {value: ...}, field:
..., ... }
Attributes
----------
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
condition : anyOf(:class:`ConditionalValueDefTextExprRef`,
List(:class:`ConditionalValueDefTextExprRef`))
One or more value definition(s) with `a selection or a test predicate
<https://vega.github.io/vega-lite/docs/condition.html>`__.
**Note:** A field definition's ``condition`` property can only contain `conditional
value definitions <https://vega.github.io/vega-lite/docs/condition.html#value>`__
since Vega-Lite only allows at most one encoded field per encoding channel.
datum : anyOf(:class:`PrimitiveValue`, :class:`DateTime`, :class:`ExprRef`,
:class:`RepeatRef`)
A constant value in data domain.
format : anyOf(string, :class:`Dictunknown`)
When used with the default ``"number"`` and ``"time"`` format type, the text
formatting pattern for labels of guides (axes, legends, headers) and text marks.
* If the format type is ``"number"`` (e.g., for quantitative fields), this is D3's
`number format pattern <https://github.com/d3/d3-format#locale_format>`__. - If
the format type is ``"time"`` (e.g., for temporal fields), this is D3's `time
format pattern <https://github.com/d3/d3-time-format#locale_format>`__.
See the `format documentation <https://vega.github.io/vega-lite/docs/format.html>`__
for more examples.
When used with a `custom formatType
<https://vega.github.io/vega-lite/docs/config.html#custom-format-type>`__, this
value will be passed as ``format`` alongside ``datum.value`` to the registered
function.
**Default value:** Derived from `numberFormat
<https://vega.github.io/vega-lite/docs/config.html#format>`__ config for number
format and from `timeFormat
<https://vega.github.io/vega-lite/docs/config.html#format>`__ config for time
format.
formatType : string
The format type for labels. One of ``"number"``, ``"time"``, or a `registered custom
format type
<https://vega.github.io/vega-lite/docs/config.html#custom-format-type>`__.
**Default value:** - ``"time"`` for temporal fields and ordinal and nominal fields
with ``timeUnit``. - ``"number"`` for quantitative fields as well as ordinal and
nominal fields without ``timeUnit``.
labelExpr : string
`Vega expression <https://vega.github.io/vega/docs/expressions/>`__ for customizing
labels text.
**Note:** The label text and value can be assessed via the ``label`` and ``value``
properties of the axis's backing ``datum`` object.
type : :class:`Type`
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/FieldOrDatumDefWithCondition<StringDatumDef,Text>'}
def __init__(self, band=Undefined, condition=Undefined, datum=Undefined, format=Undefined,
formatType=Undefined, labelExpr=Undefined, type=Undefined, **kwds):
super(FieldOrDatumDefWithConditionStringDatumDefText, self).__init__(band=band,
condition=condition,
datum=datum, format=format,
formatType=formatType,
labelExpr=labelExpr,
type=type, **kwds)
class FieldOrDatumDefWithConditionStringFieldDefText(TextDef):
"""FieldOrDatumDefWithConditionStringFieldDefText schema wrapper
Mapping(required=[])
A FieldDef with Condition :raw-html:`<ValueDef>` { condition: {value: ...}, field:
..., ... }
Attributes
----------
aggregate : :class:`Aggregate`
Aggregation function for the field (e.g., ``"mean"``, ``"sum"``, ``"median"``,
``"min"``, ``"max"``, ``"count"`` ).
**Default value:** ``undefined`` (None)
**See also:** `aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__
documentation.
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
bin : anyOf(boolean, :class:`BinParams`, string, None)
A flag for binning a ``quantitative`` field, `an object defining binning parameters
<https://vega.github.io/vega-lite/docs/bin.html#params>`__, or indicating that the
data for ``x`` or ``y`` channel are binned before they are imported into Vega-Lite (
``"binned"`` ).
If ``true``, default `binning parameters
<https://vega.github.io/vega-lite/docs/bin.html>`__ will be applied.
If ``"binned"``, this indicates that the data for the ``x`` (or ``y`` ) channel are
already binned. You can map the bin-start field to ``x`` (or ``y`` ) and the bin-end
field to ``x2`` (or ``y2`` ). The scale and axis will be formatted similar to
binning in Vega-Lite. To adjust the axis ticks based on the bin step, you can also
set the axis's `tickMinStep
<https://vega.github.io/vega-lite/docs/axis.html#ticks>`__ property.
**Default value:** ``false``
**See also:** `bin <https://vega.github.io/vega-lite/docs/bin.html>`__
documentation.
condition : anyOf(:class:`ConditionalValueDefTextExprRef`,
List(:class:`ConditionalValueDefTextExprRef`))
One or more value definition(s) with `a selection or a test predicate
<https://vega.github.io/vega-lite/docs/condition.html>`__.
**Note:** A field definition's ``condition`` property can only contain `conditional
value definitions <https://vega.github.io/vega-lite/docs/condition.html#value>`__
since Vega-Lite only allows at most one encoded field per encoding channel.
field : :class:`Field`
**Required.** A string defining the name of the field from which to pull a data
value or an object defining iterated values from the `repeat
<https://vega.github.io/vega-lite/docs/repeat.html>`__ operator.
**See also:** `field <https://vega.github.io/vega-lite/docs/field.html>`__
documentation.
**Notes:** 1) Dots ( ``.`` ) and brackets ( ``[`` and ``]`` ) can be used to access
nested objects (e.g., ``"field": "foo.bar"`` and ``"field": "foo['bar']"`` ). If
field names contain dots or brackets but are not nested, you can use ``\\`` to
escape dots and brackets (e.g., ``"a\\.b"`` and ``"a\\[0\\]"`` ). See more details
about escaping in the `field documentation
<https://vega.github.io/vega-lite/docs/field.html>`__. 2) ``field`` is not required
if ``aggregate`` is ``count``.
format : anyOf(string, :class:`Dictunknown`)
When used with the default ``"number"`` and ``"time"`` format type, the text
formatting pattern for labels of guides (axes, legends, headers) and text marks.
* If the format type is ``"number"`` (e.g., for quantitative fields), this is D3's
`number format pattern <https://github.com/d3/d3-format#locale_format>`__. - If
the format type is ``"time"`` (e.g., for temporal fields), this is D3's `time
format pattern <https://github.com/d3/d3-time-format#locale_format>`__.
See the `format documentation <https://vega.github.io/vega-lite/docs/format.html>`__
for more examples.
When used with a `custom formatType
<https://vega.github.io/vega-lite/docs/config.html#custom-format-type>`__, this
value will be passed as ``format`` alongside ``datum.value`` to the registered
function.
**Default value:** Derived from `numberFormat
<https://vega.github.io/vega-lite/docs/config.html#format>`__ config for number
format and from `timeFormat
<https://vega.github.io/vega-lite/docs/config.html#format>`__ config for time
format.
formatType : string
The format type for labels. One of ``"number"``, ``"time"``, or a `registered custom
format type
<https://vega.github.io/vega-lite/docs/config.html#custom-format-type>`__.
**Default value:** - ``"time"`` for temporal fields and ordinal and nominal fields
with ``timeUnit``. - ``"number"`` for quantitative fields as well as ordinal and
nominal fields without ``timeUnit``.
labelExpr : string
`Vega expression <https://vega.github.io/vega/docs/expressions/>`__ for customizing
labels text.
**Note:** The label text and value can be assessed via the ``label`` and ``value``
properties of the axis's backing ``datum`` object.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit (e.g., ``year``, ``yearmonth``, ``month``, ``hours`` ) for a temporal
field. or `a temporal field that gets casted as ordinal
<https://vega.github.io/vega-lite/docs/type.html#cast>`__.
**Default value:** ``undefined`` (None)
**See also:** `timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__
documentation.
title : anyOf(:class:`Text`, None)
A title for the field. If ``null``, the title will be removed.
**Default value:** derived from the field's name and transformation function (
``aggregate``, ``bin`` and ``timeUnit`` ). If the field has an aggregate function,
the function is displayed as part of the title (e.g., ``"Sum of Profit"`` ). If the
field is binned or has a time unit applied, the applied function is shown in
parentheses (e.g., ``"Profit (binned)"``, ``"Transaction Date (year-month)"`` ).
Otherwise, the title is simply the field name.
**Notes** :
1) You can customize the default field title format by providing the `fieldTitle
<https://vega.github.io/vega-lite/docs/config.html#top-level-config>`__ property in
the `config <https://vega.github.io/vega-lite/docs/config.html>`__ or `fieldTitle
function via the compile function's options
<https://vega.github.io/vega-lite/docs/compile.html#field-title>`__.
2) If both field definition's ``title`` and axis, header, or legend ``title`` are
defined, axis/header/legend title will be used.
type : :class:`StandardType`
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/FieldOrDatumDefWithCondition<StringFieldDef,Text>'}
def __init__(self, aggregate=Undefined, band=Undefined, bin=Undefined, condition=Undefined,
field=Undefined, format=Undefined, formatType=Undefined, labelExpr=Undefined,
timeUnit=Undefined, title=Undefined, type=Undefined, **kwds):
super(FieldOrDatumDefWithConditionStringFieldDefText, self).__init__(aggregate=aggregate,
band=band, bin=bin,
condition=condition,
field=field, format=format,
formatType=formatType,
labelExpr=labelExpr,
timeUnit=timeUnit,
title=title, type=type,
**kwds)
class TextDirection(VegaLiteSchema):
"""TextDirection schema wrapper
enum('ltr', 'rtl')
"""
_schema = {'$ref': '#/definitions/TextDirection'}
def __init__(self, *args):
super(TextDirection, self).__init__(*args)
class TickConfig(AnyMarkConfig):
"""TickConfig schema wrapper
Mapping(required=[])
Attributes
----------
align : anyOf(:class:`Align`, :class:`ExprRef`)
The horizontal alignment of the text or ranged marks (area, bar, image, rect, rule).
One of ``"left"``, ``"right"``, ``"center"``.
**Note:** Expression reference is *not* supported for range marks.
angle : anyOf(float, :class:`ExprRef`)
aria : anyOf(boolean, :class:`ExprRef`)
ariaRole : anyOf(string, :class:`ExprRef`)
ariaRoleDescription : anyOf(string, :class:`ExprRef`)
aspect : anyOf(boolean, :class:`ExprRef`)
bandSize : float
The width of the ticks.
**Default value:** 3/4 of step (width step for horizontal ticks and height step for
vertical ticks).
baseline : anyOf(:class:`TextBaseline`, :class:`ExprRef`)
For text marks, the vertical text baseline. One of ``"alphabetic"`` (default),
``"top"``, ``"middle"``, ``"bottom"``, ``"line-top"``, ``"line-bottom"``, or an
expression reference that provides one of the valid values. The ``"line-top"`` and
``"line-bottom"`` values operate similarly to ``"top"`` and ``"bottom"``, but are
calculated relative to the ``lineHeight`` rather than ``fontSize`` alone.
For range marks, the vertical alignment of the marks. One of ``"top"``,
``"middle"``, ``"bottom"``.
**Note:** Expression reference is *not* supported for range marks.
blend : anyOf(:class:`Blend`, :class:`ExprRef`)
color : anyOf(:class:`Color`, :class:`Gradient`, :class:`ExprRef`)
Default color.
**Default value:** :raw-html:`<span style="color: #4682b4;">■</span>`
``"#4682b4"``
**Note:** - This property cannot be used in a `style config
<https://vega.github.io/vega-lite/docs/mark.html#style-config>`__. - The ``fill``
and ``stroke`` properties have higher precedence than ``color`` and will override
``color``.
cornerRadius : anyOf(float, :class:`ExprRef`)
cornerRadiusBottomLeft : anyOf(float, :class:`ExprRef`)
cornerRadiusBottomRight : anyOf(float, :class:`ExprRef`)
cornerRadiusTopLeft : anyOf(float, :class:`ExprRef`)
cornerRadiusTopRight : anyOf(float, :class:`ExprRef`)
cursor : anyOf(:class:`Cursor`, :class:`ExprRef`)
description : anyOf(string, :class:`ExprRef`)
dir : anyOf(:class:`TextDirection`, :class:`ExprRef`)
dx : anyOf(float, :class:`ExprRef`)
dy : anyOf(float, :class:`ExprRef`)
ellipsis : anyOf(string, :class:`ExprRef`)
endAngle : anyOf(float, :class:`ExprRef`)
fill : anyOf(:class:`Color`, :class:`Gradient`, None, :class:`ExprRef`)
Default fill color. This property has higher precedence than ``config.color``. Set
to ``null`` to remove fill.
**Default value:** (None)
fillOpacity : anyOf(float, :class:`ExprRef`)
filled : boolean
Whether the mark's color should be used as fill color instead of stroke color.
**Default value:** ``false`` for all ``point``, ``line``, and ``rule`` marks as well
as ``geoshape`` marks for `graticule
<https://vega.github.io/vega-lite/docs/data.html#graticule>`__ data sources;
otherwise, ``true``.
**Note:** This property cannot be used in a `style config
<https://vega.github.io/vega-lite/docs/mark.html#style-config>`__.
font : anyOf(string, :class:`ExprRef`)
fontSize : anyOf(float, :class:`ExprRef`)
fontStyle : anyOf(:class:`FontStyle`, :class:`ExprRef`)
fontWeight : anyOf(:class:`FontWeight`, :class:`ExprRef`)
height : anyOf(float, :class:`ExprRef`)
href : anyOf(:class:`URI`, :class:`ExprRef`)
innerRadius : anyOf(float, :class:`ExprRef`)
The inner radius in pixels of arc marks. ``innerRadius`` is an alias for
``radius2``.
interpolate : anyOf(:class:`Interpolate`, :class:`ExprRef`)
invalid : enum('filter', None)
Defines how Vega-Lite should handle marks for invalid values ( ``null`` and ``NaN``
). - If set to ``"filter"`` (default), all data items with null values will be
skipped (for line, trail, and area marks) or filtered (for other marks). - If
``null``, all data items are included. In this case, invalid values will be
interpreted as zeroes.
limit : anyOf(float, :class:`ExprRef`)
lineBreak : anyOf(string, :class:`ExprRef`)
lineHeight : anyOf(float, :class:`ExprRef`)
opacity : anyOf(float, :class:`ExprRef`)
The overall opacity (value between [0,1]).
**Default value:** ``0.7`` for non-aggregate plots with ``point``, ``tick``,
``circle``, or ``square`` marks or layered ``bar`` charts and ``1`` otherwise.
order : anyOf(None, boolean)
For line and trail marks, this ``order`` property can be set to ``null`` or
``false`` to make the lines use the original order in the data sources.
orient : :class:`Orientation`
The orientation of a non-stacked bar, tick, area, and line charts. The value is
either horizontal (default) or vertical. - For bar, rule and tick, this determines
whether the size of the bar and tick should be applied to x or y dimension. - For
area, this property determines the orient property of the Vega output. - For line
and trail marks, this property determines the sort order of the points in the line
if ``config.sortLineBy`` is not specified. For stacked charts, this is always
determined by the orientation of the stack; therefore explicitly specified value
will be ignored.
outerRadius : anyOf(float, :class:`ExprRef`)
The outer radius in pixels of arc marks. ``outerRadius`` is an alias for ``radius``.
padAngle : anyOf(float, :class:`ExprRef`)
radius : anyOf(float, :class:`ExprRef`)
For arc mark, the primary (outer) radius in pixels.
For text marks, polar coordinate radial offset, in pixels, of the text from the
origin determined by the ``x`` and ``y`` properties.
radius2 : anyOf(float, :class:`ExprRef`)
The secondary (inner) radius in pixels of arc marks.
shape : anyOf(anyOf(:class:`SymbolShape`, string), :class:`ExprRef`)
size : anyOf(float, :class:`ExprRef`)
Default size for marks. - For ``point`` / ``circle`` / ``square``, this represents
the pixel area of the marks. Note that this value sets the area of the symbol; the
side lengths will increase with the square root of this value. - For ``bar``, this
represents the band size of the bar, in pixels. - For ``text``, this represents the
font size, in pixels.
**Default value:** - ``30`` for point, circle, square marks; width/height's ``step``
- ``2`` for bar marks with discrete dimensions; - ``5`` for bar marks with
continuous dimensions; - ``11`` for text marks.
smooth : anyOf(boolean, :class:`ExprRef`)
startAngle : anyOf(float, :class:`ExprRef`)
stroke : anyOf(:class:`Color`, :class:`Gradient`, None, :class:`ExprRef`)
Default stroke color. This property has higher precedence than ``config.color``. Set
to ``null`` to remove stroke.
**Default value:** (None)
strokeCap : anyOf(:class:`StrokeCap`, :class:`ExprRef`)
strokeDash : anyOf(List(float), :class:`ExprRef`)
strokeDashOffset : anyOf(float, :class:`ExprRef`)
strokeJoin : anyOf(:class:`StrokeJoin`, :class:`ExprRef`)
strokeMiterLimit : anyOf(float, :class:`ExprRef`)
strokeOffset : anyOf(float, :class:`ExprRef`)
strokeOpacity : anyOf(float, :class:`ExprRef`)
strokeWidth : anyOf(float, :class:`ExprRef`)
tension : anyOf(float, :class:`ExprRef`)
text : anyOf(:class:`Text`, :class:`ExprRef`)
theta : anyOf(float, :class:`ExprRef`)
For arc marks, the arc length in radians if theta2 is not specified, otherwise the
start arc angle. (A value of 0 indicates up or “north”, increasing values proceed
clockwise.)
For text marks, polar coordinate angle in radians.
theta2 : anyOf(float, :class:`ExprRef`)
The end angle of arc marks in radians. A value of 0 indicates up or “north”,
increasing values proceed clockwise.
thickness : float
Thickness of the tick mark.
**Default value:** ``1``
timeUnitBand : float
Default relative band size for a time unit. If set to ``1``, the bandwidth of the
marks will be equal to the time unit band step. If set to ``0.5``, bandwidth of the
marks will be half of the time unit band step.
timeUnitBandPosition : float
Default relative band position for a time unit. If set to ``0``, the marks will be
positioned at the beginning of the time unit band step. If set to ``0.5``, the marks
will be positioned in the middle of the time unit band step.
tooltip : anyOf(float, string, boolean, :class:`TooltipContent`, :class:`ExprRef`, None)
The tooltip text string to show upon mouse hover or an object defining which fields
should the tooltip be derived from.
* If ``tooltip`` is ``true`` or ``{"content": "encoding"}``, then all fields from
``encoding`` will be used. - If ``tooltip`` is ``{"content": "data"}``, then all
fields that appear in the highlighted data point will be used. - If set to
``null`` or ``false``, then no tooltip will be used.
See the `tooltip <https://vega.github.io/vega-lite/docs/tooltip.html>`__
documentation for a detailed discussion about tooltip in Vega-Lite.
**Default value:** ``null``
url : anyOf(:class:`URI`, :class:`ExprRef`)
width : anyOf(float, :class:`ExprRef`)
x : anyOf(float, string, :class:`ExprRef`)
X coordinates of the marks, or width of horizontal ``"bar"`` and ``"area"`` without
specified ``x2`` or ``width``.
The ``value`` of this channel can be a number or a string ``"width"`` for the width
of the plot.
x2 : anyOf(float, string, :class:`ExprRef`)
X2 coordinates for ranged ``"area"``, ``"bar"``, ``"rect"``, and ``"rule"``.
The ``value`` of this channel can be a number or a string ``"width"`` for the width
of the plot.
y : anyOf(float, string, :class:`ExprRef`)
Y coordinates of the marks, or height of vertical ``"bar"`` and ``"area"`` without
specified ``y2`` or ``height``.
The ``value`` of this channel can be a number or a string ``"height"`` for the
height of the plot.
y2 : anyOf(float, string, :class:`ExprRef`)
Y2 coordinates for ranged ``"area"``, ``"bar"``, ``"rect"``, and ``"rule"``.
The ``value`` of this channel can be a number or a string ``"height"`` for the
height of the plot.
"""
_schema = {'$ref': '#/definitions/TickConfig'}
def __init__(self, align=Undefined, angle=Undefined, aria=Undefined, ariaRole=Undefined,
ariaRoleDescription=Undefined, aspect=Undefined, bandSize=Undefined,
baseline=Undefined, blend=Undefined, color=Undefined, cornerRadius=Undefined,
cornerRadiusBottomLeft=Undefined, cornerRadiusBottomRight=Undefined,
cornerRadiusTopLeft=Undefined, cornerRadiusTopRight=Undefined, cursor=Undefined,
description=Undefined, dir=Undefined, dx=Undefined, dy=Undefined, ellipsis=Undefined,
endAngle=Undefined, fill=Undefined, fillOpacity=Undefined, filled=Undefined,
font=Undefined, fontSize=Undefined, fontStyle=Undefined, fontWeight=Undefined,
height=Undefined, href=Undefined, innerRadius=Undefined, interpolate=Undefined,
invalid=Undefined, limit=Undefined, lineBreak=Undefined, lineHeight=Undefined,
opacity=Undefined, order=Undefined, orient=Undefined, outerRadius=Undefined,
padAngle=Undefined, radius=Undefined, radius2=Undefined, shape=Undefined,
size=Undefined, smooth=Undefined, startAngle=Undefined, stroke=Undefined,
strokeCap=Undefined, strokeDash=Undefined, strokeDashOffset=Undefined,
strokeJoin=Undefined, strokeMiterLimit=Undefined, strokeOffset=Undefined,
strokeOpacity=Undefined, strokeWidth=Undefined, tension=Undefined, text=Undefined,
theta=Undefined, theta2=Undefined, thickness=Undefined, timeUnitBand=Undefined,
timeUnitBandPosition=Undefined, tooltip=Undefined, url=Undefined, width=Undefined,
x=Undefined, x2=Undefined, y=Undefined, y2=Undefined, **kwds):
super(TickConfig, self).__init__(align=align, angle=angle, aria=aria, ariaRole=ariaRole,
ariaRoleDescription=ariaRoleDescription, aspect=aspect,
bandSize=bandSize, baseline=baseline, blend=blend, color=color,
cornerRadius=cornerRadius,
cornerRadiusBottomLeft=cornerRadiusBottomLeft,
cornerRadiusBottomRight=cornerRadiusBottomRight,
cornerRadiusTopLeft=cornerRadiusTopLeft,
cornerRadiusTopRight=cornerRadiusTopRight, cursor=cursor,
description=description, dir=dir, dx=dx, dy=dy,
ellipsis=ellipsis, endAngle=endAngle, fill=fill,
fillOpacity=fillOpacity, filled=filled, font=font,
fontSize=fontSize, fontStyle=fontStyle, fontWeight=fontWeight,
height=height, href=href, innerRadius=innerRadius,
interpolate=interpolate, invalid=invalid, limit=limit,
lineBreak=lineBreak, lineHeight=lineHeight, opacity=opacity,
order=order, orient=orient, outerRadius=outerRadius,
padAngle=padAngle, radius=radius, radius2=radius2, shape=shape,
size=size, smooth=smooth, startAngle=startAngle, stroke=stroke,
strokeCap=strokeCap, strokeDash=strokeDash,
strokeDashOffset=strokeDashOffset, strokeJoin=strokeJoin,
strokeMiterLimit=strokeMiterLimit, strokeOffset=strokeOffset,
strokeOpacity=strokeOpacity, strokeWidth=strokeWidth,
tension=tension, text=text, theta=theta, theta2=theta2,
thickness=thickness, timeUnitBand=timeUnitBand,
timeUnitBandPosition=timeUnitBandPosition, tooltip=tooltip,
url=url, width=width, x=x, x2=x2, y=y, y2=y2, **kwds)
class TickCount(VegaLiteSchema):
"""TickCount schema wrapper
anyOf(float, :class:`TimeInterval`, :class:`TimeIntervalStep`)
"""
_schema = {'$ref': '#/definitions/TickCount'}
def __init__(self, *args, **kwds):
super(TickCount, self).__init__(*args, **kwds)
class TimeInterval(TickCount):
"""TimeInterval schema wrapper
enum('millisecond', 'second', 'minute', 'hour', 'day', 'week', 'month', 'year')
"""
_schema = {'$ref': '#/definitions/TimeInterval'}
def __init__(self, *args):
super(TimeInterval, self).__init__(*args)
class TimeIntervalStep(TickCount):
"""TimeIntervalStep schema wrapper
Mapping(required=[interval, step])
Attributes
----------
interval : :class:`TimeInterval`
step : float
"""
_schema = {'$ref': '#/definitions/TimeIntervalStep'}
def __init__(self, interval=Undefined, step=Undefined, **kwds):
super(TimeIntervalStep, self).__init__(interval=interval, step=step, **kwds)
class TimeUnit(VegaLiteSchema):
"""TimeUnit schema wrapper
anyOf(:class:`SingleTimeUnit`, :class:`MultiTimeUnit`)
"""
_schema = {'$ref': '#/definitions/TimeUnit'}
def __init__(self, *args, **kwds):
super(TimeUnit, self).__init__(*args, **kwds)
class MultiTimeUnit(TimeUnit):
"""MultiTimeUnit schema wrapper
anyOf(:class:`LocalMultiTimeUnit`, :class:`UtcMultiTimeUnit`)
"""
_schema = {'$ref': '#/definitions/MultiTimeUnit'}
def __init__(self, *args, **kwds):
super(MultiTimeUnit, self).__init__(*args, **kwds)
class LocalMultiTimeUnit(MultiTimeUnit):
"""LocalMultiTimeUnit schema wrapper
enum('yearquarter', 'yearquartermonth', 'yearmonth', 'yearmonthdate', 'yearmonthdatehours',
'yearmonthdatehoursminutes', 'yearmonthdatehoursminutesseconds', 'yearweek', 'yearweekday',
'yearweekdayhours', 'yearweekdayhoursminutes', 'yearweekdayhoursminutesseconds',
'yeardayofyear', 'quartermonth', 'monthdate', 'monthdatehours', 'monthdatehoursminutes',
'monthdatehoursminutesseconds', 'weekday', 'weeksdayhours', 'weekdayhoursminutes',
'weekdayhoursminutesseconds', 'dayhours', 'dayhoursminutes', 'dayhoursminutesseconds',
'hoursminutes', 'hoursminutesseconds', 'minutesseconds', 'secondsmilliseconds')
"""
_schema = {'$ref': '#/definitions/LocalMultiTimeUnit'}
def __init__(self, *args):
super(LocalMultiTimeUnit, self).__init__(*args)
class SingleTimeUnit(TimeUnit):
"""SingleTimeUnit schema wrapper
anyOf(:class:`LocalSingleTimeUnit`, :class:`UtcSingleTimeUnit`)
"""
_schema = {'$ref': '#/definitions/SingleTimeUnit'}
def __init__(self, *args, **kwds):
super(SingleTimeUnit, self).__init__(*args, **kwds)
class LocalSingleTimeUnit(SingleTimeUnit):
"""LocalSingleTimeUnit schema wrapper
enum('year', 'quarter', 'month', 'week', 'day', 'dayofyear', 'date', 'hours', 'minutes',
'seconds', 'milliseconds')
"""
_schema = {'$ref': '#/definitions/LocalSingleTimeUnit'}
def __init__(self, *args):
super(LocalSingleTimeUnit, self).__init__(*args)
class TimeUnitParams(VegaLiteSchema):
"""TimeUnitParams schema wrapper
Mapping(required=[])
Attributes
----------
maxbins : float
If no ``unit`` is specified, maxbins is used to infer time units.
step : float
The number of steps between bins, in terms of the least significant unit provided.
unit : :class:`TimeUnit`
Defines how date-time values should be binned.
utc : boolean
True to use UTC timezone. Equivalent to using a ``utc`` prefixed ``TimeUnit``.
"""
_schema = {'$ref': '#/definitions/TimeUnitParams'}
def __init__(self, maxbins=Undefined, step=Undefined, unit=Undefined, utc=Undefined, **kwds):
super(TimeUnitParams, self).__init__(maxbins=maxbins, step=step, unit=unit, utc=utc, **kwds)
class TitleAnchor(VegaLiteSchema):
"""TitleAnchor schema wrapper
enum(None, 'start', 'middle', 'end')
"""
_schema = {'$ref': '#/definitions/TitleAnchor'}
def __init__(self, *args):
super(TitleAnchor, self).__init__(*args)
class TitleConfig(VegaLiteSchema):
"""TitleConfig schema wrapper
Mapping(required=[])
Attributes
----------
align : :class:`Align`
Horizontal text alignment for title text. One of ``"left"``, ``"center"``, or
``"right"``.
anchor : anyOf(:class:`TitleAnchor`, :class:`ExprRef`)
angle : anyOf(float, :class:`ExprRef`)
aria : anyOf(boolean, :class:`ExprRef`)
baseline : :class:`TextBaseline`
Vertical text baseline for title and subtitle text. One of ``"alphabetic"``
(default), ``"top"``, ``"middle"``, ``"bottom"``, ``"line-top"``, or
``"line-bottom"``. The ``"line-top"`` and ``"line-bottom"`` values operate similarly
to ``"top"`` and ``"bottom"``, but are calculated relative to the *lineHeight*
rather than *fontSize* alone.
color : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`)
dx : anyOf(float, :class:`ExprRef`)
dy : anyOf(float, :class:`ExprRef`)
font : anyOf(string, :class:`ExprRef`)
fontSize : anyOf(float, :class:`ExprRef`)
fontStyle : anyOf(:class:`FontStyle`, :class:`ExprRef`)
fontWeight : anyOf(:class:`FontWeight`, :class:`ExprRef`)
frame : anyOf(anyOf(:class:`TitleFrame`, string), :class:`ExprRef`)
limit : anyOf(float, :class:`ExprRef`)
lineHeight : anyOf(float, :class:`ExprRef`)
offset : anyOf(float, :class:`ExprRef`)
orient : anyOf(:class:`TitleOrient`, :class:`ExprRef`)
subtitleColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`)
subtitleFont : anyOf(string, :class:`ExprRef`)
subtitleFontSize : anyOf(float, :class:`ExprRef`)
subtitleFontStyle : anyOf(:class:`FontStyle`, :class:`ExprRef`)
subtitleFontWeight : anyOf(:class:`FontWeight`, :class:`ExprRef`)
subtitleLineHeight : anyOf(float, :class:`ExprRef`)
subtitlePadding : anyOf(float, :class:`ExprRef`)
zindex : anyOf(float, :class:`ExprRef`)
"""
_schema = {'$ref': '#/definitions/TitleConfig'}
def __init__(self, align=Undefined, anchor=Undefined, angle=Undefined, aria=Undefined,
baseline=Undefined, color=Undefined, dx=Undefined, dy=Undefined, font=Undefined,
fontSize=Undefined, fontStyle=Undefined, fontWeight=Undefined, frame=Undefined,
limit=Undefined, lineHeight=Undefined, offset=Undefined, orient=Undefined,
subtitleColor=Undefined, subtitleFont=Undefined, subtitleFontSize=Undefined,
subtitleFontStyle=Undefined, subtitleFontWeight=Undefined,
subtitleLineHeight=Undefined, subtitlePadding=Undefined, zindex=Undefined, **kwds):
super(TitleConfig, self).__init__(align=align, anchor=anchor, angle=angle, aria=aria,
baseline=baseline, color=color, dx=dx, dy=dy, font=font,
fontSize=fontSize, fontStyle=fontStyle, fontWeight=fontWeight,
frame=frame, limit=limit, lineHeight=lineHeight,
offset=offset, orient=orient, subtitleColor=subtitleColor,
subtitleFont=subtitleFont, subtitleFontSize=subtitleFontSize,
subtitleFontStyle=subtitleFontStyle,
subtitleFontWeight=subtitleFontWeight,
subtitleLineHeight=subtitleLineHeight,
subtitlePadding=subtitlePadding, zindex=zindex, **kwds)
class TitleFrame(VegaLiteSchema):
"""TitleFrame schema wrapper
enum('bounds', 'group')
"""
_schema = {'$ref': '#/definitions/TitleFrame'}
def __init__(self, *args):
super(TitleFrame, self).__init__(*args)
class TitleOrient(VegaLiteSchema):
"""TitleOrient schema wrapper
enum('none', 'left', 'right', 'top', 'bottom')
"""
_schema = {'$ref': '#/definitions/TitleOrient'}
def __init__(self, *args):
super(TitleOrient, self).__init__(*args)
class TitleParams(VegaLiteSchema):
"""TitleParams schema wrapper
Mapping(required=[text])
Attributes
----------
text : anyOf(:class:`Text`, :class:`ExprRef`)
The title text.
align : :class:`Align`
Horizontal text alignment for title text. One of ``"left"``, ``"center"``, or
``"right"``.
anchor : :class:`TitleAnchor`
The anchor position for placing the title. One of ``"start"``, ``"middle"``, or
``"end"``. For example, with an orientation of top these anchor positions map to a
left-, center-, or right-aligned title.
**Default value:** ``"middle"`` for `single
<https://vega.github.io/vega-lite/docs/spec.html>`__ and `layered
<https://vega.github.io/vega-lite/docs/layer.html>`__ views. ``"start"`` for other
composite views.
**Note:** `For now <https://github.com/vega/vega-lite/issues/2875>`__, ``anchor`` is
only customizable only for `single
<https://vega.github.io/vega-lite/docs/spec.html>`__ and `layered
<https://vega.github.io/vega-lite/docs/layer.html>`__ views. For other composite
views, ``anchor`` is always ``"start"``.
angle : anyOf(float, :class:`ExprRef`)
aria : anyOf(boolean, :class:`ExprRef`)
baseline : :class:`TextBaseline`
Vertical text baseline for title and subtitle text. One of ``"alphabetic"``
(default), ``"top"``, ``"middle"``, ``"bottom"``, ``"line-top"``, or
``"line-bottom"``. The ``"line-top"`` and ``"line-bottom"`` values operate similarly
to ``"top"`` and ``"bottom"``, but are calculated relative to the *lineHeight*
rather than *fontSize* alone.
color : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`)
dx : anyOf(float, :class:`ExprRef`)
dy : anyOf(float, :class:`ExprRef`)
font : anyOf(string, :class:`ExprRef`)
fontSize : anyOf(float, :class:`ExprRef`)
fontStyle : anyOf(:class:`FontStyle`, :class:`ExprRef`)
fontWeight : anyOf(:class:`FontWeight`, :class:`ExprRef`)
frame : anyOf(anyOf(:class:`TitleFrame`, string), :class:`ExprRef`)
limit : anyOf(float, :class:`ExprRef`)
lineHeight : anyOf(float, :class:`ExprRef`)
offset : anyOf(float, :class:`ExprRef`)
orient : anyOf(:class:`TitleOrient`, :class:`ExprRef`)
style : anyOf(string, List(string))
A `mark style property <https://vega.github.io/vega-lite/docs/config.html#style>`__
to apply to the title text mark.
**Default value:** ``"group-title"``.
subtitle : :class:`Text`
The subtitle Text.
subtitleColor : anyOf(anyOf(None, :class:`Color`), :class:`ExprRef`)
subtitleFont : anyOf(string, :class:`ExprRef`)
subtitleFontSize : anyOf(float, :class:`ExprRef`)
subtitleFontStyle : anyOf(:class:`FontStyle`, :class:`ExprRef`)
subtitleFontWeight : anyOf(:class:`FontWeight`, :class:`ExprRef`)
subtitleLineHeight : anyOf(float, :class:`ExprRef`)
subtitlePadding : anyOf(float, :class:`ExprRef`)
zindex : float
The integer z-index indicating the layering of the title group relative to other
axis, mark and legend groups.
**Default value:** ``0``.
"""
_schema = {'$ref': '#/definitions/TitleParams'}
def __init__(self, text=Undefined, align=Undefined, anchor=Undefined, angle=Undefined,
aria=Undefined, baseline=Undefined, color=Undefined, dx=Undefined, dy=Undefined,
font=Undefined, fontSize=Undefined, fontStyle=Undefined, fontWeight=Undefined,
frame=Undefined, limit=Undefined, lineHeight=Undefined, offset=Undefined,
orient=Undefined, style=Undefined, subtitle=Undefined, subtitleColor=Undefined,
subtitleFont=Undefined, subtitleFontSize=Undefined, subtitleFontStyle=Undefined,
subtitleFontWeight=Undefined, subtitleLineHeight=Undefined, subtitlePadding=Undefined,
zindex=Undefined, **kwds):
super(TitleParams, self).__init__(text=text, align=align, anchor=anchor, angle=angle, aria=aria,
baseline=baseline, color=color, dx=dx, dy=dy, font=font,
fontSize=fontSize, fontStyle=fontStyle, fontWeight=fontWeight,
frame=frame, limit=limit, lineHeight=lineHeight,
offset=offset, orient=orient, style=style, subtitle=subtitle,
subtitleColor=subtitleColor, subtitleFont=subtitleFont,
subtitleFontSize=subtitleFontSize,
subtitleFontStyle=subtitleFontStyle,
subtitleFontWeight=subtitleFontWeight,
subtitleLineHeight=subtitleLineHeight,
subtitlePadding=subtitlePadding, zindex=zindex, **kwds)
class TooltipContent(VegaLiteSchema):
"""TooltipContent schema wrapper
Mapping(required=[content])
Attributes
----------
content : enum('encoding', 'data')
"""
_schema = {'$ref': '#/definitions/TooltipContent'}
def __init__(self, content=Undefined, **kwds):
super(TooltipContent, self).__init__(content=content, **kwds)
class TopLevelSpec(VegaLiteSchema):
"""TopLevelSpec schema wrapper
anyOf(:class:`TopLevelUnitSpec`, :class:`TopLevelFacetSpec`, :class:`TopLevelLayerSpec`,
:class:`TopLevelRepeatSpec`, :class:`TopLevelNormalizedConcatSpecGenericSpec`,
:class:`TopLevelNormalizedVConcatSpecGenericSpec`,
:class:`TopLevelNormalizedHConcatSpecGenericSpec`)
A Vega-Lite top-level specification. This is the root class for all Vega-Lite
specifications. (The json schema is generated from this type.)
"""
_schema = {'$ref': '#/definitions/TopLevelSpec'}
def __init__(self, *args, **kwds):
super(TopLevelSpec, self).__init__(*args, **kwds)
class TopLevelFacetSpec(TopLevelSpec):
"""TopLevelFacetSpec schema wrapper
Mapping(required=[data, facet, spec])
Attributes
----------
data : anyOf(:class:`Data`, None)
An object describing the data source. Set to ``null`` to ignore the parent's data
source. If no data is set, it is derived from the parent.
facet : anyOf(:class:`FacetFieldDef`, :class:`FacetMapping`)
Definition for how to facet the data. One of: 1) `a field definition for faceting
the plot by one field
<https://vega.github.io/vega-lite/docs/facet.html#field-def>`__ 2) `An object that
maps row and column channels to their field definitions
<https://vega.github.io/vega-lite/docs/facet.html#mapping>`__
spec : anyOf(:class:`LayerSpec`, :class:`UnitSpecWithFrame`)
A specification of the view that gets faceted.
align : anyOf(:class:`LayoutAlign`, :class:`RowColLayoutAlign`)
The alignment to apply to grid rows and columns. The supported string values are
``"all"``, ``"each"``, and ``"none"``.
* For ``"none"``, a flow layout will be used, in which adjacent subviews are simply
placed one after the other. - For ``"each"``, subviews will be aligned into a
clean grid structure, but each row or column may be of variable size. - For
``"all"``, subviews will be aligned and each row or column will be sized
identically based on the maximum observed size. String values for this property
will be applied to both grid rows and columns.
Alternatively, an object value of the form ``{"row": string, "column": string}`` can
be used to supply different alignments for rows and columns.
**Default value:** ``"all"``.
autosize : anyOf(:class:`AutosizeType`, :class:`AutoSizeParams`)
How the visualization size should be determined. If a string, should be one of
``"pad"``, ``"fit"`` or ``"none"``. Object values can additionally specify
parameters for content sizing and automatic resizing.
**Default value** : ``pad``
background : anyOf(:class:`Color`, :class:`ExprRef`)
CSS color property to use as the background of the entire view.
**Default value:** ``"white"``
bounds : enum('full', 'flush')
The bounds calculation method to use for determining the extent of a sub-plot. One
of ``full`` (the default) or ``flush``.
* If set to ``full``, the entire calculated bounds (including axes, title, and
legend) will be used. - If set to ``flush``, only the specified width and height
values for the sub-view will be used. The ``flush`` setting can be useful when
attempting to place sub-plots without axes or legends into a uniform grid
structure.
**Default value:** ``"full"``
center : anyOf(boolean, :class:`RowColboolean`)
Boolean flag indicating if subviews should be centered relative to their respective
rows or columns.
An object value of the form ``{"row": boolean, "column": boolean}`` can be used to
supply different centering values for rows and columns.
**Default value:** ``false``
columns : float
The number of columns to include in the view composition layout.
**Default value** : ``undefined`` -- An infinite number of columns (a single row)
will be assumed. This is equivalent to ``hconcat`` (for ``concat`` ) and to using
the ``column`` channel (for ``facet`` and ``repeat`` ).
**Note** :
1) This property is only for: - the general (wrappable) ``concat`` operator (not
``hconcat`` / ``vconcat`` ) - the ``facet`` and ``repeat`` operator with one
field/repetition definition (without row/column nesting)
2) Setting the ``columns`` to ``1`` is equivalent to ``vconcat`` (for ``concat`` )
and to using the ``row`` channel (for ``facet`` and ``repeat`` ).
config : :class:`Config`
Vega-Lite configuration object. This property can only be defined at the top-level
of a specification.
datasets : :class:`Datasets`
A global data store for named datasets. This is a mapping from names to inline
datasets. This can be an array of objects or primitive values or a string. Arrays of
primitive values are ingested as objects with a ``data`` property.
description : string
Description of this mark for commenting purpose.
name : string
Name of the visualization for later reference.
padding : anyOf(:class:`Padding`, :class:`ExprRef`)
The default visualization padding, in pixels, from the edge of the visualization
canvas to the data rectangle. If a number, specifies padding for all sides. If an
object, the value should have the format ``{"left": 5, "top": 5, "right": 5,
"bottom": 5}`` to specify padding for each side of the visualization.
**Default value** : ``5``
params : List(:class:`Parameter`)
Dynamic variables that parameterize a visualization.
resolve : :class:`Resolve`
Scale, axis, and legend resolutions for view composition specifications.
spacing : anyOf(float, :class:`RowColnumber`)
The spacing in pixels between sub-views of the composition operator. An object of
the form ``{"row": number, "column": number}`` can be used to set different spacing
values for rows and columns.
**Default value** : Depends on ``"spacing"`` property of `the view composition
configuration <https://vega.github.io/vega-lite/docs/config.html#view-config>`__ (
``20`` by default)
title : anyOf(:class:`Text`, :class:`TitleParams`)
Title for the plot.
transform : List(:class:`Transform`)
An array of data transformations such as filter and new field calculation.
usermeta : :class:`Dictunknown`
Optional metadata that will be passed to Vega. This object is completely ignored by
Vega and Vega-Lite and can be used for custom metadata.
$schema : string
URL to `JSON schema <http://json-schema.org/>`__ for a Vega-Lite specification.
Unless you have a reason to change this, use
``https://vega.github.io/schema/vega-lite/v4.json``. Setting the ``$schema``
property allows automatic validation and autocomplete in editors that support JSON
schema.
"""
_schema = {'$ref': '#/definitions/TopLevelFacetSpec'}
def __init__(self, data=Undefined, facet=Undefined, spec=Undefined, align=Undefined,
autosize=Undefined, background=Undefined, bounds=Undefined, center=Undefined,
columns=Undefined, config=Undefined, datasets=Undefined, description=Undefined,
name=Undefined, padding=Undefined, params=Undefined, resolve=Undefined,
spacing=Undefined, title=Undefined, transform=Undefined, usermeta=Undefined, **kwds):
super(TopLevelFacetSpec, self).__init__(data=data, facet=facet, spec=spec, align=align,
autosize=autosize, background=background, bounds=bounds,
center=center, columns=columns, config=config,
datasets=datasets, description=description, name=name,
padding=padding, params=params, resolve=resolve,
spacing=spacing, title=title, transform=transform,
usermeta=usermeta, **kwds)
class TopLevelLayerSpec(TopLevelSpec):
"""TopLevelLayerSpec schema wrapper
Mapping(required=[layer])
Attributes
----------
layer : List(anyOf(:class:`LayerSpec`, :class:`UnitSpec`))
Layer or single view specifications to be layered.
**Note** : Specifications inside ``layer`` cannot use ``row`` and ``column``
channels as layering facet specifications is not allowed. Instead, use the `facet
operator <https://vega.github.io/vega-lite/docs/facet.html>`__ and place a layer
inside a facet.
autosize : anyOf(:class:`AutosizeType`, :class:`AutoSizeParams`)
How the visualization size should be determined. If a string, should be one of
``"pad"``, ``"fit"`` or ``"none"``. Object values can additionally specify
parameters for content sizing and automatic resizing.
**Default value** : ``pad``
background : anyOf(:class:`Color`, :class:`ExprRef`)
CSS color property to use as the background of the entire view.
**Default value:** ``"white"``
config : :class:`Config`
Vega-Lite configuration object. This property can only be defined at the top-level
of a specification.
data : anyOf(:class:`Data`, None)
An object describing the data source. Set to ``null`` to ignore the parent's data
source. If no data is set, it is derived from the parent.
datasets : :class:`Datasets`
A global data store for named datasets. This is a mapping from names to inline
datasets. This can be an array of objects or primitive values or a string. Arrays of
primitive values are ingested as objects with a ``data`` property.
description : string
Description of this mark for commenting purpose.
encoding : :class:`SharedEncoding`
A shared key-value mapping between encoding channels and definition of fields in the
underlying layers.
height : anyOf(float, string, :class:`Step`)
The height of a visualization.
* For a plot with a continuous y-field, height should be a number. - For a plot with
either a discrete y-field or no y-field, height can be either a number indicating
a fixed height or an object in the form of ``{step: number}`` defining the height
per discrete step. (No y-field is equivalent to having one discrete step.) - To
enable responsive sizing on height, it should be set to ``"container"``.
**Default value:** Based on ``config.view.continuousHeight`` for a plot with a
continuous y-field and ``config.view.discreteHeight`` otherwise.
**Note:** For plots with `row and column channels
<https://vega.github.io/vega-lite/docs/encoding.html#facet>`__, this represents the
height of a single view and the ``"container"`` option cannot be used.
**See also:** `height <https://vega.github.io/vega-lite/docs/size.html>`__
documentation.
name : string
Name of the visualization for later reference.
padding : anyOf(:class:`Padding`, :class:`ExprRef`)
The default visualization padding, in pixels, from the edge of the visualization
canvas to the data rectangle. If a number, specifies padding for all sides. If an
object, the value should have the format ``{"left": 5, "top": 5, "right": 5,
"bottom": 5}`` to specify padding for each side of the visualization.
**Default value** : ``5``
params : List(:class:`Parameter`)
Dynamic variables that parameterize a visualization.
projection : :class:`Projection`
An object defining properties of the geographic projection shared by underlying
layers.
resolve : :class:`Resolve`
Scale, axis, and legend resolutions for view composition specifications.
title : anyOf(:class:`Text`, :class:`TitleParams`)
Title for the plot.
transform : List(:class:`Transform`)
An array of data transformations such as filter and new field calculation.
usermeta : :class:`Dictunknown`
Optional metadata that will be passed to Vega. This object is completely ignored by
Vega and Vega-Lite and can be used for custom metadata.
view : :class:`ViewBackground`
An object defining the view background's fill and stroke.
**Default value:** none (transparent)
width : anyOf(float, string, :class:`Step`)
The width of a visualization.
* For a plot with a continuous x-field, width should be a number. - For a plot with
either a discrete x-field or no x-field, width can be either a number indicating a
fixed width or an object in the form of ``{step: number}`` defining the width per
discrete step. (No x-field is equivalent to having one discrete step.) - To enable
responsive sizing on width, it should be set to ``"container"``.
**Default value:** Based on ``config.view.continuousWidth`` for a plot with a
continuous x-field and ``config.view.discreteWidth`` otherwise.
**Note:** For plots with `row and column channels
<https://vega.github.io/vega-lite/docs/encoding.html#facet>`__, this represents the
width of a single view and the ``"container"`` option cannot be used.
**See also:** `width <https://vega.github.io/vega-lite/docs/size.html>`__
documentation.
$schema : string
URL to `JSON schema <http://json-schema.org/>`__ for a Vega-Lite specification.
Unless you have a reason to change this, use
``https://vega.github.io/schema/vega-lite/v4.json``. Setting the ``$schema``
property allows automatic validation and autocomplete in editors that support JSON
schema.
"""
_schema = {'$ref': '#/definitions/TopLevelLayerSpec'}
def __init__(self, layer=Undefined, autosize=Undefined, background=Undefined, config=Undefined,
data=Undefined, datasets=Undefined, description=Undefined, encoding=Undefined,
height=Undefined, name=Undefined, padding=Undefined, params=Undefined,
projection=Undefined, resolve=Undefined, title=Undefined, transform=Undefined,
usermeta=Undefined, view=Undefined, width=Undefined, **kwds):
super(TopLevelLayerSpec, self).__init__(layer=layer, autosize=autosize, background=background,
config=config, data=data, datasets=datasets,
description=description, encoding=encoding,
height=height, name=name, padding=padding,
params=params, projection=projection, resolve=resolve,
title=title, transform=transform, usermeta=usermeta,
view=view, width=width, **kwds)
class TopLevelNormalizedConcatSpecGenericSpec(TopLevelSpec):
"""TopLevelNormalizedConcatSpecGenericSpec schema wrapper
Mapping(required=[concat])
Attributes
----------
concat : List(:class:`NormalizedSpec`)
A list of views to be concatenated.
align : anyOf(:class:`LayoutAlign`, :class:`RowColLayoutAlign`)
The alignment to apply to grid rows and columns. The supported string values are
``"all"``, ``"each"``, and ``"none"``.
* For ``"none"``, a flow layout will be used, in which adjacent subviews are simply
placed one after the other. - For ``"each"``, subviews will be aligned into a
clean grid structure, but each row or column may be of variable size. - For
``"all"``, subviews will be aligned and each row or column will be sized
identically based on the maximum observed size. String values for this property
will be applied to both grid rows and columns.
Alternatively, an object value of the form ``{"row": string, "column": string}`` can
be used to supply different alignments for rows and columns.
**Default value:** ``"all"``.
autosize : anyOf(:class:`AutosizeType`, :class:`AutoSizeParams`)
How the visualization size should be determined. If a string, should be one of
``"pad"``, ``"fit"`` or ``"none"``. Object values can additionally specify
parameters for content sizing and automatic resizing.
**Default value** : ``pad``
background : anyOf(:class:`Color`, :class:`ExprRef`)
CSS color property to use as the background of the entire view.
**Default value:** ``"white"``
bounds : enum('full', 'flush')
The bounds calculation method to use for determining the extent of a sub-plot. One
of ``full`` (the default) or ``flush``.
* If set to ``full``, the entire calculated bounds (including axes, title, and
legend) will be used. - If set to ``flush``, only the specified width and height
values for the sub-view will be used. The ``flush`` setting can be useful when
attempting to place sub-plots without axes or legends into a uniform grid
structure.
**Default value:** ``"full"``
center : anyOf(boolean, :class:`RowColboolean`)
Boolean flag indicating if subviews should be centered relative to their respective
rows or columns.
An object value of the form ``{"row": boolean, "column": boolean}`` can be used to
supply different centering values for rows and columns.
**Default value:** ``false``
columns : float
The number of columns to include in the view composition layout.
**Default value** : ``undefined`` -- An infinite number of columns (a single row)
will be assumed. This is equivalent to ``hconcat`` (for ``concat`` ) and to using
the ``column`` channel (for ``facet`` and ``repeat`` ).
**Note** :
1) This property is only for: - the general (wrappable) ``concat`` operator (not
``hconcat`` / ``vconcat`` ) - the ``facet`` and ``repeat`` operator with one
field/repetition definition (without row/column nesting)
2) Setting the ``columns`` to ``1`` is equivalent to ``vconcat`` (for ``concat`` )
and to using the ``row`` channel (for ``facet`` and ``repeat`` ).
config : :class:`Config`
Vega-Lite configuration object. This property can only be defined at the top-level
of a specification.
data : anyOf(:class:`Data`, None)
An object describing the data source. Set to ``null`` to ignore the parent's data
source. If no data is set, it is derived from the parent.
datasets : :class:`Datasets`
A global data store for named datasets. This is a mapping from names to inline
datasets. This can be an array of objects or primitive values or a string. Arrays of
primitive values are ingested as objects with a ``data`` property.
description : string
Description of this mark for commenting purpose.
name : string
Name of the visualization for later reference.
padding : anyOf(:class:`Padding`, :class:`ExprRef`)
The default visualization padding, in pixels, from the edge of the visualization
canvas to the data rectangle. If a number, specifies padding for all sides. If an
object, the value should have the format ``{"left": 5, "top": 5, "right": 5,
"bottom": 5}`` to specify padding for each side of the visualization.
**Default value** : ``5``
params : List(:class:`Parameter`)
Dynamic variables that parameterize a visualization.
resolve : :class:`Resolve`
Scale, axis, and legend resolutions for view composition specifications.
spacing : anyOf(float, :class:`RowColnumber`)
The spacing in pixels between sub-views of the composition operator. An object of
the form ``{"row": number, "column": number}`` can be used to set different spacing
values for rows and columns.
**Default value** : Depends on ``"spacing"`` property of `the view composition
configuration <https://vega.github.io/vega-lite/docs/config.html#view-config>`__ (
``20`` by default)
title : anyOf(:class:`Text`, :class:`TitleParams`)
Title for the plot.
transform : List(:class:`Transform`)
An array of data transformations such as filter and new field calculation.
usermeta : :class:`Dictunknown`
Optional metadata that will be passed to Vega. This object is completely ignored by
Vega and Vega-Lite and can be used for custom metadata.
$schema : string
URL to `JSON schema <http://json-schema.org/>`__ for a Vega-Lite specification.
Unless you have a reason to change this, use
``https://vega.github.io/schema/vega-lite/v4.json``. Setting the ``$schema``
property allows automatic validation and autocomplete in editors that support JSON
schema.
"""
_schema = {'$ref': '#/definitions/TopLevelNormalizedConcatSpec<GenericSpec>'}
def __init__(self, concat=Undefined, align=Undefined, autosize=Undefined, background=Undefined,
bounds=Undefined, center=Undefined, columns=Undefined, config=Undefined,
data=Undefined, datasets=Undefined, description=Undefined, name=Undefined,
padding=Undefined, params=Undefined, resolve=Undefined, spacing=Undefined,
title=Undefined, transform=Undefined, usermeta=Undefined, **kwds):
super(TopLevelNormalizedConcatSpecGenericSpec, self).__init__(concat=concat, align=align,
autosize=autosize,
background=background,
bounds=bounds, center=center,
columns=columns, config=config,
data=data, datasets=datasets,
description=description,
name=name, padding=padding,
params=params, resolve=resolve,
spacing=spacing, title=title,
transform=transform,
usermeta=usermeta, **kwds)
class TopLevelNormalizedHConcatSpecGenericSpec(TopLevelSpec):
"""TopLevelNormalizedHConcatSpecGenericSpec schema wrapper
Mapping(required=[hconcat])
Attributes
----------
hconcat : List(:class:`NormalizedSpec`)
A list of views to be concatenated and put into a row.
autosize : anyOf(:class:`AutosizeType`, :class:`AutoSizeParams`)
How the visualization size should be determined. If a string, should be one of
``"pad"``, ``"fit"`` or ``"none"``. Object values can additionally specify
parameters for content sizing and automatic resizing.
**Default value** : ``pad``
background : anyOf(:class:`Color`, :class:`ExprRef`)
CSS color property to use as the background of the entire view.
**Default value:** ``"white"``
bounds : enum('full', 'flush')
The bounds calculation method to use for determining the extent of a sub-plot. One
of ``full`` (the default) or ``flush``.
* If set to ``full``, the entire calculated bounds (including axes, title, and
legend) will be used. - If set to ``flush``, only the specified width and height
values for the sub-view will be used. The ``flush`` setting can be useful when
attempting to place sub-plots without axes or legends into a uniform grid
structure.
**Default value:** ``"full"``
center : boolean
Boolean flag indicating if subviews should be centered relative to their respective
rows or columns.
**Default value:** ``false``
config : :class:`Config`
Vega-Lite configuration object. This property can only be defined at the top-level
of a specification.
data : anyOf(:class:`Data`, None)
An object describing the data source. Set to ``null`` to ignore the parent's data
source. If no data is set, it is derived from the parent.
datasets : :class:`Datasets`
A global data store for named datasets. This is a mapping from names to inline
datasets. This can be an array of objects or primitive values or a string. Arrays of
primitive values are ingested as objects with a ``data`` property.
description : string
Description of this mark for commenting purpose.
name : string
Name of the visualization for later reference.
padding : anyOf(:class:`Padding`, :class:`ExprRef`)
The default visualization padding, in pixels, from the edge of the visualization
canvas to the data rectangle. If a number, specifies padding for all sides. If an
object, the value should have the format ``{"left": 5, "top": 5, "right": 5,
"bottom": 5}`` to specify padding for each side of the visualization.
**Default value** : ``5``
params : List(:class:`Parameter`)
Dynamic variables that parameterize a visualization.
resolve : :class:`Resolve`
Scale, axis, and legend resolutions for view composition specifications.
spacing : float
The spacing in pixels between sub-views of the concat operator.
**Default value** : ``10``
title : anyOf(:class:`Text`, :class:`TitleParams`)
Title for the plot.
transform : List(:class:`Transform`)
An array of data transformations such as filter and new field calculation.
usermeta : :class:`Dictunknown`
Optional metadata that will be passed to Vega. This object is completely ignored by
Vega and Vega-Lite and can be used for custom metadata.
$schema : string
URL to `JSON schema <http://json-schema.org/>`__ for a Vega-Lite specification.
Unless you have a reason to change this, use
``https://vega.github.io/schema/vega-lite/v4.json``. Setting the ``$schema``
property allows automatic validation and autocomplete in editors that support JSON
schema.
"""
_schema = {'$ref': '#/definitions/TopLevelNormalizedHConcatSpec<GenericSpec>'}
def __init__(self, hconcat=Undefined, autosize=Undefined, background=Undefined, bounds=Undefined,
center=Undefined, config=Undefined, data=Undefined, datasets=Undefined,
description=Undefined, name=Undefined, padding=Undefined, params=Undefined,
resolve=Undefined, spacing=Undefined, title=Undefined, transform=Undefined,
usermeta=Undefined, **kwds):
super(TopLevelNormalizedHConcatSpecGenericSpec, self).__init__(hconcat=hconcat,
autosize=autosize,
background=background,
bounds=bounds, center=center,
config=config, data=data,
datasets=datasets,
description=description,
name=name, padding=padding,
params=params, resolve=resolve,
spacing=spacing, title=title,
transform=transform,
usermeta=usermeta, **kwds)
class TopLevelNormalizedVConcatSpecGenericSpec(TopLevelSpec):
"""TopLevelNormalizedVConcatSpecGenericSpec schema wrapper
Mapping(required=[vconcat])
Attributes
----------
vconcat : List(:class:`NormalizedSpec`)
A list of views to be concatenated and put into a column.
autosize : anyOf(:class:`AutosizeType`, :class:`AutoSizeParams`)
How the visualization size should be determined. If a string, should be one of
``"pad"``, ``"fit"`` or ``"none"``. Object values can additionally specify
parameters for content sizing and automatic resizing.
**Default value** : ``pad``
background : anyOf(:class:`Color`, :class:`ExprRef`)
CSS color property to use as the background of the entire view.
**Default value:** ``"white"``
bounds : enum('full', 'flush')
The bounds calculation method to use for determining the extent of a sub-plot. One
of ``full`` (the default) or ``flush``.
* If set to ``full``, the entire calculated bounds (including axes, title, and
legend) will be used. - If set to ``flush``, only the specified width and height
values for the sub-view will be used. The ``flush`` setting can be useful when
attempting to place sub-plots without axes or legends into a uniform grid
structure.
**Default value:** ``"full"``
center : boolean
Boolean flag indicating if subviews should be centered relative to their respective
rows or columns.
**Default value:** ``false``
config : :class:`Config`
Vega-Lite configuration object. This property can only be defined at the top-level
of a specification.
data : anyOf(:class:`Data`, None)
An object describing the data source. Set to ``null`` to ignore the parent's data
source. If no data is set, it is derived from the parent.
datasets : :class:`Datasets`
A global data store for named datasets. This is a mapping from names to inline
datasets. This can be an array of objects or primitive values or a string. Arrays of
primitive values are ingested as objects with a ``data`` property.
description : string
Description of this mark for commenting purpose.
name : string
Name of the visualization for later reference.
padding : anyOf(:class:`Padding`, :class:`ExprRef`)
The default visualization padding, in pixels, from the edge of the visualization
canvas to the data rectangle. If a number, specifies padding for all sides. If an
object, the value should have the format ``{"left": 5, "top": 5, "right": 5,
"bottom": 5}`` to specify padding for each side of the visualization.
**Default value** : ``5``
params : List(:class:`Parameter`)
Dynamic variables that parameterize a visualization.
resolve : :class:`Resolve`
Scale, axis, and legend resolutions for view composition specifications.
spacing : float
The spacing in pixels between sub-views of the concat operator.
**Default value** : ``10``
title : anyOf(:class:`Text`, :class:`TitleParams`)
Title for the plot.
transform : List(:class:`Transform`)
An array of data transformations such as filter and new field calculation.
usermeta : :class:`Dictunknown`
Optional metadata that will be passed to Vega. This object is completely ignored by
Vega and Vega-Lite and can be used for custom metadata.
$schema : string
URL to `JSON schema <http://json-schema.org/>`__ for a Vega-Lite specification.
Unless you have a reason to change this, use
``https://vega.github.io/schema/vega-lite/v4.json``. Setting the ``$schema``
property allows automatic validation and autocomplete in editors that support JSON
schema.
"""
_schema = {'$ref': '#/definitions/TopLevelNormalizedVConcatSpec<GenericSpec>'}
def __init__(self, vconcat=Undefined, autosize=Undefined, background=Undefined, bounds=Undefined,
center=Undefined, config=Undefined, data=Undefined, datasets=Undefined,
description=Undefined, name=Undefined, padding=Undefined, params=Undefined,
resolve=Undefined, spacing=Undefined, title=Undefined, transform=Undefined,
usermeta=Undefined, **kwds):
super(TopLevelNormalizedVConcatSpecGenericSpec, self).__init__(vconcat=vconcat,
autosize=autosize,
background=background,
bounds=bounds, center=center,
config=config, data=data,
datasets=datasets,
description=description,
name=name, padding=padding,
params=params, resolve=resolve,
spacing=spacing, title=title,
transform=transform,
usermeta=usermeta, **kwds)
class TopLevelRepeatSpec(TopLevelSpec):
"""TopLevelRepeatSpec schema wrapper
anyOf(Mapping(required=[repeat, spec]), Mapping(required=[repeat, spec]))
"""
_schema = {'$ref': '#/definitions/TopLevelRepeatSpec'}
def __init__(self, *args, **kwds):
super(TopLevelRepeatSpec, self).__init__(*args, **kwds)
class TopLevelUnitSpec(TopLevelSpec):
"""TopLevelUnitSpec schema wrapper
Mapping(required=[data, mark])
Attributes
----------
data : anyOf(:class:`Data`, None)
An object describing the data source. Set to ``null`` to ignore the parent's data
source. If no data is set, it is derived from the parent.
mark : :class:`AnyMark`
A string describing the mark type (one of ``"bar"``, ``"circle"``, ``"square"``,
``"tick"``, ``"line"``, ``"area"``, ``"point"``, ``"rule"``, ``"geoshape"``, and
``"text"`` ) or a `mark definition object
<https://vega.github.io/vega-lite/docs/mark.html#mark-def>`__.
align : anyOf(:class:`LayoutAlign`, :class:`RowColLayoutAlign`)
The alignment to apply to grid rows and columns. The supported string values are
``"all"``, ``"each"``, and ``"none"``.
* For ``"none"``, a flow layout will be used, in which adjacent subviews are simply
placed one after the other. - For ``"each"``, subviews will be aligned into a
clean grid structure, but each row or column may be of variable size. - For
``"all"``, subviews will be aligned and each row or column will be sized
identically based on the maximum observed size. String values for this property
will be applied to both grid rows and columns.
Alternatively, an object value of the form ``{"row": string, "column": string}`` can
be used to supply different alignments for rows and columns.
**Default value:** ``"all"``.
autosize : anyOf(:class:`AutosizeType`, :class:`AutoSizeParams`)
How the visualization size should be determined. If a string, should be one of
``"pad"``, ``"fit"`` or ``"none"``. Object values can additionally specify
parameters for content sizing and automatic resizing.
**Default value** : ``pad``
background : anyOf(:class:`Color`, :class:`ExprRef`)
CSS color property to use as the background of the entire view.
**Default value:** ``"white"``
bounds : enum('full', 'flush')
The bounds calculation method to use for determining the extent of a sub-plot. One
of ``full`` (the default) or ``flush``.
* If set to ``full``, the entire calculated bounds (including axes, title, and
legend) will be used. - If set to ``flush``, only the specified width and height
values for the sub-view will be used. The ``flush`` setting can be useful when
attempting to place sub-plots without axes or legends into a uniform grid
structure.
**Default value:** ``"full"``
center : anyOf(boolean, :class:`RowColboolean`)
Boolean flag indicating if subviews should be centered relative to their respective
rows or columns.
An object value of the form ``{"row": boolean, "column": boolean}`` can be used to
supply different centering values for rows and columns.
**Default value:** ``false``
config : :class:`Config`
Vega-Lite configuration object. This property can only be defined at the top-level
of a specification.
datasets : :class:`Datasets`
A global data store for named datasets. This is a mapping from names to inline
datasets. This can be an array of objects or primitive values or a string. Arrays of
primitive values are ingested as objects with a ``data`` property.
description : string
Description of this mark for commenting purpose.
encoding : :class:`FacetedEncoding`
A key-value mapping between encoding channels and definition of fields.
height : anyOf(float, string, :class:`Step`)
The height of a visualization.
* For a plot with a continuous y-field, height should be a number. - For a plot with
either a discrete y-field or no y-field, height can be either a number indicating
a fixed height or an object in the form of ``{step: number}`` defining the height
per discrete step. (No y-field is equivalent to having one discrete step.) - To
enable responsive sizing on height, it should be set to ``"container"``.
**Default value:** Based on ``config.view.continuousHeight`` for a plot with a
continuous y-field and ``config.view.discreteHeight`` otherwise.
**Note:** For plots with `row and column channels
<https://vega.github.io/vega-lite/docs/encoding.html#facet>`__, this represents the
height of a single view and the ``"container"`` option cannot be used.
**See also:** `height <https://vega.github.io/vega-lite/docs/size.html>`__
documentation.
name : string
Name of the visualization for later reference.
padding : anyOf(:class:`Padding`, :class:`ExprRef`)
The default visualization padding, in pixels, from the edge of the visualization
canvas to the data rectangle. If a number, specifies padding for all sides. If an
object, the value should have the format ``{"left": 5, "top": 5, "right": 5,
"bottom": 5}`` to specify padding for each side of the visualization.
**Default value** : ``5``
params : List(:class:`Parameter`)
Dynamic variables that parameterize a visualization.
projection : :class:`Projection`
An object defining properties of geographic projection, which will be applied to
``shape`` path for ``"geoshape"`` marks and to ``latitude`` and ``"longitude"``
channels for other marks.
resolve : :class:`Resolve`
Scale, axis, and legend resolutions for view composition specifications.
selection : Mapping(required=[])
A key-value mapping between selection names and definitions.
spacing : anyOf(float, :class:`RowColnumber`)
The spacing in pixels between sub-views of the composition operator. An object of
the form ``{"row": number, "column": number}`` can be used to set different spacing
values for rows and columns.
**Default value** : Depends on ``"spacing"`` property of `the view composition
configuration <https://vega.github.io/vega-lite/docs/config.html#view-config>`__ (
``20`` by default)
title : anyOf(:class:`Text`, :class:`TitleParams`)
Title for the plot.
transform : List(:class:`Transform`)
An array of data transformations such as filter and new field calculation.
usermeta : :class:`Dictunknown`
Optional metadata that will be passed to Vega. This object is completely ignored by
Vega and Vega-Lite and can be used for custom metadata.
view : :class:`ViewBackground`
An object defining the view background's fill and stroke.
**Default value:** none (transparent)
width : anyOf(float, string, :class:`Step`)
The width of a visualization.
* For a plot with a continuous x-field, width should be a number. - For a plot with
either a discrete x-field or no x-field, width can be either a number indicating a
fixed width or an object in the form of ``{step: number}`` defining the width per
discrete step. (No x-field is equivalent to having one discrete step.) - To enable
responsive sizing on width, it should be set to ``"container"``.
**Default value:** Based on ``config.view.continuousWidth`` for a plot with a
continuous x-field and ``config.view.discreteWidth`` otherwise.
**Note:** For plots with `row and column channels
<https://vega.github.io/vega-lite/docs/encoding.html#facet>`__, this represents the
width of a single view and the ``"container"`` option cannot be used.
**See also:** `width <https://vega.github.io/vega-lite/docs/size.html>`__
documentation.
$schema : string
URL to `JSON schema <http://json-schema.org/>`__ for a Vega-Lite specification.
Unless you have a reason to change this, use
``https://vega.github.io/schema/vega-lite/v4.json``. Setting the ``$schema``
property allows automatic validation and autocomplete in editors that support JSON
schema.
"""
_schema = {'$ref': '#/definitions/TopLevelUnitSpec'}
def __init__(self, data=Undefined, mark=Undefined, align=Undefined, autosize=Undefined,
background=Undefined, bounds=Undefined, center=Undefined, config=Undefined,
datasets=Undefined, description=Undefined, encoding=Undefined, height=Undefined,
name=Undefined, padding=Undefined, params=Undefined, projection=Undefined,
resolve=Undefined, selection=Undefined, spacing=Undefined, title=Undefined,
transform=Undefined, usermeta=Undefined, view=Undefined, width=Undefined, **kwds):
super(TopLevelUnitSpec, self).__init__(data=data, mark=mark, align=align, autosize=autosize,
background=background, bounds=bounds, center=center,
config=config, datasets=datasets,
description=description, encoding=encoding,
height=height, name=name, padding=padding, params=params,
projection=projection, resolve=resolve,
selection=selection, spacing=spacing, title=title,
transform=transform, usermeta=usermeta, view=view,
width=width, **kwds)
class TopoDataFormat(DataFormat):
"""TopoDataFormat schema wrapper
Mapping(required=[])
Attributes
----------
feature : string
The name of the TopoJSON object set to convert to a GeoJSON feature collection. For
example, in a map of the world, there may be an object set named ``"countries"``.
Using the feature property, we can extract this set and generate a GeoJSON feature
object for each country.
mesh : string
The name of the TopoJSON object set to convert to mesh. Similar to the ``feature``
option, ``mesh`` extracts a named TopoJSON object set. Unlike the ``feature``
option, the corresponding geo data is returned as a single, unified mesh instance,
not as individual GeoJSON features. Extracting a mesh is useful for more efficiently
drawing borders or other geographic elements that you do not need to associate with
specific regions such as individual countries, states or counties.
parse : anyOf(:class:`Parse`, None)
If set to ``null``, disable type inference based on the spec and only use type
inference based on the data. Alternatively, a parsing directive object can be
provided for explicit data types. Each property of the object corresponds to a field
name, and the value to the desired data type (one of ``"number"``, ``"boolean"``,
``"date"``, or null (do not parse the field)). For example, ``"parse":
{"modified_on": "date"}`` parses the ``modified_on`` field in each input record a
Date value.
For ``"date"``, we parse data based using Javascript's `Date.parse()
<https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/parse>`__.
For Specific date formats can be provided (e.g., ``{foo: "date:'%m%d%Y'"}`` ), using
the `d3-time-format syntax <https://github.com/d3/d3-time-format#locale_format>`__.
UTC date format parsing is supported similarly (e.g., ``{foo: "utc:'%m%d%Y'"}`` ).
See more about `UTC time
<https://vega.github.io/vega-lite/docs/timeunit.html#utc>`__
type : string
Type of input data: ``"json"``, ``"csv"``, ``"tsv"``, ``"dsv"``.
**Default value:** The default format type is determined by the extension of the
file URL. If no extension is detected, ``"json"`` will be used by default.
"""
_schema = {'$ref': '#/definitions/TopoDataFormat'}
def __init__(self, feature=Undefined, mesh=Undefined, parse=Undefined, type=Undefined, **kwds):
super(TopoDataFormat, self).__init__(feature=feature, mesh=mesh, parse=parse, type=type, **kwds)
class Transform(VegaLiteSchema):
"""Transform schema wrapper
anyOf(:class:`AggregateTransform`, :class:`BinTransform`, :class:`CalculateTransform`,
:class:`DensityTransform`, :class:`FilterTransform`, :class:`FlattenTransform`,
:class:`FoldTransform`, :class:`ImputeTransform`, :class:`JoinAggregateTransform`,
:class:`LoessTransform`, :class:`LookupTransform`, :class:`QuantileTransform`,
:class:`RegressionTransform`, :class:`TimeUnitTransform`, :class:`SampleTransform`,
:class:`StackTransform`, :class:`WindowTransform`, :class:`PivotTransform`)
"""
_schema = {'$ref': '#/definitions/Transform'}
def __init__(self, *args, **kwds):
super(Transform, self).__init__(*args, **kwds)
class AggregateTransform(Transform):
"""AggregateTransform schema wrapper
Mapping(required=[aggregate])
Attributes
----------
aggregate : List(:class:`AggregatedFieldDef`)
Array of objects that define fields to aggregate.
groupby : List(:class:`FieldName`)
The data fields to group by. If not specified, a single group containing all data
objects will be used.
"""
_schema = {'$ref': '#/definitions/AggregateTransform'}
def __init__(self, aggregate=Undefined, groupby=Undefined, **kwds):
super(AggregateTransform, self).__init__(aggregate=aggregate, groupby=groupby, **kwds)
class BinTransform(Transform):
"""BinTransform schema wrapper
Mapping(required=[bin, field, as])
Attributes
----------
bin : anyOf(boolean, :class:`BinParams`)
An object indicating bin properties, or simply ``true`` for using default bin
parameters.
field : :class:`FieldName`
The data field to bin.
as : anyOf(:class:`FieldName`, List(:class:`FieldName`))
The output fields at which to write the start and end bin values. This can be either
a string or an array of strings with two elements denoting the name for the fields
for bin start and bin end respectively. If a single string (e.g., ``"val"`` ) is
provided, the end field will be ``"val_end"``.
"""
_schema = {'$ref': '#/definitions/BinTransform'}
def __init__(self, bin=Undefined, field=Undefined, **kwds):
super(BinTransform, self).__init__(bin=bin, field=field, **kwds)
class CalculateTransform(Transform):
"""CalculateTransform schema wrapper
Mapping(required=[calculate, as])
Attributes
----------
calculate : string
A `expression <https://vega.github.io/vega-lite/docs/types.html#expression>`__
string. Use the variable ``datum`` to refer to the current data object.
as : :class:`FieldName`
The field for storing the computed formula value.
"""
_schema = {'$ref': '#/definitions/CalculateTransform'}
def __init__(self, calculate=Undefined, **kwds):
super(CalculateTransform, self).__init__(calculate=calculate, **kwds)
class DensityTransform(Transform):
"""DensityTransform schema wrapper
Mapping(required=[density])
Attributes
----------
density : :class:`FieldName`
The data field for which to perform density estimation.
bandwidth : float
The bandwidth (standard deviation) of the Gaussian kernel. If unspecified or set to
zero, the bandwidth value is automatically estimated from the input data using
Scott’s rule.
counts : boolean
A boolean flag indicating if the output values should be probability estimates
(false) or smoothed counts (true).
**Default value:** ``false``
cumulative : boolean
A boolean flag indicating whether to produce density estimates (false) or cumulative
density estimates (true).
**Default value:** ``false``
extent : List([float, float])
A [min, max] domain from which to sample the distribution. If unspecified, the
extent will be determined by the observed minimum and maximum values of the density
value field.
groupby : List(:class:`FieldName`)
The data fields to group by. If not specified, a single group containing all data
objects will be used.
maxsteps : float
The maximum number of samples to take along the extent domain for plotting the
density.
**Default value:** ``200``
minsteps : float
The minimum number of samples to take along the extent domain for plotting the
density.
**Default value:** ``25``
steps : float
The exact number of samples to take along the extent domain for plotting the
density. If specified, overrides both minsteps and maxsteps to set an exact number
of uniform samples. Potentially useful in conjunction with a fixed extent to ensure
consistent sample points for stacked densities.
as : List([:class:`FieldName`, :class:`FieldName`])
The output fields for the sample value and corresponding density estimate.
**Default value:** ``["value", "density"]``
"""
_schema = {'$ref': '#/definitions/DensityTransform'}
def __init__(self, density=Undefined, bandwidth=Undefined, counts=Undefined, cumulative=Undefined,
extent=Undefined, groupby=Undefined, maxsteps=Undefined, minsteps=Undefined,
steps=Undefined, **kwds):
super(DensityTransform, self).__init__(density=density, bandwidth=bandwidth, counts=counts,
cumulative=cumulative, extent=extent, groupby=groupby,
maxsteps=maxsteps, minsteps=minsteps, steps=steps, **kwds)
class FilterTransform(Transform):
"""FilterTransform schema wrapper
Mapping(required=[filter])
Attributes
----------
filter : :class:`PredicateComposition`
The ``filter`` property must be a predication definition, which can take one of the
following forms:
1) an `expression <https://vega.github.io/vega-lite/docs/types.html#expression>`__
string, where ``datum`` can be used to refer to the current data object. For
example, ``{filter: "datum.b2 > 60"}`` would make the output data includes only
items that have values in the field ``b2`` over 60.
2) one of the `field predicates
<https://vega.github.io/vega-lite/docs/predicate.html#field-predicate>`__ : `equal
<https://vega.github.io/vega-lite/docs/predicate.html#field-equal-predicate>`__, `lt
<https://vega.github.io/vega-lite/docs/predicate.html#lt-predicate>`__, `lte
<https://vega.github.io/vega-lite/docs/predicate.html#lte-predicate>`__, `gt
<https://vega.github.io/vega-lite/docs/predicate.html#gt-predicate>`__, `gte
<https://vega.github.io/vega-lite/docs/predicate.html#gte-predicate>`__, `range
<https://vega.github.io/vega-lite/docs/predicate.html#range-predicate>`__, `oneOf
<https://vega.github.io/vega-lite/docs/predicate.html#one-of-predicate>`__, or
`valid <https://vega.github.io/vega-lite/docs/predicate.html#valid-predicate>`__,
3) a `selection predicate
<https://vega.github.io/vega-lite/docs/predicate.html#selection-predicate>`__, which
define the names of a selection that the data point should belong to (or a logical
composition of selections).
4) a `logical composition
<https://vega.github.io/vega-lite/docs/predicate.html#composition>`__ of (1), (2),
or (3).
"""
_schema = {'$ref': '#/definitions/FilterTransform'}
def __init__(self, filter=Undefined, **kwds):
super(FilterTransform, self).__init__(filter=filter, **kwds)
class FlattenTransform(Transform):
"""FlattenTransform schema wrapper
Mapping(required=[flatten])
Attributes
----------
flatten : List(:class:`FieldName`)
An array of one or more data fields containing arrays to flatten. If multiple fields
are specified, their array values should have a parallel structure, ideally with the
same length. If the lengths of parallel arrays do not match, the longest array will
be used with ``null`` values added for missing entries.
as : List(:class:`FieldName`)
The output field names for extracted array values.
**Default value:** The field name of the corresponding array field
"""
_schema = {'$ref': '#/definitions/FlattenTransform'}
def __init__(self, flatten=Undefined, **kwds):
super(FlattenTransform, self).__init__(flatten=flatten, **kwds)
class FoldTransform(Transform):
"""FoldTransform schema wrapper
Mapping(required=[fold])
Attributes
----------
fold : List(:class:`FieldName`)
An array of data fields indicating the properties to fold.
as : List([:class:`FieldName`, :class:`FieldName`])
The output field names for the key and value properties produced by the fold
transform. **Default value:** ``["key", "value"]``
"""
_schema = {'$ref': '#/definitions/FoldTransform'}
def __init__(self, fold=Undefined, **kwds):
super(FoldTransform, self).__init__(fold=fold, **kwds)
class ImputeTransform(Transform):
"""ImputeTransform schema wrapper
Mapping(required=[impute, key])
Attributes
----------
impute : :class:`FieldName`
The data field for which the missing values should be imputed.
key : :class:`FieldName`
A key field that uniquely identifies data objects within a group. Missing key values
(those occurring in the data but not in the current group) will be imputed.
frame : List([anyOf(None, float), anyOf(None, float)])
A frame specification as a two-element array used to control the window over which
the specified method is applied. The array entries should either be a number
indicating the offset from the current data object, or null to indicate unbounded
rows preceding or following the current data object. For example, the value ``[-5,
5]`` indicates that the window should include five objects preceding and five
objects following the current object.
**Default value:** : ``[null, null]`` indicating that the window includes all
objects.
groupby : List(:class:`FieldName`)
An optional array of fields by which to group the values. Imputation will then be
performed on a per-group basis.
keyvals : anyOf(List(Any), :class:`ImputeSequence`)
Defines the key values that should be considered for imputation. An array of key
values or an object defining a `number sequence
<https://vega.github.io/vega-lite/docs/impute.html#sequence-def>`__.
If provided, this will be used in addition to the key values observed within the
input data. If not provided, the values will be derived from all unique values of
the ``key`` field. For ``impute`` in ``encoding``, the key field is the x-field if
the y-field is imputed, or vice versa.
If there is no impute grouping, this property *must* be specified.
method : :class:`ImputeMethod`
The imputation method to use for the field value of imputed data objects. One of
``"value"``, ``"mean"``, ``"median"``, ``"max"`` or ``"min"``.
**Default value:** ``"value"``
value : Any
The field value to use when the imputation ``method`` is ``"value"``.
"""
_schema = {'$ref': '#/definitions/ImputeTransform'}
def __init__(self, impute=Undefined, key=Undefined, frame=Undefined, groupby=Undefined,
keyvals=Undefined, method=Undefined, value=Undefined, **kwds):
super(ImputeTransform, self).__init__(impute=impute, key=key, frame=frame, groupby=groupby,
keyvals=keyvals, method=method, value=value, **kwds)
class JoinAggregateTransform(Transform):
"""JoinAggregateTransform schema wrapper
Mapping(required=[joinaggregate])
Attributes
----------
joinaggregate : List(:class:`JoinAggregateFieldDef`)
The definition of the fields in the join aggregate, and what calculations to use.
groupby : List(:class:`FieldName`)
The data fields for partitioning the data objects into separate groups. If
unspecified, all data points will be in a single group.
"""
_schema = {'$ref': '#/definitions/JoinAggregateTransform'}
def __init__(self, joinaggregate=Undefined, groupby=Undefined, **kwds):
super(JoinAggregateTransform, self).__init__(joinaggregate=joinaggregate, groupby=groupby,
**kwds)
class LoessTransform(Transform):
"""LoessTransform schema wrapper
Mapping(required=[loess, on])
Attributes
----------
loess : :class:`FieldName`
The data field of the dependent variable to smooth.
on : :class:`FieldName`
The data field of the independent variable to use a predictor.
bandwidth : float
A bandwidth parameter in the range ``[0, 1]`` that determines the amount of
smoothing.
**Default value:** ``0.3``
groupby : List(:class:`FieldName`)
The data fields to group by. If not specified, a single group containing all data
objects will be used.
as : List([:class:`FieldName`, :class:`FieldName`])
The output field names for the smoothed points generated by the loess transform.
**Default value:** The field names of the input x and y values.
"""
_schema = {'$ref': '#/definitions/LoessTransform'}
def __init__(self, loess=Undefined, on=Undefined, bandwidth=Undefined, groupby=Undefined, **kwds):
super(LoessTransform, self).__init__(loess=loess, on=on, bandwidth=bandwidth, groupby=groupby,
**kwds)
class LookupTransform(Transform):
"""LookupTransform schema wrapper
Mapping(required=[lookup, from])
Attributes
----------
lookup : string
Key in primary data source.
default : string
The default value to use if lookup fails.
**Default value:** ``null``
as : anyOf(:class:`FieldName`, List(:class:`FieldName`))
The output fields on which to store the looked up data values.
For data lookups, this property may be left blank if ``from.fields`` has been
specified (those field names will be used); if ``from.fields`` has not been
specified, ``as`` must be a string.
For selection lookups, this property is optional: if unspecified, looked up values
will be stored under a property named for the selection; and if specified, it must
correspond to ``from.fields``.
from : anyOf(:class:`LookupData`, :class:`LookupSelection`)
Data source or selection for secondary data reference.
"""
_schema = {'$ref': '#/definitions/LookupTransform'}
def __init__(self, lookup=Undefined, default=Undefined, **kwds):
super(LookupTransform, self).__init__(lookup=lookup, default=default, **kwds)
class PivotTransform(Transform):
"""PivotTransform schema wrapper
Mapping(required=[pivot, value])
Attributes
----------
pivot : :class:`FieldName`
The data field to pivot on. The unique values of this field become new field names
in the output stream.
value : :class:`FieldName`
The data field to populate pivoted fields. The aggregate values of this field become
the values of the new pivoted fields.
groupby : List(:class:`FieldName`)
The optional data fields to group by. If not specified, a single group containing
all data objects will be used.
limit : float
An optional parameter indicating the maximum number of pivoted fields to generate.
The default ( ``0`` ) applies no limit. The pivoted ``pivot`` names are sorted in
ascending order prior to enforcing the limit. **Default value:** ``0``
op : string
The aggregation operation to apply to grouped ``value`` field values. **Default
value:** ``sum``
"""
_schema = {'$ref': '#/definitions/PivotTransform'}
def __init__(self, pivot=Undefined, value=Undefined, groupby=Undefined, limit=Undefined,
op=Undefined, **kwds):
super(PivotTransform, self).__init__(pivot=pivot, value=value, groupby=groupby, limit=limit,
op=op, **kwds)
class QuantileTransform(Transform):
"""QuantileTransform schema wrapper
Mapping(required=[quantile])
Attributes
----------
quantile : :class:`FieldName`
The data field for which to perform quantile estimation.
groupby : List(:class:`FieldName`)
The data fields to group by. If not specified, a single group containing all data
objects will be used.
probs : List(float)
An array of probabilities in the range (0, 1) for which to compute quantile values.
If not specified, the *step* parameter will be used.
step : float
A probability step size (default 0.01) for sampling quantile values. All values from
one-half the step size up to 1 (exclusive) will be sampled. This parameter is only
used if the *probs* parameter is not provided.
as : List([:class:`FieldName`, :class:`FieldName`])
The output field names for the probability and quantile values.
**Default value:** ``["prob", "value"]``
"""
_schema = {'$ref': '#/definitions/QuantileTransform'}
def __init__(self, quantile=Undefined, groupby=Undefined, probs=Undefined, step=Undefined, **kwds):
super(QuantileTransform, self).__init__(quantile=quantile, groupby=groupby, probs=probs,
step=step, **kwds)
class RegressionTransform(Transform):
"""RegressionTransform schema wrapper
Mapping(required=[regression, on])
Attributes
----------
on : :class:`FieldName`
The data field of the independent variable to use a predictor.
regression : :class:`FieldName`
The data field of the dependent variable to predict.
extent : List([float, float])
A [min, max] domain over the independent (x) field for the starting and ending
points of the generated trend line.
groupby : List(:class:`FieldName`)
The data fields to group by. If not specified, a single group containing all data
objects will be used.
method : enum('linear', 'log', 'exp', 'pow', 'quad', 'poly')
The functional form of the regression model. One of ``"linear"``, ``"log"``,
``"exp"``, ``"pow"``, ``"quad"``, or ``"poly"``.
**Default value:** ``"linear"``
order : float
The polynomial order (number of coefficients) for the 'poly' method.
**Default value:** ``3``
params : boolean
A boolean flag indicating if the transform should return the regression model
parameters (one object per group), rather than trend line points. The resulting
objects include a ``coef`` array of fitted coefficient values (starting with the
intercept term and then including terms of increasing order) and an ``rSquared``
value (indicating the total variance explained by the model).
**Default value:** ``false``
as : List([:class:`FieldName`, :class:`FieldName`])
The output field names for the smoothed points generated by the regression
transform.
**Default value:** The field names of the input x and y values.
"""
_schema = {'$ref': '#/definitions/RegressionTransform'}
def __init__(self, on=Undefined, regression=Undefined, extent=Undefined, groupby=Undefined,
method=Undefined, order=Undefined, params=Undefined, **kwds):
super(RegressionTransform, self).__init__(on=on, regression=regression, extent=extent,
groupby=groupby, method=method, order=order,
params=params, **kwds)
class SampleTransform(Transform):
"""SampleTransform schema wrapper
Mapping(required=[sample])
Attributes
----------
sample : float
The maximum number of data objects to include in the sample.
**Default value:** ``1000``
"""
_schema = {'$ref': '#/definitions/SampleTransform'}
def __init__(self, sample=Undefined, **kwds):
super(SampleTransform, self).__init__(sample=sample, **kwds)
class StackTransform(Transform):
"""StackTransform schema wrapper
Mapping(required=[stack, groupby, as])
Attributes
----------
groupby : List(:class:`FieldName`)
The data fields to group by.
stack : :class:`FieldName`
The field which is stacked.
offset : enum('zero', 'center', 'normalize')
Mode for stacking marks. One of ``"zero"`` (default), ``"center"``, or
``"normalize"``. The ``"zero"`` offset will stack starting at ``0``. The
``"center"`` offset will center the stacks. The ``"normalize"`` offset will compute
percentage values for each stack point, with output values in the range ``[0,1]``.
**Default value:** ``"zero"``
sort : List(:class:`SortField`)
Field that determines the order of leaves in the stacked charts.
as : anyOf(:class:`FieldName`, List([:class:`FieldName`, :class:`FieldName`]))
Output field names. This can be either a string or an array of strings with two
elements denoting the name for the fields for stack start and stack end
respectively. If a single string(e.g., ``"val"`` ) is provided, the end field will
be ``"val_end"``.
"""
_schema = {'$ref': '#/definitions/StackTransform'}
def __init__(self, groupby=Undefined, stack=Undefined, offset=Undefined, sort=Undefined, **kwds):
super(StackTransform, self).__init__(groupby=groupby, stack=stack, offset=offset, sort=sort,
**kwds)
class TimeUnitTransform(Transform):
"""TimeUnitTransform schema wrapper
Mapping(required=[timeUnit, field, as])
Attributes
----------
field : :class:`FieldName`
The data field to apply time unit.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
The timeUnit.
as : :class:`FieldName`
The output field to write the timeUnit value.
"""
_schema = {'$ref': '#/definitions/TimeUnitTransform'}
def __init__(self, field=Undefined, timeUnit=Undefined, **kwds):
super(TimeUnitTransform, self).__init__(field=field, timeUnit=timeUnit, **kwds)
class Type(VegaLiteSchema):
"""Type schema wrapper
enum('quantitative', 'ordinal', 'temporal', 'nominal', 'geojson')
Data type based on level of measurement
"""
_schema = {'$ref': '#/definitions/Type'}
def __init__(self, *args):
super(Type, self).__init__(*args)
class TypeForShape(VegaLiteSchema):
"""TypeForShape schema wrapper
enum('nominal', 'ordinal', 'geojson')
"""
_schema = {'$ref': '#/definitions/TypeForShape'}
def __init__(self, *args):
super(TypeForShape, self).__init__(*args)
class TypedFieldDef(VegaLiteSchema):
"""TypedFieldDef schema wrapper
Mapping(required=[])
Definition object for a data field, its type and transformation of an encoding channel.
Attributes
----------
aggregate : :class:`Aggregate`
Aggregation function for the field (e.g., ``"mean"``, ``"sum"``, ``"median"``,
``"min"``, ``"max"``, ``"count"`` ).
**Default value:** ``undefined`` (None)
**See also:** `aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__
documentation.
band : float
For rect-based marks ( ``rect``, ``bar``, and ``image`` ), mark size relative to
bandwidth of `band scales
<https://vega.github.io/vega-lite/docs/scale.html#band>`__, bins or time units. If
set to ``1``, the mark size is set to the bandwidth, the bin interval, or the time
unit interval. If set to ``0.5``, the mark size is half of the bandwidth or the time
unit interval.
For other marks, relative position on a band of a stacked, binned, time unit or band
scale. If set to ``0``, the marks will be positioned at the beginning of the band.
If set to ``0.5``, the marks will be positioned in the middle of the band.
bin : anyOf(boolean, :class:`BinParams`, string, None)
A flag for binning a ``quantitative`` field, `an object defining binning parameters
<https://vega.github.io/vega-lite/docs/bin.html#params>`__, or indicating that the
data for ``x`` or ``y`` channel are binned before they are imported into Vega-Lite (
``"binned"`` ).
If ``true``, default `binning parameters
<https://vega.github.io/vega-lite/docs/bin.html>`__ will be applied.
If ``"binned"``, this indicates that the data for the ``x`` (or ``y`` ) channel are
already binned. You can map the bin-start field to ``x`` (or ``y`` ) and the bin-end
field to ``x2`` (or ``y2`` ). The scale and axis will be formatted similar to
binning in Vega-Lite. To adjust the axis ticks based on the bin step, you can also
set the axis's `tickMinStep
<https://vega.github.io/vega-lite/docs/axis.html#ticks>`__ property.
**Default value:** ``false``
**See also:** `bin <https://vega.github.io/vega-lite/docs/bin.html>`__
documentation.
field : :class:`Field`
**Required.** A string defining the name of the field from which to pull a data
value or an object defining iterated values from the `repeat
<https://vega.github.io/vega-lite/docs/repeat.html>`__ operator.
**See also:** `field <https://vega.github.io/vega-lite/docs/field.html>`__
documentation.
**Notes:** 1) Dots ( ``.`` ) and brackets ( ``[`` and ``]`` ) can be used to access
nested objects (e.g., ``"field": "foo.bar"`` and ``"field": "foo['bar']"`` ). If
field names contain dots or brackets but are not nested, you can use ``\\`` to
escape dots and brackets (e.g., ``"a\\.b"`` and ``"a\\[0\\]"`` ). See more details
about escaping in the `field documentation
<https://vega.github.io/vega-lite/docs/field.html>`__. 2) ``field`` is not required
if ``aggregate`` is ``count``.
timeUnit : anyOf(:class:`TimeUnit`, :class:`TimeUnitParams`)
Time unit (e.g., ``year``, ``yearmonth``, ``month``, ``hours`` ) for a temporal
field. or `a temporal field that gets casted as ordinal
<https://vega.github.io/vega-lite/docs/type.html#cast>`__.
**Default value:** ``undefined`` (None)
**See also:** `timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__
documentation.
title : anyOf(:class:`Text`, None)
A title for the field. If ``null``, the title will be removed.
**Default value:** derived from the field's name and transformation function (
``aggregate``, ``bin`` and ``timeUnit`` ). If the field has an aggregate function,
the function is displayed as part of the title (e.g., ``"Sum of Profit"`` ). If the
field is binned or has a time unit applied, the applied function is shown in
parentheses (e.g., ``"Profit (binned)"``, ``"Transaction Date (year-month)"`` ).
Otherwise, the title is simply the field name.
**Notes** :
1) You can customize the default field title format by providing the `fieldTitle
<https://vega.github.io/vega-lite/docs/config.html#top-level-config>`__ property in
the `config <https://vega.github.io/vega-lite/docs/config.html>`__ or `fieldTitle
function via the compile function's options
<https://vega.github.io/vega-lite/docs/compile.html#field-title>`__.
2) If both field definition's ``title`` and axis, header, or legend ``title`` are
defined, axis/header/legend title will be used.
type : :class:`StandardType`
The type of measurement ( ``"quantitative"``, ``"temporal"``, ``"ordinal"``, or
``"nominal"`` ) for the encoded field or constant value ( ``datum`` ). It can also
be a ``"geojson"`` type for encoding `'geoshape'
<https://vega.github.io/vega-lite/docs/geoshape.html>`__.
Vega-Lite automatically infers data types in many cases as discussed below. However,
type is required for a field if: (1) the field is not nominal and the field encoding
has no specified ``aggregate`` (except ``argmin`` and ``argmax`` ), ``bin``, scale
type, custom ``sort`` order, nor ``timeUnit`` or (2) if you wish to use an ordinal
scale for a field with ``bin`` or ``timeUnit``.
**Default value:**
1) For a data ``field``, ``"nominal"`` is the default data type unless the field
encoding has ``aggregate``, ``channel``, ``bin``, scale type, ``sort``, or
``timeUnit`` that satisfies the following criteria: - ``"quantitative"`` is the
default type if (1) the encoded field contains ``bin`` or ``aggregate`` except
``"argmin"`` and ``"argmax"``, (2) the encoding channel is ``latitude`` or
``longitude`` channel or (3) if the specified scale type is `a quantitative scale
<https://vega.github.io/vega-lite/docs/scale.html#type>`__. - ``"temporal"`` is the
default type if (1) the encoded field contains ``timeUnit`` or (2) the specified
scale type is a time or utc scale - ``ordinal""`` is the default type if (1) the
encoded field contains a `custom sort order
<https://vega.github.io/vega-lite/docs/sort.html#specifying-custom-sort-order>`__,
(2) the specified scale type is an ordinal/point/band scale, or (3) the encoding
channel is ``order``.
2) For a constant value in data domain ( ``datum`` ): - ``"quantitative"`` if the
datum is a number - ``"nominal"`` if the datum is a string - ``"temporal"`` if the
datum is `a date time object
<https://vega.github.io/vega-lite/docs/datetime.html>`__
**Note:** - Data ``type`` describes the semantics of the data rather than the
primitive data types (number, string, etc.). The same primitive data type can have
different types of measurement. For example, numeric data can represent
quantitative, ordinal, or nominal data. - Data values for a temporal field can be
either a date-time string (e.g., ``"2015-03-07 12:32:17"``, ``"17:01"``,
``"2015-03-16"``. ``"2015"`` ) or a timestamp number (e.g., ``1552199579097`` ). -
When using with `bin <https://vega.github.io/vega-lite/docs/bin.html>`__, the
``type`` property can be either ``"quantitative"`` (for using a linear bin scale) or
`"ordinal" (for using an ordinal bin scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`timeUnit <https://vega.github.io/vega-lite/docs/timeunit.html>`__, the ``type``
property can be either ``"temporal"`` (default, for using a temporal scale) or
`"ordinal" (for using an ordinal scale)
<https://vega.github.io/vega-lite/docs/type.html#cast-bin>`__. - When using with
`aggregate <https://vega.github.io/vega-lite/docs/aggregate.html>`__, the ``type``
property refers to the post-aggregation data type. For example, we can calculate
count ``distinct`` of a categorical field ``"cat"`` using ``{"aggregate":
"distinct", "field": "cat"}``. The ``"type"`` of the aggregate output is
``"quantitative"``. - Secondary channels (e.g., ``x2``, ``y2``, ``xError``,
``yError`` ) do not have ``type`` as they must have exactly the same type as their
primary channels (e.g., ``x``, ``y`` ).
**See also:** `type <https://vega.github.io/vega-lite/docs/type.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/TypedFieldDef'}
def __init__(self, aggregate=Undefined, band=Undefined, bin=Undefined, field=Undefined,
timeUnit=Undefined, title=Undefined, type=Undefined, **kwds):
super(TypedFieldDef, self).__init__(aggregate=aggregate, band=band, bin=bin, field=field,
timeUnit=timeUnit, title=title, type=type, **kwds)
class URI(VegaLiteSchema):
"""URI schema wrapper
string
"""
_schema = {'$ref': '#/definitions/URI'}
def __init__(self, *args):
super(URI, self).__init__(*args)
class UnitSpec(VegaLiteSchema):
"""UnitSpec schema wrapper
Mapping(required=[mark])
A unit specification, which can contain either `primitive marks or composite marks
<https://vega.github.io/vega-lite/docs/mark.html#types>`__.
Attributes
----------
mark : :class:`AnyMark`
A string describing the mark type (one of ``"bar"``, ``"circle"``, ``"square"``,
``"tick"``, ``"line"``, ``"area"``, ``"point"``, ``"rule"``, ``"geoshape"``, and
``"text"`` ) or a `mark definition object
<https://vega.github.io/vega-lite/docs/mark.html#mark-def>`__.
data : anyOf(:class:`Data`, None)
An object describing the data source. Set to ``null`` to ignore the parent's data
source. If no data is set, it is derived from the parent.
description : string
Description of this mark for commenting purpose.
encoding : :class:`Encoding`
A key-value mapping between encoding channels and definition of fields.
height : anyOf(float, string, :class:`Step`)
**Deprecated:** Please avoid using width in a unit spec that's a part of a layer
spec.
name : string
Name of the visualization for later reference.
projection : :class:`Projection`
An object defining properties of geographic projection, which will be applied to
``shape`` path for ``"geoshape"`` marks and to ``latitude`` and ``"longitude"``
channels for other marks.
selection : Mapping(required=[])
A key-value mapping between selection names and definitions.
title : anyOf(:class:`Text`, :class:`TitleParams`)
Title for the plot.
transform : List(:class:`Transform`)
An array of data transformations such as filter and new field calculation.
view : :class:`ViewBackground`
**Deprecated:** Please avoid using width in a unit spec that's a part of a layer
spec.
width : anyOf(float, string, :class:`Step`)
**Deprecated:** Please avoid using width in a unit spec that's a part of a layer
spec.
"""
_schema = {'$ref': '#/definitions/UnitSpec'}
def __init__(self, mark=Undefined, data=Undefined, description=Undefined, encoding=Undefined,
height=Undefined, name=Undefined, projection=Undefined, selection=Undefined,
title=Undefined, transform=Undefined, view=Undefined, width=Undefined, **kwds):
super(UnitSpec, self).__init__(mark=mark, data=data, description=description, encoding=encoding,
height=height, name=name, projection=projection,
selection=selection, title=title, transform=transform, view=view,
width=width, **kwds)
class UnitSpecWithFrame(VegaLiteSchema):
"""UnitSpecWithFrame schema wrapper
Mapping(required=[mark])
Attributes
----------
mark : :class:`AnyMark`
A string describing the mark type (one of ``"bar"``, ``"circle"``, ``"square"``,
``"tick"``, ``"line"``, ``"area"``, ``"point"``, ``"rule"``, ``"geoshape"``, and
``"text"`` ) or a `mark definition object
<https://vega.github.io/vega-lite/docs/mark.html#mark-def>`__.
data : anyOf(:class:`Data`, None)
An object describing the data source. Set to ``null`` to ignore the parent's data
source. If no data is set, it is derived from the parent.
description : string
Description of this mark for commenting purpose.
encoding : :class:`Encoding`
A key-value mapping between encoding channels and definition of fields.
height : anyOf(float, string, :class:`Step`)
The height of a visualization.
* For a plot with a continuous y-field, height should be a number. - For a plot with
either a discrete y-field or no y-field, height can be either a number indicating
a fixed height or an object in the form of ``{step: number}`` defining the height
per discrete step. (No y-field is equivalent to having one discrete step.) - To
enable responsive sizing on height, it should be set to ``"container"``.
**Default value:** Based on ``config.view.continuousHeight`` for a plot with a
continuous y-field and ``config.view.discreteHeight`` otherwise.
**Note:** For plots with `row and column channels
<https://vega.github.io/vega-lite/docs/encoding.html#facet>`__, this represents the
height of a single view and the ``"container"`` option cannot be used.
**See also:** `height <https://vega.github.io/vega-lite/docs/size.html>`__
documentation.
name : string
Name of the visualization for later reference.
projection : :class:`Projection`
An object defining properties of geographic projection, which will be applied to
``shape`` path for ``"geoshape"`` marks and to ``latitude`` and ``"longitude"``
channels for other marks.
selection : Mapping(required=[])
A key-value mapping between selection names and definitions.
title : anyOf(:class:`Text`, :class:`TitleParams`)
Title for the plot.
transform : List(:class:`Transform`)
An array of data transformations such as filter and new field calculation.
view : :class:`ViewBackground`
An object defining the view background's fill and stroke.
**Default value:** none (transparent)
width : anyOf(float, string, :class:`Step`)
The width of a visualization.
* For a plot with a continuous x-field, width should be a number. - For a plot with
either a discrete x-field or no x-field, width can be either a number indicating a
fixed width or an object in the form of ``{step: number}`` defining the width per
discrete step. (No x-field is equivalent to having one discrete step.) - To enable
responsive sizing on width, it should be set to ``"container"``.
**Default value:** Based on ``config.view.continuousWidth`` for a plot with a
continuous x-field and ``config.view.discreteWidth`` otherwise.
**Note:** For plots with `row and column channels
<https://vega.github.io/vega-lite/docs/encoding.html#facet>`__, this represents the
width of a single view and the ``"container"`` option cannot be used.
**See also:** `width <https://vega.github.io/vega-lite/docs/size.html>`__
documentation.
"""
_schema = {'$ref': '#/definitions/UnitSpecWithFrame'}
def __init__(self, mark=Undefined, data=Undefined, description=Undefined, encoding=Undefined,
height=Undefined, name=Undefined, projection=Undefined, selection=Undefined,
title=Undefined, transform=Undefined, view=Undefined, width=Undefined, **kwds):
super(UnitSpecWithFrame, self).__init__(mark=mark, data=data, description=description,
encoding=encoding, height=height, name=name,
projection=projection, selection=selection, title=title,
transform=transform, view=view, width=width, **kwds)
class UrlData(DataSource):
"""UrlData schema wrapper
Mapping(required=[url])
Attributes
----------
url : string
An URL from which to load the data set. Use the ``format.type`` property to ensure
the loaded data is correctly parsed.
format : :class:`DataFormat`
An object that specifies the format for parsing the data.
name : string
Provide a placeholder name and bind data at runtime.
"""
_schema = {'$ref': '#/definitions/UrlData'}
def __init__(self, url=Undefined, format=Undefined, name=Undefined, **kwds):
super(UrlData, self).__init__(url=url, format=format, name=name, **kwds)
class UtcMultiTimeUnit(MultiTimeUnit):
"""UtcMultiTimeUnit schema wrapper
enum('utcyearquarter', 'utcyearquartermonth', 'utcyearmonth', 'utcyearmonthdate',
'utcyearmonthdatehours', 'utcyearmonthdatehoursminutes',
'utcyearmonthdatehoursminutesseconds', 'utcyearweek', 'utcyearweekday',
'utcyearweekdayhours', 'utcyearweekdayhoursminutes', 'utcyearweekdayhoursminutesseconds',
'utcyeardayofyear', 'utcquartermonth', 'utcmonthdate', 'utcmonthdatehours',
'utcmonthdatehoursminutes', 'utcmonthdatehoursminutesseconds', 'utcweekday',
'utcweeksdayhours', 'utcweekdayhoursminutes', 'utcweekdayhoursminutesseconds',
'utcdayhours', 'utcdayhoursminutes', 'utcdayhoursminutesseconds', 'utchoursminutes',
'utchoursminutesseconds', 'utcminutesseconds', 'utcsecondsmilliseconds')
"""
_schema = {'$ref': '#/definitions/UtcMultiTimeUnit'}
def __init__(self, *args):
super(UtcMultiTimeUnit, self).__init__(*args)
class UtcSingleTimeUnit(SingleTimeUnit):
"""UtcSingleTimeUnit schema wrapper
enum('utcyear', 'utcquarter', 'utcmonth', 'utcweek', 'utcday', 'utcdayofyear', 'utcdate',
'utchours', 'utcminutes', 'utcseconds', 'utcmilliseconds')
"""
_schema = {'$ref': '#/definitions/UtcSingleTimeUnit'}
def __init__(self, *args):
super(UtcSingleTimeUnit, self).__init__(*args)
class VConcatSpecGenericSpec(Spec):
"""VConcatSpecGenericSpec schema wrapper
Mapping(required=[vconcat])
Base interface for a vertical concatenation specification.
Attributes
----------
vconcat : List(:class:`Spec`)
A list of views to be concatenated and put into a column.
bounds : enum('full', 'flush')
The bounds calculation method to use for determining the extent of a sub-plot. One
of ``full`` (the default) or ``flush``.
* If set to ``full``, the entire calculated bounds (including axes, title, and
legend) will be used. - If set to ``flush``, only the specified width and height
values for the sub-view will be used. The ``flush`` setting can be useful when
attempting to place sub-plots without axes or legends into a uniform grid
structure.
**Default value:** ``"full"``
center : boolean
Boolean flag indicating if subviews should be centered relative to their respective
rows or columns.
**Default value:** ``false``
data : anyOf(:class:`Data`, None)
An object describing the data source. Set to ``null`` to ignore the parent's data
source. If no data is set, it is derived from the parent.
description : string
Description of this mark for commenting purpose.
name : string
Name of the visualization for later reference.
resolve : :class:`Resolve`
Scale, axis, and legend resolutions for view composition specifications.
spacing : float
The spacing in pixels between sub-views of the concat operator.
**Default value** : ``10``
title : anyOf(:class:`Text`, :class:`TitleParams`)
Title for the plot.
transform : List(:class:`Transform`)
An array of data transformations such as filter and new field calculation.
"""
_schema = {'$ref': '#/definitions/VConcatSpec<GenericSpec>'}
def __init__(self, vconcat=Undefined, bounds=Undefined, center=Undefined, data=Undefined,
description=Undefined, name=Undefined, resolve=Undefined, spacing=Undefined,
title=Undefined, transform=Undefined, **kwds):
super(VConcatSpecGenericSpec, self).__init__(vconcat=vconcat, bounds=bounds, center=center,
data=data, description=description, name=name,
resolve=resolve, spacing=spacing, title=title,
transform=transform, **kwds)
class ValueDefWithConditionMarkPropFieldOrDatumDefGradientstringnull(ColorDef, MarkPropDefGradientstringnull):
"""ValueDefWithConditionMarkPropFieldOrDatumDefGradientstringnull schema wrapper
Mapping(required=[])
Attributes
----------
condition : anyOf(:class:`ConditionalMarkPropFieldOrDatumDef`,
:class:`ConditionalValueDefGradientstringnullExprRef`,
List(:class:`ConditionalValueDefGradientstringnullExprRef`))
A field definition or one or more value definition(s) with a selection predicate.
value : anyOf(:class:`Gradient`, string, None, :class:`ExprRef`)
A constant value in visual domain (e.g., ``"red"`` / ``"#0099ff"`` / `gradient
definition <https://vega.github.io/vega-lite/docs/types.html#gradient>`__ for color,
values between ``0`` to ``1`` for opacity).
"""
_schema = {'$ref': '#/definitions/ValueDefWithCondition<MarkPropFieldOrDatumDef,(Gradient|string|null)>'}
def __init__(self, condition=Undefined, value=Undefined, **kwds):
super(ValueDefWithConditionMarkPropFieldOrDatumDefGradientstringnull, self).__init__(condition=condition,
value=value,
**kwds)
class ValueDefWithConditionMarkPropFieldOrDatumDefTypeForShapestringnull(MarkPropDefstringnullTypeForShape, ShapeDef):
"""ValueDefWithConditionMarkPropFieldOrDatumDefTypeForShapestringnull schema wrapper
Mapping(required=[])
Attributes
----------
condition : anyOf(:class:`ConditionalMarkPropFieldOrDatumDefTypeForShape`,
:class:`ConditionalValueDefstringnullExprRef`,
List(:class:`ConditionalValueDefstringnullExprRef`))
A field definition or one or more value definition(s) with a selection predicate.
value : anyOf(string, None, :class:`ExprRef`)
A constant value in visual domain (e.g., ``"red"`` / ``"#0099ff"`` / `gradient
definition <https://vega.github.io/vega-lite/docs/types.html#gradient>`__ for color,
values between ``0`` to ``1`` for opacity).
"""
_schema = {'$ref': '#/definitions/ValueDefWithCondition<MarkPropFieldOrDatumDef<TypeForShape>,(string|null)>'}
def __init__(self, condition=Undefined, value=Undefined, **kwds):
super(ValueDefWithConditionMarkPropFieldOrDatumDefTypeForShapestringnull, self).__init__(condition=condition,
value=value,
**kwds)
class ValueDefWithConditionMarkPropFieldOrDatumDefnumber(MarkPropDefnumber, NumericMarkPropDef):
"""ValueDefWithConditionMarkPropFieldOrDatumDefnumber schema wrapper
Mapping(required=[])
Attributes
----------
condition : anyOf(:class:`ConditionalMarkPropFieldOrDatumDef`,
:class:`ConditionalValueDefnumberExprRef`, List(:class:`ConditionalValueDefnumberExprRef`))
A field definition or one or more value definition(s) with a selection predicate.
value : anyOf(float, :class:`ExprRef`)
A constant value in visual domain (e.g., ``"red"`` / ``"#0099ff"`` / `gradient
definition <https://vega.github.io/vega-lite/docs/types.html#gradient>`__ for color,
values between ``0`` to ``1`` for opacity).
"""
_schema = {'$ref': '#/definitions/ValueDefWithCondition<MarkPropFieldOrDatumDef,number>'}
def __init__(self, condition=Undefined, value=Undefined, **kwds):
super(ValueDefWithConditionMarkPropFieldOrDatumDefnumber, self).__init__(condition=condition,
value=value, **kwds)
class ValueDefWithConditionMarkPropFieldOrDatumDefnumberArray(MarkPropDefnumberArray, NumericArrayMarkPropDef):
"""ValueDefWithConditionMarkPropFieldOrDatumDefnumberArray schema wrapper
Mapping(required=[])
Attributes
----------
condition : anyOf(:class:`ConditionalMarkPropFieldOrDatumDef`,
:class:`ConditionalValueDefnumberArrayExprRef`,
List(:class:`ConditionalValueDefnumberArrayExprRef`))
A field definition or one or more value definition(s) with a selection predicate.
value : anyOf(List(float), :class:`ExprRef`)
A constant value in visual domain (e.g., ``"red"`` / ``"#0099ff"`` / `gradient
definition <https://vega.github.io/vega-lite/docs/types.html#gradient>`__ for color,
values between ``0`` to ``1`` for opacity).
"""
_schema = {'$ref': '#/definitions/ValueDefWithCondition<MarkPropFieldOrDatumDef,number[]>'}
def __init__(self, condition=Undefined, value=Undefined, **kwds):
super(ValueDefWithConditionMarkPropFieldOrDatumDefnumberArray, self).__init__(condition=condition,
value=value,
**kwds)
class ValueDefWithConditionMarkPropFieldOrDatumDefstringnull(VegaLiteSchema):
"""ValueDefWithConditionMarkPropFieldOrDatumDefstringnull schema wrapper
Mapping(required=[])
Attributes
----------
condition : anyOf(:class:`ConditionalMarkPropFieldOrDatumDef`,
:class:`ConditionalValueDefstringnullExprRef`,
List(:class:`ConditionalValueDefstringnullExprRef`))
A field definition or one or more value definition(s) with a selection predicate.
value : anyOf(string, None, :class:`ExprRef`)
A constant value in visual domain (e.g., ``"red"`` / ``"#0099ff"`` / `gradient
definition <https://vega.github.io/vega-lite/docs/types.html#gradient>`__ for color,
values between ``0`` to ``1`` for opacity).
"""
_schema = {'$ref': '#/definitions/ValueDefWithCondition<MarkPropFieldOrDatumDef,(string|null)>'}
def __init__(self, condition=Undefined, value=Undefined, **kwds):
super(ValueDefWithConditionMarkPropFieldOrDatumDefstringnull, self).__init__(condition=condition,
value=value, **kwds)
class ValueDefWithConditionStringFieldDefText(TextDef):
"""ValueDefWithConditionStringFieldDefText schema wrapper
Mapping(required=[])
Attributes
----------
condition : anyOf(:class:`ConditionalStringFieldDef`,
:class:`ConditionalValueDefTextExprRef`, List(:class:`ConditionalValueDefTextExprRef`))
A field definition or one or more value definition(s) with a selection predicate.
value : anyOf(:class:`Text`, :class:`ExprRef`)
A constant value in visual domain (e.g., ``"red"`` / ``"#0099ff"`` / `gradient
definition <https://vega.github.io/vega-lite/docs/types.html#gradient>`__ for color,
values between ``0`` to ``1`` for opacity).
"""
_schema = {'$ref': '#/definitions/ValueDefWithCondition<StringFieldDef,Text>'}
def __init__(self, condition=Undefined, value=Undefined, **kwds):
super(ValueDefWithConditionStringFieldDefText, self).__init__(condition=condition, value=value,
**kwds)
class ValueDefnumber(VegaLiteSchema):
"""ValueDefnumber schema wrapper
Mapping(required=[value])
Definition object for a constant value (primitive value or gradient definition) of an
encoding channel.
Attributes
----------
value : float
A constant value in visual domain (e.g., ``"red"`` / ``"#0099ff"`` / `gradient
definition <https://vega.github.io/vega-lite/docs/types.html#gradient>`__ for color,
values between ``0`` to ``1`` for opacity).
"""
_schema = {'$ref': '#/definitions/ValueDef<number>'}
def __init__(self, value=Undefined, **kwds):
super(ValueDefnumber, self).__init__(value=value, **kwds)
class ValueDefnumberExprRef(VegaLiteSchema):
"""ValueDefnumberExprRef schema wrapper
Mapping(required=[value])
Definition object for a constant value (primitive value or gradient definition) of an
encoding channel.
Attributes
----------
value : anyOf(float, :class:`ExprRef`)
A constant value in visual domain (e.g., ``"red"`` / ``"#0099ff"`` / `gradient
definition <https://vega.github.io/vega-lite/docs/types.html#gradient>`__ for color,
values between ``0`` to ``1`` for opacity).
"""
_schema = {'$ref': '#/definitions/ValueDef<(number|ExprRef)>'}
def __init__(self, value=Undefined, **kwds):
super(ValueDefnumberExprRef, self).__init__(value=value, **kwds)
class ValueDefnumberwidthheightExprRef(VegaLiteSchema):
"""ValueDefnumberwidthheightExprRef schema wrapper
Mapping(required=[value])
Definition object for a constant value (primitive value or gradient definition) of an
encoding channel.
Attributes
----------
value : anyOf(float, string, string, :class:`ExprRef`)
A constant value in visual domain (e.g., ``"red"`` / ``"#0099ff"`` / `gradient
definition <https://vega.github.io/vega-lite/docs/types.html#gradient>`__ for color,
values between ``0`` to ``1`` for opacity).
"""
_schema = {'$ref': '#/definitions/ValueDef<(number|"width"|"height"|ExprRef)>'}
def __init__(self, value=Undefined, **kwds):
super(ValueDefnumberwidthheightExprRef, self).__init__(value=value, **kwds)
class Vector2DateTime(SelectionInitInterval):
"""Vector2DateTime schema wrapper
List([:class:`DateTime`, :class:`DateTime`])
"""
_schema = {'$ref': '#/definitions/Vector2<DateTime>'}
def __init__(self, *args):
super(Vector2DateTime, self).__init__(*args)
class Vector2Vector2number(VegaLiteSchema):
"""Vector2Vector2number schema wrapper
List([:class:`Vector2number`, :class:`Vector2number`])
"""
_schema = {'$ref': '#/definitions/Vector2<Vector2<number>>'}
def __init__(self, *args):
super(Vector2Vector2number, self).__init__(*args)
class Vector2boolean(SelectionInitInterval):
"""Vector2boolean schema wrapper
List([boolean, boolean])
"""
_schema = {'$ref': '#/definitions/Vector2<boolean>'}
def __init__(self, *args):
super(Vector2boolean, self).__init__(*args)
class Vector2number(SelectionInitInterval):
"""Vector2number schema wrapper
List([float, float])
"""
_schema = {'$ref': '#/definitions/Vector2<number>'}
def __init__(self, *args):
super(Vector2number, self).__init__(*args)
class Vector2string(SelectionInitInterval):
"""Vector2string schema wrapper
List([string, string])
"""
_schema = {'$ref': '#/definitions/Vector2<string>'}
def __init__(self, *args):
super(Vector2string, self).__init__(*args)
class Vector3number(VegaLiteSchema):
"""Vector3number schema wrapper
List([float, float, float])
"""
_schema = {'$ref': '#/definitions/Vector3<number>'}
def __init__(self, *args):
super(Vector3number, self).__init__(*args)
class ViewBackground(VegaLiteSchema):
"""ViewBackground schema wrapper
Mapping(required=[])
Attributes
----------
cornerRadius : anyOf(float, :class:`ExprRef`)
cursor : :class:`Cursor`
The mouse cursor used over the view. Any valid `CSS cursor type
<https://developer.mozilla.org/en-US/docs/Web/CSS/cursor#Values>`__ can be used.
fill : anyOf(:class:`Color`, None, :class:`ExprRef`)
The fill color.
**Default value:** ``undefined``
fillOpacity : anyOf(float, :class:`ExprRef`)
opacity : anyOf(float, :class:`ExprRef`)
The overall opacity (value between [0,1]).
**Default value:** ``0.7`` for non-aggregate plots with ``point``, ``tick``,
``circle``, or ``square`` marks or layered ``bar`` charts and ``1`` otherwise.
stroke : anyOf(:class:`Color`, None, :class:`ExprRef`)
The stroke color.
**Default value:** ``"#ddd"``
strokeCap : anyOf(:class:`StrokeCap`, :class:`ExprRef`)
strokeDash : anyOf(List(float), :class:`ExprRef`)
strokeDashOffset : anyOf(float, :class:`ExprRef`)
strokeJoin : anyOf(:class:`StrokeJoin`, :class:`ExprRef`)
strokeMiterLimit : anyOf(float, :class:`ExprRef`)
strokeOpacity : anyOf(float, :class:`ExprRef`)
strokeWidth : anyOf(float, :class:`ExprRef`)
style : anyOf(string, List(string))
A string or array of strings indicating the name of custom styles to apply to the
view background. A style is a named collection of mark property defaults defined
within the `style configuration
<https://vega.github.io/vega-lite/docs/mark.html#style-config>`__. If style is an
array, later styles will override earlier styles.
**Default value:** ``"cell"`` **Note:** Any specified view background properties
will augment the default style.
"""
_schema = {'$ref': '#/definitions/ViewBackground'}
def __init__(self, cornerRadius=Undefined, cursor=Undefined, fill=Undefined, fillOpacity=Undefined,
opacity=Undefined, stroke=Undefined, strokeCap=Undefined, strokeDash=Undefined,
strokeDashOffset=Undefined, strokeJoin=Undefined, strokeMiterLimit=Undefined,
strokeOpacity=Undefined, strokeWidth=Undefined, style=Undefined, **kwds):
super(ViewBackground, self).__init__(cornerRadius=cornerRadius, cursor=cursor, fill=fill,
fillOpacity=fillOpacity, opacity=opacity, stroke=stroke,
strokeCap=strokeCap, strokeDash=strokeDash,
strokeDashOffset=strokeDashOffset, strokeJoin=strokeJoin,
strokeMiterLimit=strokeMiterLimit,
strokeOpacity=strokeOpacity, strokeWidth=strokeWidth,
style=style, **kwds)
class ViewConfig(VegaLiteSchema):
"""ViewConfig schema wrapper
Mapping(required=[])
Attributes
----------
clip : boolean
Whether the view should be clipped.
continuousHeight : float
The default height when the plot has a continuous y-field for x or latitude, or has
arc marks.
**Default value:** ``200``
continuousWidth : float
The default width when the plot has a continuous field for x or longitude, or has
arc marks.
**Default value:** ``200``
cornerRadius : anyOf(float, :class:`ExprRef`)
cursor : :class:`Cursor`
The mouse cursor used over the view. Any valid `CSS cursor type
<https://developer.mozilla.org/en-US/docs/Web/CSS/cursor#Values>`__ can be used.
discreteHeight : anyOf(float, Mapping(required=[step]))
The default height when the plot has non arc marks and either a discrete y-field or
no y-field. The height can be either a number indicating a fixed height or an object
in the form of ``{step: number}`` defining the height per discrete step.
**Default value:** a step size based on ``config.view.step``.
discreteWidth : anyOf(float, Mapping(required=[step]))
The default width when the plot has non-arc marks and either a discrete x-field or
no x-field. The width can be either a number indicating a fixed width or an object
in the form of ``{step: number}`` defining the width per discrete step.
**Default value:** a step size based on ``config.view.step``.
fill : anyOf(:class:`Color`, None, :class:`ExprRef`)
The fill color.
**Default value:** ``undefined``
fillOpacity : anyOf(float, :class:`ExprRef`)
height : float
Default height
**Deprecated:** Since Vega-Lite 4.0. Please use continuousHeight and discreteHeight
instead.
opacity : anyOf(float, :class:`ExprRef`)
The overall opacity (value between [0,1]).
**Default value:** ``0.7`` for non-aggregate plots with ``point``, ``tick``,
``circle``, or ``square`` marks or layered ``bar`` charts and ``1`` otherwise.
step : float
Default step size for x-/y- discrete fields.
stroke : anyOf(:class:`Color`, None, :class:`ExprRef`)
The stroke color.
**Default value:** ``"#ddd"``
strokeCap : anyOf(:class:`StrokeCap`, :class:`ExprRef`)
strokeDash : anyOf(List(float), :class:`ExprRef`)
strokeDashOffset : anyOf(float, :class:`ExprRef`)
strokeJoin : anyOf(:class:`StrokeJoin`, :class:`ExprRef`)
strokeMiterLimit : anyOf(float, :class:`ExprRef`)
strokeOpacity : anyOf(float, :class:`ExprRef`)
strokeWidth : anyOf(float, :class:`ExprRef`)
width : float
Default width
**Deprecated:** Since Vega-Lite 4.0. Please use continuousWidth and discreteWidth
instead.
"""
_schema = {'$ref': '#/definitions/ViewConfig'}
def __init__(self, clip=Undefined, continuousHeight=Undefined, continuousWidth=Undefined,
cornerRadius=Undefined, cursor=Undefined, discreteHeight=Undefined,
discreteWidth=Undefined, fill=Undefined, fillOpacity=Undefined, height=Undefined,
opacity=Undefined, step=Undefined, stroke=Undefined, strokeCap=Undefined,
strokeDash=Undefined, strokeDashOffset=Undefined, strokeJoin=Undefined,
strokeMiterLimit=Undefined, strokeOpacity=Undefined, strokeWidth=Undefined,
width=Undefined, **kwds):
super(ViewConfig, self).__init__(clip=clip, continuousHeight=continuousHeight,
continuousWidth=continuousWidth, cornerRadius=cornerRadius,
cursor=cursor, discreteHeight=discreteHeight,
discreteWidth=discreteWidth, fill=fill,
fillOpacity=fillOpacity, height=height, opacity=opacity,
step=step, stroke=stroke, strokeCap=strokeCap,
strokeDash=strokeDash, strokeDashOffset=strokeDashOffset,
strokeJoin=strokeJoin, strokeMiterLimit=strokeMiterLimit,
strokeOpacity=strokeOpacity, strokeWidth=strokeWidth,
width=width, **kwds)
class WindowEventType(VegaLiteSchema):
"""WindowEventType schema wrapper
anyOf(:class:`EventType`, string)
"""
_schema = {'$ref': '#/definitions/WindowEventType'}
def __init__(self, *args, **kwds):
super(WindowEventType, self).__init__(*args, **kwds)
class EventType(WindowEventType):
"""EventType schema wrapper
enum('click', 'dblclick', 'dragenter', 'dragleave', 'dragover', 'keydown', 'keypress',
'keyup', 'mousedown', 'mousemove', 'mouseout', 'mouseover', 'mouseup', 'mousewheel',
'timer', 'touchend', 'touchmove', 'touchstart', 'wheel')
"""
_schema = {'$ref': '#/definitions/EventType'}
def __init__(self, *args):
super(EventType, self).__init__(*args)
class WindowFieldDef(VegaLiteSchema):
"""WindowFieldDef schema wrapper
Mapping(required=[op, as])
Attributes
----------
op : anyOf(:class:`AggregateOp`, :class:`WindowOnlyOp`)
The window or aggregation operation to apply within a window (e.g., ``"rank"``,
``"lead"``, ``"sum"``, ``"average"`` or ``"count"`` ). See the list of all supported
operations `here <https://vega.github.io/vega-lite/docs/window.html#ops>`__.
field : :class:`FieldName`
The data field for which to compute the aggregate or window function. This can be
omitted for window functions that do not operate over a field such as ``"count"``,
``"rank"``, ``"dense_rank"``.
param : float
Parameter values for the window functions. Parameter values can be omitted for
operations that do not accept a parameter.
See the list of all supported operations and their parameters `here
<https://vega.github.io/vega-lite/docs/transforms/window.html>`__.
as : :class:`FieldName`
The output name for the window operation.
"""
_schema = {'$ref': '#/definitions/WindowFieldDef'}
def __init__(self, op=Undefined, field=Undefined, param=Undefined, **kwds):
super(WindowFieldDef, self).__init__(op=op, field=field, param=param, **kwds)
class WindowOnlyOp(VegaLiteSchema):
"""WindowOnlyOp schema wrapper
enum('row_number', 'rank', 'dense_rank', 'percent_rank', 'cume_dist', 'ntile', 'lag',
'lead', 'first_value', 'last_value', 'nth_value')
"""
_schema = {'$ref': '#/definitions/WindowOnlyOp'}
def __init__(self, *args):
super(WindowOnlyOp, self).__init__(*args)
class WindowTransform(Transform):
"""WindowTransform schema wrapper
Mapping(required=[window])
Attributes
----------
window : List(:class:`WindowFieldDef`)
The definition of the fields in the window, and what calculations to use.
frame : List(anyOf(None, float))
A frame specification as a two-element array indicating how the sliding window
should proceed. The array entries should either be a number indicating the offset
from the current data object, or null to indicate unbounded rows preceding or
following the current data object. The default value is ``[null, 0]``, indicating
that the sliding window includes the current object and all preceding objects. The
value ``[-5, 5]`` indicates that the window should include five objects preceding
and five objects following the current object. Finally, ``[null, null]`` indicates
that the window frame should always include all data objects. If you this frame and
want to assign the same value to add objects, you can use the simpler `join
aggregate transform <https://vega.github.io/vega-lite/docs/joinaggregate.html>`__.
The only operators affected are the aggregation operations and the ``first_value``,
``last_value``, and ``nth_value`` window operations. The other window operations are
not affected by this.
**Default value:** : ``[null, 0]`` (includes the current object and all preceding
objects)
groupby : List(:class:`FieldName`)
The data fields for partitioning the data objects into separate windows. If
unspecified, all data points will be in a single window.
ignorePeers : boolean
Indicates if the sliding window frame should ignore peer values (data that are
considered identical by the sort criteria). The default is false, causing the window
frame to expand to include all peer values. If set to true, the window frame will be
defined by offset values only. This setting only affects those operations that
depend on the window frame, namely aggregation operations and the first_value,
last_value, and nth_value window operations.
**Default value:** ``false``
sort : List(:class:`SortField`)
A sort field definition for sorting data objects within a window. If two data
objects are considered equal by the comparator, they are considered "peer" values of
equal rank. If sort is not specified, the order is undefined: data objects are
processed in the order they are observed and none are considered peers (the
ignorePeers parameter is ignored and treated as if set to ``true`` ).
"""
_schema = {'$ref': '#/definitions/WindowTransform'}
def __init__(self, window=Undefined, frame=Undefined, groupby=Undefined, ignorePeers=Undefined,
sort=Undefined, **kwds):
super(WindowTransform, self).__init__(window=window, frame=frame, groupby=groupby,
ignorePeers=ignorePeers, sort=sort, **kwds)
| bsd-3-clause | ca54c4686c1be0eeb6a7f55202a9e142 | 47.884853 | 118 | 0.623998 | 4.136145 | false | false | false | false |
mozilla/badges.mozilla.org | scripts/peep.py | 4 | 31176 | #!/usr/bin/env python
"""peep ("prudently examine every package") verifies that packages conform to a
trusted, locally stored hash and only then installs them::
peep install -r requirements.txt
This makes your deployments verifiably repeatable without having to maintain a
local PyPI mirror or use a vendor lib. Just update the version numbers and
hashes in requirements.txt, and you're all set.
"""
from __future__ import print_function
try:
xrange = xrange
except NameError:
xrange = range
from base64 import urlsafe_b64encode
import cgi
from collections import defaultdict
from functools import wraps
from hashlib import sha256
from itertools import chain
from linecache import getline
import mimetypes
from optparse import OptionParser
from os import listdir
from os.path import join, basename, splitext, isdir
from pickle import dumps, loads
import re
import sys
from shutil import rmtree, copy
from sys import argv, exit
from tempfile import mkdtemp
import traceback
try:
from urllib2 import build_opener, HTTPHandler, HTTPSHandler, HTTPError
except ImportError:
from urllib.request import build_opener, HTTPHandler, HTTPSHandler
from urllib.error import HTTPError
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse # 3.4
# TODO: Probably use six to make urllib stuff work across 2/3.
from pkg_resources import require, VersionConflict, DistributionNotFound
# We don't admit our dependency on pip in setup.py, lest a naive user simply
# say `pip install peep.tar.gz` and thus pull down an untrusted copy of pip
# from PyPI. Instead, we make sure it's installed and new enough here and spit
# out an error message if not:
def activate(specifier):
"""Make a compatible version of pip importable. Raise a RuntimeError if we
couldn't."""
try:
for distro in require(specifier):
distro.activate()
except (VersionConflict, DistributionNotFound):
raise RuntimeError('The installed version of pip is too old; peep '
'requires ' + specifier)
activate('pip>=0.6.2') # Before 0.6.2, the log module wasn't there, so some
# of our monkeypatching fails. It probably wouldn't be
# much work to support even earlier, though.
import pip
from pip.commands.install import InstallCommand
try:
from pip.download import url_to_path # 1.5.6
except ImportError:
try:
from pip.util import url_to_path # 0.7.0
except ImportError:
from pip.util import url_to_filename as url_to_path # 0.6.2
from pip.index import PackageFinder, Link
try:
from pip.log import logger
except ImportError:
from pip import logger # 6.0
from pip.req import parse_requirements
__version__ = 2, 1, 1
ITS_FINE_ITS_FINE = 0
SOMETHING_WENT_WRONG = 1
# "Traditional" for command-line errors according to optparse docs:
COMMAND_LINE_ERROR = 2
ARCHIVE_EXTENSIONS = ('.tar.bz2', '.tar.gz', '.tgz', '.tar', '.zip')
MARKER = object()
class PipException(Exception):
"""When I delegated to pip, it exited with an error."""
def __init__(self, error_code):
self.error_code = error_code
class UnsupportedRequirementError(Exception):
"""An unsupported line was encountered in a requirements file."""
class DownloadError(Exception):
def __init__(self, link, exc):
self.link = link
self.reason = str(exc)
def __str__(self):
return 'Downloading %s failed: %s' % (self.link, self.reason)
def encoded_hash(sha):
"""Return a short, 7-bit-safe representation of a hash.
If you pass a sha256, this results in the hash algorithm that the Wheel
format (PEP 427) uses, except here it's intended to be run across the
downloaded archive before unpacking.
"""
return urlsafe_b64encode(sha.digest()).decode('ascii').rstrip('=')
def run_pip(initial_args):
"""Delegate to pip the given args (starting with the subcommand), and raise
``PipException`` if something goes wrong."""
status_code = pip.main(initial_args)
# Clear out the registrations in the pip "logger" singleton. Otherwise,
# loggers keep getting appended to it with every run. Pip assumes only one
# command invocation will happen per interpreter lifetime.
logger.consumers = []
if status_code:
raise PipException(status_code)
def hash_of_file(path):
"""Return the hash of a downloaded file."""
with open(path, 'rb') as archive:
sha = sha256()
while True:
data = archive.read(2 ** 20)
if not data:
break
sha.update(data)
return encoded_hash(sha)
def is_git_sha(text):
"""Return whether this is probably a git sha"""
# Handle both the full sha as well as the 7-character abbreviation
if len(text) in (40, 7):
try:
int(text, 16)
return True
except ValueError:
pass
return False
def filename_from_url(url):
parsed = urlparse(url)
path = parsed.path
return path.split('/')[-1]
def requirement_args(argv, want_paths=False, want_other=False):
"""Return an iterable of filtered arguments.
:arg argv: Arguments, starting after the subcommand
:arg want_paths: If True, the returned iterable includes the paths to any
requirements files following a ``-r`` or ``--requirement`` option.
:arg want_other: If True, the returned iterable includes the args that are
not a requirement-file path or a ``-r`` or ``--requirement`` flag.
"""
was_r = False
for arg in argv:
# Allow for requirements files named "-r", don't freak out if there's a
# trailing "-r", etc.
if was_r:
if want_paths:
yield arg
was_r = False
elif arg in ['-r', '--requirement']:
was_r = True
else:
if want_other:
yield arg
HASH_COMMENT_RE = re.compile(
r"""
\s*\#\s+ # Lines that start with a '#'
(?P<hash_type>sha256):\s+ # Hash type is hardcoded to be sha256 for now.
(?P<hash>[^\s]+) # Hashes can be anything except '#' or spaces.
\s* # Suck up whitespace before the comment or
# just trailing whitespace if there is no
# comment. Also strip trailing newlines.
(?:\#(?P<comment>.*))? # Comments can be anything after a whitespace+#
$""", re.X) # and are optional.
def peep_hash(argv):
"""Return the peep hash of one or more files, returning a shell status code
or raising a PipException.
:arg argv: The commandline args, starting after the subcommand
"""
parser = OptionParser(
usage='usage: %prog hash file [file ...]',
description='Print a peep hash line for one or more files: for '
'example, "# sha256: '
'oz42dZy6Gowxw8AelDtO4gRgTW_xPdooH484k7I5EOY".')
_, paths = parser.parse_args(args=argv)
if paths:
for path in paths:
print('# sha256:', hash_of_file(path))
return ITS_FINE_ITS_FINE
else:
parser.print_usage()
return COMMAND_LINE_ERROR
class EmptyOptions(object):
"""Fake optparse options for compatibility with pip<1.2
pip<1.2 had a bug in parse_requirements() in which the ``options`` kwarg
was required. We work around that by passing it a mock object.
"""
default_vcs = None
skip_requirements_regex = None
isolated_mode = False
def memoize(func):
"""Memoize a method that should return the same result every time on a
given instance.
"""
@wraps(func)
def memoizer(self):
if not hasattr(self, '_cache'):
self._cache = {}
if func.__name__ not in self._cache:
self._cache[func.__name__] = func(self)
return self._cache[func.__name__]
return memoizer
def package_finder(argv):
"""Return a PackageFinder respecting command-line options.
:arg argv: Everything after the subcommand
"""
# We instantiate an InstallCommand and then use some of its private
# machinery--its arg parser--for our own purposes, like a virus. This
# approach is portable across many pip versions, where more fine-grained
# ones are not. Ignoring options that don't exist on the parser (for
# instance, --use-wheel) gives us a straightforward method of backward
# compatibility.
try:
command = InstallCommand()
except TypeError:
# This is likely pip 1.3.0's "__init__() takes exactly 2 arguments (1
# given)" error. In that version, InstallCommand takes a top=level
# parser passed in from outside.
from pip.baseparser import create_main_parser
command = InstallCommand(create_main_parser())
# The downside is that it essentially ruins the InstallCommand class for
# further use. Calling out to pip.main() within the same interpreter, for
# example, would result in arguments parsed this time turning up there.
# Thus, we deepcopy the arg parser so we don't trash its singletons. Of
# course, deepcopy doesn't work on these objects, because they contain
# uncopyable regex patterns, so we pickle and unpickle instead. Fun!
options, _ = loads(dumps(command.parser)).parse_args(argv)
# Carry over PackageFinder kwargs that have [about] the same names as
# options attr names:
possible_options = [
'find_links', 'use_wheel', 'allow_external', 'allow_unverified',
'allow_all_external', ('allow_all_prereleases', 'pre'),
'process_dependency_links']
kwargs = {}
for option in possible_options:
kw, attr = option if isinstance(option, tuple) else (option, option)
value = getattr(options, attr, MARKER)
if value is not MARKER:
kwargs[kw] = value
# Figure out index_urls:
index_urls = [options.index_url] + options.extra_index_urls
if options.no_index:
index_urls = []
index_urls += getattr(options, 'mirrors', [])
# If pip is new enough to have a PipSession, initialize one, since
# PackageFinder requires it:
if hasattr(command, '_build_session'):
kwargs['session'] = command._build_session(options)
return PackageFinder(index_urls=index_urls, **kwargs)
class DownloadedReq(object):
"""A wrapper around InstallRequirement which offers additional information
based on downloading and examining a corresponding package archive
These are conceptually immutable, so we can get away with memoizing
expensive things.
"""
def __init__(self, req, argv):
"""Download a requirement, compare its hashes, and return a subclass
of DownloadedReq depending on its state.
:arg req: The InstallRequirement I am based on
:arg argv: The args, starting after the subcommand
"""
self._req = req
self._argv = argv
# We use a separate temp dir for each requirement so requirements
# (from different indices) that happen to have the same archive names
# don't overwrite each other, leading to a security hole in which the
# latter is a hash mismatch, the former has already passed the
# comparison, and the latter gets installed.
self._temp_path = mkdtemp(prefix='peep-')
# Think of DownloadedReq as a one-shot state machine. It's an abstract
# class that ratchets forward to being one of its own subclasses,
# depending on its package status. Then it doesn't move again.
self.__class__ = self._class()
def dispose(self):
"""Delete temp files and dirs I've made. Render myself useless.
Do not call further methods on me after calling dispose().
"""
rmtree(self._temp_path)
def _version(self):
"""Deduce the version number of the downloaded package from its filename."""
# TODO: Can we delete this method and just print the line from the
# reqs file verbatim instead?
def version_of_archive(filename, package_name):
# Since we know the project_name, we can strip that off the left, strip
# any archive extensions off the right, and take the rest as the
# version.
for ext in ARCHIVE_EXTENSIONS:
if filename.endswith(ext):
filename = filename[:-len(ext)]
break
# Handle github sha tarball downloads.
if is_git_sha(filename):
filename = package_name + '-' + filename
if not filename.lower().replace('_', '-').startswith(package_name.lower()):
# TODO: Should we replace runs of [^a-zA-Z0-9.], not just _, with -?
give_up(filename, package_name)
return filename[len(package_name) + 1:] # Strip off '-' before version.
def version_of_wheel(filename, package_name):
# For Wheel files (http://legacy.python.org/dev/peps/pep-0427/#file-
# name-convention) we know the format bits are '-' separated.
whl_package_name, version, _rest = filename.split('-', 2)
# Do the alteration to package_name from PEP 427:
our_package_name = re.sub(r'[^\w\d.]+', '_', package_name, re.UNICODE)
if whl_package_name != our_package_name:
give_up(filename, whl_package_name)
return version
def give_up(filename, package_name):
raise RuntimeError("The archive '%s' didn't start with the package name '%s', so I couldn't figure out the version number. My bad; improve me." %
(filename, package_name))
get_version = (version_of_wheel
if self._downloaded_filename().endswith('.whl')
else version_of_archive)
return get_version(self._downloaded_filename(), self._project_name())
def _is_always_unsatisfied(self):
"""Returns whether this requirement is always unsatisfied
This would happen in cases where we can't determine the version
from the filename.
"""
# If this is a github sha tarball, then it is always unsatisfied
# because the url has a commit sha in it and not the version
# number.
url = self._req.url
if url:
filename = filename_from_url(url)
if filename.endswith(ARCHIVE_EXTENSIONS):
filename, ext = splitext(filename)
if is_git_sha(filename):
return True
return False
def _path_and_line(self):
"""Return the path and line number of the file from which our
InstallRequirement came.
"""
path, line = (re.match(r'-r (.*) \(line (\d+)\)$',
self._req.comes_from).groups())
return path, int(line)
@memoize # Avoid hitting the file[cache] over and over.
def _expected_hashes(self):
"""Return a list of known-good hashes for this package."""
def hashes_above(path, line_number):
"""Yield hashes from contiguous comment lines before line
``line_number``.
"""
for line_number in xrange(line_number - 1, 0, -1):
line = getline(path, line_number)
match = HASH_COMMENT_RE.match(line)
if match:
yield match.groupdict()['hash']
elif not line.lstrip().startswith('#'):
# If we hit a non-comment line, abort
break
hashes = list(hashes_above(*self._path_and_line()))
hashes.reverse() # because we read them backwards
return hashes
def _download(self, link):
"""Download a file, and return its name within my temp dir.
This does no verification of HTTPS certs, but our checking hashes
makes that largely unimportant. It would be nice to be able to use the
requests lib, which can verify certs, but it is guaranteed to be
available only in pip >= 1.5.
This also drops support for proxies and basic auth, though those could
be added back in.
"""
# Based on pip 1.4.1's URLOpener but with cert verification removed
def opener(is_https):
if is_https:
opener = build_opener(HTTPSHandler())
# Strip out HTTPHandler to prevent MITM spoof:
for handler in opener.handlers:
if isinstance(handler, HTTPHandler):
opener.handlers.remove(handler)
else:
opener = build_opener()
return opener
# Descended from unpack_http_url() in pip 1.4.1
def best_filename(link, response):
"""Return the most informative possible filename for a download,
ideally with a proper extension.
"""
content_type = response.info().get('content-type', '')
filename = link.filename # fallback
# Have a look at the Content-Disposition header for a better guess:
content_disposition = response.info().get('content-disposition')
if content_disposition:
type, params = cgi.parse_header(content_disposition)
# We use ``or`` here because we don't want to use an "empty" value
# from the filename param:
filename = params.get('filename') or filename
ext = splitext(filename)[1]
if not ext:
ext = mimetypes.guess_extension(content_type)
if ext:
filename += ext
if not ext and link.url != response.geturl():
ext = splitext(response.geturl())[1]
if ext:
filename += ext
return filename
# Descended from _download_url() in pip 1.4.1
def pipe_to_file(response, path):
"""Pull the data off an HTTP response, and shove it in a new file."""
# TODO: Indicate progress.
with open(path, 'wb') as file:
while True:
chunk = response.read(4096)
if not chunk:
break
file.write(chunk)
url = link.url.split('#', 1)[0]
try:
response = opener(urlparse(url).scheme != 'http').open(url)
except (HTTPError, IOError) as exc:
raise DownloadError(link, exc)
filename = best_filename(link, response)
pipe_to_file(response, join(self._temp_path, filename))
return filename
# Based on req_set.prepare_files() in pip bb2a8428d4aebc8d313d05d590f386fa3f0bbd0f
@memoize # Avoid re-downloading.
def _downloaded_filename(self):
"""Download the package's archive if necessary, and return its
filename.
--no-deps is implied, as we have reimplemented the bits that would
ordinarily do dependency resolution.
"""
# Peep doesn't support requirements that don't come down as a single
# file, because it can't hash them. Thus, it doesn't support editable
# requirements, because pip itself doesn't support editable
# requirements except for "local projects or a VCS url". Nor does it
# support VCS requirements yet, because we haven't yet come up with a
# portable, deterministic way to hash them. In summary, all we support
# is == requirements and tarballs/zips/etc.
# TODO: Stop on reqs that are editable or aren't ==.
finder = package_finder(self._argv)
# If the requirement isn't already specified as a URL, get a URL
# from an index:
link = (finder.find_requirement(self._req, upgrade=False)
if self._req.url is None
else Link(self._req.url))
if link:
lower_scheme = link.scheme.lower() # pip lower()s it for some reason.
if lower_scheme == 'http' or lower_scheme == 'https':
file_path = self._download(link)
return basename(file_path)
elif lower_scheme == 'file':
# The following is inspired by pip's unpack_file_url():
link_path = url_to_path(link.url_without_fragment)
if isdir(link_path):
raise UnsupportedRequirementError(
"%s: %s is a directory. So that it can compute "
"a hash, peep supports only filesystem paths which "
"point to files" %
(self._req, link.url_without_fragment))
else:
copy(link_path, self._temp_path)
return basename(link_path)
else:
raise UnsupportedRequirementError(
"%s: The download link, %s, would not result in a file "
"that can be hashed. Peep supports only == requirements, "
"file:// URLs pointing to files (not folders), and "
"http:// and https:// URLs pointing to tarballs, zips, "
"etc." % (self._req, link.url))
else:
raise UnsupportedRequirementError(
"%s: couldn't determine where to download this requirement from."
% (self._req,))
def install(self):
"""Install the package I represent, without dependencies.
Obey typical pip-install options passed in on the command line.
"""
other_args = list(requirement_args(self._argv, want_other=True))
archive_path = join(self._temp_path, self._downloaded_filename())
# -U so it installs whether pip deems the requirement "satisfied" or
# not. This is necessary for GitHub-sourced zips, which change without
# their version numbers changing.
run_pip(['install'] + other_args + ['--no-deps', '-U', archive_path])
@memoize
def _actual_hash(self):
"""Download the package's archive if necessary, and return its hash."""
return hash_of_file(join(self._temp_path, self._downloaded_filename()))
def _project_name(self):
"""Return the inner Requirement's "unsafe name".
Raise ValueError if there is no name.
"""
name = getattr(self._req.req, 'project_name', '')
if name:
return name
raise ValueError('Requirement has no project_name.')
def _name(self):
return self._req.name
def _url(self):
return self._req.url
@memoize # Avoid re-running expensive check_if_exists().
def _is_satisfied(self):
self._req.check_if_exists()
return (self._req.satisfied_by and
not self._is_always_unsatisfied())
def _class(self):
"""Return the class I should be, spanning a continuum of goodness."""
try:
self._project_name()
except ValueError:
return MalformedReq
if self._is_satisfied():
return SatisfiedReq
if not self._expected_hashes():
return MissingReq
if self._actual_hash() not in self._expected_hashes():
return MismatchedReq
return InstallableReq
@classmethod
def foot(cls):
"""Return the text to be printed once, after all of the errors from
classes of my type are printed.
"""
return ''
class MalformedReq(DownloadedReq):
"""A requirement whose package name could not be determined"""
@classmethod
def head(cls):
return 'The following requirements could not be processed:\n'
def error(self):
return '* Unable to determine package name from URL %s; add #egg=' % self._url()
class MissingReq(DownloadedReq):
"""A requirement for which no hashes were specified in the requirements file"""
@classmethod
def head(cls):
return ('The following packages had no hashes specified in the requirements file, which\n'
'leaves them open to tampering. Vet these packages to your satisfaction, then\n'
'add these "sha256" lines like so:\n\n')
def error(self):
if self._url():
line = self._url()
if self._name() not in filename_from_url(self._url()):
line = '%s#egg=%s' % (line, self._name())
else:
line = '%s==%s' % (self._name(), self._version())
return '# sha256: %s\n%s\n' % (self._actual_hash(), line)
class MismatchedReq(DownloadedReq):
"""A requirement for which the downloaded file didn't match any of my hashes."""
@classmethod
def head(cls):
return ("THE FOLLOWING PACKAGES DIDN'T MATCH THE HASHES SPECIFIED IN THE REQUIREMENTS\n"
"FILE. If you have updated the package versions, update the hashes. If not,\n"
"freak out, because someone has tampered with the packages.\n\n")
def error(self):
preamble = ' %s: expected%s' % (
self._project_name(),
' one of' if len(self._expected_hashes()) > 1 else '')
return '%s %s\n%s got %s' % (
preamble,
('\n' + ' ' * (len(preamble) + 1)).join(self._expected_hashes()),
' ' * (len(preamble) - 4),
self._actual_hash())
@classmethod
def foot(cls):
return '\n'
class SatisfiedReq(DownloadedReq):
"""A requirement which turned out to be already installed"""
@classmethod
def head(cls):
return ("These packages were already installed, so we didn't need to download or build\n"
"them again. If you installed them with peep in the first place, you should be\n"
"safe. If not, uninstall them, then re-attempt your install with peep.\n")
def error(self):
return ' %s' % (self._req,)
class InstallableReq(DownloadedReq):
"""A requirement whose hash matched and can be safely installed"""
# DownloadedReq subclasses that indicate an error that should keep us from
# going forward with installation, in the order in which their errors should
# be reported:
ERROR_CLASSES = [MismatchedReq, MissingReq, MalformedReq]
def bucket(things, key):
"""Return a map of key -> list of things."""
ret = defaultdict(list)
for thing in things:
ret[key(thing)].append(thing)
return ret
def first_every_last(iterable, first, every, last):
"""Execute something before the first item of iter, something else for each
item, and a third thing after the last.
If there are no items in the iterable, don't execute anything.
"""
did_first = False
for item in iterable:
if not did_first:
first(item)
every(item)
if did_first:
last(item)
def downloaded_reqs_from_path(path, argv):
"""Return a list of DownloadedReqs representing the requirements parsed
out of a given requirements file.
:arg path: The path to the requirements file
:arg argv: The commandline args, starting after the subcommand
"""
def downloaded_reqs(parsed_reqs):
"""Just avoid repeating this list comp."""
return [DownloadedReq(req, argv) for req in parsed_reqs]
try:
return downloaded_reqs(parse_requirements(path, options=EmptyOptions()))
except TypeError:
# session is a required kwarg as of pip 6.0 and will raise
# a TypeError if missing. It needs to be a PipSession instance,
# but in older versions we can't import it from pip.download
# (nor do we need it at all) so we only import it in this except block
from pip.download import PipSession
return downloaded_reqs(parse_requirements(
path, options=EmptyOptions(), session=PipSession()))
def peep_install(argv):
"""Perform the ``peep install`` subcommand, returning a shell status code
or raising a PipException.
:arg argv: The commandline args, starting after the subcommand
"""
output = []
#out = output.append
out = print
reqs = []
try:
req_paths = list(requirement_args(argv, want_paths=True))
if not req_paths:
out("You have to specify one or more requirements files with the -r option, because\n"
"otherwise there's nowhere for peep to look up the hashes.\n")
return COMMAND_LINE_ERROR
# We're a "peep install" command, and we have some requirement paths.
reqs = list(chain.from_iterable(
downloaded_reqs_from_path(path, argv)
for path in req_paths))
buckets = bucket(reqs, lambda r: r.__class__)
# Skip a line after pip's "Cleaning up..." so the important stuff
# stands out:
if any(buckets[b] for b in ERROR_CLASSES):
out('\n')
printers = (lambda r: out(r.head()),
lambda r: out(r.error() + '\n'),
lambda r: out(r.foot()))
for c in ERROR_CLASSES:
first_every_last(buckets[c], *printers)
if any(buckets[b] for b in ERROR_CLASSES):
out('-------------------------------\n'
'Not proceeding to installation.\n')
return SOMETHING_WENT_WRONG
else:
for req in buckets[InstallableReq]:
req.install()
first_every_last(buckets[SatisfiedReq], *printers)
return ITS_FINE_ITS_FINE
except (UnsupportedRequirementError, DownloadError) as exc:
out(str(exc))
return SOMETHING_WENT_WRONG
finally:
for req in reqs:
req.dispose()
print(''.join(output))
def main():
"""Be the top-level entrypoint. Return a shell status code."""
commands = {'hash': peep_hash,
'install': peep_install}
try:
if len(argv) >= 2 and argv[1] in commands:
return commands[argv[1]](argv[2:])
else:
# Fall through to top-level pip main() for everything else:
return pip.main()
except PipException as exc:
return exc.error_code
def exception_handler(exc_type, exc_value, exc_tb):
print('Oh no! Peep had a problem while trying to do stuff. Please write up a bug report')
print('with the specifics so we can fix it:')
print()
print('https://github.com/erikrose/peep/issues/new')
print()
print('Here are some particulars you can copy and paste into the bug report:')
print()
print('---')
print('peep:', repr(__version__))
print('python:', repr(sys.version))
print('pip:', repr(pip.__version__))
print('Command line: ', repr(sys.argv))
print(
''.join(traceback.format_exception(exc_type, exc_value, exc_tb)))
print('---')
if __name__ == '__main__':
try:
exit(main())
except Exception:
exception_handler(*sys.exc_info())
exit(SOMETHING_WENT_WRONG)
| bsd-3-clause | f978abde1c75b2b45cb59884e7a21005 | 35.807556 | 157 | 0.609892 | 4.240479 | false | false | false | false |
alphacsc/alphacsc | benchmarks/function_benchmarks/convolve_ztz.py | 1 | 4302 | import time
import numba
import numpy as np
import pandas as pd
from joblib import Memory
import matplotlib.pyplot as plt
from scipy.stats.mstats import gmean
memory = Memory(location='', verbose=0)
@numba.jit((numba.float64[:, :, :], numba.float64[:, :]), cache=True)
def numpy_convolve_uv(ztz, uv):
"""Compute the multivariate (valid) convolution of ztz and D
Parameters
----------
ztz: array, shape = (n_atoms, n_atoms, 2 * n_times_atom - 1)
Activations
uv: array, shape = (n_atoms, n_channels + n_times_atom)
Dictionnary
Returns
-------
G : array, shape = (n_atoms, n_channels, n_times_atom)
Gradient
"""
assert uv.ndim == 2
n_times_atom = (ztz.shape[2] + 1) // 2
n_atoms = ztz.shape[0]
n_channels = uv.shape[1] - n_times_atom
u = uv[:, :n_channels]
v = uv[:, n_channels:]
G = np.zeros((n_atoms, n_channels, n_times_atom))
for k0 in range(n_atoms):
for k1 in range(n_atoms):
G[k0, :, :] += (
np.convolve(ztz[k0, k1], v[k1], mode='valid')[None, :]
* u[k1, :][:, None])
return G
@numba.jit((numba.float64[:, :, :], numba.float64[:, :]), cache=True,
nopython=True)
def numpy_convolve_uv_nopython(ztz, uv):
"""Compute the multivariate (valid) convolution of ztz and D
Parameters
----------
ztz: array, shape = (n_atoms, n_atoms, 2 * n_times_atom - 1)
Activations
uv: array, shape = (n_atoms, n_channels + n_times_atom)
Dictionnary
Returns
-------
G : array, shape = (n_atoms, n_channels, n_times_atom)
Gradient
"""
assert uv.ndim == 2
n_times_atom = (ztz.shape[2] + 1) // 2
n_atoms = ztz.shape[0]
n_channels = uv.shape[1] - n_times_atom
u = uv[:, :n_channels]
v = uv[:, n_channels:][:, ::-1]
G = np.zeros((n_atoms, n_channels, n_times_atom))
for k0 in range(n_atoms):
for k1 in range(n_atoms):
for t in range(n_times_atom):
G[k0, :, t] += (
np.sum(ztz[k0, k1, t:t + n_times_atom] * v[k1]) * u[k1, :])
return G
all_func = [
# naive_sum,
numpy_convolve_uv,
numpy_convolve_uv_nopython,
]
def test_equality():
n_atoms, n_channels, n_times_atom = 5, 10, 50
ztz = np.random.randn(n_atoms, n_atoms, 2 * n_times_atom - 1)
uv = np.random.randn(n_atoms, n_channels + n_times_atom)
reference = all_func[0](ztz, uv)
for func in all_func:
assert np.allclose(func(ztz, uv), reference)
@memory.cache
def run_one(n_atoms, n_channels, n_times_atom, func):
ztz = np.random.randn(n_atoms, n_atoms, 2 * n_times_atom - 1)
uv = np.random.randn(n_atoms, n_channels + n_times_atom)
start = time.time()
func(ztz, uv)
duration = time.time() - start
label = func.__name__
if label[0] == '_':
label = label[1:]
return (n_atoms, n_channels, n_times_atom, label, duration)
def benchmark():
n_atoms_range = [1, 3, 9]
n_channels_range = [1, 25, 50, 100, 200]
n_times_atom_range = [8, 32, 128]
n_runs = (len(n_atoms_range) * len(n_channels_range) * len(
n_times_atom_range) * len(all_func))
k = 0
results = []
for n_atoms in n_atoms_range:
for n_channels in n_channels_range:
for n_times_atom in n_times_atom_range:
for func in all_func:
print('%d/%d, %s' % (k, n_runs, func.__name__))
k += 1
results.append(
run_one(n_atoms, n_channels, n_times_atom, func))
df = pd.DataFrame(results, columns=[
'n_atoms', 'n_channels', 'n_times_atom', 'func', 'duration'
])
fig, axes = plt.subplots(2, 2, figsize=(10, 8))
axes = axes.ravel()
def plot(index, ax):
pivot = df.pivot_table(columns='func', index=index, values='duration',
aggfunc=gmean)
pivot.plot(ax=ax)
ax.set_xscale('log')
ax.set_yscale('log')
ax.set_ylabel('duration')
plot('n_atoms', axes[0])
plot('n_times_atom', axes[1])
plot('n_channels', axes[2])
# plot('n_times_valid', axes[3])
plt.tight_layout()
plt.show()
if __name__ == '__main__':
test_equality()
benchmark()
| bsd-3-clause | 9f88a5aa7164c21c8ba0265323afcd0c | 26.056604 | 79 | 0.54556 | 3.00419 | false | false | false | false |
alphacsc/alphacsc | alphacsc/init_dict.py | 1 | 7413 | # Authors: Mainak Jas <mainak.jas@telecom-paristech.fr>
# Tom Dupre La Tour <tom.duprelatour@telecom-paristech.fr>
# Umut Simsekli <umut.simsekli@telecom-paristech.fr>
# Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Thomas Moreau <thomas.moreau@inria.fr>
import numpy as np
from .utils import check_random_state
from .update_d_multi import prox_uv, prox_d
from .utils.dictionary import tukey_window
from .utils.dictionary import get_uv
def get_init_strategy(n_times_atom, shape, random_state, D_init):
"""Returns dictionary initialization strategy.
Parameters
----------
n_times_atom : int
The support of the atom.
shape: tuple
Expected shape of the dictionary. (n_atoms, n_channels + n_times_atoms)
or (n_atoms, n_channels, n_times_atom)
random_state: int or np.random.RandomState
A seed to generate a RandomState instance or the instance itself.
D_init : str or array, shape (n_atoms, n_channels + n_times_atoms) or \
shape (n_atoms, n_channels, n_times_atom)
The initial atoms or an initialization scheme in
{'chunk' | 'random' | 'greedy'}.
"""
if isinstance(D_init, np.ndarray):
return IdentityStrategy(shape, D_init)
elif D_init is None or D_init == 'random':
return RandomStrategy(shape, random_state)
elif D_init == 'chunk':
return ChunkStrategy(n_times_atom, shape, random_state)
elif D_init == 'greedy':
return GreedyStrategy(shape, random_state)
else:
raise NotImplementedError('It is not possible to initialize uv'
' with parameter {}.'.format(D_init))
class IdentityStrategy():
"""A class that creates a dictionary from a specified array.
Parameters
----------
shape: tuple
Expected shape of the dictionary. (n_atoms, n_channels + n_times_atoms)
or (n_atoms, n_channels, n_times_atom)
D_init : array of shape (n_atoms, n_channels + n_times_atoms) or \
shape (n_atoms, n_channels, n_times_atom)
"""
def __init__(self, shape, D_init):
assert shape == D_init.shape
self.D_init = D_init
def initialize(self, X):
return self.D_init.copy()
class RandomStrategy():
"""A class that creates a random dictionary for a specified shape.
Parameters
----------
shape: tuple
Expected shape of the dictionary. (n_atoms, n_channels + n_times_atoms)
or (n_atoms, n_channels, n_times_atom)
random_state: int or np.random.RandomState
A seed to generate a RandomState instance or the instance itself.
"""
def __init__(self, shape, random_state):
self.shape = shape
self.random_state = random_state
def initialize(self, X):
rng = check_random_state(self.random_state)
return rng.randn(*self.shape)
class ChunkStrategy():
"""A class that creates a random dictionary for a specified shape with
'chunk' strategy.
Parameters
----------
n_times_atom : int
The support of the atom.
shape: tuple
Expected shape of the dictionary. (n_atoms, n_channels + n_times_atoms)
or (n_atoms, n_channels, n_times_atom)
random_state: int or np.random.RandomState
A seed to generate a RandomState instance or the instance itself.
"""
def __init__(self, n_times_atom, shape, random_state):
self.n_atoms = shape[0]
self.n_times_atom = n_times_atom
self.rank1 = True if len(shape) == 2 else False
self.random_state = random_state
def initialize(self, X):
rng = check_random_state(self.random_state)
n_trials, n_channels, n_times = X.shape
D_hat = np.zeros(
(self.n_atoms, n_channels, self.n_times_atom))
for i_atom in range(self.n_atoms):
i_trial = rng.randint(n_trials)
t0 = rng.randint(n_times - self.n_times_atom)
D_hat[i_atom] = X[i_trial, :, t0:t0 + self.n_times_atom]
if self.rank1:
D_hat = get_uv(D_hat)
return D_hat
class GreedyStrategy(RandomStrategy):
"""A class that creates a random dictionary for a specified shape and
removes all elements.
Parameters
----------
shape: tuple
Expected shape of the dictionary. (n_atoms, n_channels + n_times_atoms)
or (n_atoms, n_channels, n_times_atom)
random_state: int or np.random.RandomState
A seed to generate a RandomState instance or the instance itself.
"""
def initialize(self, X):
D_hat = super().initialize(X)
return D_hat[:0]
def init_dictionary(X, n_atoms, n_times_atom, uv_constraint='separate',
rank1=True, window=False, D_init=None, random_state=None):
"""Return an initial dictionary for the signals X
Parameter
---------
X: array, shape(n_trials, n_channels, n_times)
The data on which to perform CSC.
n_atoms: int
The number of atoms to learn.
n_times_atom: int
The support of the atom.
uv_constraint: str in {'joint' | 'separate'}
The kind of norm constraint on the atoms:
If 'joint', the constraint is norm_2([u, v]) <= 1
If 'separate', the constraint is norm_2(u) <= 1 and norm_2(v) <= 1
rank1: boolean
If set to True, use a rank 1 dictionary.
window: boolean
If True, multiply the atoms with a temporal Tukey window.
D_init: array or {'chunk' | 'random'}
The initialization scheme for the dictionary or the initial
atoms. The shape should match the required dictionary shape, ie if
rank1 is True, (n_atoms, n_channels + n_times_atom) and else
(n_atoms, n_channels, n_times_atom)
random_state: int | None
The random state.
Return
------
D: array shape(n_atoms, n_channels + n_times_atom) or
shape(n_atoms, n_channels, n_times_atom)
The initial atoms to learn from the data.
"""
n_trials, n_channels, n_times = X.shape
rng = check_random_state(random_state)
D_shape = (n_atoms, n_channels, n_times_atom)
if rank1:
D_shape = (n_atoms, n_channels + n_times_atom)
if isinstance(D_init, np.ndarray):
D_hat = D_init.copy()
assert D_hat.shape == D_shape
elif D_init is None or D_init == "random":
D_hat = rng.randn(*D_shape)
elif D_init == 'chunk':
D_hat = np.zeros((n_atoms, n_channels, n_times_atom))
for i_atom in range(n_atoms):
i_trial = rng.randint(n_trials)
t0 = rng.randint(n_times - n_times_atom)
D_hat[i_atom] = X[i_trial, :, t0:t0 + n_times_atom]
if rank1:
D_hat = get_uv(D_hat)
elif D_init == 'greedy':
raise NotImplementedError()
else:
raise NotImplementedError('It is not possible to initialize uv with'
' parameter {}.'.format(D_init))
if window and not isinstance(D_init, np.ndarray):
if rank1:
D_hat[:, n_channels:] *= tukey_window(n_times_atom)[None, :]
else:
D_hat = D_hat * tukey_window(n_times_atom)[None, None, :]
if rank1:
D_hat = prox_uv(D_hat, uv_constraint=uv_constraint,
n_channels=n_channels)
else:
D_hat = prox_d(D_hat)
return D_hat
| bsd-3-clause | da3e851ef1211daeb365c9d432af5d14 | 32.542986 | 79 | 0.605423 | 3.565657 | false | false | false | false |
alphacsc/alphacsc | examples/other/plot_simulate_swm.py | 1 | 3342 | """
=====================
SWM on simulated data
=====================
This example shows how the sliding window method (SWM) [1]
works on simulated data. The code is adapted from the
`neurodsp package <https://github.com/voytekresearch/neurodsp/>`_
from Voytek lab. Note that, at present, it does not
implement parallel tempering.
[1] Gips, Bart, et al.
Discovering recurring patterns in electrophysiological recordings.
Journal of neuroscience methods 275 (2017): 66-79.
"""
# Authors: Scott Cole
# Mainak Jas <mainak.jas@telecom-paristech.fr>
#
# License: BSD (3-clause)
###############################################################################
# Let us define the model parameters
n_times_atom = 64 # L
n_times = 5000 # T
n_trials = 10 # N
###############################################################################
# The algorithm does not naturally lend itself to multiple atoms. Therefore,
# we simulate only one atom.
n_atoms = 1 # K
###############################################################################
# A minimum spacing between the windows averaged must be found.
min_spacing = 200 # G
###############################################################################
# Now, we can simulate
from alphacsc import check_random_state
from alphacsc.simulate import simulate_data
random_state_simulate = 1
X, ds_true, z_true = simulate_data(n_trials, n_times, n_times_atom,
n_atoms, random_state_simulate,
constant_amplitude=True)
rng = check_random_state(random_state_simulate)
X += 0.01 * rng.randn(*X.shape)
###############################################################################
# We expect 10 occurences of the atom in total.
# So, let us define 10 random locations for the algorithm to start with.
# If this number is not known, we will end up estimating more/less windows.
import numpy as np
window_starts = rng.choice(np.arange(n_trials * n_times), size=n_trials)
###############################################################################
# Now, we apply the SWM algorithm now.
from alphacsc.other.swm import sliding_window_matching
random_state = 42
X = X.reshape(X.shape[0] * X.shape[1]) # expects 1D time series
d_hat, window_starts, J = sliding_window_matching(
X, L=n_times_atom, G=min_spacing, window_starts_custom=window_starts,
max_iterations=10000, T=0.01, random_state=random_state)
###############################################################################
# Let us look at the data at the time windows when the atoms are found.
import matplotlib.pyplot as plt
fig, axes = plt.subplots(2, n_trials // 2, sharex=True, sharey=True,
figsize=(15, 3))
axes = axes.ravel()
for ax, w_start in zip(axes, window_starts):
ax.plot(X[w_start:w_start + n_times_atom])
###############################################################################
# It is not perfect, but it does find time windows where the atom
# is present. Now let us plot the atoms.
plt.figure()
plt.plot(d_hat / np.linalg.norm(d_hat))
plt.plot(ds_true.T, '--')
###############################################################################
# and the cost function over iterations
plt.figure()
plt.plot(J)
plt.ylabel('Cost function J')
plt.xlabel('Iteration #')
plt.show()
| bsd-3-clause | b92993429a6d8a53eccdb9803c8fa6bf | 35.326087 | 79 | 0.5383 | 3.908772 | false | false | false | false |
alphacsc/alphacsc | alphacsc/utils/convolution.py | 1 | 7825 | """Convolutional utilities for dictionary learning"""
# Authors: Thomas Moreau <thomas.moreau@inria.fr>
# Mainak Jas <mainak.jas@telecom-paristech.fr>
# Tom Dupre La Tour <tom.duprelatour@telecom-paristech.fr>
# Umut Simsekli <umut.simsekli@telecom-paristech.fr>
# Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
import numba
import numpy as np
from .dictionary import get_D_shape
def construct_X(z, ds):
"""
Parameters
----------
z : array, shape (n_atoms, n_trials, n_times_valid)
The activations
ds : array, shape (n_atoms, n_times_atom)
The atoms
Returns
-------
X : array, shape (n_trials, n_times)
"""
assert z.shape[0] == ds.shape[0]
n_atoms, n_trials, n_times_valid = z.shape
n_atoms, n_times_atom = ds.shape
n_times = n_times_valid + n_times_atom - 1
X = np.zeros((n_trials, n_times))
for i in range(n_trials):
X[i] = _choose_convolve(z[:, i], ds)
return X
def construct_X_multi(z, D=None, n_channels=None):
"""
Parameters
----------
z : array, shape (n_trials, n_atoms, n_times_valid)
Can also be a list of n_trials LIL-sparse matrix of shape
(n_atoms, n_times - n_times_atom + 1)
The activations
D : array
The atoms. Can either be full rank with shape shape
(n_atoms, n_channels, n_times_atom) or rank 1 with
shape shape (n_atoms, n_channels + n_times_atom)
n_channels : int
Number of channels
Returns
-------
X : array, shape (n_trials, n_channels, n_times)
"""
n_trials, n_atoms, n_times_valid = z.shape
assert n_atoms == D.shape[0]
_, n_channels, n_times_atom = get_D_shape(D, n_channels)
n_times = n_times_valid + n_times_atom - 1
X = np.zeros((n_trials, n_channels, n_times))
for i in range(n_trials):
X[i] = _choose_convolve_multi(z[i], D=D, n_channels=n_channels)
return X
def _sparse_convolve(z_i, ds):
"""Same as _dense_convolve, but use the sparsity of zi."""
n_atoms, n_times_atom = ds.shape
n_atoms, n_times_valid = z_i.shape
n_times = n_times_valid + n_times_atom - 1
Xi = np.zeros(n_times)
for zik, dk in zip(z_i, ds):
for nnz in np.where(zik != 0)[0]:
Xi[nnz:nnz + n_times_atom] += zik[nnz] * dk
return Xi
def _sparse_convolve_multi(z_i, ds):
"""Same as _dense_convolve, but use the sparsity of zi."""
n_atoms, n_channels, n_times_atom = ds.shape
n_atoms, n_times_valid = z_i.shape
n_times = n_times_valid + n_times_atom - 1
Xi = np.zeros(shape=(n_channels, n_times))
for zik, dk in zip(z_i, ds):
for nnz in np.where(zik != 0)[0]:
Xi[:, nnz:nnz + n_times_atom] += zik[nnz] * dk
return Xi
def _sparse_convolve_multi_uv(z_i, uv, n_channels):
"""Same as _dense_convolve, but use the sparsity of zi."""
u = uv[:, :n_channels]
v = uv[:, n_channels:]
n_atoms, n_times_valid = z_i.shape
n_atoms, n_times_atom = v.shape
n_times = n_times_valid + n_times_atom - 1
Xi = np.zeros(shape=(n_channels, n_times))
for zik, uk, vk in zip(z_i, u, v):
zik_vk = np.zeros(n_times)
for nnz in np.where(zik != 0)[0]:
zik_vk[nnz:nnz + n_times_atom] += zik[nnz] * vk
Xi += zik_vk[None, :] * uk[:, None]
return Xi
def _dense_convolve(z_i, ds):
"""Convolve z_i[k] and ds[k] for each atom k, and return the sum."""
return sum([np.convolve(zik, dk) for zik, dk in zip(z_i, ds)], 0)
def _dense_convolve_multi(z_i, ds):
"""Convolve z_i[k] and ds[k] for each atom k, and return the sum."""
return np.sum([[np.convolve(zik, dkp) for dkp in dk]
for zik, dk in zip(z_i, ds)], 0)
def _dense_convolve_multi_uv(z_i, uv, n_channels):
"""Convolve z_i[k] and uv[k] for each atom k, and return the sum."""
u = uv[:, :n_channels]
v = uv[:, n_channels:]
n_atoms, n_times_valid = z_i.shape
n_atoms, n_times_atom = v.shape
n_times = n_times_valid + n_times_atom - 1
Xi = np.zeros((n_channels, n_times))
for zik, uk, vk in zip(z_i, u, v):
zik_vk = np.convolve(zik, vk)
Xi += zik_vk[None, :] * uk[:, None]
return Xi
def _choose_convolve(z_i, ds):
"""Choose between _dense_convolve and _sparse_convolve with a heuristic
on the sparsity of z_i, and perform the convolution.
z_i : array, shape(n_atoms, n_times_valid)
Activations
ds : array, shape(n_atoms, n_times_atom)
Dictionary
"""
assert z_i.shape[0] == ds.shape[0]
if np.sum(z_i != 0) < 0.01 * z_i.size:
return _sparse_convolve(z_i, ds)
else:
return _dense_convolve(z_i, ds)
def _choose_convolve_multi(z_i, D=None, n_channels=None):
"""Choose between _dense_convolve and _sparse_convolve with a heuristic
on the sparsity of z_i, and perform the convolution.
z_i : array, shape(n_atoms, n_times_valid)
Activations
D : array
The atoms. Can either be full rank with shape shape
(n_atoms, n_channels, n_times_atom) or rank 1 with
shape shape (n_atoms, n_channels + n_times_atom)
n_channels : int
Number of channels
"""
assert z_i.shape[0] == D.shape[0]
if np.sum(z_i != 0) < 0.01 * z_i.size:
if D.ndim == 2:
return _sparse_convolve_multi_uv(z_i, D, n_channels)
else:
return _sparse_convolve_multi(z_i, D)
else:
if D.ndim == 2:
return _dense_convolve_multi_uv(z_i, D, n_channels)
else:
return _dense_convolve_multi(z_i, D)
@numba.jit((numba.float64[:, :, :], numba.float64[:, :]), cache=True,
nopython=True)
def numpy_convolve_uv(ztz, uv):
"""Compute the multivariate (valid) convolution of ztz and D
Parameters
----------
ztz: array, shape = (n_atoms, n_atoms, 2 * n_times_atom - 1)
Activations
uv: array, shape = (n_atoms, n_channels + n_times_atom)
Dictionnary
Returns
-------
G : array, shape = (n_atoms, n_channels, n_times_atom)
Gradient
"""
assert uv.ndim == 2
n_times_atom = (ztz.shape[2] + 1) // 2
n_atoms = ztz.shape[0]
n_channels = uv.shape[1] - n_times_atom
u = uv[:, :n_channels]
v = uv[:, n_channels:][:, ::-1]
G = np.zeros((n_atoms, n_channels, n_times_atom))
for k0 in range(n_atoms):
for k1 in range(n_atoms):
for t in range(n_times_atom):
G[k0, :, t] += (
np.sum(ztz[k0, k1, t:t + n_times_atom] * v[k1]) * u[k1, :])
return G
def tensordot_convolve(ztz, D):
"""Compute the multivariate (valid) convolution of ztz and D
Parameters
----------
ztz: array, shape = (n_atoms, n_atoms, 2 * n_times_atom - 1)
Activations
D: array, shape = (n_atoms, n_channels, n_times_atom)
Dictionnary
Returns
-------
G : array, shape = (n_atoms, n_channels, n_times_atom)
Gradient
"""
n_atoms, n_channels, n_times_atom = D.shape
D_revert = D[:, :, ::-1]
G = np.zeros(D.shape)
for t in range(n_times_atom):
G[:, :, t] = np.tensordot(ztz[:, :, t:t + n_times_atom], D_revert,
axes=([1, 2], [0, 2]))
return G
def sort_atoms_by_explained_variances(D_hat, z_hat, n_channels):
n_atoms = D_hat.shape[0]
assert z_hat.shape[1] == n_atoms
variances = np.zeros(n_atoms)
for kk in range(n_atoms):
variances[kk] = construct_X_multi(z_hat[:, kk:kk + 1],
D_hat[kk:kk + 1],
n_channels=n_channels).var()
order = np.argsort(variances)[::-1]
z_hat = z_hat[:, order, :]
D_hat = D_hat[order, ...]
return D_hat, z_hat
| bsd-3-clause | 89be8df55c889c3cc191ecf42aa3a535 | 29.686275 | 79 | 0.562684 | 2.876838 | false | false | false | false |
alphacsc/alphacsc | alphacsc/viz/epoch.py | 1 | 4256 | from copy import deepcopy
import mne
import numpy as np
from joblib import Parallel, delayed
def make_epochs(z_hat, info, t_lim, n_times_atom=1):
"""Make Epochs on the activations of atoms.
n_splits, n_atoms, n_times_valid = z_hat.shape
n_trials, n_atoms, n_times_epoch = z_hat_epoch.shape
"""
n_splits, n_atoms, n_times_valid = z_hat.shape
n_times = n_times_valid + n_times_atom - 1
# pad with zeros
padding = np.zeros((n_splits, n_atoms, n_times_atom - 1))
z_hat = np.concatenate([z_hat, padding], axis=2)
# reshape into an unique time-serie per atom
z_hat = np.reshape(z_hat.swapaxes(0, 1), (n_atoms, n_splits * n_times))
# create trials around the events, using mne
new_info = mne.create_info(ch_names=n_atoms, sfreq=info['sfreq'])
rawarray = mne.io.RawArray(data=z_hat, info=new_info, verbose=False)
t_min, t_max = t_lim
info_temp = info['temp']
epochs = mne.Epochs(rawarray, info_temp['events'], info_temp['event_id'],
t_min, t_max, verbose=False)
z_hat_epoched = epochs.get_data()
return z_hat_epoched
def make_evoke(array, info, t_lim):
"""Compute evoked activations"""
if array.ndim == 1:
array = array[None, None]
elif array.ndim == 2:
array = array[None]
epoched_array = make_epochs(array, info, t_lim=t_lim)
evoked_array = epoched_array.mean(axis=0)
return evoked_array
def make_evoke_one_surrogate(array, info, t_lim):
# generate random events
info = deepcopy(info)
info_temp = info['temp']
n_events = info_temp['events'].shape[0]
events = np.random.randint(array.shape[-1], size=n_events)
events = np.sort(np.unique(events))
n_events = events.shape[0]
event_id = np.atleast_1d(info_temp['event_id']).astype('int')
n_tile = int(np.ceil(n_events / float(event_id.shape[0])))
event_id_tiled = np.tile(event_id, n_tile)[:n_events]
events = np.c_[events, np.zeros_like(events), event_id_tiled]
info_temp['events'] = events
# make evoked with random events
evoked_array = make_evoke(array, info, t_lim)
return evoked_array
def make_evoke_all_surrogates(array, info, t_lim, n_jobs, n_surrogates=100):
delayed_func = delayed(make_evoke_one_surrogate)
evoked_arrays = Parallel(n_jobs=n_jobs)(delayed_func(array, info, t_lim)
for i in range(n_surrogates))
return np.array(evoked_arrays)
def plot_evoked_surrogates(array, info, t_lim, ax, n_jobs, label='',
threshold=0.005):
"""Compute and plot evoked array distribution over random events"""
assert array.ndim == 1
assert ax is not None
# compute mean over epochs
evoked = make_evoke(array, info, t_lim)[0]
# compute surrogate evoked with random events
evoked_surrogate = make_evoke_all_surrogates(array, info, t_lim,
n_jobs)[:, 0]
# find thresholds
low, high = 100 * threshold / 2., 100 * (1 - threshold / 2.)
threshold_low = np.percentile(evoked_surrogate.min(axis=1), low)
threshold_high = np.percentile(evoked_surrogate.max(axis=1), high)
# plot the evoked and a gray area for the 95% percentile
t = np.arange(len(evoked)) / info['sfreq'] + t_lim[0]
outside_thresholds = ((evoked > threshold_high) + (evoked < threshold_low))
color = 'C1' if np.any(outside_thresholds) else 'C2'
ax.plot(t, evoked, label=label, color=color)
label_th = str(100 * (1 - threshold)) + ' %'
ax.fill_between(t, threshold_low, threshold_high, color='k', alpha=0.2,
label=label_th)
ax.fill_between(t, threshold_low, threshold_high, where=outside_thresholds,
color='y', alpha=0.2)
ax.axvline(0, color='k', linestyle='--')
ax.set_ylim([0, None])
ax.legend()
# # plot the histogram of evoked_surrogate, and of evoked
# ax = axes[1]
# ax.hist(evoked_surrogate, bins=100, density=True, label='surrogate')
# ax.hist(evoked.ravel(), bins=100, density=True, alpha=0.8,
# label='evoked')
# ax.axvline(threshold_low, color='k', linestyle='--')
# ax.axvline(threshold_high, color='k', linestyle='--')
# ax.legend()
| bsd-3-clause | 022425eddd4fa5c3a8f32c4921aa8073 | 38.045872 | 79 | 0.624295 | 3.120235 | false | false | false | false |
alphacsc/alphacsc | benchmarks/scaling_channels_run.py | 1 | 7608 | """Benchmark the scaling of alphacsc algorithm with multiple channels.
This script needs the following packages:
conda install pandas
conda install -c conda-forge pyfftw
This script performs the computations and save the results in a pickled file
`figures/methods_scaling_reg*.pkl` which can be plotted using
`scaling_channels_plot.py`.
"""
from pathlib import Path
import time
import itertools
import numpy as np
import pandas as pd
import scipy.sparse as sp
from joblib import Parallel, delayed, Memory
from alphacsc.utils.profile_this import profile_this # noqa
from alphacsc.utils import check_random_state, get_D
from alphacsc.learn_d_z_multi import learn_d_z_multi
from alphacsc.utils.dictionary import get_lambda_max
START = time.time()
VERBOSE = 1
#####################################
# Dictionary initialization function
#####################################
def generate_D_init(n_atoms, n_channels, n_times_atom, random_state):
rng = check_random_state(random_state)
return rng.randn(n_atoms, n_channels + n_times_atom)
######################################
# Functions compared in the benchmark
######################################
def run_multichannel(X, D_init, reg, n_iter, random_state,
label, n_channels):
n_atoms, n_channels_n_times_atom = D_init.shape
n_times_atom = n_channels_n_times_atom - n_channels
solver_z_kwargs = dict(max_iter=500, tol=1e-1)
return learn_d_z_multi(
X, n_atoms, n_times_atom, reg=reg, n_iter=n_iter,
uv_constraint='separate', rank1=True, D_init=D_init,
solver_d='alternate_adaptive', solver_d_kwargs=dict(max_iter=50),
solver_z="lgcd", solver_z_kwargs=solver_z_kwargs,
name="rank1-{}-{}".format(n_channels, random_state),
random_state=random_state, n_jobs=1, verbose=VERBOSE)
def run_multivariate(X, D_init, reg, n_iter, random_state,
label, n_channels):
n_atoms, n_channels_n_times_atom = D_init.shape
n_times_atom = n_channels_n_times_atom - n_channels
D_init = get_D(D_init, n_channels)
solver_z_kwargs = dict(max_iter=500, tol=1e-1)
return learn_d_z_multi(
X, n_atoms, n_times_atom, reg=reg, n_iter=n_iter,
uv_constraint='auto', rank1=False, D_init=D_init,
solver_d='fista', solver_d_kwargs=dict(max_iter=50),
solver_z="lgcd", solver_z_kwargs=solver_z_kwargs,
name="dense-{}-{}".format(n_channels, random_state),
random_state=random_state, n_jobs=1, verbose=VERBOSE,
raise_on_increase=False)
def run_multivariate_dicodile(X, D_init, reg, n_iter, random_state,
label, n_channels):
n_atoms, n_channels_n_times_atom = D_init.shape
n_times_atom = n_channels_n_times_atom - n_channels
D_init = get_D(D_init, n_channels)
solver_z_kwargs = dict(max_iter=500, tol=1e-1)
return learn_d_z_multi(
X, n_atoms, n_times_atom, reg=reg, n_iter=n_iter,
uv_constraint='auto', rank1=False, D_init=D_init,
solver_d='auto', solver_d_kwargs=dict(max_iter=50),
solver_z="dicodile", solver_z_kwargs=solver_z_kwargs,
name="dicodile-{}-{}".format(n_channels, random_state),
random_state=random_state, n_jobs=10, verbose=VERBOSE,
raise_on_increase=False)
####################################
# Calling function of the benchmark
####################################
def one_run(X, n_channels, method, n_atoms, n_times_atom, random_state, reg):
func, label, n_iter = method
current_time = time.time() - START
print('{}-{}-{}: started at {:.0f} sec'.format(
label, n_channels, random_state, current_time))
# use the same init for all methods
D_init = generate_D_init(n_atoms, n_channels, n_times_atom, random_state)
X = X[:, :n_channels]
lmbd_max = get_lambda_max(X, D_init).mean()
reg_ = reg * lmbd_max
# run the selected algorithm with one iter to remove compilation overhead
_, _, _, _, _ = func(X, D_init, reg_, 1, random_state, label, n_channels)
# run the selected algorithm
pobj, times, d_hat, z_hat, reg = func(
X, D_init, reg_, n_iter, random_state, label, n_channels
)
# store z_hat in a sparse matrix to reduce size
for z in z_hat:
z[z < 1e-3] = 0
z_hat = [sp.csr_matrix(z) for z in z_hat]
current_time = time.time() - START
print('{}-{}-{}: done at {:.0f} sec'.format(
label, n_channels, random_state, current_time))
assert len(times) > 5
return (n_channels, random_state, label, np.asarray(pobj),
np.asarray(times), np.asarray(d_hat), np.asarray(z_hat), n_atoms,
n_times_atom, reg)
###############################
# Main script of the benchmark
###############################
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser('Programme to launch experiemnt')
parser.add_argument('--njobs', type=int, default=1,
help='number of cores used to run the experiment')
parser.add_argument('--dense', action="store_true",
help='run the experiment for multivariate')
parser.add_argument('--dicodile', action="store_true",
help='run the experiment for multivariate dicodile')
args = parser.parse_args()
figures_dir = Path('figures')
figures_dir.mkdir(exist_ok=True)
# Use the caching utilities from joblib to same intermediate results and
# avoid loosing computations when the interpreter crashes.
mem = Memory(location='.', verbose=0)
cached_one_run = mem.cache(func=one_run)
delayed_one_run = delayed(cached_one_run)
# load somato data
from alphacsc.datasets.mne_data import load_data
X, info = load_data(dataset='somato', epoch=False, n_jobs=args.njobs,
n_splits=1)
# Set dictionary learning parameters
n_atoms = 2 # K
n_times_atom = 128 # L
# Set the benchmarking parameters.
reg = .005
n_iter = 50
n_states = 5
# Select the method to run and the range of n_channels
n_channels = X.shape[1]
methods = [(run_multichannel, 'rank1', n_iter)]
span_channels = np.unique(np.floor(
np.logspace(0, np.log10(n_channels), 10)).astype(int))
if args.dense:
methods = [[run_multivariate, 'dense', n_iter]]
span_channels = np.unique(np.floor(
np.logspace(0, np.log10(n_channels), 10)).astype(int))[:5]
if args.dicodile:
methods = [[run_multivariate_dicodile, 'dicodile', n_iter]]
span_channels = np.unique(np.floor(
np.logspace(0, np.log10(n_channels), 10)).astype(int))[:5]
# Create a grid a parameter for which we which to run the benchmark.
iterator = itertools.product(range(n_states), methods, span_channels)
# Run the experiment in parallel with joblib
all_results = Parallel(n_jobs=args.njobs)(
delayed_one_run(X, n_channels, method, n_atoms,
n_times_atom, rst, reg)
for rst, method, n_channels in iterator)
# save all results for plotting with scaling_channels_plot.py script.
suffix = ""
if args.dense:
suffix = "_dense"
if args.dicodile:
suffix = "_dicodile"
file_name = f'methods_scaling_reg{reg}{suffix}.pkl'
save_path = figures_dir / file_name
all_results_df = pd.DataFrame(
all_results, columns='n_channels random_state label pobj times '
'd_hat z_hat n_atoms n_times_atom reg'.split(' '))
all_results_df.to_pickle(save_path)
print('-- End of the script --')
| bsd-3-clause | de6dd03c54a25a3427ffa7d799fda612 | 35.228571 | 77 | 0.616062 | 3.336842 | false | false | false | false |
alphacsc/alphacsc | benchmarks/function_benchmarks/multivariate_convolve.py | 1 | 6979 | import time
import numba
import numpy as np
import pandas as pd
from joblib import Memory
import matplotlib.pyplot as plt
from scipy.signal import fftconvolve
from scipy.stats.mstats import gmean
memory = Memory(location='', verbose=0)
def scipy_fftconvolve(ztz, D):
"""
ztz.shape = n_atoms, n_atoms, 2 * n_times_atom - 1
D.shape = n_atoms, n_channels, n_times_atom
"""
n_atoms, n_channels, n_times_atom = D.shape
# TODO: try with zero padding to next_fast_len
G = np.zeros(D.shape)
for k0 in range(n_atoms):
for k1 in range(n_atoms):
for p in range(n_channels):
G[k0, p] += fftconvolve(ztz[k0, k1], D[k1, p], mode='valid')
return G
def numpy_convolve(ztz, D):
"""
ztz.shape = n_atoms, n_atoms, 2 * n_times_atom - 1
D.shape = n_atoms, n_channels, n_times_atom
"""
n_atoms, n_channels, n_times_atom = D.shape
G = np.zeros(D.shape)
for k0 in range(n_atoms):
for k1 in range(n_atoms):
for p in range(n_channels):
G[k0, p] += np.convolve(ztz[k0, k1], D[k1, p], mode='valid')
return G
@numba.jit(nogil=True)
def dot_and_numba(ztz, D):
"""
ztz.shape = n_atoms, n_atoms, 2 * n_times_atom - 1
D.shape = n_atoms, n_channels, n_times_atom
"""
n_atoms, n_channels, n_times_atom = D.shape
G = np.zeros(D.shape)
for k0 in range(n_atoms):
for k1 in range(n_atoms):
for p in range(n_channels):
for t in range(n_times_atom):
G[k0, p, t] += np.dot(ztz[k0, k1, t:t + n_times_atom],
D[k1, p, ::-1])
return G
@numba.jit(nogil=True)
def sum_and_numba(ztz, D):
"""
ztz.shape = n_atoms, n_atoms, 2 * n_times_atom - 1
D.shape = n_atoms, n_channels, n_times_atom
"""
n_atoms, n_channels, n_times_atom = D.shape
G = np.zeros(D.shape)
for k0 in range(n_atoms):
for p in range(n_channels):
for t in range(n_times_atom):
G[k0, p, t] += np.sum(
ztz[k0, :, t:t + n_times_atom] * D[:, p, ::-1])
return G
def tensordot(ztz, D):
"""
ztz.shape = n_atoms, n_atoms, 2 * n_times_atom - 1
D.shape = n_atoms, n_channels, n_times_atom
"""
n_atoms, n_channels, n_times_atom = D.shape
D = D[:, :, ::-1]
G = np.zeros(D.shape)
for t in range(n_times_atom):
G[:, :, t] = np.tensordot(ztz[:, :, t:t + n_times_atom], D,
axes=([1, 2], [0, 2]))
return G
def numpy_convolve_uv(ztz, uv):
"""
ztz.shape = n_atoms, n_atoms, 2 * n_times_atom - 1
uv.shape = n_atoms, n_channels + n_times_atom
"""
assert uv.ndim == 2
n_times_atom = (ztz.shape[2] + 1) // 2
n_atoms = ztz.shape[0]
n_channels = uv.shape[1] - n_times_atom
u = uv[:, :n_channels]
v = uv[:, n_channels:]
G = np.zeros((n_atoms, n_channels, n_times_atom))
for k0 in range(n_atoms):
for k1 in range(n_atoms):
G[k0, :, :] += (np.convolve(
ztz[k0, k1], v[k1], mode='valid')[None, :] * u[k1, :][:, None])
return G
all_func = [
numpy_convolve,
# scipy_fftconvolve,
dot_and_numba,
sum_and_numba,
tensordot,
numpy_convolve_uv,
]
try:
import tensorflow as tf
raise ImportError()
def tensorflow_conv(ztz, D):
"""
ztz.shape = n_atoms, n_atoms, 2 * n_times_atom - 1
D.shape = n_atoms, n_channels, n_times_atom
"""
n_atoms, n_channels, n_times_atom = D.shape
with tf.Session() as session:
tf_D = tf.placeholder(tf.float32,
shape=(n_times_atom, n_atoms, n_channels))
tf_ztz = tf.placeholder(tf.float32, shape=(ztz.shape))
res = tf.nn.convolution(tf_ztz, tf_D, padding="VALID",
data_format="NCW")
return session.run(res, feed_dict={
tf_D: np.moveaxis(D, -1, 0)[::-1], tf_ztz: ztz})
all_func.append(tensorflow_conv)
except ImportError:
pass
try:
import torch
def torch_conv(ztz, D):
"""
ztz.shape = n_atoms, n_atoms, 2 * n_times_atom - 1
D.shape = n_atoms, n_channels, n_times_atom
"""
D = D.swapaxes(0, 1)[:, :, ::-1].copy()
filters = torch.autograd.Variable(torch.from_numpy(D))
inputs = torch.autograd.Variable(torch.from_numpy(ztz))
return torch.nn.functional.conv1d(inputs, filters).data.numpy()
# set convolution filter to D
all_func.append(torch_conv)
except ImportError:
pass
# all_func = all_func[-2:]
def test_equality():
n_atoms, n_channels, n_times_atom = 5, 10, 15
ztz = np.random.randn(n_atoms, n_atoms, 2 * n_times_atom - 1)
u = np.random.randn(n_atoms, n_channels)
v = np.random.randn(n_atoms, n_times_atom)
D = u[:, :, None] * v[:, None, :]
reference = all_func[0](ztz, D)
for func in all_func:
if 'uv' in func.__name__:
result = func(ztz, uv=np.hstack([u, v]))
else:
result = func(ztz, D=D)
assert np.allclose(result, reference)
@memory.cache
def run_one(n_atoms, n_channels, n_times_atom, func):
ztz = np.random.randn(n_atoms, n_atoms, 2 * n_times_atom - 1)
if 'uv' in func.__name__:
uv = np.random.randn(n_atoms, n_channels + n_times_atom)
D = uv
else:
D = np.random.randn(n_atoms, n_channels, n_times_atom)
start = time.time()
func(ztz, D)
duration = time.time() - start
return (n_atoms, n_channels, n_times_atom, func.__name__, duration)
def benchmark():
n_atoms_range = [1, 2, 4, 8, 16]
n_channels_range = [10, 20, 40, 80, 160]
n_times_atom_range = [10, 20, 40, 80, 160]
n_runs = (len(n_atoms_range) * len(n_channels_range) *
len(n_times_atom_range) * len(all_func))
k = 0
results = []
for n_atoms in n_atoms_range:
for n_channels in n_channels_range:
for n_times_atom in n_times_atom_range:
for func in all_func:
print('%d/%d, %s' % (k, n_runs, func.__name__))
k += 1
results.append(
run_one(n_atoms, n_channels, n_times_atom, func))
df = pd.DataFrame(results, columns=[
'n_atoms', 'n_channels', 'n_times_atom', 'func', 'duration'
])
fig, axes = plt.subplots(2, 2, figsize=(10, 8))
axes = axes.ravel()
def plot(index, ax):
pivot = df.pivot_table(columns='func', index=index, values='duration',
aggfunc=gmean)
pivot.plot(ax=ax)
ax.set_xscale('log')
ax.set_yscale('log')
ax.set_ylabel('duration')
plot('n_atoms', axes[0])
plot('n_times_atom', axes[1])
plot('n_channels', axes[2])
plt.tight_layout()
plt.show()
if __name__ == '__main__':
test_equality()
benchmark()
| bsd-3-clause | cab04915509582fe3a7d844e10ce7976 | 27.369919 | 79 | 0.536037 | 2.949704 | false | false | false | false |
mozilla/badges.mozilla.org | badger/migrations/0001_initial.py | 10 | 11026 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Badge'
db.create_table('badger_badge', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('title', self.gf('django.db.models.fields.CharField')(unique=True, max_length=255)),
('slug', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=50, db_index=True)),
('description', self.gf('django.db.models.fields.TextField')(blank=True)),
('image', self.gf('django.db.models.fields.files.ImageField')(max_length=100, null=True, blank=True)),
('unique', self.gf('django.db.models.fields.BooleanField')(default=False)),
('creator', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, blank=True)),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('modified', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal('badger', ['Badge'])
# Adding unique constraint on 'Badge', fields ['title', 'slug']
db.create_unique('badger_badge', ['title', 'slug'])
# Adding M2M table for field prerequisites on 'Badge'
db.create_table('badger_badge_prerequisites', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('from_badge', models.ForeignKey(orm['badger.badge'], null=False)),
('to_badge', models.ForeignKey(orm['badger.badge'], null=False))
))
db.create_unique('badger_badge_prerequisites', ['from_badge_id', 'to_badge_id'])
# Adding model 'Award'
db.create_table('badger_award', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('badge', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['badger.Badge'])),
('image', self.gf('django.db.models.fields.files.ImageField')(max_length=100, null=True, blank=True)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(related_name='award_user', to=orm['auth.User'])),
('creator', self.gf('django.db.models.fields.related.ForeignKey')(blank=True, related_name='award_creator', null=True, to=orm['auth.User'])),
('hidden', self.gf('django.db.models.fields.BooleanField')(default=False)),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('modified', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal('badger', ['Award'])
# Adding model 'Progress'
db.create_table('badger_progress', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('badge', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['badger.Badge'])),
('user', self.gf('django.db.models.fields.related.ForeignKey')(related_name='progress_user', to=orm['auth.User'])),
('percent', self.gf('django.db.models.fields.FloatField')(default=0)),
('counter', self.gf('django.db.models.fields.FloatField')(default=0, null=True, blank=True)),
('notes', self.gf('badger.models.JSONField')(null=True, blank=True)),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('modified', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
))
db.send_create_signal('badger', ['Progress'])
# Adding unique constraint on 'Progress', fields ['badge', 'user']
db.create_unique('badger_progress', ['badge_id', 'user_id'])
def backwards(self, orm):
# Removing unique constraint on 'Progress', fields ['badge', 'user']
db.delete_unique('badger_progress', ['badge_id', 'user_id'])
# Removing unique constraint on 'Badge', fields ['title', 'slug']
db.delete_unique('badger_badge', ['title', 'slug'])
# Deleting model 'Badge'
db.delete_table('badger_badge')
# Removing M2M table for field prerequisites on 'Badge'
db.delete_table('badger_badge_prerequisites')
# Deleting model 'Award'
db.delete_table('badger_award')
# Deleting model 'Progress'
db.delete_table('badger_progress')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'badger.award': {
'Meta': {'object_name': 'Award'},
'badge': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['badger.Badge']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'award_creator'", 'null': 'True', 'to': "orm['auth.User']"}),
'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_user'", 'to': "orm['auth.User']"})
},
'badger.badge': {
'Meta': {'unique_together': "(('title', 'slug'),)", 'object_name': 'Badge'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'prerequisites': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['badger.Badge']", 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'unique': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'badger.progress': {
'Meta': {'unique_together': "(('badge', 'user'),)", 'object_name': 'Progress'},
'badge': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['badger.Badge']"}),
'counter': ('django.db.models.fields.FloatField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'notes': ('badger.models.JSONField', [], {'null': 'True', 'blank': 'True'}),
'percent': ('django.db.models.fields.FloatField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'progress_user'", 'to': "orm['auth.User']"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['badger']
| bsd-3-clause | f784b41c96c5c0dcd117a9de819d2b6f | 67.484472 | 182 | 0.57446 | 3.641347 | false | false | false | false |
aparo/pyes | docs/conf.py | 9 | 2564 | # -*- coding: utf-8 -*-
import sys
import os
# If your extensions are in another directory, add it here. If the directory
# is relative to the documentation root, use os.path.abspath to make it
# absolute, like shown here.
currpath = os.path.dirname(os.path.abspath(__file__))
pyespath = os.path.join(currpath, "pyes")
sys.path.append(pyespath)
#import settings
#from django.core.management import setup_environ
# Commenting out the following line as it is not used.
#from django.conf import settings as dsettings
#setup_environ(settings)
#dsettings.configure()
import pyes as info
sys.path.append(os.path.join(os.path.dirname(__file__), "_ext"))
# -- General configuration -----------------------------------------------------
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.coverage', 'djangodocs']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['.templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'PyES Documentation'
copyright = u'2010, Alberto Paro and Elastic Search. All Rights Reserved.'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = info.__version__
# The full version, including alpha/beta/rc tags.
release = info.version_with_meta()
exclude_trees = ['.build']
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'trac'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_use_smartypants = True
# If false, no module index is generated.
html_use_modindex = True
# If false, no index is generated.
html_use_index = True
latex_documents = [
('index', 'pyes.tex', ur'PyES Documentation',
ur'Elastic Search', 'manual'),
]
| bsd-3-clause | be7ae7216fd8c89abc607cf8a09e34ec | 29.891566 | 80 | 0.721139 | 3.657632 | false | false | false | false |
aparo/pyes | pyes/tests.py | 1 | 5813 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
import logging
import unittest
from pprint import pprint
from pyes.es import ES
from pyes.helpers import SettingsBuilder
"""
Unit tests for pyes.
"""
def get_conn(*args, **kwargs):
return ES(("http", "127.0.0.1", 9200), *args, **kwargs)
DEFAULT_TEST_MAPPING = {
u'parsedtext': {
'store': 'true',
'type': u'text',
"term_vector": "with_positions_offsets"},
u'name': {
'store': 'true',
'type': u'text',
"term_vector": "with_positions_offsets"},
u'title': {
'store': 'true',
'type': u'text',
"term_vector": "with_positions_offsets"},
u'pos': {
'store': 'true',
'type': u'integer'},
u'position': {
'store': 'true',
'type': u'integer'},
u'doubles': {
'store': 'true',
'type': u'double'},
u'uuid': {
'store': 'true',
'type': u'keyword'},
u'tag': {'store': 'true',
'type': u'keyword'},
u'array': {'store': 'true',
'type': u'integer'},
u'inserted': {'store': 'true',
'type': u'date'},
u'date': {'store': 'true',
'type': u'date'},
u'resellers': {
'type': 'nested',
'properties': {
'name': {'type': 'text'},
'price': {'type': 'double'}
}},
}
class ESTestCase(unittest.TestCase):
def setUp(self):
self.log = open("/tmp/%s.sh" % self._testMethodName, "wb")
self.conn = get_conn(timeout=300.0, log_curl=True, dump_curl=self.log) # incremented timeout for debugging
self.index_name = "test-index"
self.document_type = "test-type"
self.conn.indices.delete_index_if_exists(self.index_name)
def tearDown(self):
self.conn.indices.delete_index_if_exists(self.index_name)
if self.log:
self.log.close()
def assertResultContains(self, result, expected):
for (key, value) in expected.items():
found = False
try:
found = value == result[key]
except KeyError:
if result.has_key('meta'):
found = value == result['meta'][key]
self.assertEqual(True, found)
def checkRaises(self, excClass, callableObj, *args, **kwargs):
"""Assert that calling callableObj with *args and **kwargs raises an
exception of type excClass, and return the exception object so that
further tests on it can be performed.
"""
try:
callableObj(*args, **kwargs)
except excClass as e:
return e
else:
raise self.failureException("Expected exception %s not raised" % excClass)
def get_datafile(self, filename):
"""
Returns a the content of a test file
"""
return open(os.path.join(os.path.dirname(__file__), "data", filename), "rb").read()
def get_datafile_path(self, filename):
"""
Returns a the content of a test file
"""
return os.path.join(os.path.dirname(__file__), "data", filename)
def dump(self, result):
"""
dump to stdout the result
"""
pprint(result)
def init_default_index(self):
settings = SettingsBuilder({'index.number_of_replicas': 0,
"index.number_of_shards": 1})
from pyes.mappings import DocumentObjectField
from pyes.mappings import IntegerField
from pyes.mappings import NestedObject
from pyes.mappings import TextField, KeywordField, DateField
docmapping = DocumentObjectField(name=self.document_type)
docmapping.add_property(
TextField(name="parsedtext", store=True, term_vector="with_positions_offsets"))
docmapping.add_property(
TextField(name="name", store=True, term_vector="with_positions_offsets"))
docmapping.add_property(
TextField(name="title", store=True, term_vector="with_positions_offsets"))
docmapping.add_property(IntegerField(name="position", store=True))
docmapping.add_property(DateField(name="date", store=True))
docmapping.add_property(KeywordField(name="uuid", store=True))
nested_object = NestedObject(name="nested")
nested_object.add_property(TextField(name="name", store=True))
nested_object.add_property(TextField(name="value", store=True))
nested_object.add_property(IntegerField(name="num", store=True))
docmapping.add_property(nested_object)
settings.add_mapping(docmapping)
self.conn.ensure_index(self.index_name, settings)
def get_default_mapping(self):
return DEFAULT_TEST_MAPPING
def setUp():
"""Package level setup.
For tests which don't modify the index, we don't want to have the overhead
of setting up a test index, so we just set up test-pindex once, and use it
for all tests.
"""
conn = get_conn(log_curl=True)
conn.indices.delete_index_if_exists("test-pindex")
conn.indices.create_index("test-pindex")
conn.indices.put_mapping("test-type", {'properties': DEFAULT_TEST_MAPPING}, ["test-pindex"])
conn.index({"name": "Joe Tester", "parsedtext": "Joe Testere nice guy", "uuid": "11111", "position": 1,
"doubles": [1.0, 2.0, 3.0]}, "test-pindex", "test-type", 1)
conn.index({"name": "Bill Baloney", "parsedtext": "Joe Testere nice guy", "uuid": "22222", "position": 2,
"doubles": [0.1, 0.2, 0.3]}, "test-pindex", "test-type", 2)
conn.indices.refresh(["test-pindex"])
def tearDown():
"""Remove the package level index.
"""
conn = get_conn()
conn.indices.delete_index_if_exists("test-pindex")
| bsd-3-clause | 98f25d644ea8226b2200b2424a16953b | 33.194118 | 115 | 0.584208 | 3.716752 | false | true | false | false |
aparo/pyes | pyes/scriptfields.py | 6 | 1947 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from .exceptions import ScriptFieldsError
class ScriptField(object):
def __init__(self, script, lang="mvel", params=None, ignore_failure=False):
self.script = script
self.lang = lang
self.params = params
self.ignore_failure = ignore_failure
class ScriptFields(object):
"""
This object create the script_fields definition
"""
_internal_name = "script_fields"
def __init__(self, name=None, script=None, lang=None, params=None, ignore_failure=False):
self.fields = {}
if name:
self.add_field(name, script, lang, params or {}, ignore_failure)
def add_field(self, name, script, lang=None, params=None, ignore_failure=False):
"""
Add a field to script_fields
"""
data = {}
if lang:
data["lang"] = lang
if script:
data['script'] = script
else:
raise ScriptFieldsError("Script is required for script_fields definition")
if params:
if isinstance(params, dict):
if len(params):
data['params'] = params
else:
raise ScriptFieldsError("Parameters should be a valid dictionary")
if ignore_failure:
data['ignore_failure'] = ignore_failure
self.fields[name] = data
def add_parameter(self, field_name, param_name, param_value):
"""
Add a parameter to a field into script_fields
The ScriptFields object will be returned, so calls to this can be chained.
"""
try:
self.fields[field_name]['params'][param_name] = param_value
except Exception as ex:
raise ScriptFieldsError("Error adding parameter %s with value %s :%s" % (param_name, param_value, ex))
return self
def serialize(self):
return self.fields
| bsd-3-clause | ab1e678218c9135141e601e5c7f1c113 | 30.403226 | 114 | 0.588598 | 4.298013 | false | false | false | false |
mne-tools/mne-python | tutorials/epochs/30_epochs_metadata.py | 9 | 6938 | # -*- coding: utf-8 -*-
"""
.. _tut-epochs-metadata:
===========================
Working with Epoch metadata
===========================
This tutorial shows how to add metadata to `~mne.Epochs` objects, and
how to use :ref:`Pandas query strings <pandas:indexing.query>` to select and
plot epochs based on metadata properties.
For this tutorial we'll use a different dataset than usual: the
:ref:`kiloword-dataset`, which contains EEG data averaged across 75 subjects
who were performing a lexical decision (word/non-word) task. The data is in
`~mne.Epochs` format, with each epoch representing the response to a
different stimulus (word). As usual we'll start by importing the modules we
need and loading the data:
"""
# %%
import numpy as np
import pandas as pd
import mne
kiloword_data_folder = mne.datasets.kiloword.data_path()
kiloword_data_file = kiloword_data_folder / 'kword_metadata-epo.fif'
epochs = mne.read_epochs(kiloword_data_file)
# %%
# Viewing ``Epochs`` metadata
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# .. admonition:: Restrictions on metadata DataFrames
# :class: sidebar warning
#
# Metadata dataframes are less flexible than typical
# :class:`Pandas DataFrames <pandas.DataFrame>`. For example, the allowed
# data types are restricted to strings, floats, integers, or booleans;
# and the row labels are always integers corresponding to epoch numbers.
# Other capabilities of :class:`DataFrames <pandas.DataFrame>` such as
# :class:`hierarchical indexing <pandas.MultiIndex>` are possible while the
# `~mne.Epochs` object is in memory, but will not survive saving and
# reloading the `~mne.Epochs` object to/from disk.
#
# The metadata attached to `~mne.Epochs` objects is stored as a
# :class:`pandas.DataFrame` containing one row for each epoch. The columns of
# this :class:`~pandas.DataFrame` can contain just about any information you
# want to store about each epoch; in this case, the metadata encodes
# information about the stimulus seen on each trial, including properties of
# the visual word form itself (e.g., ``NumberOfLetters``, ``VisualComplexity``)
# as well as properties of what the word means (e.g., its ``Concreteness``) and
# its prominence in the English lexicon (e.g., ``WordFrequency``). Here are all
# the variables; note that in a Jupyter notebook, viewing a
# :class:`pandas.DataFrame` gets rendered as an HTML table instead of the
# normal Python output block:
epochs.metadata
# %%
# Viewing the metadata values for a given epoch and metadata variable is done
# using any of the :ref:`Pandas indexing <pandas:/reference/indexing.rst>`
# methods such as :obj:`~pandas.DataFrame.loc`,
# :obj:`~pandas.DataFrame.iloc`, :obj:`~pandas.DataFrame.at`,
# and :obj:`~pandas.DataFrame.iat`. Because the
# index of the dataframe is the integer epoch number, the name- and index-based
# selection methods will work similarly for selecting rows, except that
# name-based selection (with :obj:`~pandas.DataFrame.loc`) is inclusive of the
# endpoint:
print('Name-based selection with .loc')
print(epochs.metadata.loc[2:4])
print('\nIndex-based selection with .iloc')
print(epochs.metadata.iloc[2:4])
# %%
# Modifying the metadata
# ^^^^^^^^^^^^^^^^^^^^^^
#
# Like any :class:`pandas.DataFrame`, you can modify the data or add columns as
# needed. Here we convert the ``NumberOfLetters`` column from :class:`float` to
# :class:`integer <int>` data type, and add a :class:`boolean <bool>` column
# that arbitrarily divides the variable ``VisualComplexity`` into high and low
# groups.
epochs.metadata['NumberOfLetters'] = \
epochs.metadata['NumberOfLetters'].map(int)
epochs.metadata['HighComplexity'] = epochs.metadata['VisualComplexity'] > 65
epochs.metadata.head()
# %%
# Selecting epochs using metadata queries
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# All `~mne.Epochs` objects can be subselected by event name, index, or
# :term:`slice` (see :ref:`tut-section-subselect-epochs`). But
# `~mne.Epochs` objects with metadata can also be queried using
# :ref:`Pandas query strings <pandas:indexing.query>` by passing the query
# string just as you would normally pass an event name. For example:
print(epochs['WORD.str.startswith("dis")'])
# %%
# This capability uses the :meth:`pandas.DataFrame.query` method under the
# hood, so you can check out the documentation of that method to learn how to
# format query strings. Here's another example:
print(epochs['Concreteness > 6 and WordFrequency < 1'])
# %%
# Note also that traditional epochs subselection by condition name still works;
# MNE-Python will try the traditional method first before falling back on rich
# metadata querying.
epochs['solenoid'].plot_psd()
# %%
# One use of the Pandas query string approach is to select specific words for
# plotting:
words = ['typhoon', 'bungalow', 'colossus', 'drudgery', 'linguist', 'solenoid']
epochs['WORD in {}'.format(words)].plot(n_channels=29)
# %%
# Notice that in this dataset, each "condition" (A.K.A., each word) occurs only
# once, whereas with the :ref:`sample-dataset` dataset each condition (e.g.,
# "auditory/left", "visual/right", etc) occurred dozens of times. This makes
# the Pandas querying methods especially useful when you want to aggregate
# epochs that have different condition names but that share similar stimulus
# properties. For example, here we group epochs based on the number of letters
# in the stimulus word, and compare the average signal at electrode ``Pz`` for
# each group:
evokeds = dict()
query = 'NumberOfLetters == {}'
for n_letters in epochs.metadata['NumberOfLetters'].unique():
evokeds[str(n_letters)] = epochs[query.format(n_letters)].average()
# sphinx_gallery_thumbnail_number = 3
mne.viz.plot_compare_evokeds(evokeds, cmap=('word length', 'viridis'),
picks='Pz')
# %%
# Metadata can also be useful for sorting the epochs in an image plot. For
# example, here we order the epochs based on word frequency to see if there's a
# pattern to the latency or intensity of the response:
sort_order = np.argsort(epochs.metadata['WordFrequency'])
epochs.plot_image(order=sort_order, picks='Pz')
# %%
# Although there's no obvious relationship in this case, such analyses may be
# useful for metadata variables that more directly index the time course of
# stimulus processing (such as reaction time).
#
#
# Adding metadata to an ``Epochs`` object
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# You can add a metadata :class:`~pandas.DataFrame` to any
# `~mne.Epochs` object (or replace existing metadata) simply by
# assigning to the :attr:`~mne.Epochs.metadata` attribute:
new_metadata = pd.DataFrame(data=['foo'] * len(epochs), columns=['bar'],
index=range(len(epochs)))
epochs.metadata = new_metadata
epochs.metadata.head()
# %%
# You can remove metadata from an `~mne.Epochs` object by setting its
# metadata to ``None``:
epochs.metadata = None
| bsd-3-clause | df8c6e3d538744d0691dcc365d3fd952 | 38.420455 | 79 | 0.71375 | 3.645822 | false | false | false | false |
mne-tools/mne-python | mne/tests/test_cov.py | 3 | 36001 | # Author: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Denis Engemann <denis.engemann@gmail.com>
#
# License: BSD-3-Clause
import os.path as op
import itertools as itt
import sys
from inspect import signature
from numpy.testing import (assert_array_almost_equal, assert_array_equal,
assert_equal, assert_allclose)
import pytest
import numpy as np
from scipy import linalg
from mne.cov import (regularize, whiten_evoked,
_auto_low_rank_model,
prepare_noise_cov, compute_whitener,
_regularized_covariance)
from mne import (read_cov, write_cov, Epochs, merge_events,
find_events, compute_raw_covariance,
compute_covariance, read_evokeds, compute_proj_raw,
pick_channels_cov, pick_types, make_ad_hoc_cov,
make_fixed_length_events, create_info, compute_rank)
from mne.channels import equalize_channels
from mne.datasets import testing
from mne.io import read_raw_fif, RawArray, read_raw_ctf, read_info
from mne.io.pick import _DATA_CH_TYPES_SPLIT, pick_info
from mne.preprocessing import maxwell_filter
from mne.rank import _compute_rank_int
from mne.utils import (requires_sklearn, catch_logging, assert_snr,
_record_warnings)
base_dir = op.join(op.dirname(__file__), '..', 'io', 'tests', 'data')
cov_fname = op.join(base_dir, 'test-cov.fif')
cov_gz_fname = op.join(base_dir, 'test-cov.fif.gz')
cov_km_fname = op.join(base_dir, 'test-km-cov.fif')
raw_fname = op.join(base_dir, 'test_raw.fif')
ave_fname = op.join(base_dir, 'test-ave.fif')
erm_cov_fname = op.join(base_dir, 'test_erm-cov.fif')
hp_fif_fname = op.join(base_dir, 'test_chpi_raw_sss.fif')
ctf_fname = op.join(testing.data_path(download=False), 'CTF',
'testdata_ctf.ds')
@pytest.mark.parametrize('proj', (True, False))
@pytest.mark.parametrize('pca', (True, 'white', False))
def test_compute_whitener(proj, pca):
"""Test properties of compute_whitener."""
raw = read_raw_fif(raw_fname).crop(0, 3).load_data()
raw.pick_types(meg=True, eeg=True, exclude=())
if proj:
raw.apply_proj()
else:
raw.del_proj()
with pytest.warns(RuntimeWarning, match='Too few samples'):
cov = compute_raw_covariance(raw)
assert cov['names'] == raw.ch_names
W, _, C = compute_whitener(cov, raw.info, pca=pca, return_colorer=True,
verbose='error')
n_channels = len(raw.ch_names)
n_reduced = len(raw.ch_names)
rank = n_channels - len(raw.info['projs'])
n_reduced = rank if pca is True else n_channels
assert W.shape == C.shape[::-1] == (n_reduced, n_channels)
# round-trip mults
round_trip = np.dot(W, C)
if pca is True:
assert_allclose(round_trip, np.eye(n_reduced), atol=1e-7)
elif pca == 'white':
# Our first few rows/cols are zeroed out in the white space
assert_allclose(round_trip[-rank:, -rank:],
np.eye(rank), atol=1e-7)
else:
assert pca is False
assert_allclose(round_trip, np.eye(n_channels), atol=0.05)
raw.info['bads'] = [raw.ch_names[0]]
picks = pick_types(raw.info, meg=True, eeg=True, exclude=[])
with pytest.warns(RuntimeWarning, match='Too few samples'):
cov2 = compute_raw_covariance(raw, picks=picks)
cov3 = compute_raw_covariance(raw, picks=None)
assert_allclose(cov2['data'][1:, 1:], cov3['data'])
W2, _, C2 = compute_whitener(cov2, raw.info, pca=pca, return_colorer=True,
picks=picks, verbose='error')
W3, _, C3 = compute_whitener(cov3, raw.info, pca=pca, return_colorer=True,
picks=None, verbose='error')
# this tol is not great, but Windows needs it
rtol = 1e-3 if sys.platform.startswith('win') else 1e-11
assert_allclose(W, W2, rtol=rtol)
assert_allclose(C, C2, rtol=rtol)
n_channels = len(raw.ch_names) - len(raw.info['bads'])
n_reduced = len(raw.ch_names) - len(raw.info['bads'])
rank = n_channels - len(raw.info['projs'])
n_reduced = rank if pca is True else n_channels
assert W3.shape == C3.shape[::-1] == (n_reduced, n_channels)
def test_cov_mismatch():
"""Test estimation with MEG<->Head mismatch."""
raw = read_raw_fif(raw_fname).crop(0, 5).load_data()
events = find_events(raw, stim_channel='STI 014')
raw.pick_channels(raw.ch_names[:5])
raw.add_proj([], remove_existing=True)
epochs = Epochs(raw, events, None, tmin=-0.2, tmax=0., preload=True)
for kind in ('shift', 'None'):
epochs_2 = epochs.copy()
# This should be fine
compute_covariance([epochs, epochs_2])
if kind == 'shift':
epochs_2.info['dev_head_t']['trans'][:3, 3] += 0.001
else: # None
epochs_2.info['dev_head_t'] = None
pytest.raises(ValueError, compute_covariance, [epochs, epochs_2])
compute_covariance([epochs, epochs_2], on_mismatch='ignore')
with pytest.warns(RuntimeWarning, match='transform mismatch'):
compute_covariance([epochs, epochs_2], on_mismatch='warn')
with pytest.raises(ValueError, match='Invalid value'):
compute_covariance(epochs, on_mismatch='x')
# This should work
epochs.info['dev_head_t'] = None
epochs_2.info['dev_head_t'] = None
compute_covariance([epochs, epochs_2], method=None)
def test_cov_order():
"""Test covariance ordering."""
raw = read_raw_fif(raw_fname)
raw.set_eeg_reference(projection=True)
info = raw.info
# add MEG channel with low enough index number to affect EEG if
# order is incorrect
info['bads'] += ['MEG 0113']
ch_names = [info['ch_names'][pick]
for pick in pick_types(info, meg=False, eeg=True)]
cov = read_cov(cov_fname)
# no avg ref present warning
prepare_noise_cov(cov, info, ch_names, verbose='error')
# big reordering
cov_reorder = cov.copy()
order = np.random.RandomState(0).permutation(np.arange(len(cov.ch_names)))
cov_reorder['names'] = [cov['names'][ii] for ii in order]
cov_reorder['data'] = cov['data'][order][:, order]
# Make sure we did this properly
_assert_reorder(cov_reorder, cov, order)
# Now check some functions that should get the same result for both
# regularize
with pytest.raises(ValueError, match='rank, if str'):
regularize(cov, info, rank='foo')
with pytest.raises(TypeError, match='rank must be'):
regularize(cov, info, rank=False)
with pytest.raises(TypeError, match='rank must be'):
regularize(cov, info, rank=1.)
cov_reg = regularize(cov, info, rank='full')
cov_reg_reorder = regularize(cov_reorder, info, rank='full')
_assert_reorder(cov_reg_reorder, cov_reg, order)
# prepare_noise_cov
cov_prep = prepare_noise_cov(cov, info, ch_names)
cov_prep_reorder = prepare_noise_cov(cov, info, ch_names)
_assert_reorder(cov_prep, cov_prep_reorder,
order=np.arange(len(cov_prep['names'])))
# compute_whitener
whitener, w_ch_names, n_nzero = compute_whitener(
cov, info, return_rank=True)
assert whitener.shape[0] == whitener.shape[1]
whitener_2, w_ch_names_2, n_nzero_2 = compute_whitener(
cov_reorder, info, return_rank=True)
assert_array_equal(w_ch_names_2, w_ch_names)
assert_allclose(whitener_2, whitener, rtol=1e-6)
assert n_nzero == n_nzero_2
# with pca
assert n_nzero < whitener.shape[0]
whitener_pca, w_ch_names_pca, n_nzero_pca = compute_whitener(
cov, info, pca=True, return_rank=True)
assert_array_equal(w_ch_names_pca, w_ch_names)
assert n_nzero_pca == n_nzero
assert whitener_pca.shape == (n_nzero_pca, len(w_ch_names))
# whiten_evoked
evoked = read_evokeds(ave_fname)[0]
evoked_white = whiten_evoked(evoked, cov)
evoked_white_2 = whiten_evoked(evoked, cov_reorder)
assert_allclose(evoked_white_2.data, evoked_white.data, atol=1e-7)
def _assert_reorder(cov_new, cov_orig, order):
"""Check that we get the same result under reordering."""
inv_order = np.argsort(order)
assert_array_equal([cov_new['names'][ii] for ii in inv_order],
cov_orig['names'])
assert_allclose(cov_new['data'][inv_order][:, inv_order],
cov_orig['data'], atol=1e-20)
def test_ad_hoc_cov(tmp_path):
"""Test ad hoc cov creation and I/O."""
out_fname = tmp_path / 'test-cov.fif'
evoked = read_evokeds(ave_fname)[0]
cov = make_ad_hoc_cov(evoked.info)
cov.save(out_fname)
assert 'Covariance' in repr(cov)
cov2 = read_cov(out_fname)
assert_array_almost_equal(cov['data'], cov2['data'])
std = dict(grad=2e-13, mag=10e-15, eeg=0.1e-6)
cov = make_ad_hoc_cov(evoked.info, std)
cov.save(out_fname, overwrite=True)
assert 'Covariance' in repr(cov)
cov2 = read_cov(out_fname)
assert_array_almost_equal(cov['data'], cov2['data'])
cov['data'] = np.diag(cov['data'])
with pytest.raises(RuntimeError, match='attributes inconsistent'):
cov._get_square()
cov['diag'] = False
cov._get_square()
cov['data'] = np.diag(cov['data'])
with pytest.raises(RuntimeError, match='attributes inconsistent'):
cov._get_square()
def test_io_cov(tmp_path):
"""Test IO for noise covariance matrices."""
cov = read_cov(cov_fname)
cov['method'] = 'empirical'
cov['loglik'] = -np.inf
cov.save(tmp_path / 'test-cov.fif')
cov2 = read_cov(tmp_path / 'test-cov.fif')
assert_array_almost_equal(cov.data, cov2.data)
assert_equal(cov['method'], cov2['method'])
assert_equal(cov['loglik'], cov2['loglik'])
assert 'Covariance' in repr(cov)
cov2 = read_cov(cov_gz_fname)
assert_array_almost_equal(cov.data, cov2.data)
cov2.save(tmp_path / 'test-cov.fif.gz')
cov2 = read_cov(tmp_path / 'test-cov.fif.gz')
assert_array_almost_equal(cov.data, cov2.data)
cov['bads'] = ['EEG 039']
cov_sel = pick_channels_cov(cov, exclude=cov['bads'])
assert cov_sel['dim'] == (len(cov['data']) - len(cov['bads']))
assert cov_sel['data'].shape == (cov_sel['dim'], cov_sel['dim'])
cov_sel.save(tmp_path / 'test-cov.fif', overwrite=True)
cov2 = read_cov(cov_gz_fname)
assert_array_almost_equal(cov.data, cov2.data)
cov2.save(tmp_path / 'test-cov.fif.gz', overwrite=True)
cov2 = read_cov(tmp_path / 'test-cov.fif.gz')
assert_array_almost_equal(cov.data, cov2.data)
# test warnings on bad filenames
cov_badname = tmp_path / 'test-bad-name.fif.gz'
with pytest.warns(RuntimeWarning, match='-cov.fif'):
write_cov(cov_badname, cov)
with pytest.warns(RuntimeWarning, match='-cov.fif'):
read_cov(cov_badname)
@pytest.mark.parametrize('method', [
None,
'empirical',
pytest.param('shrunk', marks=pytest.mark.slowtest),
])
def test_cov_estimation_on_raw(method, tmp_path):
"""Test estimation from raw (typically empty room)."""
if method == 'shrunk':
try:
import sklearn # noqa: F401
except Exception as exp:
pytest.skip('sklearn is required, got %s' % (exp,))
raw = read_raw_fif(raw_fname, preload=True)
cov_mne = read_cov(erm_cov_fname)
method_params = dict(shrunk=dict(shrinkage=[0]))
# The pure-string uses the more efficient numpy-based method, the
# the list gets triaged to compute_covariance (should be equivalent
# but use more memory)
with _record_warnings(): # can warn about EEG ref
cov = compute_raw_covariance(
raw, tstep=None, method=method, rank='full',
method_params=method_params)
assert_equal(cov.ch_names, cov_mne.ch_names)
assert_equal(cov.nfree, cov_mne.nfree)
assert_snr(cov.data, cov_mne.data, 1e6)
# test equivalence with np.cov
cov_np = np.cov(raw.copy().pick_channels(cov['names']).get_data(), ddof=1)
if method != 'shrunk': # can check all
off_diag = np.triu_indices(cov_np.shape[0])
else:
# We explicitly zero out off-diag entries between channel types,
# so let's just check MEG off-diag entries
off_diag = np.triu_indices(len(pick_types(raw.info, meg=True,
exclude=())))
for other in (cov_mne, cov):
assert_allclose(np.diag(cov_np), np.diag(other.data), rtol=5e-6)
assert_allclose(cov_np[off_diag], other.data[off_diag], rtol=4e-3)
assert_snr(cov.data, other.data, 1e6)
# tstep=0.2 (default)
with _record_warnings(): # can warn about EEG ref
cov = compute_raw_covariance(raw, method=method, rank='full',
method_params=method_params)
assert_equal(cov.nfree, cov_mne.nfree - 120) # cutoff some samples
assert_snr(cov.data, cov_mne.data, 170)
# test IO when computation done in Python
cov.save(tmp_path / 'test-cov.fif') # test saving
cov_read = read_cov(tmp_path / 'test-cov.fif')
assert cov_read.ch_names == cov.ch_names
assert cov_read.nfree == cov.nfree
assert_array_almost_equal(cov.data, cov_read.data)
# test with a subset of channels
raw_pick = raw.copy().pick_channels(raw.ch_names[:5])
raw_pick.info.normalize_proj()
cov = compute_raw_covariance(raw_pick, tstep=None, method=method,
rank='full', method_params=method_params)
assert cov_mne.ch_names[:5] == cov.ch_names
assert_snr(cov.data, cov_mne.data[:5, :5], 5e6)
cov = compute_raw_covariance(raw_pick, method=method, rank='full',
method_params=method_params)
assert_snr(cov.data, cov_mne.data[:5, :5], 90) # cutoff samps
# make sure we get a warning with too short a segment
raw_2 = read_raw_fif(raw_fname).crop(0, 1)
with pytest.warns(RuntimeWarning, match='Too few samples'):
cov = compute_raw_covariance(raw_2, method=method,
method_params=method_params)
# no epochs found due to rejection
pytest.raises(ValueError, compute_raw_covariance, raw, tstep=None,
method='empirical', reject=dict(eog=200e-6))
# but this should work
with _record_warnings(): # sklearn
cov = compute_raw_covariance(
raw.copy().crop(0, 10.), tstep=None, method=method,
reject=dict(eog=1000e-6), method_params=method_params,
verbose='error')
@pytest.mark.slowtest
@requires_sklearn
def test_cov_estimation_on_raw_reg():
"""Test estimation from raw with regularization."""
raw = read_raw_fif(raw_fname, preload=True)
with raw.info._unlock():
raw.info['sfreq'] /= 10.
raw = RawArray(raw._data[:, ::10].copy(), raw.info) # decimate for speed
cov_mne = read_cov(erm_cov_fname)
with pytest.warns(RuntimeWarning, match='Too few samples'):
# "diagonal_fixed" is much faster. Use long epochs for speed.
cov = compute_raw_covariance(raw, tstep=5., method='diagonal_fixed')
assert_snr(cov.data, cov_mne.data, 5)
def _assert_cov(cov, cov_desired, tol=0.005, nfree=True):
assert_equal(cov.ch_names, cov_desired.ch_names)
err = (linalg.norm(cov.data - cov_desired.data, ord='fro') /
linalg.norm(cov.data, ord='fro'))
assert err < tol, '%s >= %s' % (err, tol)
if nfree:
assert_equal(cov.nfree, cov_desired.nfree)
@pytest.mark.slowtest
@pytest.mark.parametrize('rank', ('full', None))
def test_cov_estimation_with_triggers(rank, tmp_path):
"""Test estimation from raw with triggers."""
raw = read_raw_fif(raw_fname)
raw.set_eeg_reference(projection=True).load_data()
events = find_events(raw, stim_channel='STI 014')
event_ids = [1, 2, 3, 4]
reject = dict(grad=10000e-13, mag=4e-12, eeg=80e-6, eog=150e-6)
# cov with merged events and keep_sample_mean=True
events_merged = merge_events(events, event_ids, 1234)
epochs = Epochs(raw, events_merged, 1234, tmin=-0.2, tmax=0,
baseline=(-0.2, -0.1), proj=True,
reject=reject, preload=True)
cov = compute_covariance(epochs, keep_sample_mean=True)
cov_km = read_cov(cov_km_fname)
# adjust for nfree bug
cov_km['nfree'] -= 1
_assert_cov(cov, cov_km)
# Test with tmin and tmax (different but not too much)
cov_tmin_tmax = compute_covariance(epochs, tmin=-0.19, tmax=-0.01)
assert np.all(cov.data != cov_tmin_tmax.data)
err = (linalg.norm(cov.data - cov_tmin_tmax.data, ord='fro') /
linalg.norm(cov_tmin_tmax.data, ord='fro'))
assert err < 0.05
# cov using a list of epochs and keep_sample_mean=True
epochs = [Epochs(raw, events, ev_id, tmin=-0.2, tmax=0,
baseline=(-0.2, -0.1), proj=True, reject=reject)
for ev_id in event_ids]
cov2 = compute_covariance(epochs, keep_sample_mean=True)
assert_array_almost_equal(cov.data, cov2.data)
assert cov.ch_names == cov2.ch_names
# cov with keep_sample_mean=False using a list of epochs
cov = compute_covariance(epochs, keep_sample_mean=False)
assert cov_km.nfree == cov.nfree
_assert_cov(cov, read_cov(cov_fname), nfree=False)
method_params = {'empirical': {'assume_centered': False}}
pytest.raises(ValueError, compute_covariance, epochs,
keep_sample_mean=False, method_params=method_params)
pytest.raises(ValueError, compute_covariance, epochs,
keep_sample_mean=False, method='shrunk', rank=rank)
# test IO when computation done in Python
cov.save(tmp_path / 'test-cov.fif') # test saving
cov_read = read_cov(tmp_path / 'test-cov.fif')
_assert_cov(cov, cov_read, 1e-5)
# cov with list of epochs with different projectors
epochs = [Epochs(raw, events[:1], None, tmin=-0.2, tmax=0,
baseline=(-0.2, -0.1), proj=True),
Epochs(raw, events[:1], None, tmin=-0.2, tmax=0,
baseline=(-0.2, -0.1), proj=False)]
# these should fail
pytest.raises(ValueError, compute_covariance, epochs)
pytest.raises(ValueError, compute_covariance, epochs, projs=None)
# these should work, but won't be equal to above
with pytest.warns(RuntimeWarning, match='Too few samples'):
cov = compute_covariance(epochs, projs=epochs[0].info['projs'])
with pytest.warns(RuntimeWarning, match='Too few samples'):
cov = compute_covariance(epochs, projs=[])
# test new dict support
epochs = Epochs(raw, events, dict(a=1, b=2, c=3, d=4), tmin=-0.01, tmax=0,
proj=True, reject=reject, preload=True)
with pytest.warns(RuntimeWarning, match='Too few samples'):
compute_covariance(epochs)
with pytest.warns(RuntimeWarning, match='Too few samples'):
compute_covariance(epochs, projs=[])
pytest.raises(TypeError, compute_covariance, epochs, projs='foo')
pytest.raises(TypeError, compute_covariance, epochs, projs=['foo'])
def test_arithmetic_cov():
"""Test arithmetic with noise covariance matrices."""
cov = read_cov(cov_fname)
cov_sum = cov + cov
assert_array_almost_equal(2 * cov.nfree, cov_sum.nfree)
assert_array_almost_equal(2 * cov.data, cov_sum.data)
assert cov.ch_names == cov_sum.ch_names
cov += cov
assert_array_almost_equal(cov_sum.nfree, cov.nfree)
assert_array_almost_equal(cov_sum.data, cov.data)
assert cov_sum.ch_names == cov.ch_names
def test_regularize_cov():
"""Test cov regularization."""
raw = read_raw_fif(raw_fname)
raw.info['bads'].append(raw.ch_names[0]) # test with bad channels
noise_cov = read_cov(cov_fname)
# Regularize noise cov
reg_noise_cov = regularize(noise_cov, raw.info,
mag=0.1, grad=0.1, eeg=0.1, proj=True,
exclude='bads', rank='full')
assert noise_cov['dim'] == reg_noise_cov['dim']
assert noise_cov['data'].shape == reg_noise_cov['data'].shape
assert np.mean(noise_cov['data'] < reg_noise_cov['data']) < 0.08
# make sure all args are represented
assert (set(_DATA_CH_TYPES_SPLIT) - set(signature(regularize).parameters)
== set())
def test_whiten_evoked():
"""Test whitening of evoked data."""
evoked = read_evokeds(ave_fname, condition=0, baseline=(None, 0),
proj=True)
cov = read_cov(cov_fname)
###########################################################################
# Show result
picks = pick_types(evoked.info, meg=True, eeg=True, ref_meg=False,
exclude='bads')
noise_cov = regularize(cov, evoked.info, grad=0.1, mag=0.1, eeg=0.1,
exclude='bads', rank='full')
evoked_white = whiten_evoked(evoked, noise_cov, picks, diag=True)
whiten_baseline_data = evoked_white.data[picks][:, evoked.times < 0]
mean_baseline = np.mean(np.abs(whiten_baseline_data), axis=1)
assert np.all(mean_baseline < 1.)
assert np.all(mean_baseline > 0.2)
# degenerate
cov_bad = pick_channels_cov(cov, include=evoked.ch_names[:10])
pytest.raises(RuntimeError, whiten_evoked, evoked, cov_bad, picks)
def test_regularized_covariance():
"""Test unchanged data with regularized_covariance."""
evoked = read_evokeds(ave_fname, condition=0, baseline=(None, 0),
proj=True)
data = evoked.data.copy()
# check that input data remain unchanged. gh-5698
_regularized_covariance(data)
assert_allclose(data, evoked.data, atol=1e-20)
@requires_sklearn
def test_auto_low_rank():
"""Test probabilistic low rank estimators."""
n_samples, n_features, rank = 400, 10, 5
sigma = 0.1
def get_data(n_samples, n_features, rank, sigma):
rng = np.random.RandomState(42)
W = rng.randn(n_features, n_features)
X = rng.randn(n_samples, rank)
U, _, _ = linalg.svd(W.copy())
X = np.dot(X, U[:, :rank].T)
sigmas = sigma * rng.rand(n_features) + sigma / 2.
X += rng.randn(n_samples, n_features) * sigmas
return X
X = get_data(n_samples=n_samples, n_features=n_features, rank=rank,
sigma=sigma)
method_params = {'iter_n_components': [4, 5, 6]}
cv = 3
n_jobs = 1
mode = 'factor_analysis'
rescale = 1e8
X *= rescale
est, info = _auto_low_rank_model(X, mode=mode, n_jobs=n_jobs,
method_params=method_params,
cv=cv)
assert_equal(info['best'], rank)
X = get_data(n_samples=n_samples, n_features=n_features, rank=rank,
sigma=sigma)
method_params = {'iter_n_components': [n_features + 5]}
msg = ('You are trying to estimate %i components on matrix '
'with %i features.') % (n_features + 5, n_features)
with pytest.warns(RuntimeWarning, match=msg):
_auto_low_rank_model(X, mode=mode, n_jobs=n_jobs,
method_params=method_params, cv=cv)
@pytest.mark.slowtest
@pytest.mark.parametrize('rank', ('full', None, 'info'))
@requires_sklearn
def test_compute_covariance_auto_reg(rank):
"""Test automated regularization."""
raw = read_raw_fif(raw_fname, preload=True)
raw.resample(100, npad='auto') # much faster estimation
events = find_events(raw, stim_channel='STI 014')
event_ids = [1, 2, 3, 4]
reject = dict(mag=4e-12)
# cov with merged events and keep_sample_mean=True
events_merged = merge_events(events, event_ids, 1234)
# we need a few channels for numerical reasons in PCA/FA
picks = pick_types(raw.info, meg='mag', eeg=False)[:10]
raw.pick_channels([raw.ch_names[pick] for pick in picks])
raw.info.normalize_proj()
epochs = Epochs(
raw, events_merged, 1234, tmin=-0.2, tmax=0,
baseline=(-0.2, -0.1), proj=True, reject=reject, preload=True)
epochs = epochs.crop(None, 0)[:5]
method_params = dict(factor_analysis=dict(iter_n_components=[3]),
pca=dict(iter_n_components=[3]))
covs = compute_covariance(epochs, method='auto',
method_params=method_params,
return_estimators=True, rank=rank)
# make sure regularization produces structured differences
diag_mask = np.eye(len(epochs.ch_names)).astype(bool)
off_diag_mask = np.invert(diag_mask)
for cov_a, cov_b in itt.combinations(covs, 2):
if (cov_a['method'] == 'diagonal_fixed' and
# here we have diagonal or no regularization.
cov_b['method'] == 'empirical' and rank == 'full'):
assert not np.any(cov_a['data'][diag_mask] ==
cov_b['data'][diag_mask])
# but the rest is the same
assert_allclose(cov_a['data'][off_diag_mask],
cov_b['data'][off_diag_mask], rtol=1e-12)
else:
# and here we have shrinkage everywhere.
assert not np.any(cov_a['data'][diag_mask] ==
cov_b['data'][diag_mask])
assert not np.any(cov_a['data'][diag_mask] ==
cov_b['data'][diag_mask])
logliks = [c['loglik'] for c in covs]
assert np.diff(logliks).max() <= 0 # descending order
methods = ['empirical', 'ledoit_wolf', 'oas', 'shrunk', 'shrinkage']
if rank == 'full':
methods.extend(['factor_analysis', 'pca'])
with catch_logging() as log:
cov3 = compute_covariance(epochs, method=methods,
method_params=method_params, projs=None,
return_estimators=True, rank=rank,
verbose=True)
log = log.getvalue().split('\n')
if rank is None:
assert ' Setting small MAG eigenvalues to zero (without PCA)' in log
assert 'Reducing data rank from 10 -> 7' in log
else:
assert 'Reducing' not in log
method_names = [cov['method'] for cov in cov3]
best_bounds = [-45, -35]
bounds = [-55, -45] if rank == 'full' else best_bounds
for method in set(methods) - {'empirical', 'shrunk'}:
this_lik = cov3[method_names.index(method)]['loglik']
assert bounds[0] < this_lik < bounds[1]
this_lik = cov3[method_names.index('shrunk')]['loglik']
assert best_bounds[0] < this_lik < best_bounds[1]
this_lik = cov3[method_names.index('empirical')]['loglik']
bounds = [-110, -100] if rank == 'full' else best_bounds
assert bounds[0] < this_lik < bounds[1]
assert_equal({c['method'] for c in cov3}, set(methods))
cov4 = compute_covariance(epochs, method=methods,
method_params=method_params, projs=None,
return_estimators=False, rank=rank)
assert cov3[0]['method'] == cov4['method'] # ordering
# invalid prespecified method
pytest.raises(ValueError, compute_covariance, epochs, method='pizza')
# invalid scalings
pytest.raises(ValueError, compute_covariance, epochs, method='shrunk',
scalings=dict(misc=123))
def _cov_rank(cov, info, proj=True):
# ignore warnings about rank mismatches: sometimes we will intentionally
# violate the computed/info assumption, such as when using SSS with
# `rank='full'`
with _record_warnings():
return _compute_rank_int(cov, info=info, proj=proj)
@pytest.fixture(scope='module')
def raw_epochs_events():
"""Create raw, epochs, and events for tests."""
raw = read_raw_fif(raw_fname).set_eeg_reference(projection=True).crop(0, 3)
raw = maxwell_filter(raw, regularize=None) # heavily reduce the rank
assert raw.info['bads'] == [] # no bads
events = make_fixed_length_events(raw)
epochs = Epochs(raw, events, tmin=-0.2, tmax=0, preload=True)
return (raw, epochs, events)
@requires_sklearn
@pytest.mark.parametrize('rank', (None, 'full', 'info'))
def test_low_rank_methods(rank, raw_epochs_events):
"""Test low-rank covariance matrix estimation."""
epochs = raw_epochs_events[1]
sss_proj_rank = 139 # 80 MEG + 60 EEG - 1 proj
n_ch = 366
methods = ('empirical', 'diagonal_fixed', 'oas')
bounds = {
'None': dict(empirical=(-15000, -5000),
diagonal_fixed=(-1500, -500),
oas=(-700, -600)),
'full': dict(empirical=(-18000, -8000),
diagonal_fixed=(-2000, -1600),
oas=(-1600, -1000)),
'info': dict(empirical=(-15000, -5000),
diagonal_fixed=(-700, -600),
oas=(-700, -600)),
}
with pytest.warns(RuntimeWarning, match='Too few samples'):
covs = compute_covariance(
epochs, method=methods, return_estimators=True, rank=rank,
verbose=True)
for cov in covs:
method = cov['method']
these_bounds = bounds[str(rank)][method]
this_rank = _cov_rank(cov, epochs.info, proj=(rank != 'full'))
if rank == 'full' and method != 'empirical':
assert this_rank == n_ch
else:
assert this_rank == sss_proj_rank
assert these_bounds[0] < cov['loglik'] < these_bounds[1], \
(rank, method)
@requires_sklearn
def test_low_rank_cov(raw_epochs_events):
"""Test additional properties of low rank computations."""
raw, epochs, events = raw_epochs_events
sss_proj_rank = 139 # 80 MEG + 60 EEG - 1 proj
n_ch = 366
proj_rank = 365 # one EEG proj
with pytest.warns(RuntimeWarning, match='Too few samples'):
emp_cov = compute_covariance(epochs)
# Test equivalence with mne.cov.regularize subspace
with pytest.raises(ValueError, match='are dependent.*must equal'):
regularize(emp_cov, epochs.info, rank=None, mag=0.1, grad=0.2)
assert _cov_rank(emp_cov, epochs.info) == sss_proj_rank
reg_cov = regularize(emp_cov, epochs.info, proj=True, rank='full')
assert _cov_rank(reg_cov, epochs.info) == proj_rank
with pytest.warns(RuntimeWarning, match='exceeds the theoretical'):
_compute_rank_int(reg_cov, info=epochs.info)
del reg_cov
with catch_logging() as log:
reg_r_cov = regularize(emp_cov, epochs.info, proj=True, rank=None,
verbose=True)
log = log.getvalue()
assert 'jointly' in log
assert _cov_rank(reg_r_cov, epochs.info) == sss_proj_rank
reg_r_only_cov = regularize(emp_cov, epochs.info, proj=False, rank=None)
assert _cov_rank(reg_r_only_cov, epochs.info) == sss_proj_rank
assert_allclose(reg_r_only_cov['data'], reg_r_cov['data'])
del reg_r_only_cov, reg_r_cov
# test that rank=306 is same as rank='full'
epochs_meg = epochs.copy().pick_types(meg=True)
assert len(epochs_meg.ch_names) == 306
with epochs_meg.info._unlock():
epochs_meg.info.update(bads=[], projs=[])
cov_full = compute_covariance(epochs_meg, method='oas',
rank='full', verbose='error')
assert _cov_rank(cov_full, epochs_meg.info) == 306
with pytest.warns(RuntimeWarning, match='few samples'):
cov_dict = compute_covariance(epochs_meg, method='oas',
rank=dict(meg=306))
assert _cov_rank(cov_dict, epochs_meg.info) == 306
assert_allclose(cov_full['data'], cov_dict['data'])
cov_dict = compute_covariance(epochs_meg, method='oas',
rank=dict(meg=306), verbose='error')
assert _cov_rank(cov_dict, epochs_meg.info) == 306
assert_allclose(cov_full['data'], cov_dict['data'])
# Work with just EEG data to simplify projection / rank reduction
raw = raw.copy().pick_types(meg=False, eeg=True)
n_proj = 2
raw.add_proj(compute_proj_raw(raw, n_eeg=n_proj))
n_ch = len(raw.ch_names)
rank = n_ch - n_proj - 1 # plus avg proj
assert len(raw.info['projs']) == 3
epochs = Epochs(raw, events, tmin=-0.2, tmax=0, preload=True)
assert len(raw.ch_names) == n_ch
emp_cov = compute_covariance(epochs, rank='full', verbose='error')
assert _cov_rank(emp_cov, epochs.info) == rank
reg_cov = regularize(emp_cov, epochs.info, proj=True, rank='full')
assert _cov_rank(reg_cov, epochs.info) == rank
reg_r_cov = regularize(emp_cov, epochs.info, proj=False, rank=None)
assert _cov_rank(reg_r_cov, epochs.info) == rank
dia_cov = compute_covariance(epochs, rank=None, method='diagonal_fixed',
verbose='error')
assert _cov_rank(dia_cov, epochs.info) == rank
assert_allclose(dia_cov['data'], reg_cov['data'])
epochs.pick_channels(epochs.ch_names[:103])
# degenerate
with pytest.raises(ValueError, match='can.*only be used with rank="full"'):
compute_covariance(epochs, rank=None, method='pca')
with pytest.raises(ValueError, match='can.*only be used with rank="full"'):
compute_covariance(epochs, rank=None, method='factor_analysis')
@testing.requires_testing_data
@requires_sklearn
def test_cov_ctf():
"""Test basic cov computation on ctf data with/without compensation."""
raw = read_raw_ctf(ctf_fname).crop(0., 2.).load_data()
events = make_fixed_length_events(raw, 99999)
assert len(events) == 2
ch_names = [raw.info['ch_names'][pick]
for pick in pick_types(raw.info, meg=True, eeg=False,
ref_meg=False)]
for comp in [0, 1]:
raw.apply_gradient_compensation(comp)
epochs = Epochs(raw, events, None, -0.2, 0.2, preload=True)
with pytest.warns(RuntimeWarning, match='Too few samples'):
noise_cov = compute_covariance(epochs, tmax=0.,
method=['empirical'])
prepare_noise_cov(noise_cov, raw.info, ch_names)
raw.apply_gradient_compensation(0)
epochs = Epochs(raw, events, None, -0.2, 0.2, preload=True)
with pytest.warns(RuntimeWarning, match='Too few samples'):
noise_cov = compute_covariance(epochs, tmax=0., method=['empirical'])
raw.apply_gradient_compensation(1)
# TODO This next call in principle should fail.
prepare_noise_cov(noise_cov, raw.info, ch_names)
# make sure comps matrices was not removed from raw
assert raw.info['comps'], 'Comps matrices removed'
def test_equalize_channels():
"""Test equalization of channels for instances of Covariance."""
cov1 = make_ad_hoc_cov(create_info(['CH1', 'CH2', 'CH3', 'CH4'], sfreq=1.0,
ch_types='eeg'))
cov2 = make_ad_hoc_cov(create_info(['CH5', 'CH1', 'CH2'], sfreq=1.0,
ch_types='eeg'))
cov1, cov2 = equalize_channels([cov1, cov2])
assert cov1.ch_names == ['CH1', 'CH2']
assert cov2.ch_names == ['CH1', 'CH2']
def test_compute_whitener_rank():
"""Test risky rank options."""
info = read_info(ave_fname)
info = pick_info(info, pick_types(info, meg=True))
with info._unlock():
info['projs'] = []
# need a square version because the diag one takes shortcuts in
# compute_whitener (users shouldn't even need this function so it's
# private)
cov = make_ad_hoc_cov(info)._as_square()
assert len(cov['names']) == 306
_, _, rank = compute_whitener(cov, info, rank=None, return_rank=True)
assert rank == 306
assert compute_rank(cov, info=info, verbose=True) == dict(meg=rank)
cov['data'][-1] *= 1e-14 # trivially rank-deficient
_, _, rank = compute_whitener(cov, info, rank=None, return_rank=True)
assert rank == 305
assert compute_rank(cov, info=info, verbose=True) == dict(meg=rank)
# this should emit a warning
with pytest.warns(RuntimeWarning, match='exceeds the estimated'):
_, _, rank = compute_whitener(cov, info, rank=dict(meg=306),
return_rank=True)
assert rank == 306
| bsd-3-clause | 669d705e1eb03d21fc18661ccb100bed | 41.960621 | 79 | 0.615872 | 3.288063 | false | true | false | false |
mne-tools/mne-python | examples/time_frequency/source_power_spectrum_opm.py | 5 | 7692 | # -*- coding: utf-8 -*-
"""
.. _ex-opm-resting-state:
======================================================================
Compute source power spectral density (PSD) of VectorView and OPM data
======================================================================
Here we compute the resting state from raw for data recorded using
a Neuromag VectorView system and a custom OPM system.
The pipeline is meant to mostly follow the Brainstorm :footcite:`TadelEtAl2011`
`OMEGA resting tutorial pipeline
<https://neuroimage.usc.edu/brainstorm/Tutorials/RestingOmega>`__.
The steps we use are:
1. Filtering: downsample heavily.
2. Artifact detection: use SSP for EOG and ECG.
3. Source localization: dSPM, depth weighting, cortically constrained.
4. Frequency: power spectral density (Welch), 4 sec window, 50% overlap.
5. Standardize: normalize by relative power for each source.
Preprocessing
-------------
"""
# Authors: Denis Engemann <denis.engemann@gmail.com>
# Luke Bloy <luke.bloy@gmail.com>
# Eric Larson <larson.eric.d@gmail.com>
#
# License: BSD-3-Clause
# %%
import mne
from mne.filter import next_fast_len
print(__doc__)
data_path = mne.datasets.opm.data_path()
subject = 'OPM_sample'
subjects_dir = data_path / 'subjects'
bem_dir = subjects_dir / subject / 'bem'
bem_fname = bem_dir / f'{subject}-5120-5120-5120-bem-sol.fif'
src_fname = bem_dir / f'{subject}-oct6-src.fif'
vv_fname = data_path / 'MEG' / 'SQUID' / 'SQUID_resting_state.fif'
vv_erm_fname = data_path / 'MEG' / 'SQUID' / 'SQUID_empty_room.fif'
vv_trans_fname = data_path / 'MEG' / 'SQUID' / 'SQUID-trans.fif'
opm_fname = data_path / 'MEG' / 'OPM' / 'OPM_resting_state_raw.fif'
opm_erm_fname = data_path / 'MEG' / 'OPM' / 'OPM_empty_room_raw.fif'
opm_trans = mne.transforms.Transform('head', 'mri') # use identity transform
opm_coil_def_fname = data_path / 'MEG' / 'OPM' / 'coil_def.dat'
##############################################################################
# Load data, resample. We will store the raw objects in dicts with entries
# "vv" and "opm" to simplify housekeeping and simplify looping later.
raws = dict()
raw_erms = dict()
new_sfreq = 60. # Nyquist frequency (30 Hz) < line noise freq (50 Hz)
raws['vv'] = mne.io.read_raw_fif(vv_fname, verbose='error') # ignore naming
raws['vv'].load_data().resample(new_sfreq)
raws['vv'].info['bads'] = ['MEG2233', 'MEG1842']
raw_erms['vv'] = mne.io.read_raw_fif(vv_erm_fname, verbose='error')
raw_erms['vv'].load_data().resample(new_sfreq)
raw_erms['vv'].info['bads'] = ['MEG2233', 'MEG1842']
raws['opm'] = mne.io.read_raw_fif(opm_fname)
raws['opm'].load_data().resample(new_sfreq)
raw_erms['opm'] = mne.io.read_raw_fif(opm_erm_fname)
raw_erms['opm'].load_data().resample(new_sfreq)
# Make sure our assumptions later hold
assert raws['opm'].info['sfreq'] == raws['vv'].info['sfreq']
##############################################################################
# Explore data
titles = dict(vv='VectorView', opm='OPM')
kinds = ('vv', 'opm')
n_fft = next_fast_len(int(round(4 * new_sfreq)))
print('Using n_fft=%d (%0.1f sec)' % (n_fft, n_fft / raws['vv'].info['sfreq']))
for kind in kinds:
fig = raws[kind].plot_psd(n_fft=n_fft, proj=True)
fig.suptitle(titles[kind])
fig.subplots_adjust(0.1, 0.1, 0.95, 0.85)
##############################################################################
# Alignment and forward
# ---------------------
# Here we use a reduced size source space (oct5) just for speed
src = mne.setup_source_space(
subject, 'oct5', add_dist=False, subjects_dir=subjects_dir)
# This line removes source-to-source distances that we will not need.
# We only do it here to save a bit of memory, in general this is not required.
del src[0]['dist'], src[1]['dist']
bem = mne.read_bem_solution(bem_fname)
# For speed, let's just use a 1-layer BEM
bem = mne.make_bem_solution(bem['surfs'][-1:])
fwd = dict()
# check alignment and generate forward for VectorView
kwargs = dict(azimuth=0, elevation=90, distance=0.6, focalpoint=(0., 0., 0.))
fig = mne.viz.plot_alignment(
raws['vv'].info, trans=vv_trans_fname, subject=subject,
subjects_dir=subjects_dir, dig=True, coord_frame='mri',
surfaces=('head', 'white'))
mne.viz.set_3d_view(figure=fig, **kwargs)
fwd['vv'] = mne.make_forward_solution(
raws['vv'].info, vv_trans_fname, src, bem, eeg=False, verbose=True)
##############################################################################
# And for OPM:
with mne.use_coil_def(opm_coil_def_fname):
fig = mne.viz.plot_alignment(
raws['opm'].info, trans=opm_trans, subject=subject,
subjects_dir=subjects_dir, dig=False, coord_frame='mri',
surfaces=('head', 'white'))
mne.viz.set_3d_view(figure=fig, **kwargs)
fwd['opm'] = mne.make_forward_solution(
raws['opm'].info, opm_trans, src, bem, eeg=False, verbose=True)
del src, bem
##############################################################################
# Compute and apply inverse to PSD estimated using multitaper + Welch.
# Group into frequency bands, then normalize each source point and sensor
# independently. This makes the value of each sensor point and source location
# in each frequency band the percentage of the PSD accounted for by that band.
freq_bands = dict(alpha=(8, 12), beta=(15, 29))
topos = dict(vv=dict(), opm=dict())
stcs = dict(vv=dict(), opm=dict())
snr = 3.
lambda2 = 1. / snr ** 2
for kind in kinds:
noise_cov = mne.compute_raw_covariance(raw_erms[kind])
inverse_operator = mne.minimum_norm.make_inverse_operator(
raws[kind].info, forward=fwd[kind], noise_cov=noise_cov, verbose=True)
stc_psd, sensor_psd = mne.minimum_norm.compute_source_psd(
raws[kind], inverse_operator, lambda2=lambda2,
n_fft=n_fft, dB=False, return_sensor=True, verbose=True)
topo_norm = sensor_psd.data.sum(axis=1, keepdims=True)
stc_norm = stc_psd.sum() # same operation on MNE object, sum across freqs
# Normalize each source point by the total power across freqs
for band, limits in freq_bands.items():
data = sensor_psd.copy().crop(*limits).data.sum(axis=1, keepdims=True)
topos[kind][band] = mne.EvokedArray(
100 * data / topo_norm, sensor_psd.info)
stcs[kind][band] = \
100 * stc_psd.copy().crop(*limits).sum() / stc_norm.data
del inverse_operator
del fwd, raws, raw_erms
# %%
# Now we can make some plots of each frequency band. Note that the OPM head
# coverage is only over right motor cortex, so only localization
# of beta is likely to be worthwhile.
#
# Alpha
# -----
def plot_band(kind, band):
"""Plot activity within a frequency band on the subject's brain."""
title = "%s %s\n(%d-%d Hz)" % ((titles[kind], band,) + freq_bands[band])
topos[kind][band].plot_topomap(
times=0., scalings=1., cbar_fmt='%0.1f', vlim=(0, None),
cmap='inferno', time_format=title)
brain = stcs[kind][band].plot(
subject=subject, subjects_dir=subjects_dir, views='cau', hemi='both',
time_label=title, title=title, colormap='inferno',
time_viewer=False, show_traces=False,
clim=dict(kind='percent', lims=(70, 85, 99)), smoothing_steps=10)
brain.show_view(azimuth=0, elevation=0, roll=0)
return fig, brain
fig_alpha, brain_alpha = plot_band('vv', 'alpha')
# %%
# Beta
# ----
# Here we also show OPM data, which shows a profile similar to the VectorView
# data beneath the sensors. VectorView first:
fig_beta, brain_beta = plot_band('vv', 'beta')
# %%
# Then OPM:
# sphinx_gallery_thumbnail_number = 10
fig_beta_opm, brain_beta_opm = plot_band('opm', 'beta')
# %%
# References
# ----------
# .. footbibliography::
| bsd-3-clause | af5b1df0b3ccd9623e0b06b4eca7d2c8 | 37.653266 | 79 | 0.625195 | 3.046337 | false | false | false | false |
mne-tools/mne-python | mne/io/eeglab/eeglab.py | 1 | 27204 | # Authors: Mainak Jas <mainak.jas@telecom-paristech.fr>
# Jona Sassenhagen <jona.sassenhagen@gmail.com>
# Stefan Appelhoff <stefan.appelhoff@mailbox.org>
#
# License: BSD-3-Clause
import os.path as op
import numpy as np
from ._eeglab import _readmat
from .._digitization import _ensure_fiducials_head
from ..constants import FIFF
from ..meas_info import create_info
from ..pick import _PICK_TYPES_KEYS
from ..utils import _read_segments_file, _find_channels
from ..base import BaseRaw
from ...defaults import DEFAULTS
from ...utils import (logger, verbose, warn, fill_doc, Bunch, _check_fname,
_check_head_radius)
from ...channels import make_dig_montage
from ...epochs import BaseEpochs
from ...event import read_events
from ...annotations import Annotations, read_annotations
# just fix the scaling for now, EEGLAB doesn't seem to provide this info
CAL = 1e-6
def _check_eeglab_fname(fname, dataname):
"""Check whether the filename is valid.
Check if the file extension is ``.fdt`` (older ``.dat`` being invalid) or
whether the ``EEG.data`` filename exists. If ``EEG.data`` file is absent
the set file name with .set changed to .fdt is checked.
"""
fmt = str(op.splitext(dataname)[-1])
if fmt == '.dat':
raise NotImplementedError(
'Old data format .dat detected. Please update your EEGLAB '
'version and resave the data in .fdt format')
elif fmt != '.fdt':
raise IOError('Expected .fdt file format. Found %s format' % fmt)
basedir = op.dirname(fname)
data_fname = op.join(basedir, dataname)
if not op.exists(data_fname):
fdt_from_set_fname = op.splitext(fname)[0] + '.fdt'
if op.exists(fdt_from_set_fname):
data_fname = fdt_from_set_fname
msg = ('Data file name in EEG.data ({}) is incorrect, the file '
'name must have changed on disk, using the correct file '
'name ({}).')
warn(msg.format(dataname, op.basename(fdt_from_set_fname)))
elif not data_fname == fdt_from_set_fname:
msg = 'Could not find the .fdt data file, tried {} and {}.'
raise FileNotFoundError(msg.format(data_fname, fdt_from_set_fname))
return data_fname
def _check_load_mat(fname, uint16_codec):
"""Check if the mat struct contains 'EEG'."""
eeg = _readmat(fname, uint16_codec=uint16_codec)
if 'ALLEEG' in eeg:
raise NotImplementedError(
'Loading an ALLEEG array is not supported. Please contact'
'mne-python developers for more information.')
if 'EEG' in eeg: # fields are contained in EEG structure
eeg = eeg['EEG']
eeg = eeg.get('EEG', eeg) # handle nested EEG structure
eeg = Bunch(**eeg)
eeg.trials = int(eeg.trials)
eeg.nbchan = int(eeg.nbchan)
eeg.pnts = int(eeg.pnts)
return eeg
def _to_loc(ll, scale_units=1.):
"""Check if location exists."""
if isinstance(ll, (int, float)) or len(ll) > 0:
return ll * scale_units
else:
return np.nan
def _eeg_has_montage_information(eeg):
try:
from scipy.io.matlab import mat_struct
except ImportError: # SciPy < 1.8
from scipy.io.matlab.mio5_params import mat_struct
if not len(eeg.chanlocs):
has_pos = False
else:
pos_fields = ['X', 'Y', 'Z']
if isinstance(eeg.chanlocs[0], mat_struct):
has_pos = all(hasattr(eeg.chanlocs[0], fld)
for fld in pos_fields)
elif isinstance(eeg.chanlocs[0], np.ndarray):
# Old files
has_pos = all(fld in eeg.chanlocs[0].dtype.names
for fld in pos_fields)
elif isinstance(eeg.chanlocs[0], dict):
# new files
has_pos = all(fld in eeg.chanlocs[0] for fld in pos_fields)
else:
has_pos = False # unknown (sometimes we get [0, 0])
return has_pos
def _get_montage_information(eeg, get_pos, scale_units=1.):
"""Get channel name, type and montage information from ['chanlocs']."""
ch_names, ch_types, pos_ch_names, pos = list(), list(), list(), list()
unknown_types = dict()
for chanloc in eeg.chanlocs:
# channel name
ch_names.append(chanloc['labels'])
# channel type
ch_type = 'eeg'
try_type = chanloc.get('type', None)
if isinstance(try_type, str):
try_type = try_type.strip().lower()
if try_type in _PICK_TYPES_KEYS:
ch_type = try_type
else:
if try_type in unknown_types:
unknown_types[try_type].append(chanloc['labels'])
else:
unknown_types[try_type] = [chanloc['labels']]
ch_types.append(ch_type)
# channel loc
if get_pos:
loc_x = _to_loc(chanloc['X'], scale_units=scale_units)
loc_y = _to_loc(chanloc['Y'], scale_units=scale_units)
loc_z = _to_loc(chanloc['Z'], scale_units=scale_units)
locs = np.r_[-loc_y, loc_x, loc_z]
pos_ch_names.append(chanloc['labels'])
pos.append(locs)
# warn if unknown types were provided
if len(unknown_types):
warn('Unknown types found, setting as type EEG:\n' +
'\n'.join([f'{key}: {sorted(unknown_types[key])}'
for key in sorted(unknown_types)]))
lpa, rpa, nasion = None, None, None
if hasattr(eeg, "chaninfo") and len(eeg.chaninfo.get('nodatchans', [])):
for item in list(zip(*eeg.chaninfo['nodatchans'].values())):
d = dict(zip(eeg.chaninfo['nodatchans'].keys(), item))
if d.get("type", None) != 'FID':
continue
elif d.get('description', None) == 'Nasion':
nasion = np.array([d["X"], d["Y"], d["Z"]])
elif d.get('description', None) == 'Right periauricular point':
rpa = np.array([d["X"], d["Y"], d["Z"]])
elif d.get('description', None) == 'Left periauricular point':
lpa = np.array([d["X"], d["Y"], d["Z"]])
if pos_ch_names:
pos_array = np.array(pos)
# roughly estimate head radius and check if its reasonable
is_nan_pos = np.isnan(pos).all(axis=1)
if not is_nan_pos.all():
mean_radius = np.mean(np.linalg.norm(
pos_array[~is_nan_pos], axis=1))
additional_info = (
' Check if the montage_units argument is correct (the default '
'is "mm", but your channel positions may be in different units'
').')
_check_head_radius(mean_radius, add_info=additional_info)
montage = make_dig_montage(
ch_pos=dict(zip(ch_names, pos_array)),
coord_frame='head', lpa=lpa, rpa=rpa, nasion=nasion)
_ensure_fiducials_head(montage.dig)
else:
montage = None
return ch_names, ch_types, montage
def _get_info(eeg, eog=(), scale_units=1.):
"""Get measurement info."""
# add the ch_names and info['chs'][idx]['loc']
if not isinstance(eeg.chanlocs, np.ndarray) and eeg.nbchan == 1:
eeg.chanlocs = [eeg.chanlocs]
if isinstance(eeg.chanlocs, dict):
eeg.chanlocs = _dol_to_lod(eeg.chanlocs)
eeg_has_ch_names_info = len(eeg.chanlocs) > 0
if eeg_has_ch_names_info:
has_pos = _eeg_has_montage_information(eeg)
ch_names, ch_types, eeg_montage = \
_get_montage_information(eeg, has_pos, scale_units=scale_units)
update_ch_names = False
else: # if eeg.chanlocs is empty, we still need default chan names
ch_names = ["EEG %03d" % ii for ii in range(eeg.nbchan)]
ch_types = 'eeg'
eeg_montage = None
update_ch_names = True
info = create_info(ch_names, sfreq=eeg.srate, ch_types=ch_types)
eog = _find_channels(ch_names, ch_type='EOG') if eog == 'auto' else eog
for idx, ch in enumerate(info['chs']):
ch['cal'] = CAL
if ch['ch_name'] in eog or idx in eog:
ch['coil_type'] = FIFF.FIFFV_COIL_NONE
ch['kind'] = FIFF.FIFFV_EOG_CH
return info, eeg_montage, update_ch_names
def _set_dig_montage_in_init(self, montage):
"""Set EEG sensor configuration and head digitization from when init.
This is done from the information within fname when
read_raw_eeglab(fname) or read_epochs_eeglab(fname).
"""
if montage is None:
self.set_montage(None)
else:
missing_channels = set(self.ch_names) - set(montage.ch_names)
ch_pos = dict(zip(
list(missing_channels),
np.full((len(missing_channels), 3), np.nan)
))
self.set_montage(
montage + make_dig_montage(ch_pos=ch_pos, coord_frame='head')
)
def _handle_montage_units(montage_units):
n_char_unit = len(montage_units)
if montage_units[-1:] != 'm' or n_char_unit > 2:
raise ValueError('``montage_units`` has to be in prefix + "m" format'
f', got "{montage_units}"')
prefix = montage_units[:-1]
scale_units = 1 / DEFAULTS['prefixes'][prefix]
return scale_units
@fill_doc
def read_raw_eeglab(input_fname, eog=(), preload=False,
uint16_codec=None, montage_units='mm', verbose=None):
r"""Read an EEGLAB .set file.
Parameters
----------
input_fname : str
Path to the .set file. If the data is stored in a separate .fdt file,
it is expected to be in the same folder as the .set file.
eog : list | tuple | 'auto'
Names or indices of channels that should be designated EOG channels.
If 'auto', the channel names containing ``EOG`` or ``EYE`` are used.
Defaults to empty tuple.
%(preload)s
Note that preload=False will be effective only if the data is stored
in a separate binary file.
%(uint16_codec)s
%(montage_units)s
%(verbose)s
Returns
-------
raw : instance of RawEEGLAB
A Raw object containing EEGLAB .set data.
See Also
--------
mne.io.Raw : Documentation of attribute and methods.
Notes
-----
.. versionadded:: 0.11.0
"""
return RawEEGLAB(input_fname=input_fname, preload=preload,
eog=eog, uint16_codec=uint16_codec,
montage_units=montage_units, verbose=verbose)
@fill_doc
def read_epochs_eeglab(input_fname, events=None, event_id=None,
eog=(), *, uint16_codec=None, montage_units='mm',
verbose=None):
r"""Reader function for EEGLAB epochs files.
Parameters
----------
input_fname : str
Path to the .set file. If the data is stored in a separate .fdt file,
it is expected to be in the same folder as the .set file.
events : str | array, shape (n_events, 3) | None
Path to events file. If array, it is the events typically returned
by the read_events function. If some events don't match the events
of interest as specified by event_id, they will be marked as 'IGNORED'
in the drop log. If None, it is constructed from the EEGLAB (.set) file
with each unique event encoded with a different integer.
event_id : int | list of int | dict | None
The id of the event to consider. If dict, the keys can later be used
to access associated events.
Example::
{"auditory":1, "visual":3}
If int, a dict will be created with
the id as string. If a list, all events with the IDs specified
in the list are used. If None, the event_id is constructed from the
EEGLAB (.set) file with each descriptions copied from ``eventtype``.
eog : list | tuple | 'auto'
Names or indices of channels that should be designated EOG channels.
If 'auto', the channel names containing ``EOG`` or ``EYE`` are used.
Defaults to empty tuple.
%(uint16_codec)s
%(montage_units)s
%(verbose)s
Returns
-------
epochs : instance of Epochs
The epochs.
See Also
--------
mne.Epochs : Documentation of attribute and methods.
Notes
-----
.. versionadded:: 0.11.0
"""
epochs = EpochsEEGLAB(input_fname=input_fname, events=events, eog=eog,
event_id=event_id, uint16_codec=uint16_codec,
montage_units=montage_units, verbose=verbose)
return epochs
@fill_doc
class RawEEGLAB(BaseRaw):
r"""Raw object from EEGLAB .set file.
Parameters
----------
input_fname : str
Path to the .set file. If the data is stored in a separate .fdt file,
it is expected to be in the same folder as the .set file.
eog : list | tuple | 'auto'
Names or indices of channels that should be designated EOG channels.
If 'auto', the channel names containing ``EOG`` or ``EYE`` are used.
Defaults to empty tuple.
%(preload)s
Note that preload=False will be effective only if the data is stored
in a separate binary file.
%(uint16_codec)s
%(montage_units)s
%(verbose)s
See Also
--------
mne.io.Raw : Documentation of attribute and methods.
Notes
-----
.. versionadded:: 0.11.0
"""
@verbose
def __init__(self, input_fname, eog=(),
preload=False, *, uint16_codec=None, montage_units='mm',
verbose=None): # noqa: D102
input_fname = _check_fname(input_fname, 'read', True, 'input_fname')
eeg = _check_load_mat(input_fname, uint16_codec)
if eeg.trials != 1:
raise TypeError('The number of trials is %d. It must be 1 for raw'
' files. Please use `mne.io.read_epochs_eeglab` if'
' the .set file contains epochs.' % eeg.trials)
last_samps = [eeg.pnts - 1]
scale_units = _handle_montage_units(montage_units)
info, eeg_montage, _ = _get_info(eeg, eog=eog, scale_units=scale_units)
# read the data
if isinstance(eeg.data, str):
data_fname = _check_eeglab_fname(input_fname, eeg.data)
logger.info('Reading %s' % data_fname)
super(RawEEGLAB, self).__init__(
info, preload, filenames=[data_fname], last_samps=last_samps,
orig_format='double', verbose=verbose)
else:
if preload is False or isinstance(preload, str):
warn('Data will be preloaded. preload=False or a string '
'preload is not supported when the data is stored in '
'the .set file')
# can't be done in standard way with preload=True because of
# different reading path (.set file)
if eeg.nbchan == 1 and len(eeg.data.shape) == 1:
n_chan, n_times = [1, eeg.data.shape[0]]
else:
n_chan, n_times = eeg.data.shape
data = np.empty((n_chan, n_times), dtype=float)
data[:n_chan] = eeg.data
data *= CAL
super(RawEEGLAB, self).__init__(
info, data, filenames=[input_fname], last_samps=last_samps,
orig_format='double', verbose=verbose)
# create event_ch from annotations
annot = read_annotations(input_fname)
self.set_annotations(annot)
_check_boundary(annot, None)
_set_dig_montage_in_init(self, eeg_montage)
latencies = np.round(annot.onset * self.info['sfreq'])
_check_latencies(latencies)
def _read_segment_file(self, data, idx, fi, start, stop, cals, mult):
"""Read a chunk of raw data."""
_read_segments_file(
self, data, idx, fi, start, stop, cals, mult, dtype='<f4')
class EpochsEEGLAB(BaseEpochs):
r"""Epochs from EEGLAB .set file.
Parameters
----------
input_fname : str
Path to the .set file. If the data is stored in a separate .fdt file,
it is expected to be in the same folder as the .set file.
events : str | array, shape (n_events, 3) | None
Path to events file. If array, it is the events typically returned
by the read_events function. If some events don't match the events
of interest as specified by event_id, they will be marked as 'IGNORED'
in the drop log. If None, it is constructed from the EEGLAB (.set) file
with each unique event encoded with a different integer.
event_id : int | list of int | dict | None
The id of the event to consider. If dict,
the keys can later be used to access associated events. Example:
dict(auditory=1, visual=3). If int, a dict will be created with
the id as string. If a list, all events with the IDs specified
in the list are used. If None, the event_id is constructed from the
EEGLAB (.set) file with each descriptions copied from ``eventtype``.
tmin : float
Start time before event.
baseline : None or tuple of length 2 (default (None, 0))
The time interval to apply baseline correction.
If None do not apply it. If baseline is (a, b)
the interval is between "a (s)" and "b (s)".
If a is None the beginning of the data is used
and if b is None then b is set to the end of the interval.
If baseline is equal to (None, None) all the time
interval is used.
The baseline (a, b) includes both endpoints, i.e. all
timepoints t such that a <= t <= b.
reject : dict | None
Rejection parameters based on peak-to-peak amplitude.
Valid keys are 'grad' | 'mag' | 'eeg' | 'eog' | 'ecg'.
If reject is None then no rejection is done. Example::
reject = dict(grad=4000e-13, # T / m (gradiometers)
mag=4e-12, # T (magnetometers)
eeg=40e-6, # V (EEG channels)
eog=250e-6 # V (EOG channels)
)
flat : dict | None
Rejection parameters based on flatness of signal.
Valid keys are 'grad' | 'mag' | 'eeg' | 'eog' | 'ecg', and values
are floats that set the minimum acceptable peak-to-peak amplitude.
If flat is None then no rejection is done.
reject_tmin : scalar | None
Start of the time window used to reject epochs (with the default None,
the window will start with tmin).
reject_tmax : scalar | None
End of the time window used to reject epochs (with the default None,
the window will end with tmax).
eog : list | tuple | 'auto'
Names or indices of channels that should be designated EOG channels.
If 'auto', the channel names containing ``EOG`` or ``EYE`` are used.
Defaults to empty tuple.
%(uint16_codec)s
%(montage_units)s
%(verbose)s
See Also
--------
mne.Epochs : Documentation of attribute and methods.
Notes
-----
.. versionadded:: 0.11.0
"""
@verbose
def __init__(self, input_fname, events=None, event_id=None, tmin=0,
baseline=None, reject=None, flat=None, reject_tmin=None,
reject_tmax=None, eog=(), uint16_codec=None,
montage_units='mm', verbose=None): # noqa: D102
input_fname = _check_fname(fname=input_fname, must_exist=True,
overwrite='read')
eeg = _check_load_mat(input_fname, uint16_codec)
if not ((events is None and event_id is None) or
(events is not None and event_id is not None)):
raise ValueError('Both `events` and `event_id` must be '
'None or not None')
if eeg.trials <= 1:
raise ValueError("The file does not seem to contain epochs "
"(trials less than 2). "
"You should try using read_raw_eeglab function.")
if events is None and eeg.trials > 1:
# first extract the events and construct an event_id dict
event_name, event_latencies, unique_ev = list(), list(), list()
ev_idx = 0
warn_multiple_events = False
epochs = _bunchify(eeg.epoch)
events = _bunchify(eeg.event)
for ep in epochs:
if isinstance(ep.eventtype, (int, float)):
ep.eventtype = str(ep.eventtype)
if not isinstance(ep.eventtype, str):
event_type = '/'.join([str(et) for et in ep.eventtype])
event_name.append(event_type)
# store latency of only first event
event_latencies.append(events[ev_idx].latency)
ev_idx += len(ep.eventtype)
warn_multiple_events = True
else:
event_type = ep.eventtype
event_name.append(ep.eventtype)
event_latencies.append(events[ev_idx].latency)
ev_idx += 1
if event_type not in unique_ev:
unique_ev.append(event_type)
# invent event dict but use id > 0 so you know its a trigger
event_id = {ev: idx + 1 for idx, ev in enumerate(unique_ev)}
# warn about multiple events in epoch if necessary
if warn_multiple_events:
warn('At least one epoch has multiple events. Only the latency'
' of the first event will be retained.')
# now fill up the event array
events = np.zeros((eeg.trials, 3), dtype=int)
for idx in range(0, eeg.trials):
if idx == 0:
prev_stim = 0
elif (idx > 0 and
event_latencies[idx] - event_latencies[idx - 1] == 1):
prev_stim = event_id[event_name[idx - 1]]
events[idx, 0] = event_latencies[idx]
events[idx, 1] = prev_stim
events[idx, 2] = event_id[event_name[idx]]
elif isinstance(events, str):
events = read_events(events)
logger.info('Extracting parameters from %s...' % input_fname)
scale_units = _handle_montage_units(montage_units)
info, eeg_montage, _ = _get_info(eeg, eog=eog, scale_units=scale_units)
for key, val in event_id.items():
if val not in events[:, 2]:
raise ValueError('No matching events found for %s '
'(event id %i)' % (key, val))
if isinstance(eeg.data, str):
data_fname = _check_eeglab_fname(input_fname, eeg.data)
with open(data_fname, 'rb') as data_fid:
data = np.fromfile(data_fid, dtype=np.float32)
data = data.reshape((eeg.nbchan, eeg.pnts, eeg.trials),
order="F")
else:
data = eeg.data
if eeg.nbchan == 1 and len(data.shape) == 2:
data = data[np.newaxis, :]
data = data.transpose((2, 0, 1)).astype('double')
data *= CAL
assert data.shape == (eeg.trials, eeg.nbchan, eeg.pnts)
tmin, tmax = eeg.xmin, eeg.xmax
super(EpochsEEGLAB, self).__init__(
info, data, events, event_id, tmin, tmax, baseline,
reject=reject, flat=flat, reject_tmin=reject_tmin,
reject_tmax=reject_tmax, filename=input_fname, verbose=verbose)
# data are preloaded but _bad_dropped is not set so we do it here:
self._bad_dropped = True
_set_dig_montage_in_init(self, eeg_montage)
logger.info('Ready.')
def _check_boundary(annot, event_id):
if event_id is None:
event_id = dict()
if "boundary" in annot.description and "boundary" not in event_id:
warn("The data contains 'boundary' events, indicating data "
"discontinuities. Be cautious of filtering and epoching around "
"these events.")
def _check_latencies(latencies):
if (latencies < -1).any():
raise ValueError('At least one event sample index is negative. Please'
' check if EEG.event.sample values are correct.')
if (latencies == -1).any():
warn("At least one event has a sample index of -1. This usually is "
"a consequence of how eeglab handles event latency after "
"resampling - especially when you had a boundary event at the "
"beginning of the file. Please make sure that the events at "
"the very beginning of your EEGLAB file can be safely dropped "
"(e.g., because they are boundary events).")
def _bunchify(items):
if isinstance(items, dict):
items = _dol_to_lod(items)
if len(items) > 0 and isinstance(items[0], dict):
items = [Bunch(**item) for item in items]
return items
def _read_annotations_eeglab(eeg, uint16_codec=None):
r"""Create Annotations from EEGLAB file.
This function reads the event attribute from the EEGLAB
structure and makes an :class:`mne.Annotations` object.
Parameters
----------
eeg : object | str
'EEG' struct or the path to the (EEGLAB) .set file.
uint16_codec : str | None
If your \*.set file contains non-ascii characters, sometimes reading
it may fail and give rise to error message stating that "buffer is
too small". ``uint16_codec`` allows to specify what codec (for example:
'latin1' or 'utf-8') should be used when reading character arrays and
can therefore help you solve this problem.
Returns
-------
annotations : instance of Annotations
The annotations present in the file.
"""
if isinstance(eeg, str):
eeg = _check_load_mat(eeg, uint16_codec=uint16_codec)
if not hasattr(eeg, 'event'):
events = []
elif isinstance(eeg.event, dict) and \
np.array(eeg.event['latency']).ndim > 0:
events = _dol_to_lod(eeg.event)
elif not isinstance(eeg.event, (np.ndarray, list)):
events = [eeg.event]
else:
events = eeg.event
events = _bunchify(events)
description = [str(event.type) for event in events]
onset = [event.latency - 1 for event in events]
duration = np.zeros(len(onset))
if len(events) > 0 and hasattr(events[0], 'duration'):
for idx, event in enumerate(events):
# empty duration fields are read as empty arrays
is_empty_array = (isinstance(event.duration, np.ndarray)
and len(event.duration) == 0)
duration[idx] = np.nan if is_empty_array else event.duration
return Annotations(onset=np.array(onset) / eeg.srate,
duration=duration / eeg.srate,
description=description,
orig_time=None)
def _dol_to_lod(dol):
"""Convert a dict of lists to a list of dicts."""
return [{key: dol[key][ii] for key in dol.keys()}
for ii in range(len(dol[list(dol.keys())[0]]))]
| bsd-3-clause | f96b659cb40f81183a82e7dcf6daf1fa | 38.483309 | 79 | 0.581275 | 3.64323 | false | false | false | false |
mne-tools/mne-python | examples/preprocessing/find_ref_artifacts.py | 13 | 4727 | # -*- coding: utf-8 -*-
"""
.. _ex-megnoise_processing:
====================================
Find MEG reference channel artifacts
====================================
Use ICA decompositions of MEG reference channels to remove intermittent noise.
Many MEG systems have an array of reference channels which are used to detect
external magnetic noise. However, standard techniques that use reference
channels to remove noise from standard channels often fail when noise is
intermittent. The technique described here (using ICA on the reference
channels) often succeeds where the standard techniques do not.
There are two algorithms to choose from: separate and together (default). In
the "separate" algorithm, two ICA decompositions are made: one on the reference
channels, and one on reference + standard channels. The reference + standard
channel components which correlate with the reference channel components are
removed.
In the "together" algorithm, a single ICA decomposition is made on reference +
standard channels, and those components whose weights are particularly heavy
on the reference channels are removed.
This technique is fully described and validated in :footcite:`HannaEtAl2020`
"""
# Authors: Jeff Hanna <jeff.hanna@gmail.com>
#
# License: BSD-3-Clause
# %%
import mne
from mne import io
from mne.datasets import refmeg_noise
from mne.preprocessing import ICA
import numpy as np
print(__doc__)
data_path = refmeg_noise.data_path()
# %%
# Read raw data, cropping to 5 minutes to save memory
raw_fname = data_path / 'sample_reference_MEG_noise-raw.fif'
raw = io.read_raw_fif(raw_fname).crop(300, 600).load_data()
# %%
# Note that even though standard noise removal has already
# been applied to these data, much of the noise in the reference channels
# (bottom of the plot) can still be seen in the standard channels.
select_picks = np.concatenate(
(mne.pick_types(raw.info, meg=True)[-32:],
mne.pick_types(raw.info, meg=False, ref_meg=True)))
plot_kwargs = dict(
duration=100, order=select_picks, n_channels=len(select_picks),
scalings={"mag": 8e-13, "ref_meg": 2e-11})
raw.plot(**plot_kwargs)
# %%
# The PSD of these data show the noise as clear peaks.
raw.plot_psd(fmax=30)
# %%
# Run the "together" algorithm.
raw_tog = raw.copy()
ica_kwargs = dict(
method='picard',
fit_params=dict(tol=1e-4), # use a high tol here for speed
)
all_picks = mne.pick_types(raw_tog.info, meg=True, ref_meg=True)
ica_tog = ICA(n_components=60, max_iter='auto', allow_ref_meg=True,
**ica_kwargs)
ica_tog.fit(raw_tog, picks=all_picks)
# low threshold (2.0) here because of cropped data, entire recording can use
# a higher threshold (2.5)
bad_comps, scores = ica_tog.find_bads_ref(raw_tog, threshold=2.0)
# Plot scores with bad components marked.
ica_tog.plot_scores(scores, bad_comps)
# Examine the properties of removed components. It's clear from the time
# courses and topographies that these components represent external,
# intermittent noise.
ica_tog.plot_properties(raw_tog, picks=bad_comps)
# Remove the components.
raw_tog = ica_tog.apply(raw_tog, exclude=bad_comps)
# %%
# Cleaned data:
raw_tog.plot_psd(fmax=30)
# %%
# Now try the "separate" algorithm.
raw_sep = raw.copy()
# Do ICA only on the reference channels.
ref_picks = mne.pick_types(raw_sep.info, meg=False, ref_meg=True)
ica_ref = ICA(n_components=2, max_iter='auto', allow_ref_meg=True,
**ica_kwargs)
ica_ref.fit(raw_sep, picks=ref_picks)
# Do ICA on both reference and standard channels. Here, we can just reuse
# ica_tog from the section above.
ica_sep = ica_tog.copy()
# Extract the time courses of these components and add them as channels
# to the raw data. Think of them the same way as EOG/EKG channels, but instead
# of giving info about eye movements/cardiac activity, they give info about
# external magnetic noise.
ref_comps = ica_ref.get_sources(raw_sep)
for c in ref_comps.ch_names: # they need to have REF_ prefix to be recognised
ref_comps.rename_channels({c: "REF_" + c})
raw_sep.add_channels([ref_comps])
# Now that we have our noise channels, we run the separate algorithm.
bad_comps, scores = ica_sep.find_bads_ref(raw_sep, method="separate")
# Plot scores with bad components marked.
ica_sep.plot_scores(scores, bad_comps)
# Examine the properties of removed components.
ica_sep.plot_properties(raw_sep, picks=bad_comps)
# Remove the components.
raw_sep = ica_sep.apply(raw_sep, exclude=bad_comps)
# %%
# Cleaned raw data traces:
raw_sep.plot(**plot_kwargs)
# %%
# Cleaned raw data PSD:
raw_sep.plot_psd(fmax=30)
##############################################################################
# References
# ----------
#
# .. footbibliography::
| bsd-3-clause | 4bd34718b1799f3ce99bbc75ce160d44 | 31.156463 | 79 | 0.710176 | 3.23989 | false | false | false | false |
mne-tools/mne-python | mne/preprocessing/nirs/_tddr.py | 8 | 5024 | # Authors: Robert Luke <mail@robertluke.net>
# Frank Fishburn
#
# License: BSD-3-Clause
import numpy as np
from ...io import BaseRaw
from ...utils import _validate_type, verbose
from ..nirs import _validate_nirs_info
@verbose
def temporal_derivative_distribution_repair(raw, *, verbose=None):
"""Apply temporal derivative distribution repair to data.
Applies temporal derivative distribution repair (TDDR) to data
:footcite:`FishburnEtAl2019`. This approach removes baseline shift
and spike artifacts without the need for any user-supplied parameters.
Parameters
----------
raw : instance of Raw
The raw data.
%(verbose)s
Returns
-------
raw : instance of Raw
Data with TDDR applied.
Notes
-----
TDDR was initially designed to be used on optical density fNIRS data but
has been enabled to be applied on hemoglobin concentration fNIRS data as
well in MNE. We recommend applying the algorithm to optical density fNIRS
data as intended by the original author wherever possible.
There is a shorter alias ``mne.preprocessing.nirs.tddr`` that can be used
instead of this function (e.g. if line length is an issue).
References
----------
.. footbibliography::
"""
raw = raw.copy().load_data()
_validate_type(raw, BaseRaw, 'raw')
picks = _validate_nirs_info(raw.info)
if not len(picks):
raise RuntimeError('TDDR should be run on optical density or '
'hemoglobin data.')
for pick in picks:
raw._data[pick] = _TDDR(raw._data[pick], raw.info['sfreq'])
return raw
# provide a short alias
tddr = temporal_derivative_distribution_repair
# Taken from https://github.com/frankfishburn/TDDR/ (MIT license).
# With permission https://github.com/frankfishburn/TDDR/issues/1.
# The only modification is the name, scipy signal import and flake fixes.
def _TDDR(signal, sample_rate):
# This function is the reference implementation for the TDDR algorithm for
# motion correction of fNIRS data, as described in:
#
# Fishburn F.A., Ludlum R.S., Vaidya C.J., & Medvedev A.V. (2019).
# Temporal Derivative Distribution Repair (TDDR): A motion correction
# method for fNIRS. NeuroImage, 184, 171-179.
# https://doi.org/10.1016/j.neuroimage.2018.09.025
#
# Usage:
# signals_corrected = TDDR( signals , sample_rate );
#
# Inputs:
# signals: A [sample x channel] matrix of uncorrected optical density or
# hemoglobin data
# sample_rate: A scalar reflecting the rate of acquisition in Hz
#
# Outputs:
# signals_corrected: A [sample x channel] matrix of corrected optical
# density data
from scipy.signal import butter, filtfilt
signal = np.array(signal)
if len(signal.shape) != 1:
for ch in range(signal.shape[1]):
signal[:, ch] = _TDDR(signal[:, ch], sample_rate)
return signal
# Preprocess: Separate high and low frequencies
filter_cutoff = .5
filter_order = 3
Fc = filter_cutoff * 2 / sample_rate
signal_mean = np.mean(signal)
signal -= signal_mean
if Fc < 1:
fb, fa = butter(filter_order, Fc)
signal_low = filtfilt(fb, fa, signal, padlen=0)
else:
signal_low = signal
signal_high = signal - signal_low
# Initialize
tune = 4.685
D = np.sqrt(np.finfo(signal.dtype).eps)
mu = np.inf
iter = 0
# Step 1. Compute temporal derivative of the signal
deriv = np.diff(signal_low)
# Step 2. Initialize observation weights
w = np.ones(deriv.shape)
# Step 3. Iterative estimation of robust weights
while iter < 50:
iter = iter + 1
mu0 = mu
# Step 3a. Estimate weighted mean
mu = np.sum(w * deriv) / np.sum(w)
# Step 3b. Calculate absolute residuals of estimate
dev = np.abs(deriv - mu)
# Step 3c. Robust estimate of standard deviation of the residuals
sigma = 1.4826 * np.median(dev)
# Step 3d. Scale deviations by standard deviation and tuning parameter
if sigma == 0:
break
r = dev / (sigma * tune)
# Step 3e. Calculate new weights according to Tukey's biweight function
w = ((1 - r**2) * (r < 1)) ** 2
# Step 3f. Terminate if new estimate is within
# machine-precision of old estimate
if abs(mu - mu0) < D * max(abs(mu), abs(mu0)):
break
# Step 4. Apply robust weights to centered derivative
new_deriv = w * (deriv - mu)
# Step 5. Integrate corrected derivative
signal_low_corrected = np.cumsum(np.insert(new_deriv, 0, 0.0))
# Postprocess: Center the corrected signal
signal_low_corrected = signal_low_corrected - np.mean(signal_low_corrected)
# Postprocess: Merge back with uncorrected high frequency component
signal_corrected = signal_low_corrected + signal_high + signal_mean
return signal_corrected
| bsd-3-clause | 3c4f72f73d4da69ee472ab11960c81c4 | 30.4 | 79 | 0.64371 | 3.696836 | false | false | false | false |
mne-tools/mne-python | mne/export/_eeglab.py | 6 | 2455 | # -*- coding: utf-8 -*-
# Authors: MNE Developers
#
# License: BSD-3-Clause
import numpy as np
from ..utils import _check_eeglabio_installed
_check_eeglabio_installed()
import eeglabio.raw # noqa: E402
import eeglabio.epochs # noqa: E402
def _export_raw(fname, raw):
# load data first
raw.load_data()
# remove extra epoc and STI channels
drop_chs = ['epoc']
# filenames attribute of RawArray is filled with None
if raw.filenames[0] and not (raw.filenames[0].endswith('.fif')):
drop_chs.append('STI 014')
ch_names = [ch for ch in raw.ch_names if ch not in drop_chs]
cart_coords = _get_als_coords_from_chs(raw.info['chs'], drop_chs)
annotations = [raw.annotations.description,
raw.annotations.onset,
raw.annotations.duration]
eeglabio.raw.export_set(
fname, data=raw.get_data(picks=ch_names), sfreq=raw.info['sfreq'],
ch_names=ch_names, ch_locs=cart_coords, annotations=annotations)
def _export_epochs(fname, epochs):
_check_eeglabio_installed()
# load data first
epochs.load_data()
# remove extra epoc and STI channels
drop_chs = ['epoc', 'STI 014']
ch_names = [ch for ch in epochs.ch_names if ch not in drop_chs]
cart_coords = _get_als_coords_from_chs(epochs.info['chs'], drop_chs)
if len(epochs.annotations) > 0:
annot = [epochs.annotations.description, epochs.annotations.onset,
epochs.annotations.duration]
else:
annot = None
eeglabio.epochs.export_set(
fname, data=epochs.get_data(picks=ch_names),
sfreq=epochs.info['sfreq'], events=epochs.events,
tmin=epochs.tmin, tmax=epochs.tmax, ch_names=ch_names,
event_id=epochs.event_id, ch_locs=cart_coords, annotations=annot)
def _get_als_coords_from_chs(chs, drop_chs=None):
"""Extract channel locations in ALS format (x, y, z) from a chs instance.
Returns
-------
None if no valid coordinates are found (all zeros)
"""
if drop_chs is None:
drop_chs = []
cart_coords = np.array([d['loc'][:3] for d in chs
if d['ch_name'] not in drop_chs])
if cart_coords.any(): # has coordinates
# (-y x z) to (x y z)
cart_coords[:, 0] = -cart_coords[:, 0] # -y to y
# swap x (1) and y (0)
cart_coords[:, [0, 1]] = cart_coords[:, [1, 0]]
else:
cart_coords = None
return cart_coords
| bsd-3-clause | 0675f0e44f2ede5f5730d69da56c3dd3 | 31.302632 | 77 | 0.616293 | 3.204961 | false | false | false | false |
mne-tools/mne-python | examples/stats/sensor_regression.py | 13 | 3814 | # -*- coding: utf-8 -*-
"""
.. _ex-sensor-regression:
=========================================================================
Analysing continuous features with binning and regression in sensor space
=========================================================================
Predict single trial activity from a continuous variable.
A single-trial regression is performed in each sensor and timepoint
individually, resulting in an :class:`mne.Evoked` object which contains the
regression coefficient (beta value) for each combination of sensor and
timepoint. This example shows the regression coefficient; the t and p values
are also calculated automatically.
Here, we repeat a few of the analyses from :footcite:`DufauEtAl2015`. This
can be easily performed by accessing the metadata object, which contains
word-level information about various psycholinguistically relevant features
of the words for which we have EEG activity.
For the general methodology, see e.g. :footcite:`HaukEtAl2006`.
References
----------
.. footbibliography::
"""
# Authors: Tal Linzen <linzen@nyu.edu>
# Denis A. Engemann <denis.engemann@gmail.com>
# Jona Sassenhagen <jona.sassenhagen@gmail.com>
#
# License: BSD-3-Clause
# %%
import pandas as pd
import mne
from mne.stats import linear_regression, fdr_correction
from mne.viz import plot_compare_evokeds
from mne.datasets import kiloword
# Load the data
path = kiloword.data_path() / 'kword_metadata-epo.fif'
epochs = mne.read_epochs(path)
print(epochs.metadata.head())
##############################################################################
# Psycholinguistically relevant word characteristics are continuous. I.e.,
# concreteness or imaginability is a graded property. In the metadata,
# we have concreteness ratings on a 5-point scale. We can show the dependence
# of the EEG on concreteness by dividing the data into bins and plotting the
# mean activity per bin, color coded.
name = "Concreteness"
df = epochs.metadata
df[name] = pd.cut(df[name], 11, labels=False) / 10
colors = {str(val): val for val in df[name].unique()}
epochs.metadata = df.assign(Intercept=1) # Add an intercept for later
evokeds = {val: epochs[name + " == " + val].average() for val in colors}
plot_compare_evokeds(evokeds, colors=colors, split_legend=True,
cmap=(name + " Percentile", "viridis"))
##############################################################################
# We observe that there appears to be a monotonic dependence of EEG on
# concreteness. We can also conduct a continuous analysis: single-trial level
# regression with concreteness as a continuous (although here, binned)
# feature. We can plot the resulting regression coefficient just like an
# Event-related Potential.
names = ["Intercept", name]
res = linear_regression(epochs, epochs.metadata[names], names=names)
for cond in names:
res[cond].beta.plot_joint(title=cond, ts_args=dict(time_unit='s'),
topomap_args=dict(time_unit='s'))
##############################################################################
# Because the :func:`~mne.stats.linear_regression` function also estimates
# p values, we can --
# after applying FDR correction for multiple comparisons -- also visualise the
# statistical significance of the regression of word concreteness.
# The :func:`mne.viz.plot_evoked_image` function takes a ``mask`` parameter.
# If we supply it with a boolean mask of the positions where we can reject
# the null hypothesis, points that are not significant will be shown
# transparently, and if desired, in a different colour palette and surrounded
# by dark contour lines.
reject_H0, fdr_pvals = fdr_correction(res["Concreteness"].p_val.data)
evoked = res["Concreteness"].beta
evoked.plot_image(mask=reject_H0, time_unit='s')
| bsd-3-clause | 1517262a2b0765fd6af608a46e8c701d | 43.870588 | 78 | 0.673047 | 3.88391 | false | false | false | false |
mne-tools/mne-python | mne/export/_edf.py | 11 | 11831 | # -*- coding: utf-8 -*-
# Authors: MNE Developers
#
# License: BSD-3-Clause
from contextlib import contextmanager
import numpy as np
from ..utils import _check_edflib_installed, warn
_check_edflib_installed()
from EDFlib.edfwriter import EDFwriter # noqa: E402
def _try_to_set_value(header, key, value, channel_index=None):
"""Set key/value pairs in EDF header."""
# all EDFLib set functions are set<X>
# for example "setPatientName()"
func_name = f'set{key}'
func = getattr(header, func_name)
# some setter functions are indexed by channels
if channel_index is None:
return_val = func(value)
else:
return_val = func(channel_index, value)
# a nonzero return value indicates an error
if return_val != 0:
raise RuntimeError(f"Setting {key} with {value} "
f"returned an error value "
f"{return_val}.")
@contextmanager
def _auto_close(fid):
# try to close the handle no matter what
try:
yield fid
finally:
try:
fid.close()
except Exception:
pass # we did our best
def _export_raw(fname, raw, physical_range, add_ch_type):
"""Export Raw objects to EDF files.
TODO: if in future the Info object supports transducer or
technician information, allow writing those here.
"""
# scale to save data in EDF
phys_dims = 'uV'
# get EEG-related data in uV
units = dict(eeg='uV', ecog='uV', seeg='uV', eog='uV', ecg='uV', emg='uV',
bio='uV', dbs='uV')
digital_min = -32767
digital_max = 32767
file_type = EDFwriter.EDFLIB_FILETYPE_EDFPLUS
# load data first
raw.load_data()
# remove extra STI channels
orig_ch_types = raw.get_channel_types()
drop_chs = []
if 'stim' in orig_ch_types:
stim_index = np.argwhere(np.array(orig_ch_types) == 'stim')
stim_index = np.atleast_1d(stim_index.squeeze()).tolist()
drop_chs.extend([raw.ch_names[idx] for idx in stim_index])
# Add warning if any channel types are not voltage based.
# Users are expected to only export data that is voltage based,
# such as EEG, ECoG, sEEG, etc.
# Non-voltage channels are dropped by the export function.
# Note: we can write these other channels, such as 'misc'
# but these are simply a "catch all" for unknown or undesired
# channels.
voltage_types = list(units) + ['stim', 'misc']
non_voltage_ch = [ch not in voltage_types for ch in orig_ch_types]
if any(non_voltage_ch):
warn(f"Non-voltage channels detected: {non_voltage_ch}. MNE-Python's "
'EDF exporter only supports voltage-based channels, because the '
'EDF format cannot accommodate much of the accompanying data '
'necessary for channel types like MEG and fNIRS (channel '
'orientations, coordinate frame transforms, etc). You can '
'override this restriction by setting those channel types to '
'"misc" but no guarantees are made of the fidelity of that '
'approach.')
ch_names = [ch for ch in raw.ch_names if ch not in drop_chs]
ch_types = np.array(raw.get_channel_types(picks=ch_names))
n_channels = len(ch_names)
n_times = raw.n_times
# Sampling frequency in EDF only supports integers, so to allow for
# float sampling rates from Raw, we adjust the output sampling rate
# for all channels and the data record duration.
sfreq = raw.info['sfreq']
if float(sfreq).is_integer():
out_sfreq = int(sfreq)
data_record_duration = None
else:
out_sfreq = np.floor(sfreq).astype(int)
data_record_duration = int(np.around(
out_sfreq / sfreq, decimals=6) * 1e6)
warn(f'Data has a non-integer sampling rate of {sfreq}; writing to '
'EDF format may cause a small change to sample times.')
# get any filter information applied to the data
lowpass = raw.info['lowpass']
highpass = raw.info['highpass']
linefreq = raw.info['line_freq']
filter_str_info = f"HP:{highpass}Hz LP:{lowpass}Hz N:{linefreq}Hz"
# get the entire dataset in uV
data = raw.get_data(units=units, picks=ch_names)
if physical_range == 'auto':
# get max and min for each channel type data
ch_types_phys_max = dict()
ch_types_phys_min = dict()
for _type in np.unique(ch_types):
_picks = np.nonzero(ch_types == _type)[0]
_data = raw.get_data(units=units, picks=_picks)
ch_types_phys_max[_type] = _data.max()
ch_types_phys_min[_type] = _data.min()
else:
# get the physical min and max of the data in uV
# Physical ranges of the data in uV is usually set by the manufacturer
# and properties of the electrode. In general, physical max and min
# should be the clipping levels of the ADC input and they should be
# the same for all channels. For example, Nihon Kohden uses +3200 uV
# and -3200 uV for all EEG channels (which are the actual clipping
# levels of their input amplifiers & ADC).
# For full discussion, see: https://github.com/sccn/eeglab/issues/246
pmin, pmax = physical_range[0], physical_range[1]
# check that physical min and max is not exceeded
if data.max() > pmax:
raise RuntimeError(f'The maximum μV of the data {data.max()} is '
f'more than the physical max passed in {pmax}.')
if data.min() < pmin:
raise RuntimeError(f'The minimum μV of the data {data.min()} is '
f'less than the physical min passed in {pmin}.')
# create instance of EDF Writer
with _auto_close(EDFwriter(fname, file_type, n_channels)) as hdl:
# set channel data
for idx, ch in enumerate(ch_names):
ch_type = ch_types[idx]
signal_label = f'{ch_type.upper()} {ch}' if add_ch_type else ch
if len(signal_label) > 16:
raise RuntimeError(f'Signal label for {ch} ({ch_type}) is '
f'longer than 16 characters, which is not '
f'supported in EDF. Please shorten the '
f'channel name before exporting to EDF.')
if physical_range == 'auto':
# take the channel type minimum and maximum
pmin = ch_types_phys_min[ch_type]
pmax = ch_types_phys_max[ch_type]
for key, val in [('PhysicalMaximum', pmax),
('PhysicalMinimum', pmin),
('DigitalMaximum', digital_max),
('DigitalMinimum', digital_min),
('PhysicalDimension', phys_dims),
('SampleFrequency', out_sfreq),
('SignalLabel', signal_label),
('PreFilter', filter_str_info)]:
_try_to_set_value(hdl, key, val, channel_index=idx)
# set patient info
subj_info = raw.info.get('subject_info')
if subj_info is not None:
birthday = subj_info.get('birthday')
# get the full name of subject if available
first_name = subj_info.get('first_name')
last_name = subj_info.get('last_name')
first_name = first_name or ''
last_name = last_name or ''
joiner = ''
if len(first_name) and len(last_name):
joiner = ' '
name = joiner.join([first_name, last_name])
hand = subj_info.get('hand')
sex = subj_info.get('sex')
if birthday is not None:
if hdl.setPatientBirthDate(birthday[0], birthday[1],
birthday[2]) != 0:
raise RuntimeError(
f"Setting patient birth date to {birthday} "
f"returned an error")
for key, val in [('PatientName', name),
('PatientGender', sex),
('AdditionalPatientInfo', f'hand={hand}')]:
_try_to_set_value(hdl, key, val)
# set measurement date
meas_date = raw.info['meas_date']
if meas_date:
subsecond = int(meas_date.microsecond / 100)
if hdl.setStartDateTime(year=meas_date.year, month=meas_date.month,
day=meas_date.day, hour=meas_date.hour,
minute=meas_date.minute,
second=meas_date.second,
subsecond=subsecond) != 0:
raise RuntimeError(f"Setting start date time {meas_date} "
f"returned an error")
device_info = raw.info.get('device_info')
if device_info is not None:
device_type = device_info.get('type')
_try_to_set_value(hdl, 'Equipment', device_type)
# set data record duration
if data_record_duration is not None:
_try_to_set_value(hdl, 'DataRecordDuration', data_record_duration)
# compute number of data records to loop over
n_blocks = np.ceil(n_times / out_sfreq).astype(int)
# increase the number of annotation signals if necessary
annots = raw.annotations
if annots is not None:
n_annotations = len(raw.annotations)
n_annot_chans = int(n_annotations / n_blocks)
if np.mod(n_annotations, n_blocks):
n_annot_chans += 1
if n_annot_chans > 1:
hdl.setNumberOfAnnotationSignals(n_annot_chans)
# Write each data record sequentially
for idx in range(n_blocks):
end_samp = (idx + 1) * out_sfreq
if end_samp > n_times:
end_samp = n_times
start_samp = idx * out_sfreq
# then for each datarecord write each channel
for jdx in range(n_channels):
# create a buffer with sampling rate
buf = np.zeros(out_sfreq, np.float64, "C")
# get channel data for this block
ch_data = data[jdx, start_samp:end_samp]
# assign channel data to the buffer and write to EDF
buf[:len(ch_data)] = ch_data
err = hdl.writeSamples(buf)
if err != 0:
raise RuntimeError(
f"writeSamples() for channel{ch_names[jdx]} "
f"returned error: {err}")
# there was an incomplete datarecord
if len(ch_data) != len(buf):
warn(f'EDF format requires equal-length data blocks, '
f'so {(len(buf) - len(ch_data)) / sfreq} seconds of '
'zeros were appended to all channels when writing the '
'final block.')
# write annotations
if annots is not None:
for desc, onset, duration in zip(raw.annotations.description,
raw.annotations.onset,
raw.annotations.duration):
# annotations are written in terms of 100 microseconds
onset = onset * 10000
duration = duration * 10000
if hdl.writeAnnotation(onset, duration, desc) != 0:
raise RuntimeError(f'writeAnnotation() returned an error '
f'trying to write {desc} at {onset} '
f'for {duration} seconds.')
| bsd-3-clause | 6cc3b3672a7932017430b262efe2d419 | 41.096085 | 79 | 0.555077 | 3.998986 | false | false | false | false |
mne-tools/mne-python | examples/time_frequency/time_frequency_simulated.py | 7 | 9643 | # -*- coding: utf-8 -*-
"""
.. _ex-tfr-comparison:
==================================================================================
Time-frequency on simulated data (Multitaper vs. Morlet vs. Stockwell vs. Hilbert)
==================================================================================
This example demonstrates the different time-frequency estimation methods
on simulated data. It shows the time-frequency resolution trade-off
and the problem of estimation variance. In addition it highlights
alternative functions for generating TFRs without averaging across
trials, or by operating on numpy arrays.
""" # noqa E501
# Authors: Hari Bharadwaj <hari@nmr.mgh.harvard.edu>
# Denis Engemann <denis.engemann@gmail.com>
# Chris Holdgraf <choldgraf@berkeley.edu>
# Alex Rockhill <aprockhill@mailbox.org>
#
# License: BSD-3-Clause
# %%
import numpy as np
from matplotlib import pyplot as plt
from mne import create_info, Epochs
from mne.baseline import rescale
from mne.io import RawArray
from mne.time_frequency import (tfr_multitaper, tfr_stockwell, tfr_morlet,
tfr_array_morlet, AverageTFR)
from mne.viz import centers_to_edges
print(__doc__)
# %%
# Simulate data
# -------------
#
# We'll simulate data with a known spectro-temporal structure.
sfreq = 1000.0
ch_names = ['SIM0001', 'SIM0002']
ch_types = ['grad', 'grad']
info = create_info(ch_names=ch_names, sfreq=sfreq, ch_types=ch_types)
n_times = 1024 # Just over 1 second epochs
n_epochs = 40
seed = 42
rng = np.random.RandomState(seed)
data = rng.randn(len(ch_names), n_times * n_epochs + 200) # buffer
# Add a 50 Hz sinusoidal burst to the noise and ramp it.
t = np.arange(n_times, dtype=np.float64) / sfreq
signal = np.sin(np.pi * 2. * 50. * t) # 50 Hz sinusoid signal
signal[np.logical_or(t < 0.45, t > 0.55)] = 0. # Hard windowing
on_time = np.logical_and(t >= 0.45, t <= 0.55)
signal[on_time] *= np.hanning(on_time.sum()) # Ramping
data[:, 100:-100] += np.tile(signal, n_epochs) # add signal
raw = RawArray(data, info)
events = np.zeros((n_epochs, 3), dtype=int)
events[:, 0] = np.arange(n_epochs) * n_times
epochs = Epochs(raw, events, dict(sin50hz=0), tmin=0, tmax=n_times / sfreq,
reject=dict(grad=4000), baseline=None)
epochs.average().plot()
# %%
# Calculate a time-frequency representation (TFR)
# -----------------------------------------------
#
# Below we'll demonstrate the output of several TFR functions in MNE:
#
# * :func:`mne.time_frequency.tfr_multitaper`
# * :func:`mne.time_frequency.tfr_stockwell`
# * :func:`mne.time_frequency.tfr_morlet`
# * :meth:`mne.Epochs.filter` and :meth:`mne.Epochs.apply_hilbert`
#
# Multitaper transform
# ====================
# First we'll use the multitaper method for calculating the TFR.
# This creates several orthogonal tapering windows in the TFR estimation,
# which reduces variance. We'll also show some of the parameters that can be
# tweaked (e.g., ``time_bandwidth``) that will result in different multitaper
# properties, and thus a different TFR. You can trade time resolution or
# frequency resolution or both in order to get a reduction in variance.
freqs = np.arange(5., 100., 3.)
vmin, vmax = -3., 3. # Define our color limits.
fig, axs = plt.subplots(1, 3, figsize=(15, 5), sharey=True)
for n_cycles, time_bandwidth, ax, title in zip(
[freqs / 2, freqs, freqs / 2], # number of cycles
[2.0, 4.0, 8.0], # time bandwidth
axs,
['Sim: Least smoothing, most variance',
'Sim: Less frequency smoothing,\nmore time smoothing',
'Sim: Less time smoothing,\nmore frequency smoothing']):
power = tfr_multitaper(epochs, freqs=freqs, n_cycles=n_cycles,
time_bandwidth=time_bandwidth, return_itc=False)
ax.set_title(title)
# Plot results. Baseline correct based on first 100 ms.
power.plot([0], baseline=(0., 0.1), mode='mean', vmin=vmin, vmax=vmax,
axes=ax, show=False, colorbar=False)
plt.tight_layout()
##############################################################################
# Stockwell (S) transform
# =======================
#
# Stockwell uses a Gaussian window to balance temporal and spectral resolution.
# Importantly, frequency bands are phase-normalized, hence strictly comparable
# with regard to timing, and, the input signal can be recoverd from the
# transform in a lossless way if we disregard numerical errors. In this case,
# we control the spectral / temporal resolution by specifying different widths
# of the gaussian window using the ``width`` parameter.
fig, axs = plt.subplots(1, 3, figsize=(15, 5), sharey=True)
fmin, fmax = freqs[[0, -1]]
for width, ax in zip((0.2, 0.7, 3.0), axs):
power = tfr_stockwell(epochs, fmin=fmin, fmax=fmax, width=width)
power.plot([0], baseline=(0., 0.1), mode='mean', axes=ax, show=False,
colorbar=False)
ax.set_title('Sim: Using S transform, width = {:0.1f}'.format(width))
plt.tight_layout()
# %%
# Morlet Wavelets
# ===============
#
# Next, we'll show the TFR using morlet wavelets, which are a sinusoidal wave
# with a gaussian envelope. We can control the balance between spectral and
# temporal resolution with the ``n_cycles`` parameter, which defines the
# number of cycles to include in the window.
fig, axs = plt.subplots(1, 3, figsize=(15, 5), sharey=True)
all_n_cycles = [1, 3, freqs / 2.]
for n_cycles, ax in zip(all_n_cycles, axs):
power = tfr_morlet(epochs, freqs=freqs,
n_cycles=n_cycles, return_itc=False)
power.plot([0], baseline=(0., 0.1), mode='mean', vmin=vmin, vmax=vmax,
axes=ax, show=False, colorbar=False)
n_cycles = 'scaled by freqs' if not isinstance(n_cycles, int) else n_cycles
ax.set_title(f'Sim: Using Morlet wavelet, n_cycles = {n_cycles}')
plt.tight_layout()
# %%
# Narrow-bandpass Filter and Hilbert Transform
# ============================================
#
# Finally, we'll show a time-frequency representation using a narrow bandpass
# filter and the Hilbert transform. Choosing the right filter parameters is
# important so that you isolate only one oscillation of interest, generally
# the width of this filter is recommended to be about 2 Hz.
fig, axs = plt.subplots(1, 3, figsize=(15, 5), sharey=True)
bandwidths = [1., 2., 4.]
for bandwidth, ax in zip(bandwidths, axs):
data = np.zeros((len(ch_names), freqs.size, epochs.times.size),
dtype=complex)
for idx, freq in enumerate(freqs):
# Filter raw data and re-epoch to avoid the filter being longer than
# the epoch data for low frequencies and short epochs, such as here.
raw_filter = raw.copy()
# NOTE: The bandwidths of the filters are changed from their defaults
# to exaggerate differences. With the default transition bandwidths,
# these are all very similar because the filters are almost the same.
# In practice, using the default is usually a wise choice.
raw_filter.filter(
l_freq=freq - bandwidth / 2, h_freq=freq + bandwidth / 2,
# no negative values for large bandwidth and low freq
l_trans_bandwidth=min([4 * bandwidth, freq - bandwidth]),
h_trans_bandwidth=4 * bandwidth)
raw_filter.apply_hilbert()
epochs_hilb = Epochs(raw_filter, events, tmin=0, tmax=n_times / sfreq,
baseline=(0, 0.1))
tfr_data = epochs_hilb.get_data()
tfr_data = tfr_data * tfr_data.conj() # compute power
tfr_data = np.mean(tfr_data, axis=0) # average over epochs
data[:, idx] = tfr_data
power = AverageTFR(info, data, epochs.times, freqs, nave=n_epochs)
power.plot([0], baseline=(0., 0.1), mode='mean', vmin=-0.1, vmax=0.1,
axes=ax, show=False, colorbar=False)
n_cycles = 'scaled by freqs' if not isinstance(n_cycles, int) else n_cycles
ax.set_title('Sim: Using narrow bandpass filter Hilbert,\n'
f'bandwidth = {bandwidth}, '
f'transition bandwidth = {4 * bandwidth}')
plt.tight_layout()
# %%
# Calculating a TFR without averaging over epochs
# -----------------------------------------------
#
# It is also possible to calculate a TFR without averaging across trials.
# We can do this by using ``average=False``. In this case, an instance of
# :class:`mne.time_frequency.EpochsTFR` is returned.
n_cycles = freqs / 2.
power = tfr_morlet(epochs, freqs=freqs,
n_cycles=n_cycles, return_itc=False, average=False)
print(type(power))
avgpower = power.average()
avgpower.plot([0], baseline=(0., 0.1), mode='mean', vmin=vmin, vmax=vmax,
title='Using Morlet wavelets and EpochsTFR', show=False)
# %%
# Operating on arrays
# -------------------
#
# MNE also has versions of the functions above which operate on numpy arrays
# instead of MNE objects. They expect inputs of the shape
# ``(n_epochs, n_channels, n_times)``. They will also return a numpy array
# of shape ``(n_epochs, n_channels, n_freqs, n_times)``.
power = tfr_array_morlet(epochs.get_data(), sfreq=epochs.info['sfreq'],
freqs=freqs, n_cycles=n_cycles,
output='avg_power')
# Baseline the output
rescale(power, epochs.times, (0., 0.1), mode='mean', copy=False)
fig, ax = plt.subplots()
x, y = centers_to_edges(epochs.times * 1000, freqs)
mesh = ax.pcolormesh(x, y, power[0], cmap='RdBu_r', vmin=vmin, vmax=vmax)
ax.set_title('TFR calculated on a numpy array')
ax.set(ylim=freqs[[0, -1]], xlabel='Time (ms)')
fig.colorbar(mesh)
plt.tight_layout()
plt.show()
| bsd-3-clause | 9473a00c1ae504884b73ab3212a1bcff | 41.10917 | 82 | 0.638391 | 3.312607 | false | false | false | false |
mne-tools/mne-python | mne/datasets/_fsaverage/base.py | 9 | 4530 | # -*- coding: utf-8 -*-
# Authors: Eric Larson <larson.eric.d@gmail.com>
# License: BSD Style.
import os
import os.path as op
from ..utils import _manifest_check_download, _get_path
from ...utils import verbose, get_subjects_dir, set_config
FSAVERAGE_MANIFEST_PATH = op.dirname(__file__)
@verbose
def fetch_fsaverage(subjects_dir=None, *, verbose=None):
"""Fetch and update fsaverage.
Parameters
----------
subjects_dir : str | None
The path to use as the subjects directory in the MNE-Python
config file. None will use the existing config variable (i.e.,
will not change anything), and if it does not exist, will use
``~/mne_data/MNE-fsaverage-data``.
%(verbose)s
Returns
-------
fs_dir : str
The fsaverage directory.
(essentially ``subjects_dir + '/fsaverage'``).
Notes
-----
This function is designed to provide
1. All modern (Freesurfer 6) fsaverage subject files
2. All MNE fsaverage parcellations
3. fsaverage head surface, fiducials, head<->MRI trans, 1- and 3-layer
BEMs (and surfaces)
This function will compare the contents of ``subjects_dir/fsaverage``
to the ones provided in the remote zip file. If any are missing,
the zip file is downloaded and files are updated. No files will
be overwritten.
.. versionadded:: 0.18
"""
# Code used to create the BEM (other files taken from MNE-sample-data):
#
# $ mne watershed_bem -s fsaverage -d $PWD --verbose info --copy
# $ python
# >>> bem = mne.make_bem_model('fsaverage', subjects_dir='.', verbose=True)
# >>> mne.write_bem_surfaces(
# ... 'fsaverage/bem/fsaverage-5120-5120-5120-bem.fif', bem)
# >>> sol = mne.make_bem_solution(bem, verbose=True)
# >>> mne.write_bem_solution(
# ... 'fsaverage/bem/fsaverage-5120-5120-5120-bem-sol.fif', sol)
# >>> import os
# >>> import os.path as op
# >>> names = sorted(op.join(r, f)
# ... for r, d, files in os.walk('fsaverage')
# ... for f in files)
# with open('fsaverage.txt', 'w') as fid:
# fid.write('\n'.join(names))
#
subjects_dir = _set_montage_coreg_path(subjects_dir)
subjects_dir = op.abspath(op.expanduser(subjects_dir))
fs_dir = op.join(subjects_dir, 'fsaverage')
os.makedirs(fs_dir, exist_ok=True)
_manifest_check_download(
manifest_path=op.join(FSAVERAGE_MANIFEST_PATH, 'root.txt'),
destination=op.join(subjects_dir),
url='https://osf.io/3bxqt/download?version=2',
hash_='5133fe92b7b8f03ae19219d5f46e4177',
)
_manifest_check_download(
manifest_path=op.join(FSAVERAGE_MANIFEST_PATH, 'bem.txt'),
destination=op.join(subjects_dir, 'fsaverage'),
url='https://osf.io/7ve8g/download?version=4',
hash_='b31509cdcf7908af6a83dc5ee8f49fb1',
)
return fs_dir
def _get_create_subjects_dir(subjects_dir):
subjects_dir = get_subjects_dir(subjects_dir, raise_error=False)
if subjects_dir is None:
subjects_dir = _get_path(None, 'MNE_DATA', 'montage coregistration')
subjects_dir = op.join(subjects_dir, 'MNE-fsaverage-data')
os.makedirs(subjects_dir, exist_ok=True)
return subjects_dir
def _set_montage_coreg_path(subjects_dir=None):
"""Set a subject directory suitable for montage(-only) coregistration.
Parameters
----------
subjects_dir : str | None
The path to use as the subjects directory in the MNE-Python
config file. None will use the existing config variable (i.e.,
will not change anything), and if it does not exist, will use
``~/mne_data/MNE-fsaverage-data``.
Returns
-------
subjects_dir : str
The subjects directory that was used.
See Also
--------
mne.datasets.fetch_fsaverage
mne.get_config
mne.set_config
Notes
-----
If you plan to only do EEG-montage based coregistrations with fsaverage
without any MRI warping, this function can facilitate the process.
Essentially it sets the default value for ``subjects_dir`` in MNE
functions to be ``~/mne_data/MNE-fsaverage-data`` (assuming it has
not already been set to some other value).
.. versionadded:: 0.18
"""
subjects_dir = _get_create_subjects_dir(subjects_dir)
old_subjects_dir = get_subjects_dir(None, raise_error=False)
if old_subjects_dir is None:
set_config('SUBJECTS_DIR', subjects_dir)
return subjects_dir
| bsd-3-clause | b45867cc9b85149457d552d64260782f | 33.580153 | 79 | 0.64128 | 3.323551 | false | true | false | false |
mne-tools/mne-python | mne/time_frequency/tests/test_stft.py | 11 | 2237 | # Authors : Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Eric Larson <larson.eric.d@gmail.com>
#
# License : BSD-3-Clause
import pytest
import numpy as np
from scipy import linalg
from numpy.testing import assert_almost_equal, assert_array_almost_equal
from mne.time_frequency import stft, istft, stftfreq
from mne.time_frequency._stft import stft_norm2
@pytest.mark.parametrize('T', (127, 128, 255, 256, 1337))
@pytest.mark.parametrize('wsize', (128, 256))
@pytest.mark.parametrize('tstep', (4, 64))
@pytest.mark.parametrize('f', (7., 23.)) # should be close to fftfreqs
def test_stft(T, wsize, tstep, f):
"""Test stft and istft tight frame property."""
sfreq = 1000. # Hz
if True: # just to minimize diff
# Test with low frequency signal
t = np.arange(T).astype(np.float64)
x = np.sin(2 * np.pi * f * t / sfreq)
x = np.array([x, x + 1.])
X = stft(x, wsize, tstep)
xp = istft(X, tstep, Tx=T)
freqs = stftfreq(wsize, sfreq=sfreq)
max_freq = freqs[np.argmax(np.sum(np.abs(X[0]) ** 2, axis=1))]
assert X.shape[1] == len(freqs)
assert np.all(freqs >= 0.)
assert np.abs(max_freq - f) < 1.
assert_array_almost_equal(x, xp, decimal=6)
# norm conservation thanks to tight frame property
assert_almost_equal(np.sqrt(stft_norm2(X)),
[linalg.norm(xx) for xx in x], decimal=6)
# Test with random signal
x = np.random.randn(2, T)
wsize = 16
tstep = 8
X = stft(x, wsize, tstep)
xp = istft(X, tstep, Tx=T)
freqs = stftfreq(wsize, sfreq=1000)
max_freq = freqs[np.argmax(np.sum(np.abs(X[0]) ** 2, axis=1))]
assert X.shape[1] == len(freqs)
assert np.all(freqs >= 0.)
assert_array_almost_equal(x, xp, decimal=6)
# norm conservation thanks to tight frame property
assert_almost_equal(np.sqrt(stft_norm2(X)),
[linalg.norm(xx) for xx in x],
decimal=6)
# Try with empty array
x = np.zeros((0, T))
X = stft(x, wsize, tstep)
xp = istft(X, tstep, T)
assert xp.shape == x.shape
| bsd-3-clause | 66c0033c5d1c23b895c4a593e43dcaba | 32.38806 | 72 | 0.577559 | 3.039402 | false | true | false | false |
mne-tools/mne-python | mne/datasets/kiloword/kiloword.py | 9 | 1969 | # License: BSD Style.
from ...utils import verbose
from ..utils import (_get_version, _version_doc, _download_mne_dataset)
@verbose
def data_path(path=None, force_update=False, update_path=True,
download=True, *, verbose=None):
"""Get path to local copy of the kiloword dataset.
This is the dataset from :footcite:`DufauEtAl2015`.
Parameters
----------
path : None | str
Location of where to look for the kiloword data storing
location. If None, the environment variable or config parameter
MNE_DATASETS_KILOWORD_PATH is used. If it doesn't exist,
the "mne-python/examples" directory is used. If the
kiloword dataset is not found under the given path (e.g.,
as "mne-python/examples/MNE-kiloword-data"), the data
will be automatically downloaded to the specified folder.
force_update : bool
Force update of the dataset even if a local copy exists.
update_path : bool | None
If True, set the MNE_DATASETS_KILOWORD_PATH in mne-python
config to the given path. If None, the user is prompted.
download : bool
If False and the kiloword dataset has not been downloaded yet,
it will not be downloaded and the path will be returned as
'' (empty string). This is mostly used for debugging purposes
and can be safely ignored by most users.
%(verbose)s
Returns
-------
path : list of Path
Local path to the given data file. This path is contained inside a list
of length one, for compatibility.
References
----------
.. footbibliography::
"""
return _download_mne_dataset(
name='kiloword', processor='untar', path=path,
force_update=force_update, update_path=update_path,
download=download)
def get_version():
"""Get dataset version."""
return _get_version('kiloword')
get_version.__doc__ = _version_doc.format(name='kiloword')
| bsd-3-clause | bf7dd90077b786ff0d6395459a32de99 | 33.54386 | 79 | 0.656171 | 4.034836 | false | false | false | false |
mne-tools/mne-python | mne/viz/tests/test_montage.py | 8 | 2822 | # Authors: Denis Engemann <denis.engemann@gmail.com>
# Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Teon Brooks <teon.brooks@gmail.com>
#
# License: Simplified BSD
# Set our plotters to test mode
import os.path as op
import numpy as np
import pytest
import matplotlib.pyplot as plt
from mne.channels import (read_dig_fif, make_dig_montage,
make_standard_montage)
p_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'kit', 'tests', 'data')
elp = op.join(p_dir, 'test_elp.txt')
hsp = op.join(p_dir, 'test_hsp.txt')
hpi = op.join(p_dir, 'test_mrk.sqd')
point_names = ['nasion', 'lpa', 'rpa', '1', '2', '3', '4', '5']
io_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
fif_fname = op.join(io_dir, 'test_raw.fif')
def test_plot_montage():
"""Test plotting montages."""
m = make_standard_montage('easycap-M1')
m.plot()
plt.close('all')
m.plot(kind='3d')
plt.close('all')
m.plot(kind='3d', show_names=True)
plt.close('all')
m.plot(kind='topomap')
plt.close('all')
m.plot(kind='topomap', show_names=True)
plt.close('all')
m.plot(sphere='eeglab')
plt.close('all')
N_HSP, N_HPI = 2, 1
montage = make_dig_montage(nasion=[1, 1, 1], lpa=[2, 2, 2], rpa=[3, 3, 3],
hsp=np.full((N_HSP, 3), 4),
hpi=np.full((N_HPI, 3), 4),
coord_frame='head')
assert '0 channels' in repr(montage)
with pytest.raises(RuntimeError, match='No valid channel positions'):
montage.plot()
d = read_dig_fif(fname=fif_fname)
assert '61 channels' in repr(d)
# XXX this is broken; dm.point_names is used. Sometimes we say this should
# Just contain the HPI coils, other times that it's all channels (e.g.,
# EEG channels). But there is redundancy with this and dm.dig_ch_pos.
# This should be addressed in the pending big refactoring.
# d.plot()
# plt.close('all')
@pytest.mark.parametrize('name, n', [
('standard_1005', 342), ('standard_postfixed', 85),
('standard_primed', 85), ('standard_1020', 93)
])
def test_plot_defect_montage(name, n):
"""Test plotting defect montages (i.e. with duplicate labels)."""
# montage name and number of unique labels
m = make_standard_montage(name)
n -= 3 # new montage does not have fiducials
fig = m.plot()
collection = fig.axes[0].collections[0]
assert collection._edgecolors.shape[0] == n
assert collection._facecolors.shape[0] == n
assert collection._offsets.shape[0] == n
def test_plot_digmontage():
"""Test plot DigMontage."""
montage = make_dig_montage(
ch_pos=dict(zip(list('abc'), np.eye(3))),
coord_frame='head'
)
montage.plot()
plt.close('all')
| bsd-3-clause | 609f14b60e2d958285c8fbab970deb21 | 33 | 79 | 0.599929 | 2.954974 | false | true | false | false |
mne-tools/mne-python | tutorials/forward/25_automated_coreg.py | 11 | 4242 | # -*- coding: utf-8 -*-
"""
.. _tut-auto-coreg:
=============================================
Using an automated approach to coregistration
=============================================
This example shows how to use the coregistration functions to perform an
automated MEG-MRI coregistration via scripting. Generally the results of
this approach are consistent with those obtained from manual
coregistration :footcite:`HouckClaus2020`.
.. warning:: The quality of the coregistration depends heavily upon the
quality of the head shape points (HSP) collected during subject
prepration and the quality of your T1-weighted MRI. Use with
caution and check the coregistration error.
"""
# Author: Jon Houck <jon.houck@gmail.com>
# Guillaume Favelier <guillaume.favelier@gmail.com>
#
# License: BSD-3-Clause
import numpy as np
import mne
from mne.coreg import Coregistration
from mne.io import read_info
data_path = mne.datasets.sample.data_path()
# data_path and all paths built from it are pathlib.Path objects
subjects_dir = data_path / 'subjects'
subject = 'sample'
fname_raw = data_path / 'MEG' / subject / f'{subject}_audvis_raw.fif'
info = read_info(fname_raw)
plot_kwargs = dict(subject=subject, subjects_dir=subjects_dir,
surfaces="head-dense", dig=True, eeg=[],
meg='sensors', show_axes=True,
coord_frame='meg')
view_kwargs = dict(azimuth=45, elevation=90, distance=0.6,
focalpoint=(0., 0., 0.))
# %%
# Set up the coregistration model
# -------------------------------
fiducials = "estimated" # get fiducials from fsaverage
coreg = Coregistration(info, subject, subjects_dir, fiducials=fiducials)
fig = mne.viz.plot_alignment(info, trans=coreg.trans, **plot_kwargs)
# %%
# Initial fit with fiducials
# --------------------------
# Do first a coregistration fit using only 3 fiducial points. This allows
# to find a good initial solution before further optimization using
# head shape points. This can also be useful to detect outlier head shape
# points which are too far from the skin surface. One can see for example
# that on this dataset there is one such point and we will omit it from
# the subsequent fit.
coreg.fit_fiducials(verbose=True)
fig = mne.viz.plot_alignment(info, trans=coreg.trans, **plot_kwargs)
# %%
# Refining with ICP
# -----------------
# Next we refine the transformation using a few iteration of the
# Iterative Closest Point (ICP) algorithm. As the initial fiducials
# are obtained from fsaverage and not from precise manual picking in the
# GUI we do a fit with reduced weight for the nasion.
coreg.fit_icp(n_iterations=6, nasion_weight=2., verbose=True)
fig = mne.viz.plot_alignment(info, trans=coreg.trans, **plot_kwargs)
# %%
# Omitting bad points
# -------------------
# It is now very clear that we have one point that is an outlier
# and that should be removed.
coreg.omit_head_shape_points(distance=5. / 1000) # distance is in meters
# %%
# Final coregistration fit
# ------------------------
# sphinx_gallery_thumbnail_number = 4
coreg.fit_icp(n_iterations=20, nasion_weight=10., verbose=True)
fig = mne.viz.plot_alignment(info, trans=coreg.trans, **plot_kwargs)
mne.viz.set_3d_view(fig, **view_kwargs)
dists = coreg.compute_dig_mri_distances() * 1e3 # in mm
print(
f"Distance between HSP and MRI (mean/min/max):\n{np.mean(dists):.2f} mm "
f"/ {np.min(dists):.2f} mm / {np.max(dists):.2f} mm"
)
# %%
# .. warning::
# Don't forget to save the resulting ``trans`` matrix!
#
# .. code-block:: python
#
# mne.write_trans('/path/to/filename-trans.fif', coreg.trans)
#
# .. note:: The :class:`mne.coreg.Coregistration` class has the ability to
# compute MRI scale factors using
# :meth:`~mne.coreg.Coregistration.set_scale_mode` that is useful
# for creating surrogate MRI subjects, i.e., using a template MRI
# (such as one from :func:`mne.datasets.fetch_infant_template`)
# matched to a subject's head digitization. When scaling is desired,
# a scaled surrogate MRI should be created using
# :func:`mne.scale_mri`.
# %%
# References
# ----------
# .. footbibliography::
| bsd-3-clause | aebc963a449132d6d7e229c74f1fce23 | 35.568966 | 78 | 0.662423 | 3.377389 | false | false | false | false |
mne-tools/mne-python | mne/io/boxy/boxy.py | 11 | 11042 | # Authors: Kyle Mathewson, Jonathan Kuziek <kuziek@ualberta.ca>
#
# License: BSD-3-Clause
import re as re
import numpy as np
from ..base import BaseRaw
from ..meas_info import create_info
from ..utils import _mult_cal_one
from ...utils import logger, verbose, fill_doc, _check_fname
from ...annotations import Annotations
@fill_doc
def read_raw_boxy(fname, preload=False, verbose=None):
"""Reader for an optical imaging recording.
This function has been tested using the ISS Imagent I and II systems
and versions 0.40/0.84 of the BOXY recording software.
Parameters
----------
fname : str
Path to the BOXY data file.
%(preload)s
%(verbose)s
Returns
-------
raw : instance of RawBOXY
A Raw object containing BOXY data.
See Also
--------
mne.io.Raw : Documentation of attribute and methods.
"""
return RawBOXY(fname, preload, verbose)
@fill_doc
class RawBOXY(BaseRaw):
"""Raw object from a BOXY optical imaging file.
Parameters
----------
fname : str
Path to the BOXY data file.
%(preload)s
%(verbose)s
See Also
--------
mne.io.Raw : Documentation of attribute and methods.
"""
@verbose
def __init__(self, fname, preload=False, verbose=None):
logger.info('Loading %s' % fname)
# Read header file and grab some info.
start_line = np.inf
col_names = mrk_col = filetype = mrk_data = end_line = None
raw_extras = dict()
raw_extras['offsets'] = list() # keep track of our offsets
sfreq = None
fname = _check_fname(fname, 'read', True, 'fname')
with open(fname, 'r') as fid:
line_num = 0
i_line = fid.readline()
while i_line:
# most of our lines will be data lines, so check that first
if line_num >= start_line:
assert col_names is not None
assert filetype is not None
if '#DATA ENDS' in i_line:
# Data ends just before this.
end_line = line_num
break
if mrk_col is not None:
if filetype == 'non-parsed':
# Non-parsed files have different lines lengths.
crnt_line = i_line.rsplit(' ')[0]
temp_data = re.findall(
r'[-+]?\d*\.?\d+', crnt_line)
if len(temp_data) == len(col_names):
mrk_data.append(float(
re.findall(r'[-+]?\d*\.?\d+', crnt_line)
[mrk_col]))
else:
crnt_line = i_line.rsplit(' ')[0]
mrk_data.append(float(re.findall(
r'[-+]?\d*\.?\d+', crnt_line)[mrk_col]))
raw_extras['offsets'].append(fid.tell())
# now proceed with more standard header parsing
elif 'BOXY.EXE:' in i_line:
boxy_ver = re.findall(r'\d*\.\d+',
i_line.rsplit(' ')[-1])[0]
# Check that the BOXY version is supported
if boxy_ver not in ['0.40', '0.84']:
raise RuntimeError('MNE has not been tested with BOXY '
'version (%s)' % boxy_ver)
elif 'Detector Channels' in i_line:
raw_extras['detect_num'] = int(i_line.rsplit(' ')[0])
elif 'External MUX Channels' in i_line:
raw_extras['source_num'] = int(i_line.rsplit(' ')[0])
elif 'Update Rate (Hz)' in i_line or \
'Updata Rate (Hz)' in i_line:
# Version 0.40 of the BOXY recording software
# (and possibly other versions lower than 0.84) contains a
# typo in the raw data file where 'Update Rate' is spelled
# "Updata Rate. This will account for this typo.
sfreq = float(i_line.rsplit(' ')[0])
elif '#DATA BEGINS' in i_line:
# Data should start a couple lines later.
start_line = line_num + 3
elif line_num == start_line - 2:
# Grab names for each column of data.
raw_extras['col_names'] = col_names = re.findall(
r'\w+\-\w+|\w+\-\d+|\w+', i_line.rsplit(' ')[0])
if 'exmux' in col_names:
# Change filetype based on data organisation.
filetype = 'non-parsed'
else:
filetype = 'parsed'
if 'digaux' in col_names:
mrk_col = col_names.index('digaux')
mrk_data = list()
# raw_extras['offsets'].append(fid.tell())
elif line_num == start_line - 1:
raw_extras['offsets'].append(fid.tell())
line_num += 1
i_line = fid.readline()
assert sfreq is not None
raw_extras.update(
filetype=filetype, start_line=start_line, end_line=end_line)
# Label each channel in our data, for each data type (DC, AC, Ph).
# Data is organised by channels x timepoint, where the first
# 'source_num' rows correspond to the first detector, the next
# 'source_num' rows correspond to the second detector, and so on.
ch_names = list()
ch_types = list()
cals = list()
for det_num in range(raw_extras['detect_num']):
for src_num in range(raw_extras['source_num']):
for i_type, ch_type in [
('DC', 'fnirs_cw_amplitude'),
('AC', 'fnirs_fd_ac_amplitude'),
('Ph', 'fnirs_fd_phase')]:
ch_names.append(
f'S{src_num + 1}_D{det_num + 1} {i_type}')
ch_types.append(ch_type)
cals.append(np.pi / 180. if i_type == 'Ph' else 1.)
# Create info structure.
info = create_info(ch_names, sfreq, ch_types)
for ch, cal in zip(info['chs'], cals):
ch['cal'] = cal
# Determine how long our data is.
delta = end_line - start_line
assert len(raw_extras['offsets']) == delta + 1
if filetype == 'non-parsed':
delta //= (raw_extras['source_num'])
super(RawBOXY, self).__init__(
info, preload, filenames=[fname], first_samps=[0],
last_samps=[delta - 1], raw_extras=[raw_extras], verbose=verbose)
# Now let's grab our markers, if they are present.
if mrk_data is not None:
mrk_data = np.array(mrk_data, float)
# We only want the first instance of each trigger.
prev_mrk = 0
mrk_idx = list()
duration = list()
tmp_dur = 0
for i_num, i_mrk in enumerate(mrk_data):
if i_mrk != 0 and i_mrk != prev_mrk:
mrk_idx.append(i_num)
if i_mrk != 0 and i_mrk == prev_mrk:
tmp_dur += 1
if i_mrk == 0 and i_mrk != prev_mrk:
duration.append((tmp_dur + 1) / sfreq)
tmp_dur = 0
prev_mrk = i_mrk
onset = np.array(mrk_idx) / sfreq
description = mrk_data[mrk_idx]
annot = Annotations(onset, duration, description)
self.set_annotations(annot)
def _read_segment_file(self, data, idx, fi, start, stop, cals, mult):
"""Read a segment of data from a file.
Boxy file organises data in two ways, parsed or un-parsed.
Regardless of type, output has (n_montages x n_sources x n_detectors
+ n_marker_channels) rows, and (n_timepoints x n_blocks) columns.
"""
source_num = self._raw_extras[fi]['source_num']
detect_num = self._raw_extras[fi]['detect_num']
start_line = self._raw_extras[fi]['start_line']
end_line = self._raw_extras[fi]['end_line']
filetype = self._raw_extras[fi]['filetype']
col_names = self._raw_extras[fi]['col_names']
offsets = self._raw_extras[fi]['offsets']
boxy_file = self._filenames[fi]
# Non-parsed multiplexes sources, so we need source_num times as many
# lines in that case
if filetype == 'parsed':
start_read = start_line + start
stop_read = start_read + (stop - start)
else:
assert filetype == 'non-parsed'
start_read = start_line + start * source_num
stop_read = start_read + (stop - start) * source_num
assert start_read >= start_line
assert stop_read <= end_line
# Possible detector names.
detectors = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'[:detect_num]
# Loop through our data.
one = np.zeros((len(col_names), stop_read - start_read))
with open(boxy_file, 'r') as fid:
# Just a more efficient version of this:
# ii = 0
# for line_num, i_line in enumerate(fid):
# if line_num >= start_read:
# if line_num >= stop_read:
# break
# # Grab actual data.
# i_data = i_line.strip().split()
# one[:len(i_data), ii] = i_data
# ii += 1
fid.seek(offsets[start_read - start_line], 0)
for oo in one.T:
i_data = fid.readline().strip().split()
oo[:len(i_data)] = i_data
# in theory we could index in the loop above, but it's painfully slow,
# so let's just take a hopefully minor memory hit
if filetype == 'non-parsed':
ch_idxs = [col_names.index(f'{det}-{i_type}')
for det in detectors
for i_type in ['DC', 'AC', 'Ph']]
one = one[ch_idxs].reshape( # each "time point" multiplexes srcs
len(detectors), 3, -1, source_num
).transpose( # reorganize into (det, source, DC/AC/Ph, t) order
0, 3, 1, 2
).reshape( # reshape the way we store it (det x source x DAP, t)
len(detectors) * source_num * 3, -1)
else:
assert filetype == 'parsed'
ch_idxs = [col_names.index(f'{det}-{i_type}{si + 1}')
for det in detectors
for si in range(source_num)
for i_type in ['DC', 'AC', 'Ph']]
one = one[ch_idxs]
# Place our data into the data object in place.
_mult_cal_one(data, one, idx, cals, mult)
| bsd-3-clause | a3aface4eb25fb090ead2df52b73c970 | 40.984791 | 79 | 0.49194 | 3.983405 | false | false | false | false |
mne-tools/mne-python | mne/preprocessing/maxfilter.py | 11 | 6419 | # Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Matti Hämäläinen <msh@nmr.mgh.harvard.edu>
# Martin Luessi <mluessi@nmr.mgh.harvard.edu>
#
# License: BSD-3-Clause
import os
from ..bem import fit_sphere_to_headshape
from ..io import read_raw_fif
from ..utils import logger, verbose, warn
def _mxwarn(msg):
"""Warn about a bug."""
warn('Possible MaxFilter bug: %s, more info: '
'http://imaging.mrc-cbu.cam.ac.uk/meg/maxbugs' % msg)
@verbose
def apply_maxfilter(in_fname, out_fname, origin=None, frame='device',
bad=None, autobad='off', skip=None, force=False,
st=False, st_buflen=16.0, st_corr=0.96, mv_trans=None,
mv_comp=False, mv_headpos=False, mv_hp=None,
mv_hpistep=None, mv_hpisubt=None, mv_hpicons=True,
linefreq=None, cal=None, ctc=None, mx_args='',
overwrite=True, verbose=None):
"""Apply NeuroMag MaxFilter to raw data.
Needs Maxfilter license, maxfilter has to be in PATH.
Parameters
----------
in_fname : str
Input file name.
out_fname : str
Output file name.
origin : array-like or str
Head origin in mm. If None it will be estimated from headshape points.
frame : str ('device' or 'head')
Coordinate frame for head center.
bad : str, list (or None)
List of static bad channels. Can be a list with channel names, or a
string with channels (names or logical channel numbers).
autobad : str ('on', 'off', 'n')
Sets automated bad channel detection on or off.
skip : str or a list of float-tuples (or None)
Skips raw data sequences, time intervals pairs in sec,
e.g.: 0 30 120 150.
force : bool
Ignore program warnings.
st : bool
Apply the time-domain MaxST extension.
st_buflen : float
MaxSt buffer length in sec (disabled if st is False).
st_corr : float
MaxSt subspace correlation limit (disabled if st is False).
mv_trans : str (filename or 'default') (or None)
Transforms the data into the coil definitions of in_fname, or into the
default frame (None: don't use option).
mv_comp : bool (or 'inter')
Estimates and compensates head movements in continuous raw data.
mv_headpos : bool
Estimates and stores head position parameters, but does not compensate
movements (disabled if mv_comp is False).
mv_hp : str (or None)
Stores head position data in an ascii file
(disabled if mv_comp is False).
mv_hpistep : float (or None)
Sets head position update interval in ms (disabled if mv_comp is
False).
mv_hpisubt : str ('amp', 'base', 'off') (or None)
Subtracts hpi signals: sine amplitudes, amp + baseline, or switch off
(disabled if mv_comp is False).
mv_hpicons : bool
Check initial consistency isotrak vs hpifit
(disabled if mv_comp is False).
linefreq : int (50, 60) (or None)
Sets the basic line interference frequency (50 or 60 Hz)
(None: do not use line filter).
cal : str
Path to calibration file.
ctc : str
Path to Cross-talk compensation file.
mx_args : str
Additional command line arguments to pass to MaxFilter.
%(overwrite)s
%(verbose)s
Returns
-------
origin: str
Head origin in selected coordinate frame.
"""
# check for possible maxfilter bugs
if mv_trans is not None and mv_comp:
_mxwarn("Don't use '-trans' with head-movement compensation "
"'-movecomp'")
if autobad != 'off' and (mv_headpos or mv_comp):
_mxwarn("Don't use '-autobad' with head-position estimation "
"'-headpos' or movement compensation '-movecomp'")
if st and autobad != 'off':
_mxwarn("Don't use '-autobad' with '-st' option")
# determine the head origin if necessary
if origin is None:
logger.info('Estimating head origin from headshape points..')
raw = read_raw_fif(in_fname)
r, o_head, o_dev = fit_sphere_to_headshape(raw.info, units='mm')
raw.close()
logger.info('[done]')
if frame == 'head':
origin = o_head
elif frame == 'device':
origin = o_dev
else:
raise RuntimeError('invalid frame for origin')
if not isinstance(origin, str):
origin = '%0.1f %0.1f %0.1f' % (origin[0], origin[1], origin[2])
# format command
cmd = ('maxfilter -f %s -o %s -frame %s -origin %s '
% (in_fname, out_fname, frame, origin))
if bad is not None:
# format the channels
if not isinstance(bad, list):
bad = bad.split()
bad = map(str, bad)
bad_logic = [ch[3:] if ch.startswith('MEG') else ch for ch in bad]
bad_str = ' '.join(bad_logic)
cmd += '-bad %s ' % bad_str
cmd += '-autobad %s ' % autobad
if skip is not None:
if isinstance(skip, list):
skip = ' '.join(['%0.3f %0.3f' % (s[0], s[1]) for s in skip])
cmd += '-skip %s ' % skip
if force:
cmd += '-force '
if st:
cmd += '-st '
cmd += ' %d ' % st_buflen
cmd += '-corr %0.4f ' % st_corr
if mv_trans is not None:
cmd += '-trans %s ' % mv_trans
if mv_comp:
cmd += '-movecomp '
if mv_comp == 'inter':
cmd += ' inter '
if mv_headpos:
cmd += '-headpos '
if mv_hp is not None:
cmd += '-hp %s ' % mv_hp
if mv_hpisubt is not None:
cmd += 'hpisubt %s ' % mv_hpisubt
if mv_hpicons:
cmd += '-hpicons '
if linefreq is not None:
cmd += '-linefreq %d ' % linefreq
if cal is not None:
cmd += '-cal %s ' % cal
if ctc is not None:
cmd += '-ctc %s ' % ctc
cmd += mx_args
if overwrite and os.path.exists(out_fname):
os.remove(out_fname)
logger.info('Running MaxFilter: %s ' % cmd)
if os.getenv('_MNE_MAXFILTER_TEST', '') != 'true': # fake maxfilter
st = os.system(cmd)
else:
print(cmd) # we can check the output
st = 0
if st != 0:
raise RuntimeError('MaxFilter returned non-zero exit status %d' % st)
logger.info('[done]')
return origin
| bsd-3-clause | 22e383c2d5ea73c07af1113ef3b5c400 | 31.568528 | 78 | 0.573722 | 3.562465 | false | false | false | false |
mne-tools/mne-python | tutorials/epochs/10_epochs_overview.py | 11 | 17877 | # -*- coding: utf-8 -*-
"""
.. _tut-epochs-class:
=============================================
The Epochs data structure: discontinuous data
=============================================
This tutorial covers the basics of creating and working with :term:`epoched
<epochs>` data. It introduces the :class:`~mne.Epochs` data structure in
detail, including how to load, query, subselect, export, and plot data from an
:class:`~mne.Epochs` object. For more information about visualizing
:class:`~mne.Epochs` objects, see :ref:`tut-visualize-epochs`. For info on
creating an :class:`~mne.Epochs` object from (possibly simulated) data in a
:class:`NumPy array <numpy.ndarray>`, see :ref:`tut-creating-data-structures`.
As usual we'll start by importing the modules we need:
"""
# %%
import mne
# %%
# :class:`~mne.Epochs` objects are a data structure for representing and
# analyzing equal-duration chunks of the EEG/MEG signal. :class:`~mne.Epochs`
# are most often used to represent data that is time-locked to repeated
# experimental events (such as stimulus onsets or subject button presses), but
# can also be used for storing sequential or overlapping frames of a continuous
# signal (e.g., for analysis of resting-state activity; see
# :ref:`fixed-length-events`). Inside an :class:`~mne.Epochs` object, the data
# are stored in an :class:`array <numpy.ndarray>` of shape ``(n_epochs,
# n_channels, n_times)``.
#
# :class:`~mne.Epochs` objects have many similarities with :class:`~mne.io.Raw`
# objects, including:
#
# - They can be loaded from and saved to disk in ``.fif`` format, and their
# data can be exported to a :class:`NumPy array <numpy.ndarray>` through the
# :meth:`~mne.Epochs.get_data` method or to a :class:`Pandas DataFrame
# <pandas.DataFrame>` through the :meth:`~mne.Epochs.to_data_frame` method.
#
# - Both :class:`~mne.Epochs` and :class:`~mne.io.Raw` objects support channel
# selection by index or name, including :meth:`~mne.Epochs.pick`,
# :meth:`~mne.Epochs.pick_channels` and :meth:`~mne.Epochs.pick_types`
# methods.
#
# - :term:`SSP projector <projector>` manipulation is possible through
# :meth:`~mne.Epochs.add_proj`, :meth:`~mne.Epochs.del_proj`, and
# :meth:`~mne.Epochs.plot_projs_topomap` methods.
#
# - Both :class:`~mne.Epochs` and :class:`~mne.io.Raw` objects have
# :meth:`~mne.Epochs.copy`, :meth:`~mne.Epochs.crop`,
# :meth:`~mne.Epochs.time_as_index`, :meth:`~mne.Epochs.filter`, and
# :meth:`~mne.Epochs.resample` methods.
#
# - Both :class:`~mne.Epochs` and :class:`~mne.io.Raw` objects have
# :attr:`~mne.Epochs.times`, :attr:`~mne.Epochs.ch_names`,
# :attr:`~mne.Epochs.proj`, and :class:`info <mne.Info>` attributes.
#
# - Both :class:`~mne.Epochs` and :class:`~mne.io.Raw` objects have built-in
# plotting methods :meth:`~mne.Epochs.plot`, :meth:`~mne.Epochs.plot_psd`,
# and :meth:`~mne.Epochs.plot_psd_topomap`.
#
#
# Creating Epoched data from a ``Raw`` object
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# The example dataset we've been using thus far doesn't include pre-epoched
# data, so in this section we'll load the continuous data and create epochs
# based on the events recorded in the :class:`~mne.io.Raw` object's STIM
# channels. As we often do in these tutorials, we'll :meth:`~mne.io.Raw.crop`
# the :class:`~mne.io.Raw` data to save memory:
sample_data_folder = mne.datasets.sample.data_path()
sample_data_raw_file = (sample_data_folder / 'MEG' / 'sample' /
'sample_audvis_raw.fif')
raw = mne.io.read_raw_fif(sample_data_raw_file, verbose=False).crop(tmax=60)
# %%
# As we saw in the :ref:`tut-events-vs-annotations` tutorial, we can extract an
# events array from :class:`~mne.io.Raw` objects using :func:`mne.find_events`:
events = mne.find_events(raw, stim_channel='STI 014')
# %%
# .. note::
#
# We could also have loaded the events from file, using
# :func:`mne.read_events`::
#
# sample_data_events_file = os.path.join(sample_data_folder,
# 'MEG', 'sample',
# 'sample_audvis_raw-eve.fif')
# events_from_file = mne.read_events(sample_data_events_file)
#
# See :ref:`tut-section-events-io` for more details.
#
#
# The :class:`~mne.io.Raw` object and the events array are the bare minimum
# needed to create an :class:`~mne.Epochs` object, which we create with the
# :class:`mne.Epochs` class constructor. However, you will almost surely want
# to change some of the other default parameters. Here we'll change ``tmin``
# and ``tmax`` (the time relative to each event at which to start and end each
# epoch). Note also that the :class:`~mne.Epochs` constructor accepts
# parameters ``reject`` and ``flat`` for rejecting individual epochs based on
# signal amplitude. See the :ref:`tut-reject-epochs-section` section for
# examples.
epochs = mne.Epochs(raw, events, tmin=-0.3, tmax=0.7)
# %%
# You'll see from the output that:
#
# - all 320 events were used to create epochs
#
# - baseline correction was automatically applied (by default, baseline is
# defined as the time span from ``tmin`` to ``0``, but can be customized with
# the ``baseline`` parameter)
#
# - no additional metadata was provided (see :ref:`tut-epochs-metadata` for
# details)
#
# - the projection operators present in the :class:`~mne.io.Raw` file were
# copied over to the :class:`~mne.Epochs` object
#
# If we print the :class:`~mne.Epochs` object, we'll also see a note that the
# epochs are not copied into memory by default, and a count of the number of
# epochs created for each integer Event ID.
print(epochs)
# %%
# Notice that the Event IDs are in quotes; since we didn't provide an event
# dictionary, the :class:`mne.Epochs` constructor created one automatically and
# used the string representation of the integer Event IDs as the dictionary
# keys. This is more clear when viewing the ``event_id`` attribute:
print(epochs.event_id)
# %%
# This time let's pass ``preload=True`` and provide an event dictionary; our
# provided dictionary will get stored as the ``event_id`` attribute and will
# make referencing events and pooling across event types easier:
event_dict = {'auditory/left': 1, 'auditory/right': 2, 'visual/left': 3,
'visual/right': 4, 'face': 5, 'buttonpress': 32}
epochs = mne.Epochs(raw, events, tmin=-0.3, tmax=0.7, event_id=event_dict,
preload=True)
print(epochs.event_id)
del raw # we're done with raw, free up some memory
# %%
# Notice that the output now mentions "1 bad epoch dropped". In the tutorial
# section :ref:`tut-reject-epochs-section` we saw how you can specify channel
# amplitude criteria for rejecting epochs, but here we haven't specified any
# such criteria. In this case, it turns out that the last event was too close
# the end of the (cropped) raw file to accommodate our requested ``tmax`` of
# 0.7 seconds, so the final epoch was dropped because it was too short. Here
# are the ``drop_log`` entries for the last 4 epochs (empty lists indicate
# epochs that were *not* dropped):
print(epochs.drop_log[-4:])
# %%
# .. note::
#
# If you forget to provide the event dictionary to the :class:`~mne.Epochs`
# constructor, you can add it later by assigning to the ``event_id``
# attribute::
#
# epochs.event_id = event_dict
#
#
# Basic visualization of ``Epochs`` objects
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# The :class:`~mne.Epochs` object can be visualized (and browsed interactively)
# using its :meth:`~mne.Epochs.plot` method:
epochs.plot(n_epochs=10)
# %%
# Notice that the individual epochs are sequentially numbered along the bottom
# axis and are separated by vertical dashed lines.
# Epoch plots are interactive (similar to :meth:`raw.plot()
# <mne.io.Raw.plot>`) and have many of the same interactive controls as
# :class:`~mne.io.Raw` plots. Horizontal and vertical scrollbars allow browsing
# through epochs or channels (respectively), and pressing :kbd:`?` when the
# plot is focused will show a help screen with all the available controls. See
# :ref:`tut-visualize-epochs` for more details (as well as other ways of
# visualizing epoched data).
#
#
# .. _tut-section-subselect-epochs:
#
# Subselecting epochs
# ^^^^^^^^^^^^^^^^^^^
#
# Now that we have our :class:`~mne.Epochs` object with our descriptive event
# labels added, we can subselect epochs easily using square brackets. For
# example, we can load all the "catch trials" where the stimulus was a face:
print(epochs['face'])
# %%
# We can also pool across conditions easily, thanks to how MNE-Python handles
# the ``/`` character in epoch labels (using what is sometimes called
# "tag-based indexing"):
# pool across left + right
print(epochs['auditory'])
assert len(epochs['auditory']) == (len(epochs['auditory/left']) +
len(epochs['auditory/right']))
# pool across auditory + visual
print(epochs['left'])
assert len(epochs['left']) == (len(epochs['auditory/left']) +
len(epochs['visual/left']))
# %%
# You can also pool conditions by passing multiple tags as a list. Note that
# MNE-Python will not complain if you ask for tags not present in the object,
# as long as it can find *some* match: the below example is parsed as
# (inclusive) ``'right'`` **or** ``'bottom'``, and you can see from the output
# that it selects only ``auditory/right`` and ``visual/right``.
print(epochs[['right', 'bottom']])
# %%
# However, if no match is found, an error is returned:
try:
print(epochs[['top', 'bottom']])
except KeyError:
print('Tag-based selection with no matches raises a KeyError!')
# %%
# Selecting epochs by index
# ~~~~~~~~~~~~~~~~~~~~~~~~~
#
# :class:`~mne.Epochs` objects can also be indexed with integers, :term:`slices
# <slice>`, or lists of integers. This method of selection ignores event
# labels, so if you want the first 10 epochs of a particular type, you can
# select the type first, then use integers or slices:
print(epochs[:10]) # epochs 0-9
print(epochs[1:8:2]) # epochs 1, 3, 5, 7
print(epochs['buttonpress'][:4]) # first 4 "buttonpress" epochs
print(epochs['buttonpress'][[0, 1, 2, 3]]) # same as previous line
# %%
# Selecting, dropping, and reordering channels
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# You can use the :meth:`~mne.Epochs.pick`, :meth:`~mne.Epochs.pick_channels`,
# :meth:`~mne.Epochs.pick_types`, and :meth:`~mne.Epochs.drop_channels` methods
# to modify which channels are included in an :class:`~mne.Epochs` object. You
# can also use :meth:`~mne.Epochs.reorder_channels` for this purpose; any
# channel names not provided to :meth:`~mne.Epochs.reorder_channels` will be
# dropped. Note that these *channel* selection methods modify the object
# in-place (unlike the square-bracket indexing to select *epochs* seen above)
# so in interactive/exploratory sessions you may want to create a
# :meth:`~mne.Epochs.copy` first.
epochs_eeg = epochs.copy().pick_types(meg=False, eeg=True)
print(epochs_eeg.ch_names)
new_order = ['EEG 002', 'STI 014', 'EOG 061', 'MEG 2521']
epochs_subset = epochs.copy().reorder_channels(new_order)
print(epochs_subset.ch_names)
# %%
del epochs_eeg, epochs_subset
# %%
# Changing channel name and type
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# You can change the name or type of a channel using
# :meth:`~mne.Epochs.rename_channels` or :meth:`~mne.Epochs.set_channel_types`.
# Both methods take :class:`dictionaries <dict>` where the keys are existing
# channel names, and the values are the new name (or type) for that channel.
# Existing channels that are not in the dictionary will be unchanged.
epochs.rename_channels({'EOG 061': 'BlinkChannel'})
epochs.set_channel_types({'EEG 060': 'ecg'})
print(list(zip(epochs.ch_names, epochs.get_channel_types()))[-4:])
# %%
# let's set them back to the correct values before moving on
epochs.rename_channels({'BlinkChannel': 'EOG 061'})
epochs.set_channel_types({'EEG 060': 'eeg'})
# %%
# Selection in the time domain
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# To change the temporal extent of the :class:`~mne.Epochs`, you can use the
# :meth:`~mne.Epochs.crop` method:
shorter_epochs = epochs.copy().crop(tmin=-0.1, tmax=0.1, include_tmax=True)
for name, obj in dict(Original=epochs, Cropped=shorter_epochs).items():
print('{} epochs has {} time samples'
.format(name, obj.get_data().shape[-1]))
# %%
# Cropping removed part of the baseline. When printing the
# cropped :class:`~mne.Epochs`, MNE-Python will inform you about the time
# period that was originally used to perform baseline correction by displaying
# the string "baseline period cropped after baseline correction":
print(shorter_epochs)
# %%
# However, if you wanted to *expand* the time domain of an :class:`~mne.Epochs`
# object, you would need to go back to the :class:`~mne.io.Raw` data and
# recreate the :class:`~mne.Epochs` with different values for ``tmin`` and/or
# ``tmax``.
#
# It is also possible to change the "zero point" that defines the time values
# in an :class:`~mne.Epochs` object, with the :meth:`~mne.Epochs.shift_time`
# method. :meth:`~mne.Epochs.shift_time` allows shifting times relative to the
# current values, or specifying a fixed time to set as the new time value of
# the first sample (deriving the new time values of subsequent samples based on
# the :class:`~mne.Epochs` object's sampling frequency).
# shift times so that first sample of each epoch is at time zero
later_epochs = epochs.copy().shift_time(tshift=0., relative=False)
print(later_epochs.times[:3])
# shift times by a relative amount
later_epochs.shift_time(tshift=-7, relative=True)
print(later_epochs.times[:3])
# %%
del shorter_epochs, later_epochs
# %%
# Note that although time shifting respects the sampling frequency (the spacing
# between samples), it does not enforce the assumption that there is a sample
# occurring at exactly time=0.
#
#
# Extracting data in other forms
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# The :meth:`~mne.Epochs.get_data` method returns the epoched data as a
# :class:`NumPy array <numpy.ndarray>`, of shape ``(n_epochs, n_channels,
# n_times)``; an optional ``picks`` parameter selects a subset of channels by
# index, name, or type:
eog_data = epochs.get_data(picks='EOG 061')
meg_data = epochs.get_data(picks=['mag', 'grad'])
channel_4_6_8 = epochs.get_data(picks=slice(4, 9, 2))
for name, arr in dict(EOG=eog_data, MEG=meg_data, Slice=channel_4_6_8).items():
print('{} contains {} channels'.format(name, arr.shape[1]))
# %%
# Note that if your analysis requires repeatedly extracting single epochs from
# an :class:`~mne.Epochs` object, ``epochs.get_data(item=2)`` will be much
# faster than ``epochs[2].get_data()``, because it avoids the step of
# subsetting the :class:`~mne.Epochs` object first.
#
# You can also export :class:`~mne.Epochs` data to :class:`Pandas DataFrames
# <pandas.DataFrame>`. Here, the :class:`~pandas.DataFrame` index will be
# constructed by converting the time of each sample into milliseconds and
# rounding it to the nearest integer, and combining it with the event types and
# epoch numbers to form a hierarchical :class:`~pandas.MultiIndex`. Each
# channel will appear in a separate column. Then you can use any of Pandas'
# tools for grouping and aggregating data; for example, here we select any
# epochs numbered 10 or less from the ``auditory/left`` condition, and extract
# times between 100 and 107 ms on channels ``EEG 056`` through ``EEG 058``
# (note that slice indexing within Pandas' :obj:`~pandas.DataFrame.loc` is
# inclusive of the endpoint):
df = epochs.to_data_frame(index=['condition', 'epoch', 'time'])
df.sort_index(inplace=True)
print(df.loc[('auditory/left', slice(0, 10), slice(100, 107)),
'EEG 056':'EEG 058'])
del df
# %%
# See the :ref:`tut-epochs-dataframe` tutorial for many more examples of the
# :meth:`~mne.Epochs.to_data_frame` method.
#
#
# Loading and saving ``Epochs`` objects to disk
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# :class:`~mne.Epochs` objects can be loaded and saved in the ``.fif`` format
# just like :class:`~mne.io.Raw` objects, using the :func:`mne.read_epochs`
# function and the :meth:`~mne.Epochs.save` method. Functions are also
# available for loading data that was epoched outside of MNE-Python, such as
# :func:`mne.read_epochs_eeglab` and :func:`mne.read_epochs_kit`.
epochs.save('saved-audiovisual-epo.fif', overwrite=True)
epochs_from_file = mne.read_epochs('saved-audiovisual-epo.fif', preload=False)
# %%
# The MNE-Python naming convention for epochs files is that the file basename
# (the part before the ``.fif`` or ``.fif.gz`` extension) should end with
# ``-epo`` or ``_epo``, and a warning will be issued if the filename you
# provide does not adhere to that convention.
#
# As a final note, be aware that the class of the epochs object is different
# when epochs are loaded from disk rather than generated from a
# :class:`~mne.io.Raw` object:
print(type(epochs))
print(type(epochs_from_file))
# %%
# In almost all cases this will not require changing anything about your code.
# However, if you need to do type checking on epochs objects, you can test
# against the base class that these classes are derived from:
print(all([isinstance(epochs, mne.BaseEpochs),
isinstance(epochs_from_file, mne.BaseEpochs)]))
# %%
# Iterating over ``Epochs``
# ^^^^^^^^^^^^^^^^^^^^^^^^^
#
# Iterating over an :class:`~mne.Epochs` object will yield :class:`arrays
# <numpy.ndarray>` rather than single-trial :class:`~mne.Epochs` objects:
for epoch in epochs[:3]:
print(type(epoch))
# %%
# If you want to iterate over :class:`~mne.Epochs` objects, you can use an
# integer index as the iterator:
for index in range(3):
print(type(epochs[index]))
| bsd-3-clause | 5b5fa76973c48a0172650a4800a07637 | 39.354402 | 79 | 0.686804 | 3.373019 | false | false | false | false |
mne-tools/mne-python | mne/io/ctf/eeg.py | 8 | 3732 | """Read .eeg files."""
# Author: Eric Larson <larson.eric.d<gmail.com>
#
# License: BSD-3-Clause
import numpy as np
from os.path import join
from os import listdir
from ...utils import logger, warn
from ..constants import FIFF
from .res4 import _make_ctf_name
from ...transforms import apply_trans
_cardinal_dict = dict(nasion=FIFF.FIFFV_POINT_NASION,
lpa=FIFF.FIFFV_POINT_LPA, left=FIFF.FIFFV_POINT_LPA,
rpa=FIFF.FIFFV_POINT_RPA, right=FIFF.FIFFV_POINT_RPA)
def _read_eeg(directory):
"""Read the .eeg file."""
# Missing file is ok
fname, found = _make_ctf_name(directory, 'eeg', raise_error=False)
if not found:
logger.info(' Separate EEG position data file not present.')
return
eeg = dict(labels=list(), kinds=list(), ids=list(), rr=list(), np=0,
assign_to_chs=True, coord_frame=FIFF.FIFFV_MNE_COORD_CTF_HEAD)
with open(fname, 'rb') as fid:
for line in fid:
line = line.strip()
if len(line) > 0:
parts = line.decode('utf-8').split()
if len(parts) != 5:
raise RuntimeError('Illegal data in EEG position file: %s'
% line)
r = np.array([float(p) for p in parts[2:]]) / 100.
if (r * r).sum() > 1e-4:
label = parts[1]
eeg['labels'].append(label)
eeg['rr'].append(r)
id_ = _cardinal_dict.get(label.lower(), int(parts[0]))
if label.lower() in _cardinal_dict:
kind = FIFF.FIFFV_POINT_CARDINAL
else:
kind = FIFF.FIFFV_POINT_EXTRA
eeg['ids'].append(id_)
eeg['kinds'].append(kind)
eeg['np'] += 1
logger.info(' Separate EEG position data file read.')
return eeg
def _read_pos(directory, transformations):
"""Read the .pos file and return eeg positions as dig extra points."""
fname = [join(directory, f) for f in listdir(directory) if
f.endswith('.pos')]
if len(fname) < 1:
return list()
elif len(fname) > 1:
warn(' Found multiple pos files. Extra digitizer points not added.')
return list()
logger.info(' Reading digitizer points from %s...' % fname)
if transformations['t_ctf_head_head'] is None:
warn(' No transformation found. Extra digitizer points not added.')
return list()
fname = fname[0]
digs = list()
i = 2000
with open(fname, 'r') as fid:
for line in fid:
line = line.strip()
if len(line) > 0:
parts = line.split()
# The lines can have 4 or 5 parts. First part is for the id,
# which can be an int or a string. The last three are for xyz
# coordinates. The extra part is for additional info
# (e.g. 'Pz', 'Cz') which is ignored.
if len(parts) not in [4, 5]:
continue
try:
ident = int(parts[0]) + 1000
except ValueError: # if id is not an int
ident = i
i += 1
dig = dict(kind=FIFF.FIFFV_POINT_EXTRA, ident=ident, r=list(),
coord_frame=FIFF.FIFFV_COORD_HEAD)
r = np.array([float(p) for p in parts[-3:]]) / 100. # cm to m
if (r * r).sum() > 1e-4:
r = apply_trans(transformations['t_ctf_head_head'], r)
dig['r'] = r
digs.append(dig)
return digs
| bsd-3-clause | af758ff4a4ef79dfa2f9de6e576daa00 | 38.284211 | 79 | 0.511254 | 3.758308 | false | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.