text
stringlengths 4
1.02M
| meta
dict |
|---|---|
"""Support for Goal Zero Yeti Sensors."""
from __future__ import annotations
from homeassistant.components.sensor import (
STATE_CLASS_MEASUREMENT,
STATE_CLASS_TOTAL_INCREASING,
SensorEntity,
SensorEntityDescription,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_NAME,
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_CURRENT,
DEVICE_CLASS_ENERGY,
DEVICE_CLASS_POWER,
DEVICE_CLASS_SIGNAL_STRENGTH,
DEVICE_CLASS_TEMPERATURE,
DEVICE_CLASS_VOLTAGE,
ELECTRIC_CURRENT_AMPERE,
ELECTRIC_POTENTIAL_VOLT,
ENERGY_WATT_HOUR,
PERCENTAGE,
POWER_WATT,
SIGNAL_STRENGTH_DECIBELS,
TEMP_CELSIUS,
TIME_MINUTES,
TIME_SECONDS,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from . import Yeti, YetiEntity
from .const import DATA_KEY_API, DATA_KEY_COORDINATOR, DOMAIN
SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
key="wattsIn",
name="Watts In",
device_class=DEVICE_CLASS_POWER,
native_unit_of_measurement=POWER_WATT,
state_class=STATE_CLASS_MEASUREMENT,
),
SensorEntityDescription(
key="ampsIn",
name="Amps In",
device_class=DEVICE_CLASS_CURRENT,
native_unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
state_class=STATE_CLASS_MEASUREMENT,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="wattsOut",
name="Watts Out",
device_class=DEVICE_CLASS_POWER,
native_unit_of_measurement=POWER_WATT,
state_class=STATE_CLASS_MEASUREMENT,
),
SensorEntityDescription(
key="ampsOut",
name="Amps Out",
device_class=DEVICE_CLASS_CURRENT,
native_unit_of_measurement=ELECTRIC_CURRENT_AMPERE,
state_class=STATE_CLASS_MEASUREMENT,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="whOut",
name="WH Out",
device_class=DEVICE_CLASS_ENERGY,
native_unit_of_measurement=ENERGY_WATT_HOUR,
state_class=STATE_CLASS_TOTAL_INCREASING,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="whStored",
name="WH Stored",
device_class=DEVICE_CLASS_ENERGY,
native_unit_of_measurement=ENERGY_WATT_HOUR,
state_class=STATE_CLASS_MEASUREMENT,
),
SensorEntityDescription(
key="volts",
name="Volts",
device_class=DEVICE_CLASS_VOLTAGE,
native_unit_of_measurement=ELECTRIC_POTENTIAL_VOLT,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="socPercent",
name="State of Charge Percent",
device_class=DEVICE_CLASS_BATTERY,
native_unit_of_measurement=PERCENTAGE,
),
SensorEntityDescription(
key="timeToEmptyFull",
name="Time to Empty/Full",
device_class=TIME_MINUTES,
native_unit_of_measurement=TIME_MINUTES,
),
SensorEntityDescription(
key="temperature",
name="Temperature",
device_class=DEVICE_CLASS_TEMPERATURE,
native_unit_of_measurement=TEMP_CELSIUS,
),
SensorEntityDescription(
key="wifiStrength",
name="Wifi Strength",
device_class=DEVICE_CLASS_SIGNAL_STRENGTH,
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="timestamp",
name="Total Run Time",
native_unit_of_measurement=TIME_SECONDS,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="ssid",
name="Wi-Fi SSID",
entity_registry_enabled_default=False,
),
SensorEntityDescription(
key="ipAddr",
name="IP Address",
entity_registry_enabled_default=False,
),
)
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up the Goal Zero Yeti sensor."""
name = entry.data[CONF_NAME]
goalzero_data = hass.data[DOMAIN][entry.entry_id]
sensors = [
YetiSensor(
goalzero_data[DATA_KEY_API],
goalzero_data[DATA_KEY_COORDINATOR],
name,
description,
entry.entry_id,
)
for description in SENSOR_TYPES
]
async_add_entities(sensors, True)
class YetiSensor(YetiEntity, SensorEntity):
"""Representation of a Goal Zero Yeti sensor."""
def __init__(
self,
api: Yeti,
coordinator: DataUpdateCoordinator,
name: str,
description: SensorEntityDescription,
server_unique_id: str,
) -> None:
"""Initialize a Goal Zero Yeti sensor."""
super().__init__(api, coordinator, name, server_unique_id)
self._attr_name = f"{name} {description.name}"
self.entity_description = description
self._attr_unique_id = f"{server_unique_id}/{description.key}"
@property
def native_value(self) -> str:
"""Return the state."""
return self.api.data.get(self.entity_description.key)
|
{
"content_hash": "883a3daa09929b02776494b353443131",
"timestamp": "",
"source": "github",
"line_count": 173,
"max_line_length": 84,
"avg_line_length": 30.884393063583815,
"alnum_prop": 0.6500093580385551,
"repo_name": "sander76/home-assistant",
"id": "957891e67ed9699c88fb88ac59543ed7feb73df0",
"size": "5343",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "homeassistant/components/goalzero/sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1795"
},
{
"name": "Python",
"bytes": "36548768"
},
{
"name": "Shell",
"bytes": "4910"
}
],
"symlink_target": ""
}
|
"""
SQLAlchemy models for nova data.
"""
from oslo.config import cfg
from oslo.db.sqlalchemy import models
from sqlalchemy import Column, Index, Integer, BigInteger, Enum, String, schema
from sqlalchemy.dialects.mysql import MEDIUMTEXT
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import orm
from sqlalchemy import ForeignKey, DateTime, Boolean, Text, Float
from nova.db.sqlalchemy import types
from nova.openstack.common import timeutils
CONF = cfg.CONF
BASE = declarative_base()
def MediumText():
return Text().with_variant(MEDIUMTEXT(), 'mysql')
class NovaBase(models.SoftDeleteMixin,
models.TimestampMixin,
models.ModelBase):
metadata = None
# TODO(ekudryashova): remove this after both nova and oslo.db
# will use oslo.utils library
# NOTE: Both projects(nova and oslo.db) use `timeutils.utcnow`, which
# returns specified time(if override_time is set). Time overriding is
# only used by unit tests, but in a lot of places, temporarily overriding
# this columns helps to avoid lots of calls of timeutils.set_override
# from different places in unit tests.
created_at = Column(DateTime, default=lambda: timeutils.utcnow())
updated_at = Column(DateTime, onupdate=lambda: timeutils.utcnow())
def save(self, session=None):
from nova.db.sqlalchemy import api
if session is None:
session = api.get_session()
super(NovaBase, self).save(session=session)
class Service(BASE, NovaBase):
"""Represents a running service on a host."""
__tablename__ = 'services'
__table_args__ = (
schema.UniqueConstraint("host", "topic", "deleted",
name="uniq_services0host0topic0deleted"),
schema.UniqueConstraint("host", "binary", "deleted",
name="uniq_services0host0binary0deleted")
)
id = Column(Integer, primary_key=True)
host = Column(String(255)) # , ForeignKey('hosts.id'))
binary = Column(String(255))
topic = Column(String(255))
report_count = Column(Integer, nullable=False, default=0)
disabled = Column(Boolean, default=False)
disabled_reason = Column(String(255))
class ComputeNode(BASE, NovaBase):
"""Represents a running compute service on a host."""
__tablename__ = 'compute_nodes'
__table_args__ = ()
id = Column(Integer, primary_key=True)
service_id = Column(Integer, ForeignKey('services.id'), nullable=False)
service = orm.relationship(Service,
backref=orm.backref('compute_node'),
foreign_keys=service_id,
primaryjoin='and_('
'ComputeNode.service_id == Service.id,'
'ComputeNode.deleted == 0)')
vcpus = Column(Integer, nullable=False)
memory_mb = Column(Integer, nullable=False)
local_gb = Column(Integer, nullable=False)
vcpus_used = Column(Integer, nullable=False)
memory_mb_used = Column(Integer, nullable=False)
local_gb_used = Column(Integer, nullable=False)
hypervisor_type = Column(MediumText(), nullable=False)
hypervisor_version = Column(Integer, nullable=False)
hypervisor_hostname = Column(String(255))
# Free Ram, amount of activity (resize, migration, boot, etc) and
# the number of running VM's are a good starting point for what's
# important when making scheduling decisions.
free_ram_mb = Column(Integer)
free_disk_gb = Column(Integer)
current_workload = Column(Integer)
running_vms = Column(Integer)
# Note(masumotok): Expected Strings example:
#
# '{"arch":"x86_64",
# "model":"Nehalem",
# "topology":{"sockets":1, "threads":2, "cores":3},
# "features":["tdtscp", "xtpr"]}'
#
# Points are "json translatable" and it must have all dictionary keys
# above, since it is copied from <cpu> tag of getCapabilities()
# (See libvirt.virtConnection).
cpu_info = Column(MediumText(), nullable=False)
disk_available_least = Column(Integer)
host_ip = Column(types.IPAddress())
supported_instances = Column(Text)
metrics = Column(Text)
# Note(yongli): json string PCI Stats
# '{"vendor_id":"8086", "product_id":"1234", "count":3 }'
pci_stats = Column(Text)
# extra_resources is a json string containing arbitrary
# data about additional resources.
extra_resources = Column(Text)
# json-encode string containing compute node statistics
stats = Column(Text, default='{}')
# json-encoded dict that contains NUMA topology as generated by
# nova.virt.hardware.VirtNUMAHostTopology.to_json()
numa_topology = Column(Text)
class Certificate(BASE, NovaBase):
"""Represents a x509 certificate."""
__tablename__ = 'certificates'
__table_args__ = (
Index('certificates_project_id_deleted_idx', 'project_id', 'deleted'),
Index('certificates_user_id_deleted_idx', 'user_id', 'deleted')
)
id = Column(Integer, primary_key=True)
user_id = Column(String(255))
project_id = Column(String(255))
file_name = Column(String(255))
class Instance(BASE, NovaBase):
"""Represents a guest VM."""
__tablename__ = 'instances'
__table_args__ = (
Index('uuid', 'uuid', unique=True),
Index('project_id', 'project_id'),
Index('instances_host_deleted_idx',
'host', 'deleted'),
Index('instances_reservation_id_idx',
'reservation_id'),
Index('instances_terminated_at_launched_at_idx',
'terminated_at', 'launched_at'),
Index('instances_uuid_deleted_idx',
'uuid', 'deleted'),
Index('instances_task_state_updated_at_idx',
'task_state', 'updated_at'),
Index('instances_host_node_deleted_idx',
'host', 'node', 'deleted'),
Index('instances_host_deleted_cleaned_idx',
'host', 'deleted', 'cleaned'),
)
injected_files = []
id = Column(Integer, primary_key=True, autoincrement=True)
@property
def name(self):
try:
base_name = CONF.instance_name_template % self.id
except TypeError:
# Support templates like "uuid-%(uuid)s", etc.
info = {}
# NOTE(russellb): Don't use self.iteritems() here, as it will
# result in infinite recursion on the name property.
for column in iter(orm.object_mapper(self).columns):
key = column.name
# prevent recursion if someone specifies %(name)s
# %(name)s will not be valid.
if key == 'name':
continue
info[key] = self[key]
try:
base_name = CONF.instance_name_template % info
except KeyError:
base_name = self.uuid
return base_name
@property
def _extra_keys(self):
return ['name']
user_id = Column(String(255))
project_id = Column(String(255))
image_ref = Column(String(255))
kernel_id = Column(String(255))
ramdisk_id = Column(String(255))
hostname = Column(String(255))
launch_index = Column(Integer)
key_name = Column(String(255))
key_data = Column(MediumText())
power_state = Column(Integer)
vm_state = Column(String(255))
task_state = Column(String(255))
memory_mb = Column(Integer)
vcpus = Column(Integer)
root_gb = Column(Integer)
ephemeral_gb = Column(Integer)
ephemeral_key_uuid = Column(String(36))
# This is not related to hostname, above. It refers
# to the nova node.
host = Column(String(255)) # , ForeignKey('hosts.id'))
# To identify the "ComputeNode" which the instance resides in.
# This equals to ComputeNode.hypervisor_hostname.
node = Column(String(255))
# *not* flavorid, this is the internal primary_key
instance_type_id = Column(Integer)
user_data = Column(MediumText())
reservation_id = Column(String(255))
scheduled_at = Column(DateTime)
launched_at = Column(DateTime)
terminated_at = Column(DateTime)
availability_zone = Column(String(255))
# User editable field for display in user-facing UIs
display_name = Column(String(255))
display_description = Column(String(255))
# To remember on which host an instance booted.
# An instance may have moved to another host by live migration.
launched_on = Column(MediumText())
# NOTE(jdillaman): locked deprecated in favor of locked_by,
# to be removed in Icehouse
locked = Column(Boolean)
locked_by = Column(Enum('owner', 'admin'))
os_type = Column(String(255))
architecture = Column(String(255))
vm_mode = Column(String(255))
uuid = Column(String(36))
root_device_name = Column(String(255))
default_ephemeral_device = Column(String(255))
default_swap_device = Column(String(255))
config_drive = Column(String(255))
# User editable field meant to represent what ip should be used
# to connect to the instance
access_ip_v4 = Column(types.IPAddress())
access_ip_v6 = Column(types.IPAddress())
auto_disk_config = Column(Boolean())
progress = Column(Integer)
# EC2 instance_initiated_shutdown_terminate
# True: -> 'terminate'
# False: -> 'stop'
# Note(maoy): currently Nova will always stop instead of terminate
# no matter what the flag says. So we set the default to False.
shutdown_terminate = Column(Boolean(), default=False)
# EC2 disable_api_termination
disable_terminate = Column(Boolean(), default=False)
# OpenStack compute cell name. This will only be set at the top of
# the cells tree and it'll be a full cell name such as 'api!hop1!hop2'
cell_name = Column(String(255))
internal_id = Column(Integer)
# Records whether an instance has been deleted from disk
cleaned = Column(Integer, default=0)
class InstanceInfoCache(BASE, NovaBase):
"""Represents a cache of information about an instance
"""
__tablename__ = 'instance_info_caches'
__table_args__ = (
schema.UniqueConstraint(
"instance_uuid",
name="uniq_instance_info_caches0instance_uuid"),)
id = Column(Integer, primary_key=True, autoincrement=True)
# text column used for storing a json object of network data for api
network_info = Column(MediumText())
instance_uuid = Column(String(36), ForeignKey('instances.uuid'),
nullable=False)
instance = orm.relationship(Instance,
backref=orm.backref('info_cache', uselist=False),
foreign_keys=instance_uuid,
primaryjoin=instance_uuid == Instance.uuid)
class InstanceExtra(BASE, NovaBase):
__tablename__ = 'instance_extra'
__table_args__ = (
Index('instance_extra_idx', 'instance_uuid'),)
id = Column(Integer, primary_key=True, autoincrement=True)
instance_uuid = Column(String(36), ForeignKey('instances.uuid'),
nullable=False)
numa_topology = Column(Text)
pci_requests = Column(Text)
instance = orm.relationship(Instance,
backref=orm.backref('numa_topology',
uselist=False),
foreign_keys=instance_uuid,
primaryjoin=instance_uuid == Instance.uuid)
class InstanceTypes(BASE, NovaBase):
"""Represents possible flavors for instances.
Note: instance_type and flavor are synonyms and the term instance_type is
deprecated and in the process of being removed.
"""
__tablename__ = "instance_types"
__table_args__ = (
schema.UniqueConstraint("flavorid", "deleted",
name="uniq_instance_types0flavorid0deleted"),
schema.UniqueConstraint("name", "deleted",
name="uniq_instance_types0name0deleted")
)
# Internal only primary key/id
id = Column(Integer, primary_key=True)
name = Column(String(255))
memory_mb = Column(Integer, nullable=False)
vcpus = Column(Integer, nullable=False)
root_gb = Column(Integer)
ephemeral_gb = Column(Integer)
# Public facing id will be renamed public_id
flavorid = Column(String(255))
swap = Column(Integer, nullable=False, default=0)
rxtx_factor = Column(Float, default=1)
vcpu_weight = Column(Integer)
disabled = Column(Boolean, default=False)
is_public = Column(Boolean, default=True)
class Volume(BASE, NovaBase):
"""Represents a block storage device that can be attached to a VM."""
__tablename__ = 'volumes'
__table_args__ = (
Index('volumes_instance_uuid_idx', 'instance_uuid'),
)
id = Column(String(36), primary_key=True, nullable=False)
deleted = Column(String(36), default="")
@property
def name(self):
return CONF.volume_name_template % self.id
ec2_id = Column(String(255))
user_id = Column(String(255))
project_id = Column(String(255))
snapshot_id = Column(String(36))
host = Column(String(255))
size = Column(Integer)
availability_zone = Column(String(255))
instance_uuid = Column(String(36))
mountpoint = Column(String(255))
attach_time = Column(DateTime)
status = Column(String(255)) # TODO(vish): enum?
attach_status = Column(String(255)) # TODO(vish): enum
scheduled_at = Column(DateTime)
launched_at = Column(DateTime)
terminated_at = Column(DateTime)
display_name = Column(String(255))
display_description = Column(String(255))
provider_location = Column(String(256))
provider_auth = Column(String(256))
volume_type_id = Column(Integer)
class Quota(BASE, NovaBase):
"""Represents a single quota override for a project.
If there is no row for a given project id and resource, then the
default for the quota class is used. If there is no row for a
given quota class and resource, then the default for the
deployment is used. If the row is present but the hard limit is
Null, then the resource is unlimited.
"""
__tablename__ = 'quotas'
__table_args__ = (
schema.UniqueConstraint("project_id", "resource", "deleted",
name="uniq_quotas0project_id0resource0deleted"
),
)
id = Column(Integer, primary_key=True)
project_id = Column(String(255))
resource = Column(String(255), nullable=False)
hard_limit = Column(Integer)
class ProjectUserQuota(BASE, NovaBase):
"""Represents a single quota override for a user with in a project."""
__tablename__ = 'project_user_quotas'
uniq_name = "uniq_project_user_quotas0user_id0project_id0resource0deleted"
__table_args__ = (
schema.UniqueConstraint("user_id", "project_id", "resource", "deleted",
name=uniq_name),
Index('project_user_quotas_project_id_deleted_idx',
'project_id', 'deleted'),
Index('project_user_quotas_user_id_deleted_idx',
'user_id', 'deleted')
)
id = Column(Integer, primary_key=True, nullable=False)
project_id = Column(String(255), nullable=False)
user_id = Column(String(255), nullable=False)
resource = Column(String(255), nullable=False)
hard_limit = Column(Integer)
class QuotaClass(BASE, NovaBase):
"""Represents a single quota override for a quota class.
If there is no row for a given quota class and resource, then the
default for the deployment is used. If the row is present but the
hard limit is Null, then the resource is unlimited.
"""
__tablename__ = 'quota_classes'
__table_args__ = (
Index('ix_quota_classes_class_name', 'class_name'),
)
id = Column(Integer, primary_key=True)
class_name = Column(String(255))
resource = Column(String(255))
hard_limit = Column(Integer)
class QuotaUsage(BASE, NovaBase):
"""Represents the current usage for a given resource."""
__tablename__ = 'quota_usages'
__table_args__ = (
Index('ix_quota_usages_project_id', 'project_id'),
)
id = Column(Integer, primary_key=True)
project_id = Column(String(255))
user_id = Column(String(255))
resource = Column(String(255), nullable=False)
in_use = Column(Integer, nullable=False)
reserved = Column(Integer, nullable=False)
allocated= Column(Integer, nullable=False)
@property
def total(self):
return self.in_use + self.reserved+self.allocated
until_refresh = Column(Integer)
class Reservation(BASE, NovaBase):
"""Represents a resource reservation for quotas."""
__tablename__ = 'reservations'
__table_args__ = (
Index('ix_reservations_project_id', 'project_id'),
Index('reservations_uuid_idx', 'uuid'),
Index('reservations_deleted_expire_idx', 'deleted', 'expire'),
)
id = Column(Integer, primary_key=True, nullable=False)
uuid = Column(String(36), nullable=False)
usage_id = Column(Integer, ForeignKey('quota_usages.id'), nullable=False)
project_id = Column(String(255))
user_id = Column(String(255))
resource = Column(String(255))
delta = Column(Integer, nullable=False)
expire = Column(DateTime)
usage = orm.relationship(
"QuotaUsage",
foreign_keys=usage_id,
primaryjoin='and_(Reservation.usage_id == QuotaUsage.id,'
'QuotaUsage.deleted == 0)')
class Snapshot(BASE, NovaBase):
"""Represents a block storage device that can be attached to a VM."""
__tablename__ = 'snapshots'
__table_args__ = ()
id = Column(String(36), primary_key=True, nullable=False)
deleted = Column(String(36), default="")
@property
def name(self):
return CONF.snapshot_name_template % self.id
@property
def volume_name(self):
return CONF.volume_name_template % self.volume_id
user_id = Column(String(255))
project_id = Column(String(255))
volume_id = Column(String(36), nullable=False)
status = Column(String(255))
progress = Column(String(255))
volume_size = Column(Integer)
scheduled_at = Column(DateTime)
display_name = Column(String(255))
display_description = Column(String(255))
class BlockDeviceMapping(BASE, NovaBase):
"""Represents block device mapping that is defined by EC2."""
__tablename__ = "block_device_mapping"
__table_args__ = (
Index('snapshot_id', 'snapshot_id'),
Index('volume_id', 'volume_id'),
Index('block_device_mapping_instance_uuid_device_name_idx',
'instance_uuid', 'device_name'),
Index('block_device_mapping_instance_uuid_volume_id_idx',
'instance_uuid', 'volume_id'),
Index('block_device_mapping_instance_uuid_idx', 'instance_uuid'),
# TODO(sshturm) Should be dropped. `virtual_name` was dropped
# in 186 migration,
# Duplicates `block_device_mapping_instance_uuid_device_name_idx`
# index.
Index("block_device_mapping_instance_uuid_virtual_name"
"_device_name_idx", 'instance_uuid', 'device_name'),
)
id = Column(Integer, primary_key=True, autoincrement=True)
instance_uuid = Column(String(36), ForeignKey('instances.uuid'))
instance = orm.relationship(Instance,
backref=orm.backref('block_device_mapping'),
foreign_keys=instance_uuid,
primaryjoin='and_(BlockDeviceMapping.'
'instance_uuid=='
'Instance.uuid,'
'BlockDeviceMapping.deleted=='
'0)')
source_type = Column(String(255))
destination_type = Column(String(255))
guest_format = Column(String(255))
device_type = Column(String(255))
disk_bus = Column(String(255))
boot_index = Column(Integer)
device_name = Column(String(255))
# default=False for compatibility of the existing code.
# With EC2 API,
# default True for ami specified device.
# default False for created with other timing.
# TODO(sshturm) add default in db
delete_on_termination = Column(Boolean, default=False)
snapshot_id = Column(String(36))
volume_id = Column(String(36))
volume_size = Column(Integer)
image_id = Column(String(36))
# for no device to suppress devices.
no_device = Column(Boolean)
connection_info = Column(MediumText())
class IscsiTarget(BASE, NovaBase):
"""Represents an iscsi target for a given host."""
__tablename__ = 'iscsi_targets'
__table_args__ = (
Index('iscsi_targets_volume_id_fkey', 'volume_id'),
Index('iscsi_targets_host_idx', 'host'),
Index('iscsi_targets_host_volume_id_deleted_idx', 'host', 'volume_id',
'deleted')
)
id = Column(Integer, primary_key=True, nullable=False)
target_num = Column(Integer)
host = Column(String(255))
volume_id = Column(String(36), ForeignKey('volumes.id'))
volume = orm.relationship(Volume,
backref=orm.backref('iscsi_target', uselist=False),
foreign_keys=volume_id,
primaryjoin='and_(IscsiTarget.volume_id==Volume.id,'
'IscsiTarget.deleted==0)')
class SecurityGroupInstanceAssociation(BASE, NovaBase):
__tablename__ = 'security_group_instance_association'
__table_args__ = (
Index('security_group_instance_association_instance_uuid_idx',
'instance_uuid'),
)
id = Column(Integer, primary_key=True, nullable=False)
security_group_id = Column(Integer, ForeignKey('security_groups.id'))
instance_uuid = Column(String(36), ForeignKey('instances.uuid'))
class SecurityGroup(BASE, NovaBase):
"""Represents a security group."""
__tablename__ = 'security_groups'
__table_args__ = (
Index('uniq_security_groups0project_id0name0deleted', 'project_id',
'name', 'deleted'),
)
id = Column(Integer, primary_key=True)
name = Column(String(255))
description = Column(String(255))
user_id = Column(String(255))
project_id = Column(String(255))
instances = orm.relationship(Instance,
secondary="security_group_instance_association",
primaryjoin='and_('
'SecurityGroup.id == '
'SecurityGroupInstanceAssociation.security_group_id,'
'SecurityGroupInstanceAssociation.deleted == 0,'
'SecurityGroup.deleted == 0)',
secondaryjoin='and_('
'SecurityGroupInstanceAssociation.instance_uuid == Instance.uuid,'
# (anthony) the condition below shouldn't be necessary now that the
# association is being marked as deleted. However, removing this
# may cause existing deployments to choke, so I'm leaving it
'Instance.deleted == 0)',
backref='security_groups')
class SecurityGroupIngressRule(BASE, NovaBase):
"""Represents a rule in a security group."""
__tablename__ = 'security_group_rules'
__table_args__ = ()
id = Column(Integer, primary_key=True)
parent_group_id = Column(Integer, ForeignKey('security_groups.id'))
parent_group = orm.relationship("SecurityGroup", backref="rules",
foreign_keys=parent_group_id,
primaryjoin='and_('
'SecurityGroupIngressRule.parent_group_id == SecurityGroup.id,'
'SecurityGroupIngressRule.deleted == 0)')
protocol = Column(String(255))
from_port = Column(Integer)
to_port = Column(Integer)
cidr = Column(types.CIDR())
# Note: This is not the parent SecurityGroup. It's SecurityGroup we're
# granting access for.
group_id = Column(Integer, ForeignKey('security_groups.id'))
grantee_group = orm.relationship("SecurityGroup",
foreign_keys=group_id,
primaryjoin='and_('
'SecurityGroupIngressRule.group_id == SecurityGroup.id,'
'SecurityGroupIngressRule.deleted == 0)')
class SecurityGroupIngressDefaultRule(BASE, NovaBase):
__tablename__ = 'security_group_default_rules'
__table_args__ = ()
id = Column(Integer, primary_key=True, nullable=False)
protocol = Column(String(5)) # "tcp", "udp" or "icmp"
from_port = Column(Integer)
to_port = Column(Integer)
cidr = Column(types.CIDR())
class ProviderFirewallRule(BASE, NovaBase):
"""Represents a rule in a security group."""
__tablename__ = 'provider_fw_rules'
__table_args__ = ()
id = Column(Integer, primary_key=True, nullable=False)
protocol = Column(String(5)) # "tcp", "udp", or "icmp"
from_port = Column(Integer)
to_port = Column(Integer)
cidr = Column(types.CIDR())
class KeyPair(BASE, NovaBase):
"""Represents a public key pair for ssh."""
__tablename__ = 'key_pairs'
__table_args__ = (
schema.UniqueConstraint("user_id", "name", "deleted",
name="uniq_key_pairs0user_id0name0deleted"),
)
id = Column(Integer, primary_key=True, nullable=False)
name = Column(String(255))
user_id = Column(String(255))
fingerprint = Column(String(255))
public_key = Column(MediumText())
class Migration(BASE, NovaBase):
"""Represents a running host-to-host migration."""
__tablename__ = 'migrations'
__table_args__ = (
Index('migrations_instance_uuid_and_status_idx', 'instance_uuid',
'status'),
Index('migrations_by_host_nodes_and_status_idx', 'deleted',
'source_compute', 'dest_compute', 'source_node', 'dest_node',
'status'),
)
id = Column(Integer, primary_key=True, nullable=False)
# NOTE(tr3buchet): the ____compute variables are instance['host']
source_compute = Column(String(255))
dest_compute = Column(String(255))
# nodes are equivalent to a compute node's 'hypervisor_hostname'
source_node = Column(String(255))
dest_node = Column(String(255))
# NOTE(tr3buchet): dest_host, btw, is an ip address
dest_host = Column(String(255))
old_instance_type_id = Column(Integer())
new_instance_type_id = Column(Integer())
instance_uuid = Column(String(36), ForeignKey('instances.uuid'))
# TODO(_cerberus_): enum
status = Column(String(255))
instance = orm.relationship("Instance", foreign_keys=instance_uuid,
primaryjoin='and_(Migration.instance_uuid == '
'Instance.uuid, Instance.deleted == '
'0)')
class Network(BASE, NovaBase):
"""Represents a network."""
__tablename__ = 'networks'
__table_args__ = (
schema.UniqueConstraint("vlan", "deleted",
name="uniq_networks0vlan0deleted"),
Index('networks_bridge_deleted_idx', 'bridge', 'deleted'),
Index('networks_host_idx', 'host'),
Index('networks_project_id_deleted_idx', 'project_id', 'deleted'),
Index('networks_uuid_project_id_deleted_idx', 'uuid',
'project_id', 'deleted'),
Index('networks_vlan_deleted_idx', 'vlan', 'deleted'),
Index('networks_cidr_v6_idx', 'cidr_v6')
)
id = Column(Integer, primary_key=True, nullable=False)
label = Column(String(255))
injected = Column(Boolean, default=False)
cidr = Column(types.CIDR())
cidr_v6 = Column(types.CIDR())
multi_host = Column(Boolean, default=False)
gateway_v6 = Column(types.IPAddress())
netmask_v6 = Column(types.IPAddress())
netmask = Column(types.IPAddress())
bridge = Column(String(255))
bridge_interface = Column(String(255))
gateway = Column(types.IPAddress())
broadcast = Column(types.IPAddress())
dns1 = Column(types.IPAddress())
dns2 = Column(types.IPAddress())
vlan = Column(Integer)
vpn_public_address = Column(types.IPAddress())
vpn_public_port = Column(Integer)
vpn_private_address = Column(types.IPAddress())
dhcp_start = Column(types.IPAddress())
rxtx_base = Column(Integer)
project_id = Column(String(255))
priority = Column(Integer)
host = Column(String(255)) # , ForeignKey('hosts.id'))
uuid = Column(String(36))
mtu = Column(Integer)
dhcp_server = Column(types.IPAddress())
enable_dhcp = Column(Boolean, default=True)
share_address = Column(Boolean, default=False)
class VirtualInterface(BASE, NovaBase):
"""Represents a virtual interface on an instance."""
__tablename__ = 'virtual_interfaces'
__table_args__ = (
schema.UniqueConstraint("address", "deleted",
name="uniq_virtual_interfaces0address0deleted"),
Index('network_id', 'network_id'),
Index('virtual_interfaces_instance_uuid_fkey', 'instance_uuid'),
)
id = Column(Integer, primary_key=True, nullable=False)
address = Column(String(255))
network_id = Column(Integer)
instance_uuid = Column(String(36), ForeignKey('instances.uuid'))
uuid = Column(String(36))
# TODO(vish): can these both come from the same baseclass?
class FixedIp(BASE, NovaBase):
"""Represents a fixed ip for an instance."""
__tablename__ = 'fixed_ips'
__table_args__ = (
schema.UniqueConstraint(
"address", "deleted", name="uniq_fixed_ips0address0deleted"),
Index('fixed_ips_virtual_interface_id_fkey', 'virtual_interface_id'),
Index('network_id', 'network_id'),
Index('address', 'address'),
Index('fixed_ips_instance_uuid_fkey', 'instance_uuid'),
Index('fixed_ips_host_idx', 'host'),
Index('fixed_ips_network_id_host_deleted_idx', 'network_id', 'host',
'deleted'),
Index('fixed_ips_address_reserved_network_id_deleted_idx',
'address', 'reserved', 'network_id', 'deleted'),
Index('fixed_ips_deleted_allocated_idx', 'address', 'deleted',
'allocated')
)
id = Column(Integer, primary_key=True)
address = Column(types.IPAddress())
network_id = Column(Integer)
virtual_interface_id = Column(Integer)
instance_uuid = Column(String(36), ForeignKey('instances.uuid'))
# associated means that a fixed_ip has its instance_id column set
# allocated means that a fixed_ip has its virtual_interface_id column set
# TODO(sshturm) add default in db
allocated = Column(Boolean, default=False)
# leased means dhcp bridge has leased the ip
# TODO(sshturm) add default in db
leased = Column(Boolean, default=False)
# TODO(sshturm) add default in db
reserved = Column(Boolean, default=False)
host = Column(String(255))
network = orm.relationship(Network,
backref=orm.backref('fixed_ips'),
foreign_keys=network_id,
primaryjoin='and_('
'FixedIp.network_id == Network.id,'
'FixedIp.deleted == 0,'
'Network.deleted == 0)')
instance = orm.relationship(Instance,
foreign_keys=instance_uuid,
primaryjoin='and_('
'FixedIp.instance_uuid == Instance.uuid,'
'FixedIp.deleted == 0,'
'Instance.deleted == 0)')
class FloatingIp(BASE, NovaBase):
"""Represents a floating ip that dynamically forwards to a fixed ip."""
__tablename__ = 'floating_ips'
__table_args__ = (
schema.UniqueConstraint("address", "deleted",
name="uniq_floating_ips0address0deleted"),
Index('fixed_ip_id', 'fixed_ip_id'),
Index('floating_ips_host_idx', 'host'),
Index('floating_ips_project_id_idx', 'project_id'),
Index('floating_ips_pool_deleted_fixed_ip_id_project_id_idx',
'pool', 'deleted', 'fixed_ip_id', 'project_id')
)
id = Column(Integer, primary_key=True)
address = Column(types.IPAddress())
fixed_ip_id = Column(Integer)
project_id = Column(String(255))
host = Column(String(255)) # , ForeignKey('hosts.id'))
auto_assigned = Column(Boolean, default=False)
# TODO(sshturm) add default in db
pool = Column(String(255))
interface = Column(String(255))
fixed_ip = orm.relationship(FixedIp,
backref=orm.backref('floating_ips'),
foreign_keys=fixed_ip_id,
primaryjoin='and_('
'FloatingIp.fixed_ip_id == FixedIp.id,'
'FloatingIp.deleted == 0,'
'FixedIp.deleted == 0)')
class DNSDomain(BASE, NovaBase):
"""Represents a DNS domain with availability zone or project info."""
__tablename__ = 'dns_domains'
__table_args__ = (
Index('project_id', 'project_id'),
Index('dns_domains_domain_deleted_idx', 'domain', 'deleted'),
)
deleted = Column(Boolean, default=False)
domain = Column(String(255), primary_key=True)
scope = Column(String(255))
availability_zone = Column(String(255))
project_id = Column(String(255))
class ConsolePool(BASE, NovaBase):
"""Represents pool of consoles on the same physical node."""
__tablename__ = 'console_pools'
__table_args__ = (
schema.UniqueConstraint(
"host", "console_type", "compute_host", "deleted",
name="uniq_console_pools0host0console_type0compute_host0deleted"),
)
id = Column(Integer, primary_key=True)
address = Column(types.IPAddress())
username = Column(String(255))
password = Column(String(255))
console_type = Column(String(255))
public_hostname = Column(String(255))
host = Column(String(255))
compute_host = Column(String(255))
class Console(BASE, NovaBase):
"""Represents a console session for an instance."""
__tablename__ = 'consoles'
__table_args__ = (
Index('consoles_instance_uuid_idx', 'instance_uuid'),
)
id = Column(Integer, primary_key=True)
instance_name = Column(String(255))
instance_uuid = Column(String(36), ForeignKey('instances.uuid'))
password = Column(String(255))
port = Column(Integer)
pool_id = Column(Integer, ForeignKey('console_pools.id'))
pool = orm.relationship(ConsolePool, backref=orm.backref('consoles'))
class InstanceMetadata(BASE, NovaBase):
"""Represents a user-provided metadata key/value pair for an instance."""
__tablename__ = 'instance_metadata'
__table_args__ = (
Index('instance_metadata_instance_uuid_idx', 'instance_uuid'),
)
id = Column(Integer, primary_key=True)
key = Column(String(255))
value = Column(String(255))
instance_uuid = Column(String(36), ForeignKey('instances.uuid'))
instance = orm.relationship(Instance, backref="metadata",
foreign_keys=instance_uuid,
primaryjoin='and_('
'InstanceMetadata.instance_uuid == '
'Instance.uuid,'
'InstanceMetadata.deleted == 0)')
class InstanceSystemMetadata(BASE, NovaBase):
"""Represents a system-owned metadata key/value pair for an instance."""
__tablename__ = 'instance_system_metadata'
__table_args__ = ()
id = Column(Integer, primary_key=True)
key = Column(String(255), nullable=False)
value = Column(String(255))
instance_uuid = Column(String(36),
ForeignKey('instances.uuid'),
nullable=False)
primary_join = ('and_(InstanceSystemMetadata.instance_uuid == '
'Instance.uuid, InstanceSystemMetadata.deleted == 0)')
instance = orm.relationship(Instance, backref="system_metadata",
foreign_keys=instance_uuid,
primaryjoin=primary_join)
class InstanceTypeProjects(BASE, NovaBase):
"""Represent projects associated instance_types."""
__tablename__ = "instance_type_projects"
__table_args__ = (schema.UniqueConstraint(
"instance_type_id", "project_id", "deleted",
name="uniq_instance_type_projects0instance_type_id0project_id0deleted"
),
)
id = Column(Integer, primary_key=True)
instance_type_id = Column(Integer, ForeignKey('instance_types.id'),
nullable=False)
project_id = Column(String(255))
instance_type = orm.relationship(InstanceTypes, backref="projects",
foreign_keys=instance_type_id,
primaryjoin='and_('
'InstanceTypeProjects.instance_type_id == InstanceTypes.id,'
'InstanceTypeProjects.deleted == 0)')
class InstanceTypeExtraSpecs(BASE, NovaBase):
"""Represents additional specs as key/value pairs for an instance_type."""
__tablename__ = 'instance_type_extra_specs'
__table_args__ = (
Index('instance_type_extra_specs_instance_type_id_key_idx',
'instance_type_id', 'key'),
schema.UniqueConstraint(
"instance_type_id", "key", "deleted",
name=("uniq_instance_type_extra_specs0"
"instance_type_id0key0deleted")
),
)
id = Column(Integer, primary_key=True)
key = Column(String(255))
value = Column(String(255))
instance_type_id = Column(Integer, ForeignKey('instance_types.id'),
nullable=False)
instance_type = orm.relationship(InstanceTypes, backref="extra_specs",
foreign_keys=instance_type_id,
primaryjoin='and_('
'InstanceTypeExtraSpecs.instance_type_id == InstanceTypes.id,'
'InstanceTypeExtraSpecs.deleted == 0)')
class Cell(BASE, NovaBase):
"""Represents parent and child cells of this cell. Cells can
have multiple parents and children, so there could be any number
of entries with is_parent=True or False
"""
__tablename__ = 'cells'
__table_args__ = (schema.UniqueConstraint(
"name", "deleted", name="uniq_cells0name0deleted"
),
)
id = Column(Integer, primary_key=True)
# Name here is the 'short name' of a cell. For instance: 'child1'
name = Column(String(255))
api_url = Column(String(255))
transport_url = Column(String(255), nullable=False)
weight_offset = Column(Float(), default=0.0)
weight_scale = Column(Float(), default=1.0)
is_parent = Column(Boolean())
class AggregateHost(BASE, NovaBase):
"""Represents a host that is member of an aggregate."""
__tablename__ = 'aggregate_hosts'
__table_args__ = (schema.UniqueConstraint(
"host", "aggregate_id", "deleted",
name="uniq_aggregate_hosts0host0aggregate_id0deleted"
),
)
id = Column(Integer, primary_key=True, autoincrement=True)
host = Column(String(255))
aggregate_id = Column(Integer, ForeignKey('aggregates.id'), nullable=False)
class AggregateMetadata(BASE, NovaBase):
"""Represents a metadata key/value pair for an aggregate."""
__tablename__ = 'aggregate_metadata'
__table_args__ = (
schema.UniqueConstraint("aggregate_id", "key", "deleted",
name="uniq_aggregate_metadata0aggregate_id0key0deleted"
),
Index('aggregate_metadata_key_idx', 'key'),
)
id = Column(Integer, primary_key=True)
key = Column(String(255), nullable=False)
value = Column(String(255), nullable=False)
aggregate_id = Column(Integer, ForeignKey('aggregates.id'), nullable=False)
class Aggregate(BASE, NovaBase):
"""Represents a cluster of hosts that exists in this zone."""
__tablename__ = 'aggregates'
__table_args__ = ()
id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(String(255))
_hosts = orm.relationship(AggregateHost,
primaryjoin='and_('
'Aggregate.id == AggregateHost.aggregate_id,'
'AggregateHost.deleted == 0,'
'Aggregate.deleted == 0)')
_metadata = orm.relationship(AggregateMetadata,
primaryjoin='and_('
'Aggregate.id == AggregateMetadata.aggregate_id,'
'AggregateMetadata.deleted == 0,'
'Aggregate.deleted == 0)')
@property
def _extra_keys(self):
return ['hosts', 'metadetails', 'availability_zone']
@property
def hosts(self):
return [h.host for h in self._hosts]
@property
def metadetails(self):
return dict([(m.key, m.value) for m in self._metadata])
@property
def availability_zone(self):
if 'availability_zone' not in self.metadetails:
return None
return self.metadetails['availability_zone']
class AgentBuild(BASE, NovaBase):
"""Represents an agent build."""
__tablename__ = 'agent_builds'
__table_args__ = (
Index('agent_builds_hypervisor_os_arch_idx', 'hypervisor', 'os',
'architecture'),
schema.UniqueConstraint("hypervisor", "os", "architecture", "deleted",
name="uniq_agent_builds0hypervisor0os0architecture0deleted"),
)
id = Column(Integer, primary_key=True)
hypervisor = Column(String(255))
os = Column(String(255))
architecture = Column(String(255))
version = Column(String(255))
url = Column(String(255))
md5hash = Column(String(255))
class BandwidthUsage(BASE, NovaBase):
"""Cache for instance bandwidth usage data pulled from the hypervisor."""
__tablename__ = 'bw_usage_cache'
__table_args__ = (
Index('bw_usage_cache_uuid_start_period_idx', 'uuid',
'start_period'),
)
id = Column(Integer, primary_key=True, nullable=False)
uuid = Column(String(36))
mac = Column(String(255))
start_period = Column(DateTime, nullable=False)
last_refreshed = Column(DateTime)
bw_in = Column(BigInteger)
bw_out = Column(BigInteger)
last_ctr_in = Column(BigInteger)
last_ctr_out = Column(BigInteger)
class VolumeUsage(BASE, NovaBase):
"""Cache for volume usage data pulled from the hypervisor."""
__tablename__ = 'volume_usage_cache'
__table_args__ = ()
id = Column(Integer, primary_key=True, nullable=False)
volume_id = Column(String(36), nullable=False)
instance_uuid = Column(String(36))
project_id = Column(String(36))
user_id = Column(String(64))
availability_zone = Column(String(255))
tot_last_refreshed = Column(DateTime)
tot_reads = Column(BigInteger, default=0)
tot_read_bytes = Column(BigInteger, default=0)
tot_writes = Column(BigInteger, default=0)
tot_write_bytes = Column(BigInteger, default=0)
curr_last_refreshed = Column(DateTime)
curr_reads = Column(BigInteger, default=0)
curr_read_bytes = Column(BigInteger, default=0)
curr_writes = Column(BigInteger, default=0)
curr_write_bytes = Column(BigInteger, default=0)
class S3Image(BASE, NovaBase):
"""Compatibility layer for the S3 image service talking to Glance."""
__tablename__ = 's3_images'
__table_args__ = ()
id = Column(Integer, primary_key=True, nullable=False, autoincrement=True)
uuid = Column(String(36), nullable=False)
class VolumeIdMapping(BASE, NovaBase):
"""Compatibility layer for the EC2 volume service."""
__tablename__ = 'volume_id_mappings'
__table_args__ = ()
id = Column(Integer, primary_key=True, nullable=False, autoincrement=True)
uuid = Column(String(36), nullable=False)
class SnapshotIdMapping(BASE, NovaBase):
"""Compatibility layer for the EC2 snapshot service."""
__tablename__ = 'snapshot_id_mappings'
__table_args__ = ()
id = Column(Integer, primary_key=True, nullable=False, autoincrement=True)
uuid = Column(String(36), nullable=False)
class InstanceFault(BASE, NovaBase):
__tablename__ = 'instance_faults'
__table_args__ = (
Index('instance_faults_host_idx', 'host'),
Index('instance_faults_instance_uuid_deleted_created_at_idx',
'instance_uuid', 'deleted', 'created_at')
)
id = Column(Integer, primary_key=True, nullable=False)
instance_uuid = Column(String(36),
ForeignKey('instances.uuid'))
code = Column(Integer(), nullable=False)
message = Column(String(255))
details = Column(MediumText())
host = Column(String(255))
class InstanceAction(BASE, NovaBase):
"""Track client actions on an instance.
The intention is that there will only be one of these per user request. A
lookup by (instance_uuid, request_id) should always return a single result.
"""
__tablename__ = 'instance_actions'
__table_args__ = (
Index('instance_uuid_idx', 'instance_uuid'),
Index('request_id_idx', 'request_id')
)
id = Column(Integer, primary_key=True, nullable=False, autoincrement=True)
action = Column(String(255))
instance_uuid = Column(String(36),
ForeignKey('instances.uuid'))
request_id = Column(String(255))
user_id = Column(String(255))
project_id = Column(String(255))
start_time = Column(DateTime, default=timeutils.utcnow)
finish_time = Column(DateTime)
message = Column(String(255))
class InstanceActionEvent(BASE, NovaBase):
"""Track events that occur during an InstanceAction."""
__tablename__ = 'instance_actions_events'
__table_args__ = ()
id = Column(Integer, primary_key=True, nullable=False, autoincrement=True)
event = Column(String(255))
action_id = Column(Integer, ForeignKey('instance_actions.id'))
start_time = Column(DateTime, default=timeutils.utcnow)
finish_time = Column(DateTime)
result = Column(String(255))
traceback = Column(Text)
host = Column(String(255))
details = Column(Text)
class InstanceIdMapping(BASE, NovaBase):
"""Compatibility layer for the EC2 instance service."""
__tablename__ = 'instance_id_mappings'
__table_args__ = (
Index('ix_instance_id_mappings_uuid', 'uuid'),
)
id = Column(Integer, primary_key=True, nullable=False, autoincrement=True)
uuid = Column(String(36), nullable=False)
class TaskLog(BASE, NovaBase):
"""Audit log for background periodic tasks."""
__tablename__ = 'task_log'
__table_args__ = (
schema.UniqueConstraint(
'task_name', 'host', 'period_beginning', 'period_ending',
name="uniq_task_log0task_name0host0period_beginning0period_ending"
),
Index('ix_task_log_period_beginning', 'period_beginning'),
Index('ix_task_log_host', 'host'),
Index('ix_task_log_period_ending', 'period_ending'),
)
id = Column(Integer, primary_key=True, nullable=False, autoincrement=True)
task_name = Column(String(255), nullable=False)
state = Column(String(255), nullable=False)
host = Column(String(255), nullable=False)
period_beginning = Column(DateTime, default=timeutils.utcnow,
nullable=False)
period_ending = Column(DateTime, default=timeutils.utcnow,
nullable=False)
message = Column(String(255), nullable=False)
task_items = Column(Integer(), default=0)
errors = Column(Integer(), default=0)
class InstanceGroupMember(BASE, NovaBase):
"""Represents the members for an instance group."""
__tablename__ = 'instance_group_member'
__table_args__ = (
Index('instance_group_member_instance_idx', 'instance_id'),
)
id = Column(Integer, primary_key=True, nullable=False)
instance_id = Column(String(255))
group_id = Column(Integer, ForeignKey('instance_groups.id'),
nullable=False)
class InstanceGroupPolicy(BASE, NovaBase):
"""Represents the policy type for an instance group."""
__tablename__ = 'instance_group_policy'
__table_args__ = (
Index('instance_group_policy_policy_idx', 'policy'),
)
id = Column(Integer, primary_key=True, nullable=False)
policy = Column(String(255))
group_id = Column(Integer, ForeignKey('instance_groups.id'),
nullable=False)
class InstanceGroup(BASE, NovaBase):
"""Represents an instance group.
A group will maintain a collection of instances and the relationship
between them.
"""
__tablename__ = 'instance_groups'
__table_args__ = (
schema.UniqueConstraint("uuid", "deleted",
name="uniq_instance_groups0uuid0deleted"),
)
id = Column(Integer, primary_key=True, autoincrement=True)
user_id = Column(String(255))
project_id = Column(String(255))
uuid = Column(String(36), nullable=False)
name = Column(String(255))
_policies = orm.relationship(InstanceGroupPolicy, primaryjoin='and_('
'InstanceGroup.id == InstanceGroupPolicy.group_id,'
'InstanceGroupPolicy.deleted == 0,'
'InstanceGroup.deleted == 0)')
_members = orm.relationship(InstanceGroupMember, primaryjoin='and_('
'InstanceGroup.id == InstanceGroupMember.group_id,'
'InstanceGroupMember.deleted == 0,'
'InstanceGroup.deleted == 0)')
@property
def policies(self):
return [p.policy for p in self._policies]
@property
def members(self):
return [m.instance_id for m in self._members]
class PciDevice(BASE, NovaBase):
"""Represents a PCI host device that can be passed through to instances.
"""
__tablename__ = 'pci_devices'
__table_args__ = (
Index('ix_pci_devices_compute_node_id_deleted',
'compute_node_id', 'deleted'),
Index('ix_pci_devices_instance_uuid_deleted',
'instance_uuid', 'deleted'),
schema.UniqueConstraint(
"compute_node_id", "address", "deleted",
name="uniq_pci_devices0compute_node_id0address0deleted")
)
id = Column(Integer, primary_key=True)
compute_node_id = Column(Integer, ForeignKey('compute_nodes.id'),
nullable=False)
# physical address of device domain:bus:slot.func (0000:09:01.1)
address = Column(String(12), nullable=False)
vendor_id = Column(String(4), nullable=False)
product_id = Column(String(4), nullable=False)
dev_type = Column(String(8), nullable=False)
dev_id = Column(String(255))
# label is abstract device name, that is used to unify devices with the
# same functionality with different addresses or host.
label = Column(String(255), nullable=False)
status = Column(String(36), nullable=False)
# the request_id is used to identify a device that is allocated for a
# particular request
request_id = Column(String(36), nullable=True)
extra_info = Column(Text)
instance_uuid = Column(String(36))
instance = orm.relationship(Instance, backref="pci_devices",
foreign_keys=instance_uuid,
primaryjoin='and_('
'PciDevice.instance_uuid == Instance.uuid,'
'PciDevice.deleted == 0)')
|
{
"content_hash": "7c8f2a16075e39cfcbf04a1df683475c",
"timestamp": "",
"source": "github",
"line_count": 1400,
"max_line_length": 79,
"avg_line_length": 36.97071428571429,
"alnum_prop": 0.6212059738402983,
"repo_name": "sajeeshcs/nested_quota",
"id": "7d14a443a69dd7821f29c1e45fac563668edb702",
"size": "52598",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nova/db/sqlalchemy/models.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "15252346"
},
{
"name": "Shell",
"bytes": "18705"
}
],
"symlink_target": ""
}
|
import os
import urwid
from twython.twython import Twython, TwythonError
import command
from messages import messages as _
command_handler = None
class EditHandler(urwid.Edit):
def keypress(self, size, key):
if key == 'enter':
data = self._edit_text.split(None, 1)
if len(data) > 0:
cmd = data[0]
args = data[1].split() if len(data) > 1 else []
command_handler.parse_command(cmd, args)
return 'esc'
if key == 'tab':
if len(self._edit_text.split()) == 1:
command_handler.tab_completion(self, self._edit_text)
return
return super(EditHandler, self).keypress(size, key)
class MainHandler:
palette = [
('status', 'white,bold', 'dark gray'),
('error', 'dark red,bold', 'black'),
('info', 'white,bold', 'default'),
]
def __init__(self):
global command_handler
command_handler = command.CommandHandler(self)
self.t = None
if os.environ['XDG_CONFIG_HOME']:
path = os.environ['XDG_CONFIG_HOME']
else:
path = os.environ['HOME']
self.basepath = os.path.join(path, 'twiclr')
if not os.path.isdir(self.basepath):
os.mkdir(self.basepath)
self.test_login()
def run(self):
self.screen = urwid.raw_display.Screen()
self.screen.tty_signal_keys(*['undefined'] * 5)
self.footer = {
'edit': EditHandler(),
'error': urwid.Text(('error', '')),
'list': urwid.ListBox(urwid.SimpleListWalker([])),
}
if self.t:
msg = _['welcome_login'].format(**self.user)
else:
msg = _['welcome_guest']
self.body = urwid.SimpleListWalker([
urwid.Text(u'twiclr\nTerminal twitter client.', align='center'),
urwid.Text(msg, align='center')
])
self.statusbar = urwid.Text(('status', ''))
self.inner = urwid.Frame(urwid.Filler(urwid.Pile(self.body)),
footer=urwid.AttrMap(self.statusbar, 'status')
)
self.outer = urwid.Frame(self.inner,
footer=self.footer['edit'])
self.loop = urwid.MainLoop(self.outer, self.palette, self.screen,
unhandled_input=self.unhandled_input, handle_mouse=False)
self.loop.run()
def test_login(self):
if not os.path.isfile(os.path.join(self.basepath, 'keys.txt')):
return
with open(os.path.join(self.basepath, 'keys.txt')) as f:
data = f.readlines()
self.oauth_data = {}
for d in data:
key, value = d.split('=', 1)
self.oauth_data[key] = value.rstrip('\n')
try:
self.t = Twython(command.konsumer_ceys[0],
command.konsumer_ceys[1],
self.oauth_data['oauth_token'],
self.oauth_data['oauth_token_secret'])
self.user = self.t.verifyCredentials()
except TwythonError:
self.t = None
def unhandled_input(self, key):
# Set focus to status-edit
if key == ':':
self.outer.set_footer(self.footer['edit'])
self.footer['edit'].set_caption(':')
self.outer.set_focus('footer')
elif key == 'esc':
self.footer['edit'].set_caption('')
self.footer['edit'].set_edit_text('')
self.outer.set_focus('body')
def error(self, msg):
self.footer['error'].set_text(('error', msg))
self.outer.set_footer(self.footer['error'])
def show_info(self, msg):
self.footer['error'].set_text(('info', msg))
self.outer.set_footer(self.footer['error'])
|
{
"content_hash": "d502673d48f737908b40be39b3f3ec8a",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 76,
"avg_line_length": 31.932773109243698,
"alnum_prop": 0.5371052631578948,
"repo_name": "xintron/twiclr",
"id": "b9bc2d7c7a9eef395771f3e8820170ac74dd820d",
"size": "5380",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "twiclr/twiclr.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "15744"
}
],
"symlink_target": ""
}
|
import base64
import cgi
import hashlib
import hmac
import md5
import os
import re
import sys
import time
import traceback
import types
from ZSI.dispatch import _CGISendXML, _CGISendFault
from ZSI import Fault
from tashi.aws.wsdl.AmazonEC2_services_server import *
from tashi.aws.impl import address, bundle, images, instances, keys, location, monitor, other, reservation, security, volume
from tashi.aws.util import *
import tashi.aws.util
import tashi
import trans
for mod in [address, bundle, images, instances, keys, location, monitor, other, reservation, security, volume]:
for fname in mod.__dict__.get('functions', []):
globals()[fname] = QUERY(mod.__dict__.get(fname))
userDict = {}
def loadUserDict():
f = open("/var/lib/tashi-ec2/access.txt")
data = f.read()
f.close()
for l in data.split("\n"):
ws = l.strip().split()
if (len(ws) == 3):
(accessKey, secretAccessKey, authenticatedUser) = ws
userDict[accessKey] = (secretAccessKey, authenticatedUser)
def AsQuery():
'''Handle the Amazon QUERY interface'''
try:
form = cgi.FieldStorage()
args = {}
signStr = ""
for var in form:
args[var] = form[var].value
if (var != "Signature"):
signStr += var + args[var]
log("[QUERY] %s=%s\n" % (var, args[var]))
secretKey = userDict[args['AWSAccessKeyId']][0]
calculatedSig = base64.b64encode(hmac.new(secretKey, signStr, hashlib.sha1).digest())
if (args['Signature'] != calculatedSig):
_CGISendFault(Fault(Fault.Client, 'Could not authenticate'))
return
tashi.aws.util.authorizedUser = userDict[args['AWSAccessKeyId']][1]
log("[AUTH] authorizedUser = %s\n" % (tashi.aws.util.authorizedUser))
functionName = args['Action']
res = eval("%s(args)" % (functionName))
_CGISendXML(res)
except Exception, e:
_CGISendFault(Fault(Fault.Client, str(e)))
if __name__ == "__main__" :
log("%s\n" % (str(time.time())))
for var in os.environ:
log("[CGI] %s=%s\n" % (var, os.environ[var]))
try:
loadUserDict()
AsQuery()
except:
log("%s\n" % (traceback.format_exc(sys.exc_info())))
|
{
"content_hash": "a4d6c63c54a032b274c93a7ae4a7a0ab",
"timestamp": "",
"source": "github",
"line_count": 69,
"max_line_length": 124,
"avg_line_length": 29.6231884057971,
"alnum_prop": 0.684931506849315,
"repo_name": "stroucki/tashi",
"id": "1cea92dbcd2e68e97b5a5288f78d6a7d94d271de",
"size": "2859",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "src/tashi/aws/query.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "9326"
},
{
"name": "PHP",
"bytes": "28750"
},
{
"name": "Python",
"bytes": "606584"
},
{
"name": "Shell",
"bytes": "28185"
}
],
"symlink_target": ""
}
|
from collections import OrderedDict
from django.contrib import admin
from django.conf.urls import patterns
from django.shortcuts import render_to_response
class ParticipantAdmin(admin.ModelAdmin):
def link(self, instance):
url = instance.start_url()
return '<a href="{}" target="_blank">{}</a>'.format(url, 'Link')
link.short_description = "Participant link"
link.allow_tags = True
list_filter = ['match', 'treatment', 'experiment']
class MatchAdmin(admin.ModelAdmin):
list_filter = ['treatment', 'experiment']
class TreatmentAdmin(admin.ModelAdmin):
def link(self, instance):
url = instance.start_url()
return '<a href="{}" target="_blank">{}</a>'.format(url, 'Link')
link.short_description = "Demo link"
link.allow_tags = True
list_filter = ['experiment']
class ExperimentAdmin(admin.ModelAdmin):
def mturk_link(self, instance):
url = instance.mturk_start_url()
return '<a href="{}" target="_blank">{}</a>'.format(url, 'Link')
mturk_link.short_description = "MTurk link (requires workerId to be appended to URL with JavaScript)"
mturk_link.allow_tags = True
def payments_link(self, instance):
return '<a href="{}" target="_blank">{}</a>'.format('{}/payments/'.format(instance.pk), 'Link')
payments_link.short_description = "Payments page"
payments_link.allow_tags = True
def experimenter_input_link(self, instance):
url = instance.experimenter_input_url()
return '<a href="{}" target="_blank">{}</a>'.format(url, 'Link')
experimenter_input_link.short_description = 'Link for experimenter input during gameplay'
experimenter_input_link.allow_tags = True
def get_urls(self):
urls = super(ExperimentAdmin, self).get_urls()
my_urls = patterns('',
(r'^(?P<pk>\d+)/payments/$', self.admin_site.admin_view(self.payments))
)
return my_urls + urls
def payments(self, request, pk):
experiment = self.model.objects.get(pk=pk)
participants = experiment.participants()
return render_to_response('admin/Payments.html',
{'participants': participants,
'total_payments': sum(p.total_pay() for p in participants if p.total_pay())})
def remove_duplicates(lst):
return list(OrderedDict.fromkeys(lst))
def get_list_display(ModelName, readonly_fields, first_fields):
all_field_names = [field.name for field in ModelName._meta.fields]
# make sure they're actually in the model.
first_fields = [f for f in first_fields if f in all_field_names]
list_display = first_fields + readonly_fields + all_field_names
return remove_duplicates(list_display)
def get_readonly_fields(fields_common_to_all_models, fields_specific_to_this_subclass):
return remove_duplicates(fields_common_to_all_models + fields_specific_to_this_subclass)
def get_participant_readonly_fields(fields_specific_to_this_subclass):
return get_readonly_fields(['link', 'bonus_display'], fields_specific_to_this_subclass)
def get_participant_list_display(Participant, readonly_fields, first_fields=None):
first_fields = ['__unicode__', 'id', 'experiment', 'treatment', 'match', 'has_visited'] + (first_fields or [])
return get_list_display(Participant, readonly_fields, first_fields)
def get_match_readonly_fields(fields_specific_to_this_subclass):
return get_readonly_fields([], fields_specific_to_this_subclass)
def get_match_list_display(Match, readonly_fields, first_fields=None):
first_fields = ['__unicode__', 'id', 'experiment', 'treatment', 'time_started'] + (first_fields or [])
return get_list_display(Match, readonly_fields, first_fields)
def get_treatment_readonly_fields(fields_specific_to_this_subclass):
return get_readonly_fields(['link'], fields_specific_to_this_subclass)
def get_treatment_list_display(Treatment, readonly_fields, first_fields=None):
first_fields = ['__unicode__', 'id', 'description', 'experiment'] + (first_fields or [])
return get_list_display(Treatment, readonly_fields, first_fields)
def get_experiment_readonly_fields(fields_specific_to_this_subclass):
return get_readonly_fields(['mturk_link', 'experimenter_input_link', 'payments_link'], fields_specific_to_this_subclass)
def get_experiment_list_display(Experiment, readonly_fields, first_fields=None):
first_fields = ['__unicode__', 'id', 'description'] + (first_fields or [])
return get_list_display(Experiment, readonly_fields, first_fields)
|
{
"content_hash": "fee4ef81055c84340f731b9af7692623",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 124,
"avg_line_length": 42.728971962616825,
"alnum_prop": 0.6843832020997376,
"repo_name": "pombredanne/django-ptree",
"id": "0a1a6c0ea93e6ade39b320a4ec8fc65f44f2c739",
"size": "4572",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ptree/common.py",
"mode": "33188",
"license": "mit",
"language": [],
"symlink_target": ""
}
|
import os
import re
import bz2
import glob
import json
import jieba
import logging
import argparse
import coloredlogs
import cPickle as pickle
from collections import defaultdict
SYM = re.compile(ur'[^\u4e00-\u9fa5]+')
coloredlogs.install()
class Indexer(object):
"""Indexer index questions and persist them for further usage"""
def __init__(self, path):
self._path = path
self._words = defaultdict(set)
self._questions = dict()
self._topics = dict()
@property
def filenames(self):
return glob.glob(os.path.join(self._path, '*.json'))
def loadtopic(self, topic):
topicid = topic['id']
topicname = topic['name']
logging.info('Loading topic [%s (%d)]' % (topicname, topicid))
self._topics[topicid] = topicname
for question in topic['questions']:
qid = question['id']
qtitle = question['title']
subtopic = question['subtopic']
self._topics[subtopic['id']] = subtopic['title']
self._questions[qid] = dict(
title=qtitle,
topic=set([topicid, subtopic['id']])
)
def loaddata(self):
logging.info('Loading data')
for filename in self.filenames:
with open(filename) as f:
topic = json.load(f)
self.loadtopic(topic)
def indexquestion(self, qid, question):
title = SYM.sub(' ', question['title'])
fragments = filter(None, jieba.cut_for_search(title))
for fragment in fragments:
self._words[fragment].add(qid)
def index(self):
self.loaddata()
logging.info('Indexing questions')
for qid, question in self._questions.iteritems():
self.indexquestion(qid, question)
def persist(self, outfilepath):
data = dict(
words=self._words,
topics=self._topics,
questions=self._questions
)
logging.info('Persisting to file: %s' % outfilepath)
with bz2.BZ2File(outfilepath, 'w') as f:
pickle.dump(data, f)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("indir", help="The path of directory to read in raw data files")
parser.add_argument("outfile", help="The path of file to save indexed data")
args = parser.parse_args()
if args.indir and args.outfile:
indexer = Indexer(args.indir)
indexer.index()
indexer.persist(args.outfile)
|
{
"content_hash": "85cb4c6aca22f58e41e686372a9573ba",
"timestamp": "",
"source": "github",
"line_count": 87,
"max_line_length": 88,
"avg_line_length": 29.057471264367816,
"alnum_prop": 0.59375,
"repo_name": "shanzi/thesiscode",
"id": "8894873dfdb110db3186aab365a44dfabb13864b",
"size": "2575",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "searcher/indexer.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "15661"
}
],
"symlink_target": ""
}
|
"""
Django Reformation
(c) 2013 Curtis Maloney, Danielle Madeley
See LICENSE.
"""
from django.forms import BaseForm
class ReformerMixin(BaseForm):
"""
"""
|
{
"content_hash": "d26c501567d3bcbc3dbfd97005ef062d",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 42,
"avg_line_length": 12.923076923076923,
"alnum_prop": 0.6845238095238095,
"repo_name": "funkybob/django-reformation",
"id": "ae956dda9dd29d2a60c7f1f64e2519118779a2c0",
"size": "168",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "reformation/forms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "4390"
}
],
"symlink_target": ""
}
|
from django import template
import datetime
register = template.Library()
def dayssince(value):
"Returns number of days between today and value."
today = datetime.date.today()
diff = today - value
if diff.days > 1:
return '%s days ago' % diff.days
elif diff.days == 1:
return 'yesterday'
elif diff.days == 0:
return 'today'
else:
# Date is in the future; return formatted date.
return value.strftime("%B %d, %Y")
register.filter('dayssince', dayssince)
|
{
"content_hash": "453b725ad286c5f7c3691b64f541c780",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 55,
"avg_line_length": 26.2,
"alnum_prop": 0.6335877862595419,
"repo_name": "issackelly/cohpy2011",
"id": "b976744800b12912f31a2ba0c52a57a51e8d6285",
"size": "524",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cohpy_project/apps/library/templatetags/library_tags.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "16582"
},
{
"name": "Python",
"bytes": "18849"
}
],
"symlink_target": ""
}
|
from datetime import datetime
from django.db import models
from django.core.urlresolvers import reverse
from sorl.thumbnail import ImageField, get_thumbnail
from sorl.thumbnail.helpers import ThumbnailError
class Author(models.Model):
name = models.CharField(max_length=200)
short_bio = models.TextField(max_length=500, null=True, blank=True)
slug = models.SlugField(max_length=100, blank=True)
uuid = models.CharField(max_length=100, blank=True, null=True)
avatar = ImageField(upload_to='authors',
blank=True,
null=True)
class Meta:
ordering = ['name']
def __unicode__(self):
return self.name
def get_absolute_url(self):
return reverse('author', kwargs={'slug': self.slug})
def get_avatar(self, size='60x60'):
try:
thumbnail = get_thumbnail(self.avatar,
size,
crop='center',
quality=70)
if thumbnail:
return thumbnail.url
except ThumbnailError:
return None
class Tag(models.Model):
name = models.CharField(max_length=100)
slug = models.SlugField(max_length=100, blank=True)
uuid = models.CharField(max_length=100, blank=True, null=True)
class Meta:
ordering = ['name']
def __unicode__(self):
return self.name
def get_absolute_url(self):
return reverse('tag', kwargs={'slug': self.slug})
class Quote(models.Model):
body = models.TextField()
authors = models.ManyToManyField(Author)
tags = models.ManyToManyField(Tag, blank=True, null=True)
slug = models.SlugField(max_length=100, blank=True)
publish_date = models.DateTimeField(default=datetime.now)
source = models.TextField(null=True, blank=True)
featured = models.BooleanField(default=False)
uuid = models.CharField(max_length=100, blank=True, null=True)
class Meta:
ordering = ['-publish_date']
def __unicode__(self):
return self.body
def get_absolute_url(self):
return reverse('quote', kwargs={'uuid': self.uuid})
def serialize(self):
return {
'body': self.body,
'url': self.get_absolute_url(),
'authors': [
{
'name': author.name,
'url': author.get_absolute_url(),
'avatar': author.get_avatar(),
}
for author in self.authors.all()
],
'tags': [
{
'name': tag.name,
'url': tag.get_absolute_url(),
}
for tag in self.tags.all()
]
}
|
{
"content_hash": "e6d7592ed57e537338cd0039d61770da",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 71,
"avg_line_length": 31.365591397849464,
"alnum_prop": 0.5320534796023312,
"repo_name": "daviferreira/defprogramming",
"id": "31ad0add354e89d5a1c96fd161834bc9dde1dca9",
"size": "2933",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "quotes/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "47012"
},
{
"name": "HTML",
"bytes": "23332"
},
{
"name": "JavaScript",
"bytes": "20969"
},
{
"name": "Makefile",
"bytes": "153"
},
{
"name": "Python",
"bytes": "50861"
}
],
"symlink_target": ""
}
|
import math
import m5
from m5.objects import *
from m5.defines import buildEnv
from m5.util import addToPath
#
# Note: the L1 Cache latency is only used by the sequencer on fast path hits
#
class L1Cache(RubyCache):
latency = 3
#
# Note: the L2 Cache latency is not currently used
#
class L2Cache(RubyCache):
latency = 15
def create_system(options, phys_mem, piobus, dma_devices):
if buildEnv['PROTOCOL'] != 'MESI_CMP_directory':
panic("This script requires the MESI_CMP_directory protocol to be built.")
cpu_sequencers = []
#
# The ruby network creation expects the list of nodes in the system to be
# consistent with the NetDest list. Therefore the l1 controller nodes must be
# listed before the directory nodes and directory nodes before dma nodes, etc.
#
l1_cntrl_nodes = []
l2_cntrl_nodes = []
dir_cntrl_nodes = []
dma_cntrl_nodes = []
#
# Must create the individual controllers before the network to ensure the
# controller constructors are called before the network constructor
#
for i in xrange(options.num_cpus):
#
# First create the Ruby objects associated with this cpu
#
l1i_cache = L1Cache(size = options.l1i_size,
assoc = options.l1i_assoc)
l1d_cache = L1Cache(size = options.l1d_size,
assoc = options.l1d_assoc)
cpu_seq = RubySequencer(version = i,
icache = l1i_cache,
dcache = l1d_cache,
physMemPort = phys_mem.port,
physmem = phys_mem)
if piobus != None:
cpu_seq.pio_port = piobus.port
l1_cntrl = L1Cache_Controller(version = i,
sequencer = cpu_seq,
L1IcacheMemory = l1i_cache,
L1DcacheMemory = l1d_cache,
l2_select_num_bits = \
math.log(options.num_l2caches, 2))
#
# Add controllers and sequencers to the appropriate lists
#
cpu_sequencers.append(cpu_seq)
l1_cntrl_nodes.append(l1_cntrl)
for i in xrange(options.num_l2caches):
#
# First create the Ruby objects associated with this cpu
#
l2_cache = L2Cache(size = options.l2_size,
assoc = options.l2_assoc)
l2_cntrl = L2Cache_Controller(version = i,
L2cacheMemory = l2_cache)
l2_cntrl_nodes.append(l2_cntrl)
phys_mem_size = long(phys_mem.range.second) - long(phys_mem.range.first) + 1
mem_module_size = phys_mem_size / options.num_dirs
for i in xrange(options.num_dirs):
#
# Create the Ruby objects associated with the directory controller
#
mem_cntrl = RubyMemoryControl(version = i)
dir_size = MemorySize('0B')
dir_size.value = mem_module_size
dir_cntrl = Directory_Controller(version = i,
directory = \
RubyDirectoryMemory(version = i,
size = dir_size),
memBuffer = mem_cntrl)
dir_cntrl_nodes.append(dir_cntrl)
for i, dma_device in enumerate(dma_devices):
#
# Create the Ruby objects associated with the dma controller
#
dma_seq = DMASequencer(version = i,
physMemPort = phys_mem.port,
physmem = phys_mem)
dma_cntrl = DMA_Controller(version = i,
dma_sequencer = dma_seq)
dma_cntrl.dma_sequencer.port = dma_device.dma
dma_cntrl_nodes.append(dma_cntrl)
all_cntrls = l1_cntrl_nodes + \
l2_cntrl_nodes + \
dir_cntrl_nodes + \
dma_cntrl_nodes
return (cpu_sequencers, dir_cntrl_nodes, all_cntrls)
|
{
"content_hash": "39ea440d1e8e3f8e22c4b63057e0e4c1",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 82,
"avg_line_length": 34.0650406504065,
"alnum_prop": 0.5300715990453461,
"repo_name": "liangwang/m5",
"id": "ca5a7aa4626d51fc1e9ae645f72e81d5cf3fbf30",
"size": "5820",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "configs/ruby/protocols/MESI_CMP_directory.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "490228"
},
{
"name": "C++",
"bytes": "8617145"
},
{
"name": "Emacs Lisp",
"bytes": "1969"
},
{
"name": "Python",
"bytes": "2567844"
},
{
"name": "Shell",
"bytes": "6722"
},
{
"name": "Visual Basic",
"bytes": "2884"
}
],
"symlink_target": ""
}
|
"""
Copyright 2014-2016 University of Illinois
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
file: stats/urls.py
Author: Jon Gunderson
"""
# stats/urls.py
from __future__ import absolute_import
from django.conf.urls import url
from .views import ShowUsageStatistics
urlpatterns = [
url(r'^$', ShowUsageStatistics.as_view(), name='usage_statistics'),
]
|
{
"content_hash": "3405bb2ba54313093751831c294df26b",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 72,
"avg_line_length": 27.161290322580644,
"alnum_prop": 0.7684085510688836,
"repo_name": "opena11y/fae2",
"id": "acbfdf384e629114092a0647be29f2c13c5d87b7",
"size": "842",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "fae2/stats/urls.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "13797"
},
{
"name": "HTML",
"bytes": "370034"
},
{
"name": "Java",
"bytes": "128994"
},
{
"name": "JavaScript",
"bytes": "2157386"
},
{
"name": "Python",
"bytes": "500936"
},
{
"name": "Shell",
"bytes": "2666"
}
],
"symlink_target": ""
}
|
from django.db import models
import datetime
from django.utils import timezone
# Create your models here.
class Poll(models.Model):
question = models.CharField(max_length=200)
pub_date = models.DateTimeField('date published')
def __unicode__(self): # Python 3: def __str__(self):
return self.question
def was_published_recently(self):
#return self.pub_date >= timezone.now() - datetime.timedelta(days=1)
now = timezone.now()
return now - datetime.timedelta(days=1) <= self.pub_date <= now
was_published_recently.admin_order_field = 'pub_date'
was_published_recently.boolean = True
was_published_recently.short_description = 'Published recently?'
class Choice(models.Model):
poll = models.ForeignKey(Poll)
choice_text = models.CharField(max_length=200)
votes = models.IntegerField(default=0)
def __unicode__(self): # Python 3: def __str__(self):
return self.choice_text
|
{
"content_hash": "ee26af4455d8a4f77a58a844907cef96",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 76,
"avg_line_length": 41.56521739130435,
"alnum_prop": 0.6872384937238494,
"repo_name": "davidaparicio/django_officialtutorial",
"id": "399d19564110c3adf92a5c2a0f531ed9d6942da5",
"size": "956",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "polls/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "224"
},
{
"name": "Python",
"bytes": "22099"
}
],
"symlink_target": ""
}
|
"""Support for deCONZ binary sensors."""
from pydeconz.sensor import Presence, Vibration
from homeassistant.components.binary_sensor import BinarySensorDevice
from homeassistant.const import ATTR_BATTERY_LEVEL, ATTR_TEMPERATURE
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import ATTR_DARK, ATTR_ON, NEW_SENSOR
from .deconz_device import DeconzDevice
from .gateway import get_gateway_from_config_entry
ATTR_ORIENTATION = 'orientation'
ATTR_TILTANGLE = 'tiltangle'
ATTR_VIBRATIONSTRENGTH = 'vibrationstrength'
async def async_setup_platform(
hass, config, async_add_entities, discovery_info=None):
"""Old way of setting up deCONZ platforms."""
pass
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the deCONZ binary sensor."""
gateway = get_gateway_from_config_entry(hass, config_entry)
@callback
def async_add_sensor(sensors):
"""Add binary sensor from deCONZ."""
entities = []
for sensor in sensors:
if sensor.BINARY and \
not (not gateway.allow_clip_sensor and
sensor.type.startswith('CLIP')):
entities.append(DeconzBinarySensor(sensor, gateway))
async_add_entities(entities, True)
gateway.listeners.append(async_dispatcher_connect(
hass, gateway.async_event_new_device(NEW_SENSOR), async_add_sensor))
async_add_sensor(gateway.api.sensors.values())
class DeconzBinarySensor(DeconzDevice, BinarySensorDevice):
"""Representation of a deCONZ binary sensor."""
@callback
def async_update_callback(self, force_update=False):
"""Update the sensor's state."""
changed = set(self._device.changed_keys)
keys = {'battery', 'on', 'reachable', 'state'}
if force_update or any(key in changed for key in keys):
self.async_schedule_update_ha_state()
@property
def is_on(self):
"""Return true if sensor is on."""
return self._device.is_tripped
@property
def device_class(self):
"""Return the class of the sensor."""
return self._device.SENSOR_CLASS
@property
def icon(self):
"""Return the icon to use in the frontend."""
return self._device.SENSOR_ICON
@property
def device_state_attributes(self):
"""Return the state attributes of the sensor."""
attr = {}
if self._device.battery:
attr[ATTR_BATTERY_LEVEL] = self._device.battery
if self._device.on is not None:
attr[ATTR_ON] = self._device.on
if self._device.secondary_temperature is not None:
attr[ATTR_TEMPERATURE] = self._device.secondary_temperature
if self._device.type in Presence.ZHATYPE and \
self._device.dark is not None:
attr[ATTR_DARK] = self._device.dark
elif self._device.type in Vibration.ZHATYPE:
attr[ATTR_ORIENTATION] = self._device.orientation
attr[ATTR_TILTANGLE] = self._device.tiltangle
attr[ATTR_VIBRATIONSTRENGTH] = self._device.vibrationstrength
return attr
|
{
"content_hash": "21aa77379c608b807cda4f42ea18e430",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 76,
"avg_line_length": 32.94845360824742,
"alnum_prop": 0.6623904881101377,
"repo_name": "jabesq/home-assistant",
"id": "6fe8b4324b3c1a40e0ae372f04d3b832df809bf8",
"size": "3196",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "homeassistant/components/deconz/binary_sensor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1175"
},
{
"name": "Dockerfile",
"bytes": "1829"
},
{
"name": "Python",
"bytes": "16238292"
},
{
"name": "Ruby",
"bytes": "745"
},
{
"name": "Shell",
"bytes": "17615"
}
],
"symlink_target": ""
}
|
from Hindlebook.models.validators import UuidValidator
from Hindlebook.models.server_models import Settings, Node
from Hindlebook.models.user_models import Author
from Hindlebook.models.post_models import Post, Comment, Image, Category
|
{
"content_hash": "357f8d0b60ed59460677ca104d4bc0c4",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 72,
"avg_line_length": 58.75,
"alnum_prop": 0.8553191489361702,
"repo_name": "Tamarabyte/cmput410-project",
"id": "ea7278721a7ff7fbe7e2c578a026d92a2e342e88",
"size": "235",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "DistributedSocialNetworking/Hindlebook/models/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "36523"
},
{
"name": "HTML",
"bytes": "29835"
},
{
"name": "JavaScript",
"bytes": "89893"
},
{
"name": "Python",
"bytes": "166628"
}
],
"symlink_target": ""
}
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'LineItem'
db.create_table(u'billing_lineitem', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('statement', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['billing.Statement'])),
('tag_pool_name', self.gf('django.db.models.fields.CharField')(default='', max_length=100, blank=True)),
('tag_name', self.gf('django.db.models.fields.CharField')(default='', max_length=100, blank=True)),
('message_direction', self.gf('django.db.models.fields.CharField')(default='', max_length=20, blank=True)),
('total_cost', self.gf('django.db.models.fields.IntegerField')(default=0)),
))
db.send_create_signal(u'billing', ['LineItem'])
# Adding model 'Statement'
db.create_table(u'billing_statement', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('account', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['billing.Account'])),
('title', self.gf('django.db.models.fields.CharField')(max_length=100)),
('from_date', self.gf('django.db.models.fields.DateField')()),
('to_date', self.gf('django.db.models.fields.DateField')()),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
))
db.send_create_signal(u'billing', ['Statement'])
def backwards(self, orm):
# Deleting model 'LineItem'
db.delete_table(u'billing_lineitem')
# Deleting model 'Statement'
db.delete_table(u'billing_statement')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'base.gouser': {
'Meta': {'object_name': 'GoUser'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '254'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '254'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'billing.account': {
'Meta': {'object_name': 'Account'},
'account_number': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'alert_credit_balance': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'alert_threshold': ('django.db.models.fields.DecimalField', [], {'default': "'0.0'", 'max_digits': '10', 'decimal_places': '2'}),
'credit_balance': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.GoUser']"})
},
u'billing.lineitem': {
'Meta': {'object_name': 'LineItem'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message_direction': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '20', 'blank': 'True'}),
'statement': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['billing.Statement']"}),
'tag_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'tag_pool_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'total_cost': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'billing.messagecost': {
'Meta': {'object_name': 'MessageCost'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['billing.Account']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'markup_percent': ('django.db.models.fields.DecimalField', [], {'default': "'0.0'", 'max_digits': '10', 'decimal_places': '2'}),
'message_cost': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'message_direction': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'tag_pool': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['billing.TagPool']"})
},
u'billing.statement': {
'Meta': {'object_name': 'Statement'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['billing.Account']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'from_date': ('django.db.models.fields.DateField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'to_date': ('django.db.models.fields.DateField', [], {})
},
u'billing.tagpool': {
'Meta': {'object_name': 'TagPool'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
u'billing.transaction': {
'Meta': {'object_name': 'Transaction'},
'account_number': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'credit_amount': ('django.db.models.fields.IntegerField', [], {}),
'credit_factor': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'markup_percent': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'message_cost': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'message_direction': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'Pending'", 'max_length': '20'}),
'tag_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'tag_pool_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['billing']
|
{
"content_hash": "26c56b40c964fd5e1f08a5f4dd1040ba",
"timestamp": "",
"source": "github",
"line_count": 137,
"max_line_length": 187,
"avg_line_length": 70.4963503649635,
"alnum_prop": 0.5580865603644647,
"repo_name": "praekelt/vumi-go",
"id": "5289f9d3a80a04fecb807c2c0feed1ac9b4042c1",
"size": "9682",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "go/billing/migrations/0004_auto__add_lineitem__add_statement.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "154585"
},
{
"name": "HTML",
"bytes": "158025"
},
{
"name": "JavaScript",
"bytes": "446112"
},
{
"name": "Python",
"bytes": "2738963"
},
{
"name": "Shell",
"bytes": "6799"
}
],
"symlink_target": ""
}
|
'''
Created on Jun 20, 2015
@author: Daniil Sorokin<sorokin@ukp.informatik.tu-darmstadt.de>
'''
from classfiers import *
from vector_representation import *
from scale import *
if __name__ == '__main__':
parser = argparse.ArgumentParser()
# parser.add_argument('-l', action='store_true', help = "Normalize frequency values by document length.")
parser.add_argument('-n', nargs=3, type=int, help = "Number of the base terms to keep. Three parameters to generate a range: n[0]**x for x in range(n[1],n[2])")
parser.add_argument('-k', nargs=3, type=int, help = "K for KNN. Three parameters to generate a range: range(n[0],n[1],n[2])", default=[0,1,1])
parser.add_argument('-c', choices=["KNN", "NB"], help = "Classifier to use.", default = "NB")
parser.add_argument('-s', choices=["Z", "N"], help = "Scaling.", default = "Z")
parser.add_argument('-f', choices=["idf", "mi"], help = "Base term filtering criterion.", default = "mi")
parser.add_argument('-w', action='store_true', help = "Lemmatize tokens.")
parser.add_argument('-r', action='store_false', help = "Keep stopwords.")
parser.add_argument('train_folder', type=str, help = "Training corpus input.")
parser.add_argument('test_folder', type=str, help = "Test corpus input.")
params = parser.parse_args()
scale_preprocess_alias = get_mu_sigma if params.s == "Z" else get_min_max
scale_alias = z_norm_vectors if params.s == "Z" else scale_vectors
print("Loading data.")
train_corpus = StemmedCorpus(language="english")
train_corpus.load_from_folder(params.train_folder)
train_corpus.preprocess_documents(lemmatize=params.w, remove_stopwords=params.r)
test_corpus = StemmedCorpus(language="english")
test_corpus.load_from_folder(params.test_folder)
test_corpus.preprocess_documents(lemmatize=params.w, remove_stopwords=params.r)
print("Compute base terms.")
base_terms = compute_base_terms(train_corpus._stemmed_documents)
print("Base terms extracted: " + str(len(base_terms)))
for n in [params.n[0]**x for x in range(params.n[1], params.n[2])] :
# print("Compute vectors for N=" + str(n))
base_terms_n = filter_base_terms(base_terms, n) if params.f == "idf" else filter_base_terms_with_mi(base_terms, n)
train_vectors = compute_vectors(train_corpus._stemmed_documents, base_terms_n)
test_vectors = compute_vectors(test_corpus._stemmed_documents, base_terms_n)
# print("Scaling vectors ({}).".format(params.s))
mu_min_v, sigma_max_v = scale_preprocess_alias(train_vectors)
train_vectors = scale_alias(train_vectors, mu_min_v, sigma_max_v)
test_vectors = scale_alias(test_vectors, mu_min_v, sigma_max_v)
for k in list(range(params.k[0], params.k[1], params.k[2])):
print("Training. Cl= {}, S= {}, F= {}, L= {}, R={}, N={}, K={}".format(params.c, params.s, params.f, params.w, params.r, n, k))
classifier = NBClassifier() if params.c == "NB" else NNClassifier(k)
classifier.train(train_vectors)
accuracy, m_p, m_r, m_f1 = classifier.evaluate(test_vectors)
print("accuracy, P, R, F1: {},{},{},{}".format(accuracy, m_p, m_r, m_f1))
|
{
"content_hash": "646a90d87c1b29bae6acafb2ba3af8ef",
"timestamp": "",
"source": "github",
"line_count": 53,
"max_line_length": 164,
"avg_line_length": 60.924528301886795,
"alnum_prop": 0.6491173737999381,
"repo_name": "daniilsorokin/Web-Mining-Exercises",
"id": "82b9341052c2670e8b121dd7d2ef0f2e04b41f10",
"size": "3229",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/parameter-selection.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "73422"
},
{
"name": "Shell",
"bytes": "2308"
}
],
"symlink_target": ""
}
|
from django import forms
from django.core.exceptions import ValidationError
from prosodyauth import fields, authenticate
from prosodyauth.prosody.parsejid import nodeprep
from prosodyauth.models import User
from simplecaptcha import captcha
from helpers.forms import PlaceholderForm
class LoginForm(PlaceholderForm):
username = forms.CharField(min_length=3, max_length=30)
password = fields.PassField(min_length=8)
next_url = forms.CharField(widget=forms.HiddenInput, required=False)
def clean_username(self):
"""Normalize the username field by stripping '@domain' part
Users may end up with their JID ('username@domain.tld') in their
browser's stored passwords, which then can obstruct their ability to
log into the website. This is easily remedied by simply stripping out
any '@domain.tld' attached to the username when logging in.
"""
username = self.cleaned_data.get('username')
# Might be best to only strip out our XMPP domain, but this works too
return username.split('@')[0]
def render_password(self, render_value=True):
for field_name in self.fields:
field = self.fields.get(field_name)
if field:
if type(field.widget) == forms.PasswordInput:
field.widget.render_value = render_value
@captcha
class RegistrationForm(LoginForm):
email = forms.EmailField()
password_confirm = fields.PassField(label='confirm password')
def clean_username(self):
username = self.cleaned_data.get('username')
try:
node = nodeprep(username)
except UnicodeError as err:
raise ValidationError(str(err), code='invalid')
if node.lower() != username.lower():
raise ValidationError('That username contains invalid characters', code='invalid')
if User.objects.filter(username__iexact=username).count() > 0:
raise ValidationError('That username is already taken', code='invalid')
return username
def clean_email(self):
email = self.cleaned_data.get('email').lower()
if User.objects.filter(email=email).count() > 0:
raise ValidationError('That email is already in use', code='invalid')
return email
def clean(self):
cleaned_data = super().clean()
password = cleaned_data.get('password')
confirm = cleaned_data.get('password_confirm')
if password and password != confirm:
self.add_error('password_confirm', ValidationError('Confirmation must match password', code='invalid'))
elif password and not authenticate.password_is_compliant(password, cleaned_data.get('username')):
self.add_error('password', ValidationError('Password does not meet requirements', code='invalid'))
class ResendActivationForm(PlaceholderForm):
user = forms.CharField(label='username or email')
|
{
"content_hash": "ed3f5d2dfb000a1f1ae0898608366855",
"timestamp": "",
"source": "github",
"line_count": 79,
"max_line_length": 115,
"avg_line_length": 37.30379746835443,
"alnum_prop": 0.6783169324737021,
"repo_name": "Kromey/akwriters",
"id": "1db4afbe88a243a8442f368f30310d8c7990ea4a",
"size": "2947",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "prosodyauth/forms.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "14197"
},
{
"name": "HTML",
"bytes": "69988"
},
{
"name": "JavaScript",
"bytes": "364937"
},
{
"name": "Python",
"bytes": "160020"
},
{
"name": "SCSS",
"bytes": "9316"
},
{
"name": "Shell",
"bytes": "2239"
}
],
"symlink_target": ""
}
|
"""The ``foo`` subcommand."""
import click
@click.group()
def foo():
"""Manage the FOO component."""
pass
@foo.command()
def version():
"""The current FOO version."""
click.echo("Version TBD...")
|
{
"content_hash": "a1924380e6d76df8708530bc4eb1a074",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 35,
"avg_line_length": 14.466666666666667,
"alnum_prop": 0.5852534562211982,
"repo_name": "jtpaasch/tabu",
"id": "bc6535d4c597f017dc38e910a29ddde274aacbdb",
"size": "242",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tabu/projects/foo/cli/foo.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "6877"
}
],
"symlink_target": ""
}
|
import basepage
class NavigationBars(basepage.BasePage):
def expand_project_panel(self):
elm = self.driver.find_element_by_css_selector(
'a[data-target="#sidebar-accordion-project"]')
state = elm.get_attribute('class')
if 'collapsed' in state:
elm.click()
else:
pass
def expand_admin_panel(self):
elm = self.driver.find_element_by_css_selector(
'a[data-target="#sidebar-accordion-admin"]')
state = elm.get_attribute('class')
if 'collapsed' in state:
elm.click()
else:
pass
def expand_identity_panel(self):
elm = self.driver.find_element_by_css_selector(
'a[data-target="#sidebar-accordion-identity"]')
state = elm.get_attribute('class')
if 'collapsed' in state:
elm.click()
else:
pass
def expand_developer_panel(self):
elm = self.driver.find_element_by_css_selector(
'a[data-target="#sidebar-accordion-developer"]')
state = elm.get_attribute('class')
if 'collapsed' in state:
elm.click()
else:
pass
"""
Project > Compute > Resource
"""
def expand_project_compute(self):
NavigationBars.expand_project_panel(self)
elm = self.driver.find_element_by_css_selector(
'a[data-target="#sidebar-accordion-project-compute"]')
state = elm.get_attribute('class')
if 'collapsed' in state:
elm.click()
else:
pass
def click_project_compute_overview(self):
NavigationBars.expand_project_compute(self)
self.driver.find_element_by_css_selector(
'a[href="/project/"]').click()
def click_project_compute_instance(self):
NavigationBars.expand_project_compute(self)
self.driver.find_element_by_css_selector(
'a[href="/project/instances/"]').click()
def click_project_compute_volumes(self):
NavigationBars.expand_project_compute(self)
self.driver.find_element_by_css_selector(
'a[href="/project/volumes/"]').click()
def click_project_compute_images(self):
NavigationBars.expand_project_compute(self)
self.driver.find_element_by_css_selector(
'a[href="/project/images/"]').click()
def click_project_compute_access_and_security(self):
NavigationBars.expand_project_compute(self)
self.driver.find_element_by_css_selector(
'a[href="/project/access_and_security/"]').click()
"""
Project > Network > Resource
"""
def expand_project_network(self):
NavigationBars.expand_project_panel(self)
elm = self.driver.find_element_by_css_selector(
'a[data-target="#sidebar-accordion-project-network"]')
state = elm.get_attribute('class')
if 'collapsed' in state:
elm.click()
else:
pass
def click_project_network_network_topology(self):
NavigationBars.expand_project_network(self)
self.driver.find_element_by_css_selector(
'a[href="/project/network_topology/"]').click()
def click_project_network_networks(self):
NavigationBars.expand_project_network(self)
self.driver.find_element_by_css_selector(
'a[href="/project/networks/"]').click()
def click_project_network_routers(self):
NavigationBars.expand_project_network(self)
self.driver.find_element_by_css_selector(
'a[href="/project/routers/"]').click()
def click_project_network_loadbalancers(self):
NavigationBars.expand_project_network(self)
self.driver.find_element_by_css_selector(
'a[href="/project/ngloadbalancersv2/"]').click()
"""
Project > Orchestration > Resource
"""
def expand_project_orchestration(self):
NavigationBars.expand_project_panel(self)
elm = self.driver.find_element_by_css_selector(
'a[data-target="#sidebar-accordion-project-orchestration"]')
state = elm.get_attribute('class')
if 'collapsed' in state:
elm.click()
else:
pass
def click_project_orchestration_stacks(self):
NavigationBars.expand_project_orchestration(self)
self.driver.find_element_by_css_selector(
'a[href="/project/stacks/"]').click()
def click_project_orchestration_resource_types(self):
NavigationBars.expand_project_orchestration(self)
self.driver.find_element_by_css_selector(
'a[href="/project/stacks/resource_types/"]').click()
def click_project_orchestration_template_versions(self):
NavigationBars.expand_project_orchestration(self)
self.driver.find_element_by_css_selector(
'a[href="/project/stacks/template_versions/"]').click()
"""
Project > Object Store > Resource
"""
def expand_project_object_store(self):
NavigationBars.expand_project_panel(self)
elm = self.driver.find_element_by_css_selector(
'a[data-target="#sidebar-accordion-project-object_store"]')
state = elm.get_attribute('class')
if 'collapsed' in state:
elm.click()
else:
pass
def click_project_object_store_containers(self):
NavigationBars.expand_project_object_store(self)
self.driver.find_element_by_css_selector(
'a[href="/project/containers/"]').click()
"""
Admin > System > Resource
"""
def expand_admin_system(self):
NavigationBars.expand_admin_panel(self)
elm = self.driver.find_element_by_css_selector(
'a[data-target="#sidebar-accordion-admin-admin"]')
state = elm.get_attribute('class')
if 'collapsed' in state:
elm.click()
else:
pass
def click_admin_system_overview(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/"]').click()
def click_admin_system_hypervisors(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/hypervisors/"]').click()
def click_admin_system_host_aggregates(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/aggregates/"]').click()
def click_admin_system_instances(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/instances/"]').click()
def click_admin_system_volumes(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/volumes/"]').click()
def click_admin_system_flavors(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/flavors/"]').click()
def click_admin_system_images(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/images/"]').click()
def click_admin_system_networks(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/networks/"]').click()
def click_admin_system_routers(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/routers/"]').click()
def click_admin_system_floating_ips(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/floating_ips/"]').click()
def click_admin_system_defaults(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/defaults/"]').click()
def click_admin_system_metadata_definitions(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/metadata_defs/"]').click()
def click_admin_system_info(self):
NavigationBars.expand_admin_system(self)
self.driver.find_element_by_css_selector(
'a[href="/admin/info/"]').click()
"""
Identity > Resource
"""
def click_identity_projects(self):
NavigationBars.expand_identity_panel(self)
self.driver.find_element_by_css_selector(
'a[href="/identity/"]').click()
def click_identity_users(self):
NavigationBars.expand_identity_panel(self)
self.driver.find_element_by_css_selector(
'a[href="/identity/users/"]').click()
def click_identity_groups(self):
NavigationBars.expand_identity_panel(self)
self.driver.find_element_by_css_selector(
'a[href="/identity/groups/"]').click()
def click_identity_roles(self):
NavigationBars.expand_identity_panel(self)
self.driver.find_element_by_css_selector(
'a[href="/identity/roles/"]').click()
|
{
"content_hash": "5d9f2c0bb5fa38749138afe1065d0371",
"timestamp": "",
"source": "github",
"line_count": 264,
"max_line_length": 72,
"avg_line_length": 34.97727272727273,
"alnum_prop": 0.6191249729261425,
"repo_name": "rcbops-qe/horizon-selenium",
"id": "19c6d146b640ef731a004eb8e881332225dcda9d",
"size": "9234",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pages/navigation_bars.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "30139"
}
],
"symlink_target": ""
}
|
"""
Test displayed value of a vector variable while doing watchpoint operations
"""
from __future__ import print_function
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestValueOfVectorVariableTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def test_value_of_vector_variable_using_watchpoint_set(self):
"""Test verify displayed value of vector variable."""
exe = self.getBuildArtifact("a.out")
d = {'C_SOURCES': self.source, 'EXE': exe}
self.build(dictionary=d)
self.setTearDownCleanup(dictionary=d)
self.value_of_vector_variable_with_watchpoint_set()
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Our simple source filename.
self.source = 'main.c'
def value_of_vector_variable_with_watchpoint_set(self):
"""Test verify displayed value of vector variable"""
exe = self.getBuildArtifact("a.out")
self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET)
# Set break to get a frame
self.runCmd("b main")
# Run the program.
self.runCmd("run", RUN_SUCCEEDED)
# Value of a vector variable should be displayed correctly
self.expect(
"watchpoint set variable global_vector",
WATCHPOINT_CREATED,
substrs=['new value: (1, 2, 3, 4)'])
|
{
"content_hash": "2ed024648b05fd6279f66e01ec8d8021",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 75,
"avg_line_length": 30.872340425531913,
"alnum_prop": 0.6485182632667126,
"repo_name": "llvm-mirror/lldb",
"id": "c1b3fb9485b5692a4bab169bcced73e171001e72",
"size": "1451",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "packages/Python/lldbsuite/test/commands/watchpoints/watchpoint_on_vectors/TestValueOfVectorVariable.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "131618"
},
{
"name": "C",
"bytes": "195293"
},
{
"name": "C++",
"bytes": "23346708"
},
{
"name": "CMake",
"bytes": "167302"
},
{
"name": "DTrace",
"bytes": "334"
},
{
"name": "LLVM",
"bytes": "6106"
},
{
"name": "Makefile",
"bytes": "50396"
},
{
"name": "Objective-C",
"bytes": "106956"
},
{
"name": "Objective-C++",
"bytes": "24806"
},
{
"name": "Perl",
"bytes": "72175"
},
{
"name": "Python",
"bytes": "3669886"
},
{
"name": "Shell",
"bytes": "6573"
},
{
"name": "Vim script",
"bytes": "8434"
}
],
"symlink_target": ""
}
|
"""
The base Command class.
All commands in Evennia inherit from the 'Command' class in this module.
"""
from builtins import range
import re
from django.conf import settings
from evennia.locks.lockhandler import LockHandler
from evennia.utils.utils import is_iter, fill, lazy_property, make_iter
from future.utils import with_metaclass
def _init_command(cls, **kwargs):
"""
Helper command.
Makes sure all data are stored as lowercase and
do checking on all properties that should be in list form.
Sets up locks to be more forgiving. This is used both by the metaclass
and (optionally) at instantiation time.
If kwargs are given, these are set as instance-specific properties
on the command.
"""
for i in range(len(kwargs)):
# used for dynamic creation of commands
key, value = kwargs.popitem()
setattr(cls, key, value)
cls.key = cls.key.lower()
if cls.aliases and not is_iter(cls.aliases):
try:
cls.aliases = [str(alias).strip().lower()
for alias in cls.aliases.split(',')]
except Exception:
cls.aliases = []
cls.aliases = list(set(alias for alias in cls.aliases
if alias and alias != cls.key))
# optimization - a set is much faster to match against than a list
cls._matchset = set([cls.key] + cls.aliases)
# optimization for looping over keys+aliases
cls._keyaliases = tuple(cls._matchset)
# by default we don't save the command between runs
if not hasattr(cls, "save_for_next"):
cls.save_for_next = False
# pre-process locks as defined in class definition
temp = []
if hasattr(cls, 'permissions'):
cls.locks = cls.permissions
if not hasattr(cls, 'locks'):
# default if one forgets to define completely
cls.locks = "cmd:all()"
if "cmd:" not in cls.locks:
cls.locks = "cmd:all();" + cls.locks
for lockstring in cls.locks.split(';'):
if lockstring and not ':' in lockstring:
lockstring = "cmd:%s" % lockstring
temp.append(lockstring)
cls.lock_storage = ";".join(temp)
if hasattr(cls, 'arg_regex') and isinstance(cls.arg_regex, basestring):
cls.arg_regex = re.compile(r"%s" % cls.arg_regex, re.I + re.UNICODE)
if not hasattr(cls, "auto_help"):
cls.auto_help = True
if not hasattr(cls, 'is_exit'):
cls.is_exit = False
if not hasattr(cls, "help_category"):
cls.help_category = "general"
cls.help_category = cls.help_category.lower()
class CommandMeta(type):
"""
The metaclass cleans up all properties on the class
"""
def __init__(cls, *args, **kwargs):
_init_command(cls, **kwargs)
super(CommandMeta, cls).__init__(*args, **kwargs)
# The Command class is the basic unit of an Evennia command; when
# defining new commands, the admin subclass this class and
# define their own parser method to handle the input. The
# advantage of this is inheritage; commands that have similar
# structure can parse the input string the same way, minimizing
# parsing errors.
class Command(with_metaclass(CommandMeta, object)):
"""
Base command
Usage:
command [args]
This is the base command class. Inherit from this
to create new commands.
The cmdhandler makes the following variables available to the
command methods (so you can always assume them to be there):
self.caller - the game object calling the command
self.cmdstring - the command name used to trigger this command (allows
you to know which alias was used, for example)
cmd.args - everything supplied to the command following the cmdstring
(this is usually what is parsed in self.parse())
cmd.cmdset - the cmdset from which this command was matched (useful only
seldomly, notably for help-type commands, to create dynamic
help entries and lists)
cmd.obj - the object on which this command is defined. If a default command,
this is usually the same as caller.
cmd.rawstring - the full raw string input, including any args and no parsing.
The following class properties can/should be defined on your child class:
key - identifier for command (e.g. "look")
aliases - (optional) list of aliases (e.g. ["l", "loo"])
locks - lock string (default is "cmd:all()")
help_category - how to organize this help entry in help system
(default is "General")
auto_help - defaults to True. Allows for turning off auto-help generation
arg_regex - (optional) raw string regex defining how the argument part of
the command should look in order to match for this command
(e.g. must it be a space between cmdname and arg?)
(Note that if auto_help is on, this initial string is also used by the
system to create the help entry for the command, so it's a good idea to
format it similar to this one). This behavior can be changed by
overriding the method 'get_help' of a command: by default, this
method returns cmd.__doc__ (that is, this very docstring, or
the docstring of your command). You can, however, extend or
replace this without disabling auto_help.
"""
# the main way to call this command (e.g. 'look')
key = "command"
# alternative ways to call the command (e.g. 'l', 'glance', 'examine')
aliases = []
# a list of lock definitions on the form
# cmd:[NOT] func(args) [ AND|OR][ NOT] func2(args)
locks = settings.COMMAND_DEFAULT_LOCKS
# used by the help system to group commands in lists.
help_category = settings.COMMAND_DEFAULT_HELP_CATEGORY
# This allows to turn off auto-help entry creation for individual commands.
auto_help = True
# optimization for quickly separating exit-commands from normal commands
is_exit = False
# define the command not only by key but by the regex form of its arguments
arg_regex = settings.COMMAND_DEFAULT_ARG_REGEX
# whether self.msg sends to all sessions of a related player/object (default
# is to only send to the session sending the command).
msg_all_sessions = settings.COMMAND_DEFAULT_MSG_ALL_SESSIONS
# auto-set (by Evennia on command instantiation) are:
# obj - which object this command is defined on
# session - which session is responsible for triggering this command. Only set
# if triggered by a player.
def __init__(self, **kwargs):
"""
The lockhandler works the same as for objects.
optional kwargs will be set as properties on the Command at runtime,
overloading evential same-named class properties.
"""
if kwargs:
_init_command(self, **kwargs)
@lazy_property
def lockhandler(self):
return LockHandler(self)
def __str__(self):
"""
Print the command key
"""
return self.key
def __eq__(self, cmd):
"""
Compare two command instances to each other by matching their
key and aliases.
Args:
cmd (Command or str): Allows for equating both Command
objects and their keys.
Returns:
equal (bool): If the commands are equal or not.
"""
try:
# first assume input is a command (the most common case)
return self._matchset.intersection(cmd._matchset)
except AttributeError:
# probably got a string
return cmd in self._matchset
def __ne__(self, cmd):
"""
The logical negation of __eq__. Since this is one of the most
called methods in Evennia (along with __eq__) we do some
code-duplication here rather than issuing a method-lookup to
__eq__.
"""
try:
return self._matchset.isdisjoint(cmd._matchset)
except AttributeError:
return cmd not in self._matchset
def __contains__(self, query):
"""
This implements searches like 'if query in cmd'. It's a fuzzy
matching used by the help system, returning True if query can
be found as a substring of the commands key or its aliases.
Args:
query (str): query to match against. Should be lower case.
Returns:
result (bool): Fuzzy matching result.
"""
return any(query in keyalias for keyalias in self._keyaliases)
def _optimize(self):
"""
Optimize the key and aliases for lookups.
"""
# optimization - a set is much faster to match against than a list
self._matchset = set([self.key] + self.aliases)
# optimization for looping over keys+aliases
self._keyaliases = tuple(self._matchset)
def set_key(self, new_key):
"""
Update key.
Args:
new_key (str): The new key.
Notes:
This is necessary to use to make sure the optimization
caches are properly updated as well.
"""
self.key = new_key.lower()
self._optimize()
def set_aliases(self, new_aliases):
"""
Replace aliases with new ones.
Args:
new_aliases (str or list): Either a ;-separated string
or a list of aliases. These aliases will replace the
existing ones, if any.
Notes:
This is necessary to use to make sure the optimization
caches are properly updated as well.
"""
if isinstance(new_aliases, basestring):
new_aliases = new_aliases.split(';')
aliases = (str(alias).strip().lower() for alias in make_iter(new_aliases))
self.aliases = list(set(alias for alias in aliases if alias != self.key))
self._optimize()
def match(self, cmdname):
"""
This is called by the system when searching the available commands,
in order to determine if this is the one we wanted. cmdname was
previously extracted from the raw string by the system.
Args:
cmdname (str): Always lowercase when reaching this point.
Returns:
result (bool): Match result.
"""
return cmdname in self._matchset
def access(self, srcobj, access_type="cmd", default=False):
"""
This hook is called by the cmdhandler to determine if srcobj
is allowed to execute this command. It should return a boolean
value and is not normally something that need to be changed since
it's using the Evennia permission system directly.
Args:
srcobj (Object): Object trying to gain permission
access_type (str, optional): The lock type to check.
default (bool, optional): The fallbacl result if no lock
of matching `access_type` is found on this Command.
"""
return self.lockhandler.check(srcobj, access_type, default=default)
def msg(self, text=None, to_obj=None, from_obj=None,
session=None, **kwargs):
"""
This is a shortcut instead of calling msg() directly on an
object - it will detect if caller is an Object or a Player and
also appends self.session automatically if self.msg_all_sessions is False.
Args:
text (str, optional): Text string of message to send.
to_obj (Object, optional): Target object of message. Defaults to self.caller.
from_obj (Object, optional): Source of message. Defaults to to_obj.
session (Session, optional): Supply data only to a unique
session (ignores the value of `self.msg_all_sessions`).
Kwargs:
options (dict): Options to the protocol.
any (any): All other keywords are interpreted as th
name of send-instructions.
"""
from_obj = from_obj or self.caller
to_obj = to_obj or from_obj
if not session and not self.msg_all_sessions:
if to_obj == self.caller:
session = self.session
else:
session = to_obj.sessions.get()
to_obj.msg(text=text, from_obj=from_obj, session=session, **kwargs)
def execute_cmd(self, raw_string, session=None, obj=None, **kwargs):
"""
A shortcut of execute_cmd on the caller. It appends the
session automatically.
Args:
raw_string (str): Execute this string as a command input.
session (Session, optional): If not given, the current command's Session will be used.
obj (Object or Player, optional): Object or Player on which to call the execute_cmd.
If not given, self.caller will be used.
Kwargs:
Other keyword arguments will be added to the found command
object instace as variables before it executes. This is
unused by default Evennia but may be used to set flags and
change operating paramaters for commands at run-time.
"""
obj = self.caller if obj is None else obj
session = self.session if session is None else session
obj.execute_cmd(raw_string, session=session, **kwargs)
# Common Command hooks
def at_pre_cmd(self):
"""
This hook is called before self.parse() on all commands. If
this hook returns anything but False/None, the command
sequence is aborted.
"""
pass
def at_post_cmd(self):
"""
This hook is called after the command has finished executing
(after self.func()).
"""
pass
def parse(self):
"""
Once the cmdhandler has identified this as the command we
want, this function is run. If many of your commands have a
similar syntax (for example 'cmd arg1 = arg2') you should
simply define this once and just let other commands of the
same form inherit from this. See the docstring of this module
for which object properties are available to use (notably
self.args).
"""
pass
def func(self):
"""
This is the actual executing part of the command. It is
called directly after self.parse(). See the docstring of this
module for which object properties are available (beyond those
set in self.parse())
"""
# a simple test command to show the available properties
string = "-" * 50
string += "\n|w%s|n - Command variables from evennia:\n" % self.key
string += "-" * 50
string += "\nname of cmd (self.key): |w%s|n\n" % self.key
string += "cmd aliases (self.aliases): |w%s|n\n" % self.aliases
string += "cmd locks (self.locks): |w%s|n\n" % self.locks
string += "help category (self.help_category): |w%s|n\n" % self.help_category.capitalize()
string += "object calling (self.caller): |w%s|n\n" % self.caller
string += "object storing cmdset (self.obj): |w%s|n\n" % self.obj
string += "command string given (self.cmdstring): |w%s|n\n" % self.cmdstring
# show cmdset.key instead of cmdset to shorten output
string += fill("current cmdset (self.cmdset): |w%s|n\n" %
(self.cmdset.key if self.cmdset.key else self.cmdset.__class__))
self.caller.msg(string)
def get_extra_info(self, caller, **kwargs):
"""
Display some extra information that may help distinguish this
command from others, for instance, in a disambiguity prompt.
If this command is a potential match in an ambiguous
situation, one distinguishing feature may be its attachment to
a nearby object, so we include this if available.
Args:
caller (TypedObject): The caller who typed an ambiguous
term handed to the search function.
Returns:
A string with identifying information to disambiguate the
object, conventionally with a preceding space.
"""
if hasattr(self, 'obj') and self.obj and self.obj != caller:
return " (%s)" % self.obj.get_display_name(caller).strip()
return ""
def get_help(self, caller, cmdset):
"""
Return the help message for this command and this caller.
By default, return self.__doc__ (the docstring just under
the class definition). You can override this behavior,
though, and even customize it depending on the caller, or other
commands the caller can use.
Args:
caller (Object or Player): the caller asking for help on the command.
cmdset (CmdSet): the command set (if you need additional commands).
Returns:
docstring (str): the help text to provide the caller for this command.
"""
return self.__doc__
class InterruptCommand(Exception):
"""Cleanly interrupt a command."""
pass
|
{
"content_hash": "5822ee8d950173c1e40fb0a7ede1fa1c",
"timestamp": "",
"source": "github",
"line_count": 460,
"max_line_length": 98,
"avg_line_length": 37.25869565217391,
"alnum_prop": 0.6229068207013244,
"repo_name": "whitehorse-io/encarnia",
"id": "c5d1515d83d746d4a0f81a40de6509d807b13088",
"size": "17139",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "evennia/evennia/commands/command_backup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "106"
},
{
"name": "C",
"bytes": "63966"
},
{
"name": "CSS",
"bytes": "87525"
},
{
"name": "Emacs Lisp",
"bytes": "2734"
},
{
"name": "GAP",
"bytes": "18122"
},
{
"name": "HTML",
"bytes": "91741"
},
{
"name": "JavaScript",
"bytes": "151335"
},
{
"name": "Objective-C",
"bytes": "1292"
},
{
"name": "Python",
"bytes": "24616242"
},
{
"name": "Shell",
"bytes": "8808"
}
],
"symlink_target": ""
}
|
import datetime
import os
import sys
from django.db import models
from django.conf import settings
from django.contrib.auth.models import User
from django.core.files.base import ContentFile
from django.utils.translation import ugettext as _
from django.utils.hashcompat import md5_constructor
from django.utils.encoding import smart_str
from django.db.models import signals
from avatar.tasks import create_default_thumbnails
try:
from cStringIO import StringIO
dir(StringIO) # Placate PyFlakes
except ImportError:
from StringIO import StringIO
try:
from PIL import Image
dir(Image) # Placate PyFlakes
except ImportError:
import Image
from avatar.util import invalidate_cache
from avatar.settings import (AVATAR_STORAGE_DIR, AVATAR_RESIZE_METHOD,
AVATAR_MAX_AVATARS_PER_USER, AVATAR_THUMB_FORMAT,
AVATAR_HASH_USERDIRNAMES, AVATAR_HASH_FILENAMES,
AVATAR_THUMB_QUALITY, AUTO_GENERATE_AVATAR_SIZES,
AVATAR_USERDIRNAMES_AS_ID, AVATAR_STORAGE)
def avatar_file_path(instance=None, filename=None, size=None, ext=None):
tmppath = [AVATAR_STORAGE_DIR]
if AVATAR_HASH_USERDIRNAMES:
tmp = md5_constructor(instance.user.username).hexdigest()
tmppath.extend([tmp[0], tmp[1], instance.user.username])
elif AVATAR_USERDIRNAMES_AS_ID:
tmppath.append(str(instance.user.id))
else:
tmppath.append(instance.user.username)
if not filename:
# Filename already stored in database
filename = instance.avatar.name
if ext and AVATAR_HASH_FILENAMES:
# An extension was provided, probably because the thumbnail
# is in a different format than the file. Use it. Because it's
# only enabled if AVATAR_HASH_FILENAMES is true, we can trust
# it won't conflict with another filename
(root, oldext) = os.path.splitext(filename)
filename = root + "." + ext
else:
# File doesn't exist yet
if AVATAR_HASH_FILENAMES:
(root, ext) = os.path.splitext(filename)
filename = md5_constructor(smart_str(filename)).hexdigest()
filename = filename + ext
if size:
tmppath.extend(['resized', str(size)])
tmppath.append(os.path.basename(filename))
return os.path.join(*tmppath)
def find_extension(format):
format = format.lower()
if format == 'jpeg':
format = 'jpg'
return format
class Avatar(models.Model):
user = models.ForeignKey(User)
primary = models.BooleanField(default=False)
avatar = models.ImageField(max_length=1024, upload_to=avatar_file_path, blank=True)
date_uploaded = models.DateTimeField(default=datetime.datetime.now)
existing_thumbnail_sizes = models.CommaSeparatedIntegerField(max_length=1024, blank=True)
def __unicode__(self):
return _(u'Avatar for %s') % self.user
def get_storage(self):
(module, cls) = AVATAR_STORAGE.rsplit('.', 1)
__import__(module)
StorageClass = getattr(sys.modules[module], cls)
return StorageClass()
def save(self, *args, **kwargs):
square = kwargs.pop('square', False)
avatars = Avatar.objects.filter(user=self.user)
if self.pk:
avatars = avatars.exclude(pk=self.pk)
if AVATAR_MAX_AVATARS_PER_USER > 1:
if self.primary:
avatars = avatars.filter(primary=True)
avatars.update(primary=False)
else:
avatars.delete()
invalidate_cache(self.user)
is_new = False
if not self.id:
is_new = True
super(Avatar, self).save(*args, **kwargs)
if is_new:
create_default_thumbnails.delay(self, created=True, square=square)
def delete(self, *args, **kwargs):
invalidate_cache(self.user)
super(Avatar, self).delete(*args, **kwargs)
def thumbnail_exists(self, size):
if self.existing_thumbnail_sizes:
if str(size) in self.existing_thumbnail_sizes.split(','):
return True
return False
def create_thumbnail(self, size, quality=None, square=False):
""" Creates a thumbnail for this avatar.
If Square is False, the image will retain it's original proportions.
Also, when square is false, size then dictates the height of the new image
"""
if self.primary:
square = True
# invalidate the cache of the thumbnail with the given size first
invalidate_cache(self.user, size)
try:
orig = self.get_storage().open(self.avatar.name, 'rb').read()
image = Image.open(StringIO(orig))
except IOError:
return # What should we do here? Render a "sorry, didn't work" img?
quality = quality or AVATAR_THUMB_QUALITY
(w, h) = image.size
if w != size or h != size:
if square:
if w > h:
diff = (w - h) / 2
image = image.crop((diff, 0, w - diff, h))
else:
diff = (h - w) / 2
image = image.crop((0, diff, w, h - diff))
w = size
h = size
else:
scaling_ratio = 1.0*size/h
h = int(size)
w = int(scaling_ratio * w)
if image.mode != "RGB":
image = image.convert("RGB")
image = image.resize((w, h), AVATAR_RESIZE_METHOD)
thumb = StringIO()
image.save(thumb, AVATAR_THUMB_FORMAT, quality=quality)
thumb_file = ContentFile(thumb.getvalue())
else:
thumb_file = ContentFile(orig)
thumb = self.get_storage().save(self.avatar_name(size), thumb_file)
# Save this image size in the database so we know what thumbnails have already been created
size_string = str(size)
if self.existing_thumbnail_sizes:
size_string = ',' + size_string
self.existing_thumbnail_sizes += size_string
self.save()
def avatar_url(self, size=None):
if size:
return self.get_storage().url(self.avatar_name(size)).lstrip('/')
else:
return self.avatar.url
def avatar_name(self, size):
ext = find_extension(AVATAR_THUMB_FORMAT)
return avatar_file_path(
instance=self,
size=size,
ext=ext
)
|
{
"content_hash": "826f7f9426383f7f0a97493a18c6e67b",
"timestamp": "",
"source": "github",
"line_count": 181,
"max_line_length": 99,
"avg_line_length": 36.35911602209945,
"alnum_prop": 0.5982373499468165,
"repo_name": "mliu7/django-avatar",
"id": "236f19637928ea6f56b674693fc35e04ebfbb2c7",
"size": "6581",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "avatar/models.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "46915"
}
],
"symlink_target": ""
}
|
from setuptools import setup
import os
import ticketpy
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='ticketpy',
version=ticketpy.__version__,
author=ticketpy.__author__,
author_email='git@edward.sh',
description="Python wrapper/SDK for the Ticketmaster Discovery API",
long_description=read('README.rst'),
license='MIT',
keywords='Ticketmaster',
url='https://github.com/arcward/ticketpy',
packages=['ticketpy'],
install_requires=['requests']
)
|
{
"content_hash": "3c210b4b532e5fcf6c0d38ae42de2065",
"timestamp": "",
"source": "github",
"line_count": 22,
"max_line_length": 72,
"avg_line_length": 24.818181818181817,
"alnum_prop": 0.6813186813186813,
"repo_name": "arcward/ticketpy",
"id": "7da461df5c0aeabfacfb1c9697ef3a7d48bc9f3c",
"size": "546",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "52308"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, unicode_literals
import json
from django.core.urlresolvers import reverse
from django.shortcuts import get_object_or_404, render
from wagtail.utils.pagination import paginate
from wagtail.wagtailadmin.forms import SearchForm
from wagtail.wagtailadmin.modal_workflow import render_modal_workflow
from wagtail.wagtailadmin.utils import PermissionPolicyChecker
from wagtail.wagtailcore.models import Collection
from wagtail.wagtailimages.formats import get_image_format
from wagtail.wagtailimages.forms import ImageInsertionForm, get_image_form
from wagtail.wagtailimages.models import get_image_model
from wagtail.wagtailimages.permissions import permission_policy
from wagtail.wagtailsearch.backends import get_search_backends
permission_checker = PermissionPolicyChecker(permission_policy)
def get_image_json(image):
"""
helper function: given an image, return the json to pass back to the
image chooser panel
"""
preview_image = image.get_rendition('max-165x165')
return json.dumps({
'id': image.id,
'edit_link': reverse('wagtailimages:edit', args=(image.id,)),
'title': image.title,
'preview': {
'url': preview_image.url,
'width': preview_image.width,
'height': preview_image.height,
}
})
def chooser(request):
Image = get_image_model()
if permission_policy.user_has_permission(request.user, 'add'):
ImageForm = get_image_form(Image)
uploadform = ImageForm()
else:
uploadform = None
images = Image.objects.order_by('-created_at')
q = None
if (
'q' in request.GET or 'p' in request.GET or 'tag' in request.GET or
'collection_id' in request.GET
):
# this request is triggered from search, pagination or 'popular tags';
# we will just render the results.html fragment
collection_id = request.GET.get('collection_id')
if collection_id:
images = images.filter(collection=collection_id)
searchform = SearchForm(request.GET)
if searchform.is_valid():
q = searchform.cleaned_data['q']
images = images.search(q)
is_searching = True
else:
is_searching = False
tag_name = request.GET.get('tag')
if tag_name:
images = images.filter(tags__name=tag_name)
# Pagination
paginator, images = paginate(request, images, per_page=12)
return render(request, "wagtailimages/chooser/results.html", {
'images': images,
'is_searching': is_searching,
'query_string': q,
'will_select_format': request.GET.get('select_format')
})
else:
searchform = SearchForm()
collections = Collection.objects.all()
if len(collections) < 2:
collections = None
paginator, images = paginate(request, images, per_page=12)
return render_modal_workflow(request, 'wagtailimages/chooser/chooser.html', 'wagtailimages/chooser/chooser.js', {
'images': images,
'uploadform': uploadform,
'searchform': searchform,
'is_searching': False,
'query_string': q,
'will_select_format': request.GET.get('select_format'),
'popular_tags': Image.popular_tags(),
'collections': collections,
})
def image_chosen(request, image_id):
image = get_object_or_404(get_image_model(), id=image_id)
return render_modal_workflow(
request, None, 'wagtailimages/chooser/image_chosen.js',
{'image_json': get_image_json(image)}
)
@permission_checker.require('add')
def chooser_upload(request):
Image = get_image_model()
ImageForm = get_image_form(Image)
searchform = SearchForm()
if request.method == 'POST':
image = Image(uploaded_by_user=request.user)
form = ImageForm(request.POST, request.FILES, instance=image)
if form.is_valid():
form.save()
# Reindex the image to make sure all tags are indexed
for backend in get_search_backends():
backend.add(image)
if request.GET.get('select_format'):
form = ImageInsertionForm(initial={'alt_text': image.default_alt_text})
return render_modal_workflow(
request, 'wagtailimages/chooser/select_format.html', 'wagtailimages/chooser/select_format.js',
{'image': image, 'form': form}
)
else:
# not specifying a format; return the image details now
return render_modal_workflow(
request, None, 'wagtailimages/chooser/image_chosen.js',
{'image_json': get_image_json(image)}
)
else:
form = ImageForm()
images = Image.objects.order_by('title')
return render_modal_workflow(
request, 'wagtailimages/chooser/chooser.html', 'wagtailimages/chooser/chooser.js',
{'images': images, 'uploadform': form, 'searchform': searchform}
)
def chooser_select_format(request, image_id):
image = get_object_or_404(get_image_model(), id=image_id)
if request.method == 'POST':
form = ImageInsertionForm(request.POST, initial={'alt_text': image.default_alt_text})
if form.is_valid():
format = get_image_format(form.cleaned_data['format'])
preview_image = image.get_rendition(format.filter_spec)
image_json = json.dumps({
'id': image.id,
'title': image.title,
'format': format.name,
'alt': form.cleaned_data['alt_text'],
'class': format.classnames,
'edit_link': reverse('wagtailimages:edit', args=(image.id,)),
'preview': {
'url': preview_image.url,
'width': preview_image.width,
'height': preview_image.height,
},
'html': format.image_to_editor_html(image, form.cleaned_data['alt_text']),
})
return render_modal_workflow(
request, None, 'wagtailimages/chooser/image_chosen.js',
{'image_json': image_json}
)
else:
form = ImageInsertionForm(initial={'alt_text': image.default_alt_text})
return render_modal_workflow(
request, 'wagtailimages/chooser/select_format.html', 'wagtailimages/chooser/select_format.js',
{'image': image, 'form': form}
)
|
{
"content_hash": "3cfbde4714ac6eae3125ca77d387a588",
"timestamp": "",
"source": "github",
"line_count": 191,
"max_line_length": 117,
"avg_line_length": 34.617801047120416,
"alnum_prop": 0.6107078039927405,
"repo_name": "hamsterbacke23/wagtail",
"id": "83764dff6cf9456bae6e96784d02efe20202b2d5",
"size": "6612",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wagtail/wagtailimages/views/chooser.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "172736"
},
{
"name": "HTML",
"bytes": "291553"
},
{
"name": "JavaScript",
"bytes": "116387"
},
{
"name": "Makefile",
"bytes": "548"
},
{
"name": "Python",
"bytes": "2243460"
},
{
"name": "Shell",
"bytes": "7387"
}
],
"symlink_target": ""
}
|
import os
from ctypes import *
# Taken from c/cukf.h
UKF_PRECISION_FLOAT = 0
UKF_PRECISION_DOUBLE = 1
state = None
state_error = None
innovation = None
covariance = None
parameters = None
parameters_error = None
# Internal globals, set during init
_cukf = None
_REAL_T = None
# Internal classes, wrapping cukf structs directly
class _SensorParams(Structure):
pass
class _State(Structure):
def __repr__(self):
fields = {
"attitude": tuple(self.attitude),
"angular_velocity": tuple(self.angular_velocity),
"acceleration": tuple(self.angular_velocity)
}
return str(fields)
class _StateError(Structure):
def __repr__(self):
fields = {
"attitude": tuple(self.attitude),
"angular_velocity": tuple(self.angular_velocity)
}
return str(fields)
class _Innovation(Structure):
def __repr__(self):
fields = {
"accel": tuple(self.accel),
"gyro": tuple(self.gyro),
"mag": tuple(self.mag)
}
return str(fields)
class _Parameters(Structure):
def __repr__(self):
field = {
"accel_bias": tuple(self.accel_bias),
"gyro_bias": tuple(self.gyro_bias),
"mag_bias": tuple(self.mag_bias),
"mag_scale": tuple(self.mag_scale),
"mag_field_norm": tuple(self.mag_field_norm),
"mag_field_inclination": tuple(self.mag_field_inclination)
}
return std(fields)
# Public interface
def iterate(dt):
global _cukf, state, state_error, innovation, parameters, parameters_error
if not _cukf:
raise RuntimeError("Please call ukf.init()")
_cukf.ukf_set_state(state)
_cukf.ukf_iterate(dt)
_cukf.ukf_sensor_clear()
_cukf.ukf_get_state(state)
_cukf.ukf_get_state_error(state_error)
_cukf.ukf_get_innovation(innovation)
_cukf.ukf_get_parameters(parameters)
_cukf.ukf_get_parameters_error(parameters_error)
def set_sensors(accelerometer=None, gyroscope=None, magnetometer=None):
if accelerometer is not None:
_cukf.ukf_sensor_set_accelerometer(*accelerometer)
if gyroscope is not None:
_cukf.ukf_sensor_set_gyroscope(*gyroscope)
if magnetometer is not None:
_cukf.ukf_sensor_set_magnetometer(*magnetometer)
def configure_sensors(accelerometer_covariance=None,
gyroscope_covariance=None, magnetometer_covariance=None):
params = _SensorParams()
if getattr(accelerometer_covariance, '__iter__', False):
params.accel_covariance = accelerometer_covariance
elif accelerometer_covariance is not None:
params.accel_covariance = (accelerometer_covariance, ) * 3
else:
params.accel_covariance = (1.0, 1.0, 1.0)
if getattr(gyroscope_covariance, '__iter__', False):
params.gyro_covariance = gyroscope_covariance
elif gyroscope_covariance is not None:
params.gyro_covariance = (gyroscope_covariance, ) * 3
else:
params.gyro_covariance = (1.0, 1.0, 1.0)
if getattr(magnetometer_covariance, '__iter__', False):
params.mag_covariance = magnetometer_covariance
elif magnetometer_covariance is not None:
params.mag_covariance = (magnetometer_covariance, ) * 3
else:
params.mag_covariance = (1.0, 1.0, 1.0)
_cukf.ukf_set_params(params)
def configure_process_noise(process_noise_covariance):
_cukf.ukf_set_process_noise((_REAL_T * 6)(*process_noise_covariance))
def init():
global _cukf, _REAL_T, state, state_error, innovation, parameters, parameters_error
lib = os.path.join(os.path.dirname(__file__), "libahrs.dylib")
_cukf = cdll.LoadLibrary(lib)
_cukf.ukf_init.argtypes = []
_cukf.ukf_init.restype = None
_cukf.ukf_config_get_precision.argtypes = []
_cukf.ukf_config_get_precision.restype = c_long
_cukf.ukf_config_get_state_dim.argtypes = []
_cukf.ukf_config_get_state_dim.restype = c_long
_cukf.ukf_config_get_measurement_dim.argtypes = []
_cukf.ukf_config_get_measurement_dim.restype = c_long
_PRECISION = _cukf.ukf_config_get_precision()
_REAL_T = c_double if _PRECISION == UKF_PRECISION_DOUBLE else c_float
_STATE_DIM = _cukf.ukf_config_get_state_dim()
_MEASUREMENT_DIM = _cukf.ukf_config_get_measurement_dim()
_SensorParams._fields_ = [
("accel_covariance", _REAL_T * 3),
("gyro_covariance", _REAL_T * 3),
("mag_covariance", _REAL_T * 3)
]
_State._fields_ = [
("attitude", _REAL_T * 4),
("angular_velocity", _REAL_T * 3),
("acceleration", _REAL_T * 3)
]
_StateError._fields_ = [
("attitude", _REAL_T * 3),
("angular_velocity", _REAL_T * 3)
]
_Innovation._fields_ = [
("accel", _REAL_T * 3),
("gyro", _REAL_T * 3),
("mag", _REAL_T * 3)
]
_Parameters._fields_ = [
("accel_bias", _REAL_T * 3),
("gyro_bias", _REAL_T * 3),
("mag_bias", _REAL_T * 3),
("mag_scale", _REAL_T * 3),
("mag_field_norm", _REAL_T),
("mag_field_inclination", _REAL_T),
]
# Set up the function prototypes
_cukf.ukf_set_attitude.argtypes = [_REAL_T, _REAL_T, _REAL_T, _REAL_T]
_cukf.ukf_set_attitude.restype = None
_cukf.ukf_set_angular_velocity.argtypes = [_REAL_T, _REAL_T, _REAL_T]
_cukf.ukf_set_angular_velocity.restype = None
_cukf.ukf_get_state.argtypes = [POINTER(_State)]
_cukf.ukf_get_state.restype = None
_cukf.ukf_set_state.argtypes = [POINTER(_State)]
_cukf.ukf_set_state.restype = None
_cukf.ukf_get_state_error.argtypes = [POINTER(_StateError)]
_cukf.ukf_get_state_error.restype = None
_cukf.ukf_get_innovation.argtypes = [POINTER(_Innovation)]
_cukf.ukf_get_innovation.restype = None
_cukf.ukf_get_state_covariance.argtypes = [
POINTER(_REAL_T * (_STATE_DIM**2))]
_cukf.ukf_get_state_covariance.restype = None
_cukf.ukf_sensor_clear.argtypes = []
_cukf.ukf_sensor_clear.restype = None
_cukf.ukf_sensor_set_accelerometer.argtypes = [_REAL_T, _REAL_T, _REAL_T]
_cukf.ukf_sensor_set_accelerometer.restype = None
_cukf.ukf_sensor_set_gyroscope.argtypes = [_REAL_T, _REAL_T, _REAL_T]
_cukf.ukf_sensor_set_gyroscope.restype = None
_cukf.ukf_sensor_set_magnetometer.argtypes = [_REAL_T, _REAL_T, _REAL_T]
_cukf.ukf_sensor_set_magnetometer.restype = None
_cukf.ukf_set_params.argtypes = [POINTER(_SensorParams)]
_cukf.ukf_set_params.restype = None
_cukf.ukf_iterate.argtypes = [c_float]
_cukf.ukf_iterate.restype = None
_cukf.ukf_set_process_noise.argtypes = [POINTER(_REAL_T * _STATE_DIM)]
_cukf.ukf_set_process_noise.restype = None
_cukf.ukf_get_parameters.argtypes = [POINTER(_Parameters)]
_cukf.ukf_get_parameters.restype = None
_cukf.ukf_get_parameters_error.argtypes = [POINTER(_Parameters)]
_cukf.ukf_get_parameters_error.restype = None
# Initialize the library
_cukf.ukf_init()
# Set up the state
state = _State()
_cukf.ukf_get_state(state)
# Set up the state errors
state_error = _StateError()
_cukf.ukf_get_state_error(state_error)
# Set up the innovation
innovation = _Innovation()
# Set up the parameters
parameters = _Parameters()
_cukf.ukf_get_parameters(parameters)
# Set up the parameter errors
parameters_error = _Parameters()
_cukf.ukf_get_parameters_error(parameters_error)
|
{
"content_hash": "01589a3ff62f6080bee2d26d7e893af3",
"timestamp": "",
"source": "github",
"line_count": 249,
"max_line_length": 87,
"avg_line_length": 30.14457831325301,
"alnum_prop": 0.6317612576605383,
"repo_name": "sfwa/ukf",
"id": "0412701d3031478948082b3ac86b6e803acd3203",
"size": "8581",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/ahrs/python/ukf/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "287"
},
{
"name": "C++",
"bytes": "295178"
},
{
"name": "CMake",
"bytes": "4309"
}
],
"symlink_target": ""
}
|
class price_validation:
# STATE will be TRUE if rule validates for arguments value otherwise FALSE
STATE = None
# It takes 3 arguments netAmount, maxPrice, and minPrice to validate procurement price
def __init__(self, netAmount, maxPrice, minPrice):
self.netAmount=netAmount
self.maxPrice=maxPrice
self.minPrice=minPrice
# validation for maximum procurement contract price
def Procurement_Price_Max_Procurement_Price(self):
'Rule: validating if maximum price (Schema ID: maxPrice) is greater than or equal to procurement price (Schema ID: netAmount)'
if self.maxPrice is not None and self.netAmount is not None and self.maxPrice >= self.netAmount:
price_validation.STATE=True
return price_validation.STATE
else:
price_validation.STATE=False
return price_validation.STATE
# validation for minimum and maximum procurement contract price
def Max_Procurement_Price_Min_Procurement_Price(self):
'Rule: validating if maxPrice is greater than or equal to minimum price (Schema ID: minPrice)'
if self.maxPrice is not None and self.minPrice is not None and self.maxPrice >= self.minPrice:
price_validation.STATE=True
return price_validation.STATE
else:
price_validation.STATE=False
return price_validation.STATE
# validation for minimum procurement contract price
def Procurement_Price_Min_Procurement_Price(self):
'Rule: validating if minPrice is lesser than or equal to netAmount'
if self.minPrice is not None and self.netAmount is not None and self.minPrice <= self.netAmount:
price_validation.STATE=True
return price_validation.STATE
else:
price_validation.STATE=False
return price_validation.STATE
|
{
"content_hash": "f295f61e15d3d9fcb5f8c34274316dba",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 134,
"avg_line_length": 44.90909090909091,
"alnum_prop": 0.6568825910931174,
"repo_name": "santosh26a/Semantic-Data-Validation-using-Python",
"id": "d076c362ba148d21277df3c7e0234506838b4e8d",
"size": "1976",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "price_validation.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "18067"
}
],
"symlink_target": ""
}
|
import pandas as pd
import numpy as np
import pyaf.ForecastEngine as autof
import pyaf.Bench.TS_datasets as tsds
import logging
import logging.config
#logging.config.fileConfig('logging.conf')
# logging.basicConfig(level=logging.DEBUG)
b1 = tsds.load_ozone_exogenous()
df = b1.mPastData
H = b1.mHorizon;
N = df.shape[0];
for n in [N]:
df1 = df.head(n);
lEngine = autof.cForecastEngine()
lEngine.mOptions.set_active_autoregressions(['LGB']);
lExogenousData = (b1.mExogenousDataFrame , b1.mExogenousVariables)
lEngine.train(df1 , b1.mTimeVar , b1.mSignalVar, H, lExogenousData);
lEngine.getModelInfo();
lEngine.mSignalDecomposition.mBestModel.mTimeInfo.mResolution
lEngine.standardPlots(name = "outputs/my_lgb_ozone_" + str(n))
dfapp_in = df1.copy();
dfapp_in.tail()
dfapp_out = lEngine.forecast(dfapp_in, H);
#dfapp_out.to_csv("outputs/arx_ozone_apply_out.csv")
dfapp_out.tail(2 * H)
print("Forecast Columns " , dfapp_out.columns);
Forecast_DF = dfapp_out[[b1.mTimeVar , b1.mSignalVar, b1.mSignalVar + '_Forecast']]
print(Forecast_DF.info())
print("Forecasts\n" , Forecast_DF.tail(H).values);
print("\n\n<ModelInfo>")
print(lEngine.to_json());
print("</ModelInfo>\n\n")
print("\n\n<Forecast>")
print(Forecast_DF.tail(2*H).to_json(date_format='iso'))
print("</Forecast>\n\n")
|
{
"content_hash": "41700fe0b84a9ef948105ce24d6e00e8",
"timestamp": "",
"source": "github",
"line_count": 49,
"max_line_length": 87,
"avg_line_length": 28.244897959183675,
"alnum_prop": 0.6828034682080925,
"repo_name": "antoinecarme/pyaf",
"id": "9c49d793d8e2eb6bacaa911b4af1fffee22610ba",
"size": "1384",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/lgbm/test_ozone_lgb_exogenous.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "6773299"
},
{
"name": "Procfile",
"bytes": "24"
},
{
"name": "Python",
"bytes": "54209093"
},
{
"name": "R",
"bytes": "807"
},
{
"name": "Shell",
"bytes": "3619"
}
],
"symlink_target": ""
}
|
from abc import ABCMeta, abstractmethod
from pymongo.errors import DuplicateKeyError
from .config import get_config
from .util import get_default_database, sanitize, skeleton
class Sink(object):
__metaclass__ = ABCMeta
def filter(self, data, address):
return False
def handle(self, data, address):
if not self.filter(data, address):
self.send(data, address)
@abstractmethod
def send(self, data, address):
pass
class ProfileSink(Sink):
def __new__(cls, *args, **kwargs):
if not hasattr(cls, '_MongoClient'):
from gevent import monkey; monkey.patch_socket()
from pymongo import MongoClient
cls._MongoClient = MongoClient
return super(ProfileSink, cls).__new__(cls, *args, **kwargs)
def __init__(self):
self._config = get_config()
self._db = None
self._session_col = None
def filter(self, data, address):
return data['collection'].startswith('$')
@property
def db(self):
if self._db is None:
mongo_uri = self._config.index_profile_sink.mongo_uri
client = self.__class__._MongoClient(mongo_uri)
self._db = get_default_database(client, mongo_uri)
return self._db
@property
def session_col(self):
if self._session_col is None:
from .collection import SessionCollection
col_name = SessionCollection.get_collection_name()
self._session_col = SessionCollection(self.db[col_name])
return self._session_col
class IndexProfileSink(ProfileSink):
def __init__(self):
super(IndexProfileSink, self).__init__()
self._index_profile_col = None
@property
def index_profile_col(self):
if self._index_profile_col is None:
from .collection import IndexProfileCollection
col_name = IndexProfileCollection.get_collection_name()
self._index_profile_col = IndexProfileCollection(self.db[col_name])
return self._index_profile_col
def send(self, data, address):
q = {'session': data['session'],
'collection': data['collection'],
'index': data['explain']['cursor']}
query_skeleton = skeleton(data['query'])
try:
doc = {'queries': []}
doc.update(q)
self.index_profile_col.insert(doc)
except DuplicateKeyError:
pass
q.update({'queries.query': {'$ne': query_skeleton}})
self.index_profile_col.update(
q,
{
'$push': {
'queries': {
'query': query_skeleton,
'count': 0,
'durations': []
}
}
})
self.index_profile_col.update(
{'session': data['session'],
'collection': data['collection'],
'index': data['explain']['cursor'],
'queries.query': query_skeleton},
{'$inc': {'queries.$.count': 1},
'$set': {
'queries.$.covered': data['explain']['indexOnly']
},
'$push': {
'queries.$.durations': data['explain']['millis']
}})
class QueryProfileSink(ProfileSink):
def __init__(self):
super(QueryProfileSink, self).__init__()
self._query_profile_col = None
def filter(self, data, address):
return data['collection'].startswith('$')
@property
def query_profile_col(self):
if self._query_profile_col is None:
from .collection import QueryProfileCollection
col_name = QueryProfileCollection.get_collection_name()
self._query_profile_col = QueryProfileCollection(self.db[col_name])
return self._query_profile_col
def send(self, data, address):
query_profile_doc = \
{'function': data['function'],
'database': data['database'],
'collection': data['collection'],
'session': data['session'],
'explain': sanitize(data['explain']),
'query': skeleton(data['query']),
'source': data['source']}
self.query_profile_col.save(query_profile_doc)
|
{
"content_hash": "e3058967af0662091223e64be76af36f",
"timestamp": "",
"source": "github",
"line_count": 135,
"max_line_length": 79,
"avg_line_length": 32.022222222222226,
"alnum_prop": 0.5537820957668286,
"repo_name": "gregbanks/mongodrums",
"id": "799cdec618dbd20c2ef2796e09431dc29415861a",
"size": "4323",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "mongodrums/sink.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "360"
},
{
"name": "Python",
"bytes": "76484"
}
],
"symlink_target": ""
}
|
"""Test for the debugging wordcount example."""
from __future__ import absolute_import
import logging
import re
import tempfile
import unittest
from apache_beam.examples import wordcount_debugging
from apache_beam.testing.util import open_shards
class WordCountDebuggingTest(unittest.TestCase):
SAMPLE_TEXT = 'xx yy Flourish\n zz Flourish Flourish stomach\n aa\n bb cc dd'
def create_temp_file(self, contents):
with tempfile.NamedTemporaryFile(delete=False) as f:
f.write(contents.encode('utf-8'))
return f.name
def get_results(self, temp_path):
results = []
with open_shards(temp_path + '.result-*-of-*') as result_file:
for line in result_file:
match = re.search(r'([A-Za-z]+): ([0-9]+)', line)
if match is not None:
results.append((match.group(1), int(match.group(2))))
return results
def test_basics(self):
temp_path = self.create_temp_file(self.SAMPLE_TEXT)
expected_words = [('Flourish', 3), ('stomach', 1)]
wordcount_debugging.run(
['--input=%s*' % temp_path,
'--output=%s.result' % temp_path],
save_main_session=False)
# Parse result file and compare.
results = self.get_results(temp_path)
self.assertEqual(sorted(results), sorted(expected_words))
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
unittest.main()
|
{
"content_hash": "c2994dd01101979092b5a3710eb56d16",
"timestamp": "",
"source": "github",
"line_count": 47,
"max_line_length": 79,
"avg_line_length": 29.25531914893617,
"alnum_prop": 0.6654545454545454,
"repo_name": "RyanSkraba/beam",
"id": "124b6802f2add255afb3c973e6de3e0c072ed623",
"size": "2160",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sdks/python/apache_beam/examples/wordcount_debugging_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1597"
},
{
"name": "CSS",
"bytes": "40963"
},
{
"name": "Dockerfile",
"bytes": "16638"
},
{
"name": "FreeMarker",
"bytes": "7428"
},
{
"name": "Go",
"bytes": "2683402"
},
{
"name": "Groovy",
"bytes": "517560"
},
{
"name": "HTML",
"bytes": "183330"
},
{
"name": "Java",
"bytes": "28609011"
},
{
"name": "JavaScript",
"bytes": "16595"
},
{
"name": "Jupyter Notebook",
"bytes": "56365"
},
{
"name": "Python",
"bytes": "6191025"
},
{
"name": "Ruby",
"bytes": "4159"
},
{
"name": "Shell",
"bytes": "235061"
},
{
"name": "TSQL",
"bytes": "841"
}
],
"symlink_target": ""
}
|
from msrest.service_client import ServiceClient
from msrest import Configuration, Serializer, Deserializer
from .version import VERSION
from .operations.dictionary_operations import DictionaryOperations
from . import models
class AutoRestSwaggerBATdictionaryServiceConfiguration(Configuration):
"""Configuration for AutoRestSwaggerBATdictionaryService
Note that all parameters used to create this instance are saved as instance
attributes.
:param str base_url: Service URL
:param str filepath: Existing config
"""
def __init__(
self, base_url=None, filepath=None):
if not base_url:
base_url = 'http://localhost'
super(AutoRestSwaggerBATdictionaryServiceConfiguration, self).__init__(base_url, filepath)
self.add_user_agent('autorestswaggerbatdictionaryservice/{}'.format(VERSION))
class AutoRestSwaggerBATdictionaryService(object):
"""Test Infrastructure for AutoRest Swagger BAT
:ivar config: Configuration for client.
:vartype config: AutoRestSwaggerBATdictionaryServiceConfiguration
:ivar dictionary: Dictionary operations
:vartype dictionary: .operations.DictionaryOperations
:param str base_url: Service URL
:param str filepath: Existing config
"""
def __init__(
self, base_url=None, filepath=None):
self.config = AutoRestSwaggerBATdictionaryServiceConfiguration(base_url, filepath)
self._client = ServiceClient(None, self.config)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self.dictionary = DictionaryOperations(
self._client, self.config, self._serialize, self._deserialize)
|
{
"content_hash": "d68da260928c08c1edf8a4474b135734",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 98,
"avg_line_length": 34.86538461538461,
"alnum_prop": 0.7231108659680088,
"repo_name": "fhoring/autorest",
"id": "26d72d68088730ee0f414e7c3bd671c573f91f4f",
"size": "2287",
"binary": false,
"copies": "8",
"ref": "refs/heads/master",
"path": "src/generator/AutoRest.Python.Tests/Expected/AcceptanceTests/BodyDictionary/autorestswaggerbatdictionaryservice/auto_rest_swagger_ba_tdictionary_service.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C#",
"bytes": "16227657"
},
{
"name": "CSS",
"bytes": "110"
},
{
"name": "Go",
"bytes": "146679"
},
{
"name": "HTML",
"bytes": "274"
},
{
"name": "Java",
"bytes": "6906819"
},
{
"name": "JavaScript",
"bytes": "4715100"
},
{
"name": "PowerShell",
"bytes": "67568"
},
{
"name": "Python",
"bytes": "2090856"
},
{
"name": "Ruby",
"bytes": "308478"
},
{
"name": "Shell",
"bytes": "142"
},
{
"name": "Smalltalk",
"bytes": "3"
},
{
"name": "TypeScript",
"bytes": "179578"
}
],
"symlink_target": ""
}
|
"""Routine for decoding the CIFAR-10 binary file format."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
# Process images of this size. Note that this differs from the original CIFAR
# image size of 32 x 32. If one alters this number, then the entire model
# architecture will change and any model would need to be retrained.
IMAGE_SIZE = 24
# Global constants describing the CIFAR-10 data set.
NUM_CLASSES = 10
NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN = 50000
NUM_EXAMPLES_PER_EPOCH_FOR_EVAL = 10000
def read_cifar10(filename_queue):
"""Reads and parses examples from CIFAR10 data files.
Recommendation: if you want N-way read parallelism, call this function
N times. This will give you N independent Readers reading different
files & positions within those files, which will give better mixing of
examples.
Args:
filename_queue: A queue of strings with the filenames to read from.
Returns:
An object representing a single example, with the following fields:
height: number of rows in the result (32)
width: number of columns in the result (32)
depth: number of color channels in the result (3)
key: a scalar string Tensor describing the filename & record number
for this example.
label: an int32 Tensor with the label in the range 0..9.
uint8image: a [height, width, depth] uint8 Tensor with the image data
"""
class CIFAR10Record(object):
pass
result = CIFAR10Record()
# Dimensions of the images in the CIFAR-10 dataset.
# See http://www.cs.toronto.edu/~kriz/cifar.html for a description of the
# input format.
label_bytes = 1 # 2 for CIFAR-100
result.height = 32
result.width = 32
result.depth = 3
image_bytes = result.height * result.width * result.depth
# Every record consists of a label followed by the image, with a
# fixed number of bytes for each.
record_bytes = label_bytes + image_bytes
# Read a record, getting filenames from the filename_queue. No
# header or footer in the CIFAR-10 format, so we leave header_bytes
# and footer_bytes at their default of 0.
reader = tf.FixedLengthRecordReader(record_bytes=record_bytes)
result.key, value = reader.read(filename_queue)
# Convert from a string to a vector of uint8 that is record_bytes long.
record_bytes = tf.decode_raw(value, tf.uint8)
# The first bytes represent the label, which we convert from uint8->int32.
result.label = tf.cast(
tf.strided_slice(record_bytes, [0], [label_bytes]), tf.int32)
# The remaining bytes after the label represent the image, which we reshape
# from [depth * height * width] to [depth, height, width].
depth_major = tf.reshape(
tf.strided_slice(record_bytes, [label_bytes],
[label_bytes + image_bytes]),
[result.depth, result.height, result.width])
# Convert from [depth, height, width] to [height, width, depth].
result.uint8image = tf.transpose(depth_major, [1, 2, 0])
return result
def _generate_image_and_label_batch(image, label, min_queue_examples,
batch_size, shuffle):
"""Construct a queued batch of images and labels.
Args:
image: 3-D Tensor of [height, width, 3] of type.float32.
label: 1-D Tensor of type.int32
min_queue_examples: int32, minimum number of samples to retain
in the queue that provides of batches of examples.
batch_size: Number of images per batch.
shuffle: boolean indicating whether to use a shuffling queue.
Returns:
images: Images. 4D tensor of [batch_size, height, width, 3] size.
labels: Labels. 1D tensor of [batch_size] size.
"""
# Create a queue that shuffles the examples, and then
# read 'batch_size' images + labels from the example queue.
num_preprocess_threads = 16
if shuffle:
images, label_batch = tf.train.shuffle_batch(
[image, label],
batch_size=batch_size,
num_threads=num_preprocess_threads,
capacity=min_queue_examples + 3 * batch_size,
min_after_dequeue=min_queue_examples)
else:
images, label_batch = tf.train.batch(
[image, label],
batch_size=batch_size,
num_threads=num_preprocess_threads,
capacity=min_queue_examples + 3 * batch_size)
# Display the training images in the visualizer.
tf.summary.image('images', images)
return images, tf.reshape(label_batch, [batch_size])
def distorted_inputs(data_dir, batch_size):
"""Construct distorted input for CIFAR training using the Reader ops.
Args:
data_dir: Path to the CIFAR-10 data directory.
batch_size: Number of images per batch.
Returns:
images: Images. 4D tensor of [batch_size, IMAGE_SIZE, IMAGE_SIZE, 3] size.
labels: Labels. 1D tensor of [batch_size] size.
"""
filenames = [os.path.join(data_dir, 'data_batch_%d.bin' % i)
for i in xrange(1, 6)]
for f in filenames:
if not tf.gfile.Exists(f):
raise ValueError('Failed to find file: ' + f)
# Create a queue that produces the filenames to read.
filename_queue = tf.train.string_input_producer(filenames)
# Read examples from files in the filename queue.
read_input = read_cifar10(filename_queue)
reshaped_image = tf.cast(read_input.uint8image, tf.float32)
height = IMAGE_SIZE
width = IMAGE_SIZE
# Image processing for training the network. Note the many random
# distortions applied to the image.
# Randomly crop a [height, width] section of the image.
distorted_image = tf.random_crop(reshaped_image, [height, width, 3])
# Randomly flip the image horizontally.
distorted_image = tf.image.random_flip_left_right(distorted_image)
# Because these operations are not commutative, consider randomizing
# the order their operation.
# NOTE: since per_image_standardization zeros the mean and makes
# the stddev unit, this likely has no effect see tensorflow#1458.
distorted_image = tf.image.random_brightness(distorted_image,
max_delta=63)
distorted_image = tf.image.random_contrast(distorted_image,
lower=0.2, upper=1.8)
# Subtract off the mean and divide by the variance of the pixels.
float_image = tf.image.per_image_standardization(distorted_image)
# Set the shapes of tensors.
float_image.set_shape([height, width, 3])
read_input.label.set_shape([1])
# Ensure that the random shuffling has good mixing properties.
min_fraction_of_examples_in_queue = 0.4
min_queue_examples = int(NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN *
min_fraction_of_examples_in_queue)
print ('Filling queue with %d CIFAR images before starting to train. '
'This will take a few minutes.' % min_queue_examples)
# Generate a batch of images and labels by building up a queue of examples.
return _generate_image_and_label_batch(float_image, read_input.label,
min_queue_examples, batch_size,
shuffle=True)
def inputs(eval_data, data_dir, batch_size):
"""Construct input for CIFAR evaluation using the Reader ops.
Args:
eval_data: bool, indicating if one should use the train or eval data set.
data_dir: Path to the CIFAR-10 data directory.
batch_size: Number of images per batch.
Returns:
images: Images. 4D tensor of [batch_size, IMAGE_SIZE, IMAGE_SIZE, 3] size.
labels: Labels. 1D tensor of [batch_size] size.
"""
if not eval_data:
filenames = [os.path.join(data_dir, 'data_batch_%d.bin' % i)
for i in xrange(1, 6)]
num_examples_per_epoch = NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN
else:
filenames = [os.path.join(data_dir, 'test_batch.bin')]
num_examples_per_epoch = NUM_EXAMPLES_PER_EPOCH_FOR_EVAL
for f in filenames:
if not tf.gfile.Exists(f):
raise ValueError('Failed to find file: ' + f)
# Create a queue that produces the filenames to read.
filename_queue = tf.train.string_input_producer(filenames)
# Read examples from files in the filename queue.
read_input = read_cifar10(filename_queue)
reshaped_image = tf.cast(read_input.uint8image, tf.float32)
height = IMAGE_SIZE
width = IMAGE_SIZE
# Image processing for evaluation.
# Crop the central [height, width] of the image.
resized_image = tf.image.resize_image_with_crop_or_pad(reshaped_image,
height, width)
# Subtract off the mean and divide by the variance of the pixels.
float_image = tf.image.per_image_standardization(resized_image)
# Set the shapes of tensors.
float_image.set_shape([height, width, 3])
read_input.label.set_shape([1])
# Ensure that the random shuffling has good mixing properties.
min_fraction_of_examples_in_queue = 0.4
min_queue_examples = int(num_examples_per_epoch *
min_fraction_of_examples_in_queue)
# Generate a batch of images and labels by building up a queue of examples.
return _generate_image_and_label_batch(float_image, read_input.label,
min_queue_examples, batch_size,
shuffle=False)
|
{
"content_hash": "a7627e8ed2a045ba18eb95e1e711b0e4",
"timestamp": "",
"source": "github",
"line_count": 244,
"max_line_length": 78,
"avg_line_length": 38.299180327868854,
"alnum_prop": 0.6806848582129481,
"repo_name": "rossumai/keras-multi-gpu",
"id": "97690af7fe81d6106dcfdd94ea454b83536050ea",
"size": "10061",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "keras_tf_multigpu/examples/avolkov1/cifar/tf_examples/cifar10_input.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "2807909"
},
{
"name": "Makefile",
"bytes": "686"
},
{
"name": "Python",
"bytes": "246579"
},
{
"name": "Shell",
"bytes": "13861"
}
],
"symlink_target": ""
}
|
import os
import json
import logging
import argparse
import requests
import xmltodict
from requests.auth import HTTPBasicAuth
from requests.exceptions import ConnectionError, Timeout
LOG = logging.getLogger(__name__)
#-------------------------------------------------------------------------------
# Class 'Controller'
#-------------------------------------------------------------------------------
class Controller():
""" Class that represents a Controller device. """
def __init__(self, **kwargs):
"""Initializes this object properties."""
if not kwargs.get('config'):
raise Exception('controller needs config properties')
# Check the config (and supply defaults)
self.config = self.check_config(kwargs['config'])
self.default_headers = {
'content-type': 'application/json', 'accept': 'application/json'}
def check_config(self, cfg):
"""Check properties and supply defaults."""
req_props = ['ip', 'port', 'username', 'password']
# defaults
props = { 'protocol': 'http', 'timeout': 30 }
for prop in req_props:
if prop not in cfg:
raise Exception('can\'t find property {0}'.format(prop))
# Update defaults with given props
props.update(cfg)
return props
def __str__(self):
""" Returns string representation of this object. """
return str(vars(self))
def to_string(self):
""" Returns string representation of this object. """
return self.__str__()
def to_json(self):
""" Returns JSON representation of this object. """
return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True,
indent=4)
def brief_json(self):
""" Returns JSON representation of this object (brief info). """
d = {'protocol': self.protocol,
'ipAddr': self.config['ip'],
'portNum': self.config['port'],
'adminName': self.config['username'],
'adminPassword': self.config['password']}
return json.dumps(d, default=lambda o: o.__dict__, sort_keys=True,
indent=4)
def http_get_request(self, url, headers=None, timeout=None):
""" Sends HTTP GET request to a remote server
and returns the response.
:param string url: The complete url including protocol:
http://www.example.com/path/to/resource
:param string data: The data to include in the body of the request.
Typically set to None.
:param dict headers: The headers to include in the request.
:param string timeout: Pass a timeout for longlived queries
:return: The response from the http request.
:rtype: None or `requests.response`
<http://docs.python-requests.org/en/latest/api/#requests.Response>
"""
if headers is None:
headers = self.default_headers
resp = None
if timeout is None:
timeout = self.config['timeout']
resp = requests.get(url,
auth=HTTPBasicAuth(self.config['username'],
self.config['password']),
data=None, headers=headers,
timeout=timeout)
if resp is not None:
if resp.status_code == 200:
LOG.debug("found {}".format(url))
elif resp.status_code == 404:
LOG.debug("not found {}".format(url))
else:
LOG.error("error getting {} message {}".format(url,resp.content))
return (resp)
def http_post_request(self, url, data, headers=None):
""" Sends HTTP POST request to a remote server
and returns the response.
:param string url: The complete url including protocol:
http://www.example.com/path/to/resource
:param string data: The data to include in the body of the request.
Typically set to None.
:param dict headers: The headers to include in the request.
:return: The response from the http request.
:rtype: None or `requests.response`
<http://docs.python-requests.org/en/latest/api/#requests.Response>
"""
if headers is None:
headers = self.default_headers
resp = None
resp = requests.post(url,
auth=HTTPBasicAuth(self.config['username'],
self.config['password']),
data=data, headers=headers,
timeout=self.config['timeout'])
if resp is not None:
if resp.status_code == 204:
LOG.debug("added {} {}".format(url,data))
else:
LOG.error("error posting {} data {} message {}".format(url,data,resp.content))
return (resp)
def http_put_request(self, url, data, headers=None):
""" Sends HTTP PUT request to a remote server
and returns the response.
:param string url: The complete url including protocol:
http://www.example.com/path/to/resource
:param string data: The data to include in the body of the request.
Typically set to None.
:param dict headers: The headers to include in the request.
:return: The response from the http request.
:rtype: None or `requests.response`
<http://docs.python-requests.org/en/latest/api/#requests.Response>
"""
if headers is None:
headers = self.default_headers
resp = None
resp = requests.put(url,
auth=HTTPBasicAuth(self.config['username'],
self.config['password']),
data=data, headers=headers,
timeout=self.config['timeout'])
if resp is not None:
if resp.status_code == 200:
LOG.debug("added {} {}".format(url,data))
else:
LOG.error("error posting {} data {} message {}".format(url,data,resp.content))
return (resp)
def http_delete_request(self, url, data=None, headers=None):
""" Sends HTTP DELETE request to a remote server
and returns the response.
:param string url: The complete url including protocol:
http://www.example.com/path/to/resource
:param string data: The data to include in the body of the request.
Typically set to None.
:param dict headers: The headers to include in the request.
:return: The response from the http request.
:rtype: None or `requests.response`
<http://docs.python-requests.org/en/latest/api/#requests.Response>
"""
if headers is None:
headers = self.default_headers
resp = None
resp = requests.delete(url,
auth=HTTPBasicAuth(self.config['username'],
self.config['password']),
data=data, headers=headers,
timeout=self.config['timeout'])
if resp is not None:
if resp.status_code == 200:
LOG.debug("delete {}".format(url))
elif resp.status_code == 404:
LOG.debug("delete {}".format(url))
else:
LOG.error("error posting {} message {}".format(url,resp.content))
return (resp)
def get_base_url(self):
return ("{}://{}:{}/restconf").format(
self.config['protocol'],self.config['ip'], self.config['port'])
def get_operational_url(self):
return self.get_base_url() + "/operational"
def get_config_url(self):
return self.get_base_url() + "/config"
def create_topology_stream(self):
# This calls RESTConf api to create a stream for notifications about changes
# in the topology in the operational data store
# RETURNS: stream's url or None if fail
url = self.get_base_url() + '/operations/sal-remote:create-data-change-event-subscription'
headers = {'content-type': 'application/xml',
'accept': 'application/json'}
payload = '<input xmlns="urn:opendaylight:params:xml:ns:yang:controller:md:sal:remote"> \
<path xmlns:a="urn:TBD:params:xml:ns:yang:network-topology">/a:network-topology</path> \
<datastore xmlns="urn:sal:restconf:event:subscription">OPERATIONAL</datastore> \
<scope xmlns="urn:sal:restconf:event:subscription">SUBTREE</scope> \
</input>'
r = requests.post(url, data=payload, headers=headers, auth=HTTPBasicAuth(self.config['username'], self.config['password']))
streamName = r.text
#print streamName
if ('error' in streamName):
print "Error: " + streamName
return None
else:
streamName = r.json()
streamName = streamName['output']
streamName = streamName['stream-name']
return streamName
def subcribe_stream(self,streamName):
# This calls the RESTConf api to suscribe to the stream at streamName
# INPUT:
# rConfBaseUrl - base url at which RestConf calls may be made - example: http://192.168.56.101:8181/restconf
# user - the user name with which to authenticate - example: admin
# password - the password with which to authenticate - example - admin
# streamName - name of stream with which to subscribe (streamName is returned from creating it)
# RETURNS: stream's url at which to listen with web socket
url = self.get_base_url() + '/streams/stream/' + streamName
#print url
headers = {'content-type': 'application/json',
'accept': 'application/json'}
r = requests.get(url, headers=headers, auth=HTTPBasicAuth(self.config['username'], self.config['password']))
streamListenUrl = r.headers['location']
return streamListenUrl
|
{
"content_hash": "8a12e4b5e47aba2b08b7c7551e109131",
"timestamp": "",
"source": "github",
"line_count": 269,
"max_line_length": 131,
"avg_line_length": 39.059479553903344,
"alnum_prop": 0.5495384029694489,
"repo_name": "brocade-apj/anzsdnhackathon2016",
"id": "5a96b66a9bce610df84075aaf28372d99ed57478",
"size": "10507",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "srmanager/controller.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "198"
},
{
"name": "HTML",
"bytes": "7149"
},
{
"name": "JavaScript",
"bytes": "1863"
},
{
"name": "Makefile",
"bytes": "559"
},
{
"name": "Python",
"bytes": "77625"
}
],
"symlink_target": ""
}
|
"""The CleverHans adversarial example library"""
from cleverhans.devtools.version import append_dev_version
# If possible attach a hex digest to the version string to keep track of
# changes in the development branch
__version__ = append_dev_version("4.0.0")
|
{
"content_hash": "5bcd5d36328bb96a052dfb10e5c946a0",
"timestamp": "",
"source": "github",
"line_count": 6,
"max_line_length": 72,
"avg_line_length": 43.333333333333336,
"alnum_prop": 0.7730769230769231,
"repo_name": "cleverhans-lab/cleverhans",
"id": "519ff0442481857c28a87f77d0bf6582738ac7bc",
"size": "260",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cleverhans/__init__.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "242"
},
{
"name": "HTML",
"bytes": "64"
},
{
"name": "Makefile",
"bytes": "836"
},
{
"name": "Python",
"bytes": "1016809"
},
{
"name": "Shell",
"bytes": "2831"
}
],
"symlink_target": ""
}
|
from django.db.models.signals import post_save
from django.dispatch import receiver
from videos.models import Video, Rating, Analytic
@receiver(post_save, sender=Video)
def create_analytics(sender, instance, created, **kwargs):
if created:
Analytic.objects.create(video=instance)
@receiver(post_save, sender=Video)
def create_rating(sender, instance, created, **kwargs):
if created:
Rating.objects.create(video=instance)
|
{
"content_hash": "e130b04d98cd9d179374689a9ed96b74",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 58,
"avg_line_length": 28.0625,
"alnum_prop": 0.7505567928730512,
"repo_name": "lotube/lotube",
"id": "a5128374762548c3e780e59de57a1442a22729b4",
"size": "449",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lotube/videos/signals.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "8456"
},
{
"name": "Python",
"bytes": "65219"
}
],
"symlink_target": ""
}
|
from user_metrics.utils import nested_import
from user_metrics.config import settings
query_mod = nested_import(settings.__query_module__)
|
{
"content_hash": "ba60dd33bcb3b60a91618e3e70c7b4b5",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 52,
"avg_line_length": 35,
"alnum_prop": 0.8,
"repo_name": "wikimedia/user_metrics",
"id": "e604a4713acb64106dbd3cb8ae2b9744d447cfa1",
"size": "213",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "user_metrics/metrics/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "JavaScript",
"bytes": "58528"
},
{
"name": "Python",
"bytes": "314524"
},
{
"name": "Shell",
"bytes": "462"
}
],
"symlink_target": ""
}
|
import uuid
# Abstract class
class Primitive(object):
def __init__(self):
self.is_success = False # flags weather the primitive's action resulted in a success or not
self.exception = None # holds the exception object if one was raised
self._id = uuid.uuid4()
def cancelIn(self, cancel):
pass
def __str__(self):
return '%s %s' % (str(self.__class__.__name__), self._id)
|
{
"content_hash": "1ba86ea23bcf06683cf2c03133c9afe8",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 104,
"avg_line_length": 28.8125,
"alnum_prop": 0.5639913232104121,
"repo_name": "levilucio/SyVOLT",
"id": "4f5ce94eed3beef5aaa85de726b103a190d25001",
"size": "461",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "t_core/primitive.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "166159"
},
{
"name": "Python",
"bytes": "34207588"
},
{
"name": "Shell",
"bytes": "1118"
}
],
"symlink_target": ""
}
|
import enum
from django.conf import settings
from django.db import models
from django.utils.encoding import force_str
from django.utils.translation import gettext_lazy as _
postgresql_engine_names = [
'django.db.backends.postgresql',
'django.db.backends.postgresql_psycopg2',
]
if settings.DATABASES['default']['ENGINE'] in postgresql_engine_names:
from django.contrib.postgres.fields import JSONField as _JSONField
else:
from jsonfield.fields import JSONField as _JSONField
class JSONField(_JSONField):
def __init__(self, *args, **kwargs):
kwargs.update({'default': dict})
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
del kwargs['default']
return name, path, args, kwargs
class ChoiceEnumMeta(enum.EnumMeta):
def __call__(cls, value, *args, **kwargs):
if isinstance(value, str):
try:
value = cls.__members__[value]
except KeyError:
pass # let the super method complain
return super().__call__(value, *args, **kwargs)
def __new__(metacls, classname, bases, classdict):
labels = {}
for key in classdict._member_names:
source_value = classdict[key]
if isinstance(source_value, (list, tuple)):
try:
val, labels[key] = source_value
except ValueError:
raise ValueError("Invalid ChoiceEnum member '{}'".format(key))
else:
val = source_value
labels[key] = key.replace("_", " ").title()
# Use dict.__setitem__() to suppress defenses against
# double assignment in enum's classdict
dict.__setitem__(classdict, key, val)
cls = super().__new__(metacls, classname, bases, classdict)
for key, label in labels.items():
getattr(cls, key).label = label
return cls
@property
def choices(cls):
return [(k.value, k.label) for k in cls]
@property
def default(cls):
try:
return next(iter(cls))
except StopIteration:
return None
class ChoiceEnum(enum.Enum, metaclass=ChoiceEnumMeta):
"""
Utility class to handle choices in Django model and/or form fields.
Usage:
class Color(ChoiceEnum):
WHITE = 0, "White"
RED = 1, "Red"
GREEN = 2, "Green"
BLUE = 3, "Blue"
green = Color.GREEN
color = forms.ChoiceField(
choices=Color.choices,
default=Color.default,
)
"""
def __str__(self):
return force_str(self.label)
class ChoiceEnumField(models.PositiveSmallIntegerField):
description = _("Customer recognition state")
def __init__(self, *args, **kwargs):
self.enum_type = kwargs.pop('enum_type', ChoiceEnum) # fallback is required form migrations
if not issubclass(self.enum_type, ChoiceEnum):
raise ValueError("enum_type must be a subclass of `ChoiceEnum`.")
kwargs.update(choices=self.enum_type.choices)
kwargs.setdefault('default', self.enum_type.default)
super().__init__(*args, **kwargs)
def deconstruct(self):
name, path, args, kwargs = super().deconstruct()
if 'choices' in kwargs:
del kwargs['choices']
if kwargs['default'] is self.enum_type.default:
del kwargs['default']
elif isinstance(kwargs['default'], self.enum_type):
kwargs['default'] = kwargs['default'].value
return name, path, args, kwargs
def from_db_value(self, value, expression, connection):
try:
return self.enum_type(value)
except ValueError:
return value
def get_prep_value(self, state):
if isinstance(state, self.enum_type):
return state.value
return state
def to_python(self, state):
return self.enum_type(state)
def value_to_string(self, obj):
value = getattr(obj, self.name, obj)
if not isinstance(value, self.enum_type):
raise ValueError("Value must be of type {}".format(self.enum_type))
return value.name
|
{
"content_hash": "fca31d3f9a6dffe23e7e3145eb32eb42",
"timestamp": "",
"source": "github",
"line_count": 132,
"max_line_length": 100,
"avg_line_length": 32.11363636363637,
"alnum_prop": 0.5980184005661713,
"repo_name": "awesto/django-shop",
"id": "6248faf7aa67b7e9241540384f435ba28595fa06",
"size": "4239",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "shop/models/fields.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "8455"
},
{
"name": "HTML",
"bytes": "107122"
},
{
"name": "JavaScript",
"bytes": "51946"
},
{
"name": "Python",
"bytes": "588560"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('demo_models', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='bar',
name='foos',
field=models.ManyToManyField(blank=True, to='demo_models.Foo'),
preserve_default=True,
),
]
|
{
"content_hash": "a892efad9d2105098b140d34d6a3fd93",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 75,
"avg_line_length": 22.31578947368421,
"alnum_prop": 0.5849056603773585,
"repo_name": "AbhiAgarwal/django-report-builder",
"id": "17f4ca890358f99ca95f98706e342972dc1ec9f8",
"size": "448",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "report_builder_demo/demo_models/migrations/0002_bar_foos.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1799"
},
{
"name": "HTML",
"bytes": "27527"
},
{
"name": "JavaScript",
"bytes": "14634"
},
{
"name": "Python",
"bytes": "110452"
}
],
"symlink_target": ""
}
|
'''
fabfile_sample.py
edit this to your satisfaction, then move it in your project root as fabfile.py
usage:
$ fab dev pack deploy
$ fab dev uptime
'''
import os
from fabric.api import *
def dev():
env.user = 'nathaniel'
env.hosts = ['tycho']
env.virtualenv_dir = '/home/nathaniel/conf/virtualenvs/pipeline'
env.supervisord_config = '/home/nathaniel/conf/tycho/supervisord.conf'
# some supervisor installs need sudo
env.supervisorctl_with_sudo = False
def pack():
# create a new source distribution as a tarball
local('python setup.py sdist --formats=gztar', capture=False)
def deploy():
# determine release name and version
dist = local('python setup.py --fullname', capture=True).strip()
# upload the source tarball and unzip
put('dist/%s.tar.gz' % dist, '/tmp/pipeline.tar.gz')
run('mkdir /tmp/pipeline')
with cd('/tmp/pipeline'):
run('tar xzf /tmp/pipeline.tar.gz')
# setup the package with the virtualenv
with cd('/tmp/pipeline/%s' % dist):
python = os.path.join(env.virtualenv_dir, 'bin/python')
run('%s setup.py install' % python)
# re-install requirements.txt
#run('pip install -r requirements.txt -E %s' % env.virtualenv_dir)
# delete the temporary folder
run('rm -rf /tmp/pipeline /tmp/pipeline.tar.gz')
if env.supervisorctl_with_sudo:
# restart the server..
sudo('supervisorctl restart lejos')
# restart the job queue..
sudo('supervisorctl restart rqscheduler')
sudo('supervisorctl restart rqworker:')
else:
# restart the server..
run('supervisorctl restart lejos')
# restart the job queue..
run('supervisorctl restart rqscheduler')
run('supervisorctl restart rqworker:')
def logs():
''' view logs
supervisord redirects stderr and stdout to this path
'''
run('tail /tmp/pipeline.log')
def nginx(command):
''' nginx controls
'''
if command == 'start':
sudo('/etc/init.d/nginx start')
elif command == 'stop':
sudo('/etc/init.d/nginx stop')
elif command == 'restart':
nginx('stop')
nginx('start')
else:
print 'hm, did not quite understand that nginx command'
''' misc
'''
def host_info():
print 'checking lsb_release of host: '
run('lsb_release -a')
def uptime():
run('uptime')
def grep_python():
run('ps aux | grep python')
|
{
"content_hash": "a470af7b8841dd9c9f484b926b5ac4dc",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 79,
"avg_line_length": 26.93548387096774,
"alnum_prop": 0.6191616766467066,
"repo_name": "aquaya/pipeline",
"id": "dacf0c9914256bbda059a1030eee9305d78e6277",
"size": "2505",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "conf/fabfile_sample.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "23321"
},
{
"name": "Python",
"bytes": "294961"
}
],
"symlink_target": ""
}
|
import unittest
import io
import os
import tempfile
from kivy import setupconfig
class ImageTestCase(unittest.TestCase):
def setUp(self):
from kivy.core.window import Window
from kivy.core.image import Image
import os
self.cls = Image
self.image = os.path.join(os.path.dirname(__file__), 'test_button.png')
print(self.image)
self.root = Image(self.image)
def test_keep_data(self):
root = self.root
texture = root.texture
self.assertEqual(root._image._data[0].data, None)
i1 = self.cls(self.image, keep_data=True)
if not i1._image._data[0].data:
self.fail('Image has no data even with keep_data = True')
@unittest.skip("Travis on Xenial don't have SDL_image >= 2.0.5")
def test_save_into_bytesio(self):
Image = self.cls
if setupconfig.PLATFORM == "darwin":
# XXX on OSX CI Builder, img_sdl2 is not used
# therefore the test below wont work yet with imageio only.
return
# load kivy logo
img = Image.load("data/logo/kivy-icon-512.png")
self.assertIsNotNone(img)
# try to save without any format
with self.assertRaises(Exception) as context:
bio = io.BytesIO()
img.save(bio)
# save it in png
bio = io.BytesIO()
# if False, then there is no provider
self.assertTrue(img.save(bio, fmt="png"))
pngdata = bio.read()
self.assertTrue(len(pngdata) > 0)
# try to save in a filename
try:
_, filename = tempfile.mkstemp(suffix=".png")
self.assertTrue(img.save(filename, fmt="png"))
finally:
os.unlink(filename)
# XXX Test wrote but temporary commented
# XXX because of the issue #6123 on OSX
# XXX https://github.com/kivy/kivy/issues/6123
# with open(filename, "rb") as fd2:
# pngdatafile = fd2.read()
# # check the png file data is the same as bytesio
# self.assertTrue(pngdata == pngdatafile)
# save it in jpeg
bio = io.BytesIO()
# if False, then there is no provider
self.assertTrue(img.save(bio, fmt="jpg"))
self.assertTrue(len(bio.read()) > 0)
with tempfile.NamedTemporaryFile(suffix=".jpg") as fd:
self.assertTrue(img.save(fd.name))
|
{
"content_hash": "9d445c79fa91af52363b8e1a115c9b2a",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 79,
"avg_line_length": 32.527027027027025,
"alnum_prop": 0.5911923556294142,
"repo_name": "matham/kivy",
"id": "8883be4857fb2ea1020fea3862dc74de04d70819",
"size": "2407",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "kivy/tests/test_image.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "324418"
},
{
"name": "C++",
"bytes": "3888"
},
{
"name": "Emacs Lisp",
"bytes": "9838"
},
{
"name": "GLSL",
"bytes": "323"
},
{
"name": "Makefile",
"bytes": "4993"
},
{
"name": "Objective-C",
"bytes": "21550"
},
{
"name": "PowerShell",
"bytes": "5375"
},
{
"name": "Python",
"bytes": "4204346"
},
{
"name": "Shell",
"bytes": "25365"
},
{
"name": "Vim script",
"bytes": "2120"
}
],
"symlink_target": ""
}
|
from django.conf.urls import url, include
from taxonomy.models import Taxon, Rank
from references.models import Reference, Author, AuthorOrder
from rest_framework import routers, serializers, viewsets
from rest_framework.authtoken import views
# Serializers define the API representation.
class TaxonSerializer(serializers.HyperlinkedModelSerializer):
rank = serializers.PrimaryKeyRelatedField(queryset=Rank.objects.all())
ref = serializers.PrimaryKeyRelatedField(queryset=Reference.objects.all(), required=False)
parent = serializers.PrimaryKeyRelatedField(queryset=Taxon.objects.all(), required=False)
#label and value are not required, but are used by jquery autocomplete
label = serializers.CharField(source="__unicode__", required=False)
value = serializers.IntegerField(source="id", required=False)
class Meta:
model = Taxon
fields = ('url', 'name', 'rank', 'ref', 'parent', 'label', 'value')
# ViewSets define the view behavior.
class TaxonList(viewsets.ModelViewSet):
serializer_class = TaxonSerializer
def get_queryset(self):
queryset = Taxon.objects.all()
term = self.request.query_params.get('term', None)
if term is not None:
queryset = queryset.filter(name__icontains=term)
return queryset
class FullTaxonHierarchySerializer(serializers.HyperlinkedModelSerializer):
taxClass = serializers.CharField()
order = serializers.CharField()
family = serializers.CharField()
subfamily = serializers.CharField()
tribe = serializers.CharField()
genus = serializers.CharField()
species = serializers.CharField()
class Meta:
model = Taxon
fields = ('id','taxClass','order','family','subfamily','tribe','genus','species')
class FullTaxonHierarchyList(viewsets.ModelViewSet):
serializer_class = FullTaxonHierarchySerializer
def get_queryset(self):
queryset = Taxon.objects.all()
name = self.request.query_params.get('name', None)
if name is not None:
queryset = queryset.filter(name__icontains=name)
return queryset
class ReferenceSerializer(serializers.HyperlinkedModelSerializer):
label = serializers.CharField(source='__unicode__')
value = serializers.IntegerField(source='id')
class Meta:
model = Reference
fields = ('label', 'value')
class ReferenceList(viewsets.ModelViewSet):
serializer_class = ReferenceSerializer
# set up to work with jQuery autocomplete, which sends a query string called "term"
def get_queryset(self):
queryset = Reference.objects.all()
term = self.request.query_params.get('term', None)
if term is not None:
queryset = queryset.filter(authors__lastName__icontains=term)
return queryset
class AuthorSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Author
fields = ('id', '__unicode__')
class AuthorList(viewsets.ModelViewSet):
serializer_class = AuthorSerializer
queryset = Author.objects.all()
class AuthorOrderSerializer(serializers.HyperlinkedModelSerializer):
authorString = serializers.CharField(source='author')
referenceString = serializers.CharField(source='reference')
class Meta:
model = AuthorOrder
fields = ('id', 'authorString', 'referenceString', 'orderNumber')
class AuthorOrderList(viewsets.ModelViewSet):
serializer_class = AuthorOrderSerializer
queryset = AuthorOrder.objects.all()
class RankSerializer(serializers.HyperlinkedModelSerializer):
label = serializers.CharField(source="name")
value = serializers.CharField(source="id")
class Meta:
model = Rank
fields = ('label', 'value')
class RankList(viewsets.ModelViewSet):
serializer_class = RankSerializer
def get_queryset(self):
queryset = Rank.objects.all()
term = self.request.query_params.get('term', None)
if term is not None:
queryset = queryset.filter(name__icontains=term)
return queryset
# Routers provide an easy way of automatically determining the URL conf.
router = routers.DefaultRouter()
router.register(r'taxa', TaxonList, base_name="taxon")
router.register(r'ranks', RankList, base_name="rank")
router.register(r'references', ReferenceList, base_name="reference")
router.register(r'authors', AuthorList, base_name="author")
router.register(r'author_orders', AuthorOrderList, base_name="author_order")
router.register(r'full_taxon_hierarchy', FullTaxonHierarchyList, base_name="full_taxon_hierarchy")
urlpatterns = [
url(r'^', include(router.urls)),
## the next two are the login and logout view for the browsable API
url(r'^api-token-auth/', views.obtain_auth_token),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
]
|
{
"content_hash": "60464a903a0ad201e451ade756693baa",
"timestamp": "",
"source": "github",
"line_count": 123,
"max_line_length": 98,
"avg_line_length": 39.390243902439025,
"alnum_prop": 0.7145510835913312,
"repo_name": "wabarr/census-paleo",
"id": "364aec24d155e89287a54251847cc6673c0f5cef",
"size": "4845",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "RESTful/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "56630"
},
{
"name": "HTML",
"bytes": "77157"
},
{
"name": "JavaScript",
"bytes": "21426"
},
{
"name": "Python",
"bytes": "124811"
}
],
"symlink_target": ""
}
|
"""
.. py:currentmodule:: leepstools.file.__init__
.. moduleauthor:: Hendrix Demers <hendrix.demers@mail.mcgill.ca>
Package for read and write LEEPS input / output files.
"""
###############################################################################
# Copyright 2017 Hendrix Demers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
# Standard library modules.
# Third party modules.
# Local modules.
# Project modules.
# Globals and constants variables.
|
{
"content_hash": "7024090f8fcde4622076f8a7225eec28",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 79,
"avg_line_length": 31.606060606060606,
"alnum_prop": 0.6337488015340365,
"repo_name": "drix00/leepstools",
"id": "104f274c30b6f88173de6c40f9118e3c51ae8041",
"size": "1090",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "leepstools/file/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "2307"
},
{
"name": "Python",
"bytes": "51384"
}
],
"symlink_target": ""
}
|
"""
Copyright (c) 2009 Barry Schwartz
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import fontforge
def cap_spacing(font, caps_names, expansion):
advance_widths = {}
for n in caps_names:
if n in font:
w = font[n].width
if w in advance_widths:
advance_widths[w].append(n)
else:
advance_widths[w] = [n]
rules = ''
for w in sorted(advance_widths):
advance_increase = int(round(expansion * w))
pos = int(advance_increase / 2)
glyphs = advance_widths[w]
rules += ' pos '
if len(glyphs) == 1:
rules += '\\' + glyphs[0]
else:
rules += '[ '
for g in glyphs:
rules += '\\' + g + ' '
rules += ']'
rules += ' <' + str(pos) + ' 0 ' + str(advance_increase) + ' 0>;\n'
return rules
|
{
"content_hash": "90ffdb88406d704f6925a0c9e4461fd8",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 77,
"avg_line_length": 36.72549019607843,
"alnum_prop": 0.6518953550453818,
"repo_name": "chemoelectric/sortsmill",
"id": "b8c95e80e274fab59c92bf1a8d5cce524d519fee",
"size": "1898",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tools/cap_spacing.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "OCaml",
"bytes": "170116"
},
{
"name": "Python",
"bytes": "215416"
},
{
"name": "Shell",
"bytes": "100328"
}
],
"symlink_target": ""
}
|
import os
import sys
import smtplib
import mimetypes
from email.Encoders import encode_base64
from email.MIMEBase import MIMEBase
from email.MIMEMultipart import MIMEMultipart
from email.MIMEText import MIMEText
# =====================================
# CONFIG
TEST = False
MAIL_FROM = 'your-nas@host.tld'
MAIL_TO = 'you@host.tld'
MAIL_HOST = 'smtp.host.tld'
MAIL_PORT = 25 # 465 SSL/TLS
MAIL_USER = 'user'
MAIL_PASS = 'pass'
MAIL_ALL_OK = True # Even send an EMail if everything is OK
CMD_SVCS = '/usr/bin/svcs'
CMD_ZPOOL = '/sbin/zpool'
CMD_SMARTCTL = '/opt/smartmon/sbin/smartctl'
DISKS = [
'/dev/rdsk/c8t1d0'
, '/dev/rdsk/c8t2d0'
, '/dev/rdsk/c8t3d0'
]
# =====================================
# LIB
# open a connection to the SMTP-Server
def initSMTP():
try:
# USE SSL/TLS INSTEAD
#s = smtplib.SMTP_SSL(MAIL_HOST, MAIL_PORT)
s = smtplib.SMTP(MAIL_HOST, MAIL_PORT)
# DETAILED TRACE
# s.set_debuglevel(1)
s.login(MAIL_USER, MAIL_PASS)
return s
except Exception, e:
print e
sys.exit(1)
# close the SMTP-connection
def closeSMTP(s):
s.quit()
s.close()
# send an E-Mail using specified SMTP-Connection
def sendMail(s, subj, body):
try:
msg = MIMEMultipart()
msg['From'] = MAIL_FROM
msg['To'] = MAIL_TO
msg['Subject'] = subj
msg.attach(MIMEText(body))
s.sendmail(MAIL_FROM, MAIL_TO.split(";"), msg.as_string())
except Exception, e:
print e
sys.exit(1)
# execute a command and return its output
def cmd(c):
try:
proc = os.popen(c)
out = proc.read().strip()
return out
except Exception, e:
print e
sys.exit(1)
# create a summary-text of failed-command's output and additional details
def summary(failed, details):
s = failed
s += "\n----------\n\n"
s += details
return s
# =====================================
# START
alert = False
# connect to SMTP
s = initSMTP()
# Services
svcsX = cmd(CMD_SVCS + ' -x')
if TEST or len(svcsX) > 0:
alert = True
svcsXV = cmd(CMD_SVCS + ' -x -v')
txt = summary(svcsX, svcsXV)
sendMail(s, "[NAS] Services failed", txt)
# ZFS Pool checking
zpoolStatusX = cmd(CMD_ZPOOL + ' status -x')
if TEST or zpoolStatusX.find("all pools are healthy") == -1:
alert = True
zpoolStatus = cmd(CMD_ZPOOL + ' status')
txt = summary(zpoolStatusX, zpoolStatus)
sendMail(s, "[NAS] ZFS Pool Status", txt)
# SMART checking
for disk in DISKS:
smartHealth = cmd(CMD_SMARTCTL + ' --health -d sat,12 ' + disk)
if TEST or smartHealth.find("PASSED") == -1:
alert = True
smart = cmd(CMD_SMARTCTL + ' --all -d sat,12 ' + disk)
txt = summary(smartHealth, smart)
sendMail(s, "[NAS] S.M.A.R.T. " + disk, txt)
# OK
if alert == False and MAIL_ALL_OK == True:
sendMail(s, "[NAS] O.K.", "Everything is fine")
closeSMTP(s)
sys.exit(0)
|
{
"content_hash": "8300538141a759a7c6c8eacc4a13b433",
"timestamp": "",
"source": "github",
"line_count": 129,
"max_line_length": 73,
"avg_line_length": 21.930232558139537,
"alnum_prop": 0.6164722516790385,
"repo_name": "hotzen/SolarStatus",
"id": "50e2b6e5eabc44fe5de7e2a375e978db15f16b3b",
"size": "2892",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "extra/alerter.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "15658"
},
{
"name": "CSS",
"bytes": "6858"
},
{
"name": "JavaScript",
"bytes": "64835"
},
{
"name": "PHP",
"bytes": "31471"
},
{
"name": "Perl",
"bytes": "46128"
},
{
"name": "Python",
"bytes": "2892"
}
],
"symlink_target": ""
}
|
import subprocess
import shlex
from shutil import copy
from sys import argv, stdout, stderr, exit
from platform import dist
from os import getuid, path, access, W_OK
class Color:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
class Formatter:
def __init__(self, fp=None, fmt=[]):
self.fp = fp
self.fmt = fmt
def write(self, text):
_text = ''
for i in self.fmt:
_text += i if len(i) > 0 else text
self.fp.write(_text)
_stderr = stderr
stderr = Formatter(fp=_stderr, fmt=[Color.FAIL, '', Color.ENDC])
def _check_uid(uid=0):
"""
check user unique id
"""
return (uid == getuid())
def _check_dist(distribution='ubuntu'):
"""
check linux distribution
"""
current_dist = dist()
return (distribution == current_dist[0].lower()
if len(current_dist) > 0 else False)
def _check_apt(package, print_err=False):
"""
check installed a package from dpkg list
"""
command = ' '.join(['dpkg', '-L', package])
command = shlex.split(command)
process = subprocess.Popen(
command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = process.communicate()
if err or 'not installed' in out.lower():
if print_err:
print >> stderr, ('Cannot find {0} package ' +
'in your aptitude package list. ') \
.format(package)
return False
return True
def install():
"""
install fancyindex conf
"""
if not path.isfile('/etc/nginx/fancyindex.conf'):
copy('fancy/fancyindex.conf', '/etc/nginx/fancyindex.conf')
print >> stdout, 'Install complete. Execute "install.py' + \
' template /path/somewhere" to copy template.'
exit(0)
def template(directory):
"""
copy fancyindex template file to specify directory
"""
if not path.isdir(directory) or not access(directory, W_OK):
print >> stderr, 'Path not exists / cannot get write permission.'
exit(1)
copy('fancy/fancy-header.html', path.join(directory,
'fancy-header.html'))
copy('fancy/fancy-footer.html', path.join(directory,
'fancy-footer.html'))
copy('fancy/fancy.css', path.join(directory, 'fancy.css'))
print >> stdout, 'Copy complete. Add "include fancyindex.conf;" to' + \
' your nginx web server configuration file.'
def main():
if not _check_uid():
print >> stderr, 'only root can run this script. ' + \
'recommend: run this script via sudo.'
exit(1)
if not _check_dist():
print >> stderr, 'ubuntu only'
exit(1)
if not ((_check_apt('nginx') & _check_apt('nginx-extras')) |
_check_apt('nginx-full')) or not path.isdir('/etc/nginx'):
print >> stderr, 'Cannot find nginx, nginx-extras or nginx-full' + \
'package in your ubuntu aptitude package list.'
exit(1)
command = 'install' if len(argv) < 2 else argv[1]
try:
func = globals()[command]
except KeyError:
print >> stderr, 'unknown command: ', command
else:
args = argv[2:]
func(*args)
if __name__ == '__main__':
main()
|
{
"content_hash": "cba0797a3577f8d2b2ae81065317fe1a",
"timestamp": "",
"source": "github",
"line_count": 117,
"max_line_length": 76,
"avg_line_length": 29.299145299145298,
"alnum_prop": 0.5606767794632439,
"repo_name": "ssut/ngx-fancyindex-stylisher",
"id": "be9c97d0e126fa21c393bad7dbd63fef68db2c09",
"size": "3428",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "install.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "1183"
},
{
"name": "Python",
"bytes": "3428"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, division, print_function, \
unicode_literals
import json
from datetime import date, datetime, time, tzinfo
from typing import Hashable, Iterable, Mapping, Optional, Sequence, Sized, Text, \
Union
from dateutil.parser import parse as dateutil_parse
from dateutil.tz import tzoffset
from pytz import utc
from six import binary_type, python_2_unicode_compatible, text_type
from filters.base import BaseFilter, Type
from filters.number import Int, Max, Min
__all__ = [
'Array',
'ByteArray',
'Choice',
'Date',
'Datetime',
'Empty',
'Length',
'MaxLength',
'MinLength',
'NoOp',
'NotEmpty',
'Optional',
'Required',
]
class Array(Type):
"""
Validates that the incoming value is a non-string sequence.
"""
def __init__(self, aliases=None):
# type: (Optional[Mapping[type, Text]]) -> None
super(Array, self).__init__(Sequence, True, aliases)
def _apply(self, value):
value = super(Array, self)._apply(value) # type: Sequence
if self._has_errors:
return None
if isinstance(value, (binary_type, text_type)):
return self._invalid_value(
value = value,
reason = self.CODE_WRONG_TYPE,
template_vars = {
'incoming': self.get_type_name(type(value)),
'allowed': self.get_allowed_type_names(),
},
)
return value
class ByteArray(BaseFilter):
"""
Converts an incoming value into a bytearray.
"""
CODE_BAD_ENCODING = 'bad_encoding'
templates = {
CODE_BAD_ENCODING:
'Unable to encode this value using {encoding}.',
}
def __init__(self, encoding='utf-8'):
# type: (Text) -> None
"""
:param encoding:
The encoding to use when decoding strings into bytes.
"""
super(ByteArray, self).__init__()
self.encoding = encoding
def _apply(self, value):
value = self._filter(value, Type(Iterable))
if self._has_errors:
return None
if isinstance(value, bytearray):
return value
if isinstance(value, binary_type):
return bytearray(value)
if isinstance(value, text_type):
try:
return bytearray(value, encoding=self.encoding)
except UnicodeEncodeError:
return self._invalid_value(
value = value,
reason = self.CODE_BAD_ENCODING,
template_vars = {
'encoding': self.encoding,
},
)
from filters.complex import FilterRepeater
filtered = self._filter(value, FilterRepeater(
# Only allow ints and booleans.
Type(int)
# Convert booleans to int (Min and Max require an
# exact type match).
| Int
# Min value for each byte is 2^0-1.
| Min(0)
# Max value for each byte is 2^8-1.
| Max(255)
))
if self._has_errors:
return None
return bytearray(filtered)
@python_2_unicode_compatible
class Choice(BaseFilter):
"""
Expects the value to match one of the items in a set.
Note: When matching string values, the comparison is case-
sensitive! Use the :py:class:`CaseFold` Filter if you want to
perform a case-insensitive comparison.
"""
CODE_INVALID = 'not_valid_choice'
templates = {
CODE_INVALID: 'Valid options are: {choices}',
}
def __init__(self, choices):
# type: (Iterable[Hashable]) -> None
super(Choice, self).__init__()
self.choices = set(choices)
def __str__(self):
return '{type}({choices!r})'.format(
type = type(self).__name__,
# Use JSON to mask Python syntax (e.g., remove "u" prefix
# on unicode strings in Python 2).
# :py:meth:`Type.__init__`
choices = json.dumps(sorted(self.choices)),
)
def _apply(self, value):
if value not in self.choices:
return self._invalid_value(
value = value,
reason = self.CODE_INVALID,
exc_info = True,
template_vars = {
'choices': sorted(self.choices),
},
)
return value
@python_2_unicode_compatible
class Datetime(BaseFilter):
"""
Interprets the value as a UTC datetime.
"""
CODE_INVALID = 'not_datetime'
templates = {
CODE_INVALID:
'This value does not appear to be a datetime.',
}
def __init__(self, timezone=None, naive=False):
# type: (Optional[Union[tzinfo, int, float]], bool) -> None
"""
:param timezone:
Specifies the timezone to use when the *incoming* value is
a naive timestamp. Has no effect on timezone-aware
timestamps.
IMPORTANT: The result is always converted to UTC,
regardless of the value of the ``timezone`` param!
You can provide an int/float here, which is the offset from
UTC in hours (e.g., 5 = UTC+5).
:param naive:
If True, the filter will *return* naive datetime objects
(sans tzinfo). This is useful e.g., for datetimes that
will be stored in a database that doesn't understand aware
timestamps.
IMPORTANT: Incoming values are still converted to UTC
before stripping tzinfo!
"""
super(Datetime, self).__init__()
if not isinstance(timezone, tzinfo):
if timezone in [0, None]:
timezone = utc
else:
# Assume that we got an int/float instead.
timezone = tzoffset(
name = 'UTC{offset:+}'.format(offset=timezone),
offset = float(timezone) * 3600.0,
)
self.timezone = timezone
self.naive = naive
def __str__(self):
return '{type}(timezone={timezone!r}, naive={naive!r})'.format(
type = type(self).__name__,
timezone = self.timezone,
naive = self.naive,
)
def _apply(self, value):
if isinstance(value, datetime):
parsed = value
elif isinstance(value, date):
# http://stackoverflow.com/a/1937636
parsed = datetime.combine(value, time.min)
else:
try:
#
# It's a shame we can't pass ``tzinfos`` to
# :py:meth:`dateutil_parse.parse`; ``tzinfos`` only has
# effect if we also specify ``ignoretz = True``, which
# we definitely don't want to do here!
#
# https://dateutil.readthedocs.org/en/latest/parser.html#dateutil.parser.parse
#
parsed = dateutil_parse(value)
except ValueError:
return self._invalid_value(
value = value,
reason = self.CODE_INVALID,
exc_info = True,
)
if not parsed.tzinfo:
parsed = parsed.replace(tzinfo=self.timezone)
# Always covert to UTC.
aware_result = parsed.astimezone(utc)
return (
aware_result.replace(tzinfo=None)
if self.naive
else aware_result
)
class Date(Datetime):
"""
Interprets the value as a UTC date.
Note that the value is first converted to a datetime with UTC
timezone, which may cause the resulting date to appear to be
off by +/- 1 day (does not apply if the value is already a date
object).
"""
CODE_INVALID = 'not_date'
templates = {
CODE_INVALID: 'This value does not appear to be a date.',
}
def _apply(self, value):
if isinstance(value, date) and not isinstance(value, datetime):
return value
filtered = super(Date, self)._apply(value) # type: datetime
# Normally we return `None` if we get any errors, but in this
# case, we'll let the superclass method decide.
return filtered if self._has_errors else filtered.date()
class Empty(BaseFilter):
"""
Expects the value to be empty.
In this context, "empty" is defined as having zero length. Note
that this Filter considers values that do not have length to be
not empty (in particular, False and 0 are not considered empty
here).
"""
CODE_NOT_EMPTY = 'not_empty'
templates = {
CODE_NOT_EMPTY: 'Empty value expected.',
}
def _apply(self, value):
try:
length = len(value)
except TypeError:
length = 1
return (
self._invalid_value(value, self.CODE_NOT_EMPTY)
if length
else value
)
@python_2_unicode_compatible
class Length(BaseFilter):
"""
Ensures incoming values have exactly the right length.
"""
CODE_TOO_LONG = 'too_long'
CODE_TOO_SHORT = 'too_short'
templates = {
CODE_TOO_LONG:
'Value is too long (length must be exactly {expected}).',
CODE_TOO_SHORT:
'Value is too short (length must be exactly {expected}).',
}
def __init__(self, length):
# type: (int) -> None
super(Length, self).__init__()
self.length = length
def __str__(self):
return '{type}(length={length!r})'.format(
type = type(self).__name__,
length = self.length,
)
def _apply(self, value):
value = self._filter(value, Type(Sized))
if self._has_errors:
return None
if len(value) > self.length:
return self._invalid_value(
value = value,
reason = self.CODE_TOO_LONG,
template_vars = {
'expected': self.length,
},
)
elif len(value) < self.length:
return self._invalid_value(
value = value,
reason = self.CODE_TOO_SHORT,
template_vars = {
'expected': self.length,
},
)
return value
@python_2_unicode_compatible
class MaxLength(BaseFilter):
"""
Enforces a maximum length on the value.
"""
CODE_TOO_LONG = 'too_long'
templates = {
CODE_TOO_LONG: 'Value is too long (length must be < {max}).',
}
def __init__(self, max_length):
# type: (int) -> None
super(MaxLength, self).__init__()
self.max_length = max_length
def __str__(self):
return '{type}({max_length!r})'.format(
type = type(self).__name__,
max_length = self.max_length,
)
def _apply(self, value):
if len(value) > self.max_length:
# Note that we do not truncate the value:
# - It's not always clear which end we should truncate
# from.
# - We should keep this filter's behavior consistent with
# that of MinLength.
return self._invalid_value(
value = value,
reason = self.CODE_TOO_LONG,
template_vars = {
'length': len(value),
'max': self.max_length,
},
)
return value
class MinLength(BaseFilter):
"""
Enforces a minimum length on the value.
"""
CODE_TOO_SHORT = 'too_short'
templates = {
CODE_TOO_SHORT: 'Value is too short (length must be > {min}).',
}
def __init__(self, min_length):
# type: (int) -> None
super(MinLength, self).__init__()
self.min_length = min_length
def __str__(self):
return '{type}({min_length!r})'.format(
type = type(self).__name__,
min_length = self.min_length,
)
def _apply(self, value):
if len(value) < self.min_length:
#
# Note that we do not pad the value:
# - It is not clear to which end(s) we should add the
# padding.
# - It is not clear what the padding value(s) should be.
# - We should keep this filter's behavior consistent with
# that of MaxLength.
#
return self._invalid_value(
value = value,
reason = self.CODE_TOO_SHORT,
template_vars = {
'length': len(value),
'min': self.min_length,
},
)
return value
class NoOp(BaseFilter):
"""
Filter that does nothing, used when you need a placeholder Filter
in a FilterChain.
"""
def _apply(self, value):
return value
@python_2_unicode_compatible
class NotEmpty(BaseFilter):
"""
Expects the value not to be empty.
In this context, "empty" is defined as having zero length. Note
that this filter considers values that do not have length to be
not empty (in particular, False and 0 are not considered empty
here).
By default, this filter treats ``None`` as valid, just like every
other filter. However, you can configure the filter to reject
``None`` in its initializer method.
"""
CODE_EMPTY = 'empty'
templates = {
CODE_EMPTY: 'Non-empty value expected.',
}
def __init__(self, allow_none=True):
# type: (bool) -> None
"""
:param allow_none:
Whether to allow ``None``.
"""
super(NotEmpty, self).__init__()
self.allow_none = allow_none
def __str__(self):
return '{type}(allow_none={allow_none!r})'.format(
type = type(self).__name__,
allow_none = self.allow_none,
)
def _apply(self, value):
try:
length = len(value)
except TypeError:
length = 1
return value if length else self._invalid_value(value, self.CODE_EMPTY)
def _apply_none(self):
if not self.allow_none:
return self._invalid_value(None, self.CODE_EMPTY)
return None
class Required(NotEmpty):
"""
Same as NotEmpty, but with ``allow_none`` hard-wired to ``False``.
This filter is the only exception to the "``None`` passes by
default" rule.
"""
templates = {
NotEmpty.CODE_EMPTY: 'This value is required.',
}
def __init__(self):
super(Required, self).__init__(allow_none=False)
@python_2_unicode_compatible
class Optional(BaseFilter):
"""
Changes empty and null values into a default value.
In this context, "empty" is defined as having zero length. Note
that this Filter considers values that do not have length to be
not empty (in particular, False and 0 are not considered empty
here).
"""
def __init__(self, default=None):
"""
:param default:
The default value used to replace empty values.
"""
super(Optional, self).__init__()
self.default = default
def __str__(self):
return '{type}(default={default!r})'.format(
type = type(self).__name__,
default = self.default,
)
def _apply(self, value):
try:
length = len(value)
except TypeError:
length = 1
return value if length > 0 else self.default
def _apply_none(self):
return self.default
|
{
"content_hash": "500cdee665f6770c399936d96d605f71",
"timestamp": "",
"source": "github",
"line_count": 573,
"max_line_length": 94,
"avg_line_length": 27.904013961605585,
"alnum_prop": 0.5314278566514479,
"repo_name": "eflglobal/filters",
"id": "b80a811fd0c1a8c2e3d8fc2390312f8f7fc9f007",
"size": "16004",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "filters/simple.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "266107"
}
],
"symlink_target": ""
}
|
from bot import fetch_submission, get_comments, extract_urls
import shelve
submission = fetch_submission()
comments = get_comments(submission)
# shelve doesn't support unicode keys
sub_id = submission.id.encode('utf-8')
d = shelve.open('cache/urls.data', writeback=True)
if sub_id not in d:
d[sub_id] = {'urls': set(), 'comment_ids': set()}
for comment in comments:
if comment.id in d[sub_id]['comment_ids'] or getattr(comment, 'body', None) is None:
continue
urls = extract_urls(comment.body)
if not urls:
continue
d[sub_id]['urls'].update(urls)
d[sub_id]['comment_ids'].add(comment.id)
d.sync()
d.close()
|
{
"content_hash": "833f6f0db03e94ff88ec4b7a34506f03",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 88,
"avg_line_length": 23.392857142857142,
"alnum_prop": 0.6687022900763359,
"repo_name": "yaph/threaddit",
"id": "ee3f7b5cb714683e316f5c248bc6cd3b12344584",
"size": "746",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "threaddit/urls.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "3584"
}
],
"symlink_target": ""
}
|
"""
This sample shows how to create a list in json
of all items in a group
Python 2.x
ArcREST 3.0.1
"""
import arcrest
import os
import json
from arcresthelper import orgtools, common
import csv
def trace():
"""
trace finds the line, the filename
and error message and returns it
to the user
"""
import traceback, inspect,sys
tb = sys.exc_info()[2]
tbinfo = traceback.format_tb(tb)[0]
filename = inspect.getfile(inspect.currentframe())
# script name + line number
line = tbinfo.split(", ")[1]
# Get Python syntax error
#
synerror = traceback.format_exc().splitlines()[-1]
return line, filename, synerror
def _unicode_convert(obj):
""" converts unicode to anscii """
if isinstance(obj, dict):
return {_unicode_convert(key): _unicode_convert(value) for key, value in obj.items()}
elif isinstance(obj, list):
return [_unicode_convert(element) for element in obj]
elif isinstance(obj, unicode):
return obj.encode('utf-8')
else:
return obj
if __name__ == "__main__":
proxy_port = None
proxy_url = None
securityinfo = {}
securityinfo['security_type'] = 'Portal'#LDAP, NTLM, OAuth, Portal, PKI
securityinfo['username'] = ""#<UserName>
securityinfo['password'] = ""#<Password>
securityinfo['org_url'] = "http://www.arcgis.com"
securityinfo['proxy_url'] = proxy_url
securityinfo['proxy_port'] = proxy_port
securityinfo['referer_url'] = None
securityinfo['token_url'] = None
securityinfo['certificatefile'] = None
securityinfo['keyfile'] = None
securityinfo['client_id'] = None
securityinfo['secret_id'] = None
groups = ["Demographic Content"] #Name of groups
outputlocation = r"C:\TEMP"
outputfilename = "group.json"
outputitemID = "id.csv"
try:
orgt = orgtools.orgtools(securityinfo)
groupRes = []
if orgt.valid:
fileName = os.path.join(outputlocation,outputfilename)
csvFile = os.path.join(outputlocation,outputitemID)
iconPath = os.path.join(outputlocation,"icons")
if not os.path.exists(iconPath):
os.makedirs(iconPath)
file = open(fileName, "w")
with open(csvFile, 'wb') as csvfile:
idwriter = csv.writer(csvfile, delimiter=',',
quotechar='|', quoting=csv.QUOTE_MINIMAL)
for groupName in groups:
results = orgt.getGroupContent(groupName=groupName,
onlyInOrg=True,
onlyInUser=True)
if not results is None:
for result in results:
idwriter.writerow([result['title'],result['id']])
thumbLocal = orgt.getThumbnailForItem(itemId=result['id'],
fileName=result['title'],
filePath=iconPath)
result['thumbnail']=thumbLocal
groupRes.append(result)
if len(groupRes) > 0:
print "%s items found" % str(len(groupRes))
groupRes = _unicode_convert(groupRes)
file.write(json.dumps(groupRes, ensure_ascii=False, sort_keys=True, indent=4, separators=(',', ': ')))
file.close()
except (common.ArcRestHelperError),e:
print "error in function: %s" % e[0]['function']
print "error on line: %s" % e[0]['line']
print "error in file name: %s" % e[0]['filename']
print "with error message: %s" % e[0]['synerror']
if 'arcpyError' in e[0]:
print "with arcpy message: %s" % e[0]['arcpyError']
except:
line, filename, synerror = trace()
print "error on line: %s" % line
print "error in file name: %s" % filename
print "with error message: %s" % synerror
|
{
"content_hash": "0e618419e938405264e6126f57a6312e",
"timestamp": "",
"source": "github",
"line_count": 107,
"max_line_length": 122,
"avg_line_length": 38.308411214953274,
"alnum_prop": 0.5530617223713101,
"repo_name": "DShokes/ArcREST",
"id": "8f742e3e3e8eb1fcdbaefff723a4698296a4b6ce",
"size": "4099",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "samples/report_content_in_groups.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "48383"
},
{
"name": "Python",
"bytes": "2174554"
}
],
"symlink_target": ""
}
|
import os
import time
from microbenchmarks import benchmark_plan_list
from utils import dump2json
class MicroBenchmark:
suite_name = 'microbenchmarks'
config = {
'cuda': {
'enable': True
},
'vulkan': {
'enable': False
},
'opengl': {
'enable': False
}
}
def __init__(self):
self._results = {}
self._info = {}
def get_benchmark_info(self):
info_dict = {}
arch_list = []
for arch, item in self.config.items():
if item['enable'] == True:
arch_list.append(arch)
info_dict['archs'] = arch_list
return info_dict
def run(self):
for arch, item in self.config.items():
if item['enable'] == True:
arch_results = {}
self._info[arch] = {}
for plan in benchmark_plan_list:
plan_impl = plan(arch)
results = plan_impl.run()
self._info[arch][plan_impl.name] = results['info']
arch_results[plan_impl.name] = results['results']
self._results[arch] = arch_results
def save_as_json(self, suite_dir='./'):
for arch in self._results:
arch_dir = os.path.join(suite_dir, arch)
os.makedirs(arch_dir, exist_ok=True)
self._save_info_as_json(arch, arch_dir)
self._save_cases_as_json(arch, arch_dir)
def _save_info_as_json(self, arch, arch_dir='./'):
info_path = os.path.join(arch_dir, '_info.json')
with open(info_path, 'w') as f:
print(dump2json(self._info[arch]), file=f)
def _save_cases_as_json(self, arch, arch_dir='./'):
for case in self._info[arch]:
case_path = os.path.join(arch_dir, (case + '.json'))
case_results = self._results[arch][case]
with open(case_path, 'w') as f:
case_str = dump2json(case_results)
print(case_str, file=f)
|
{
"content_hash": "4c5c5370dcb90f87c48caebfda9bc4a4",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 70,
"avg_line_length": 31.060606060606062,
"alnum_prop": 0.5073170731707317,
"repo_name": "yuanming-hu/taichi",
"id": "dc8e8305404c1f28dee47a087497eb85c45bb105",
"size": "2050",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "benchmarks/suite_microbenchmarks.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "66677"
},
{
"name": "C++",
"bytes": "3713898"
},
{
"name": "CMake",
"bytes": "69354"
},
{
"name": "Cuda",
"bytes": "20566"
},
{
"name": "GLSL",
"bytes": "10756"
},
{
"name": "Makefile",
"bytes": "994"
},
{
"name": "PowerShell",
"bytes": "9227"
},
{
"name": "Python",
"bytes": "2209929"
},
{
"name": "Shell",
"bytes": "12216"
}
],
"symlink_target": ""
}
|
import unittest
import mock
from test import safe_repr
from test.unit import MockTrue
from swift.common.swob import HTTPBadRequest, Request, HTTPException
from swift.common.http import HTTP_REQUEST_ENTITY_TOO_LARGE, \
HTTP_BAD_REQUEST, HTTP_LENGTH_REQUIRED
from swift.common import constraints
class TestConstraints(unittest.TestCase):
def assertIn(self, member, container, msg=None):
"""Copied from 2.7"""
if member not in container:
standardMsg = '%s not found in %s' % (safe_repr(member),
safe_repr(container))
self.fail(self._formatMessage(msg, standardMsg))
def test_check_metadata_empty(self):
headers = {}
self.assertEquals(constraints.check_metadata(Request.blank(
'/', headers=headers), 'object'), None)
def test_check_metadata_good(self):
headers = {'X-Object-Meta-Name': 'Value'}
self.assertEquals(constraints.check_metadata(Request.blank(
'/', headers=headers), 'object'), None)
def test_check_metadata_empty_name(self):
headers = {'X-Object-Meta-': 'Value'}
self.assert_(constraints.check_metadata(Request.blank(
'/', headers=headers), 'object'), HTTPBadRequest)
def test_check_metadata_name_length(self):
name = 'a' * constraints.MAX_META_NAME_LENGTH
headers = {'X-Object-Meta-%s' % name: 'v'}
self.assertEquals(constraints.check_metadata(Request.blank(
'/', headers=headers), 'object'), None)
name = 'a' * (constraints.MAX_META_NAME_LENGTH + 1)
headers = {'X-Object-Meta-%s' % name: 'v'}
self.assertEquals(constraints.check_metadata(Request.blank(
'/', headers=headers), 'object').status_int, HTTP_BAD_REQUEST)
self.assertIn(
('X-Object-Meta-%s' % name).lower(),
constraints.check_metadata(Request.blank(
'/', headers=headers), 'object').body.lower())
def test_check_metadata_value_length(self):
value = 'a' * constraints.MAX_META_VALUE_LENGTH
headers = {'X-Object-Meta-Name': value}
self.assertEquals(constraints.check_metadata(Request.blank(
'/', headers=headers), 'object'), None)
value = 'a' * (constraints.MAX_META_VALUE_LENGTH + 1)
headers = {'X-Object-Meta-Name': value}
self.assertEquals(constraints.check_metadata(Request.blank(
'/', headers=headers), 'object').status_int, HTTP_BAD_REQUEST)
self.assertIn(
'x-object-meta-name',
constraints.check_metadata(Request.blank(
'/', headers=headers),
'object').body.lower())
self.assertIn(
str(constraints.MAX_META_VALUE_LENGTH),
constraints.check_metadata(Request.blank(
'/', headers=headers),
'object').body)
def test_check_metadata_count(self):
headers = {}
for x in xrange(constraints.MAX_META_COUNT):
headers['X-Object-Meta-%d' % x] = 'v'
self.assertEquals(constraints.check_metadata(Request.blank(
'/', headers=headers), 'object'), None)
headers['X-Object-Meta-Too-Many'] = 'v'
self.assertEquals(constraints.check_metadata(Request.blank(
'/', headers=headers), 'object').status_int, HTTP_BAD_REQUEST)
def test_check_metadata_size(self):
headers = {}
size = 0
chunk = constraints.MAX_META_NAME_LENGTH + \
constraints.MAX_META_VALUE_LENGTH
x = 0
while size + chunk < constraints.MAX_META_OVERALL_SIZE:
headers['X-Object-Meta-%04d%s' %
(x, 'a' * (constraints.MAX_META_NAME_LENGTH - 4))] = \
'v' * constraints.MAX_META_VALUE_LENGTH
size += chunk
x += 1
self.assertEquals(constraints.check_metadata(Request.blank(
'/', headers=headers), 'object'), None)
# add two more headers in case adding just one falls exactly on the
# limit (eg one header adds 1024 and the limit is 2048)
headers['X-Object-Meta-%04d%s' %
(x, 'a' * (constraints.MAX_META_NAME_LENGTH - 4))] = \
'v' * constraints.MAX_META_VALUE_LENGTH
headers['X-Object-Meta-%04d%s' %
(x + 1, 'a' * (constraints.MAX_META_NAME_LENGTH - 4))] = \
'v' * constraints.MAX_META_VALUE_LENGTH
self.assertEquals(constraints.check_metadata(Request.blank(
'/', headers=headers), 'object').status_int, HTTP_BAD_REQUEST)
def test_check_object_creation_content_length(self):
headers = {'Content-Length': str(constraints.MAX_FILE_SIZE),
'Content-Type': 'text/plain'}
self.assertEquals(constraints.check_object_creation(Request.blank(
'/', headers=headers), 'object_name'), None)
headers = {'Content-Length': str(constraints.MAX_FILE_SIZE + 1),
'Content-Type': 'text/plain'}
self.assertEquals(constraints.check_object_creation(
Request.blank('/', headers=headers), 'object_name').status_int,
HTTP_REQUEST_ENTITY_TOO_LARGE)
headers = {'Transfer-Encoding': 'chunked',
'Content-Type': 'text/plain'}
self.assertEquals(constraints.check_object_creation(Request.blank(
'/', headers=headers), 'object_name'), None)
headers = {'Content-Type': 'text/plain'}
self.assertEquals(constraints.check_object_creation(
Request.blank('/', headers=headers), 'object_name').status_int,
HTTP_LENGTH_REQUIRED)
def test_check_object_creation_name_length(self):
headers = {'Transfer-Encoding': 'chunked',
'Content-Type': 'text/plain'}
name = 'o' * constraints.MAX_OBJECT_NAME_LENGTH
self.assertEquals(constraints.check_object_creation(Request.blank(
'/', headers=headers), name), None)
name = 'o' * (constraints.MAX_OBJECT_NAME_LENGTH + 1)
self.assertEquals(constraints.check_object_creation(
Request.blank('/', headers=headers), name).status_int,
HTTP_BAD_REQUEST)
def test_check_object_creation_content_type(self):
headers = {'Transfer-Encoding': 'chunked',
'Content-Type': 'text/plain'}
self.assertEquals(constraints.check_object_creation(Request.blank(
'/', headers=headers), 'object_name'), None)
headers = {'Transfer-Encoding': 'chunked'}
self.assertEquals(constraints.check_object_creation(
Request.blank('/', headers=headers), 'object_name').status_int,
HTTP_BAD_REQUEST)
def test_check_object_creation_bad_content_type(self):
headers = {'Transfer-Encoding': 'chunked',
'Content-Type': '\xff\xff'}
resp = constraints.check_object_creation(
Request.blank('/', headers=headers), 'object_name')
self.assertEquals(resp.status_int, HTTP_BAD_REQUEST)
self.assert_('Content-Type' in resp.body)
def test_check_mount(self):
self.assertFalse(constraints.check_mount('', ''))
with mock.patch("swift.common.constraints.ismount", MockTrue()):
self.assertTrue(constraints.check_mount('/srv', '1'))
self.assertTrue(constraints.check_mount('/srv', 'foo-bar'))
self.assertTrue(constraints.check_mount(
'/srv', '003ed03c-242a-4b2f-bee9-395f801d1699'))
self.assertFalse(constraints.check_mount('/srv', 'foo bar'))
self.assertFalse(constraints.check_mount('/srv', 'foo/bar'))
self.assertFalse(constraints.check_mount('/srv', 'foo?bar'))
def test_check_float(self):
self.assertFalse(constraints.check_float(''))
self.assertTrue(constraints.check_float('0'))
def test_check_utf8(self):
unicode_sample = u'\uc77c\uc601'
valid_utf8_str = unicode_sample.encode('utf-8')
invalid_utf8_str = unicode_sample.encode('utf-8')[::-1]
unicode_with_null = u'abc\u0000def'
utf8_with_null = unicode_with_null.encode('utf-8')
for false_argument in [None,
'',
invalid_utf8_str,
unicode_with_null,
utf8_with_null]:
self.assertFalse(constraints.check_utf8(false_argument))
for true_argument in ['this is ascii and utf-8, too',
unicode_sample,
valid_utf8_str]:
self.assertTrue(constraints.check_utf8(true_argument))
def test_validate_bad_meta(self):
req = Request.blank(
'/v/a/c/o',
headers={'x-object-meta-hello':
'ab' * constraints.MAX_HEADER_SIZE})
self.assertEquals(constraints.check_metadata(req, 'object').status_int,
HTTP_BAD_REQUEST)
self.assertIn('x-object-meta-hello', constraints.check_metadata(req,
'object').body.lower())
def test_validate_constraints(self):
c = constraints
self.assertTrue(c.MAX_META_OVERALL_SIZE > c.MAX_META_NAME_LENGTH)
self.assertTrue(c.MAX_META_OVERALL_SIZE > c.MAX_META_VALUE_LENGTH)
self.assertTrue(c.MAX_HEADER_SIZE > c.MAX_META_NAME_LENGTH)
self.assertTrue(c.MAX_HEADER_SIZE > c.MAX_META_VALUE_LENGTH)
def test_validate_copy_from(self):
req = Request.blank(
'/v/a/c/o',
headers={'x-copy-from': 'c/o2'})
src_cont, src_obj = constraints.check_copy_from_header(req)
self.assertEqual(src_cont, 'c')
self.assertEqual(src_obj, 'o2')
req = Request.blank(
'/v/a/c/o',
headers={'x-copy-from': 'c/subdir/o2'})
src_cont, src_obj = constraints.check_copy_from_header(req)
self.assertEqual(src_cont, 'c')
self.assertEqual(src_obj, 'subdir/o2')
req = Request.blank(
'/v/a/c/o',
headers={'x-copy-from': '/c/o2'})
src_cont, src_obj = constraints.check_copy_from_header(req)
self.assertEqual(src_cont, 'c')
self.assertEqual(src_obj, 'o2')
def test_validate_bad_copy_from(self):
req = Request.blank(
'/v/a/c/o',
headers={'x-copy-from': 'bad_object'})
self.assertRaises(HTTPException,
constraints.check_copy_from_header, req)
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "feb55c1826ab9744c7784d2e307e116f",
"timestamp": "",
"source": "github",
"line_count": 234,
"max_line_length": 79,
"avg_line_length": 45.22649572649573,
"alnum_prop": 0.5873570821128224,
"repo_name": "gotostack/swift",
"id": "aaf5269ac79d50973814da47ea7f085e2056b6ff",
"size": "11178",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "test/unit/common/test_constraints.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
import unittest
import json
from six.moves.urllib import parse as urlparse
from six.moves import cStringIO as StringIO
import requests
from httmock import urlmatch, HTTMock
import ipfsApi.commands
import ipfsApi.exceptions
@urlmatch(netloc='localhost:5001', path=r'.*/simple')
def cmd_simple(url, request):
return {
'status_code': 200,
'content': json.dumps({
'Message': 'okay',
}).encode('utf-8'),
}
@urlmatch(netloc='localhost:5001', path=r'.*/arg')
def cmd_with_arg(url, request):
qs = urlparse.parse_qs(url.query)
return {
'status_code': 200,
'content': json.dumps({
'Message': 'okay',
'Arg': qs['arg'],
}).encode('utf-8'),
}
@urlmatch(netloc='localhost:5001', path=r'.*/file')
def cmd_with_file(url, request):
return {
'status_code': 200,
'content': json.dumps({
'Message': 'okay',
'Body': request.body.decode('utf-8'),
}).encode('utf-8'),
}
class TestCommands(unittest.TestCase):
def setUp(self):
self._client = ipfsApi.http.HTTPClient(
'localhost',
5001,
'api/v0',
'json')
@ipfsApi.commands.Command('/simple')
def simple(req, **kwargs):
return req(**kwargs)
def test_simple_command(self):
with HTTMock(cmd_simple):
res = self.simple()
self.assertEquals(res['Message'], 'okay')
@ipfsApi.commands.ArgCommand('/arg')
def with_arg(req, *args, **kwargs):
return req(*args, **kwargs)
def test_arg_command(self):
with HTTMock(cmd_with_arg):
res = self.with_arg('arg1')
self.assertEquals(res['Arg'][0], 'arg1')
@ipfsApi.commands.FileCommand('/file')
def with_file(req, files, **kwargs):
return req(files, **kwargs)
def test_file_command_fd(self):
data = 'content\ngoes\nhere'
fd = StringIO(data)
with HTTMock(cmd_with_file):
res = self.with_file(fd)
self.assertTrue(data in res['Body'])
|
{
"content_hash": "fdb965a567abbcdd00e19589c5dcd674",
"timestamp": "",
"source": "github",
"line_count": 80,
"max_line_length": 53,
"avg_line_length": 26.2625,
"alnum_prop": 0.569728700618753,
"repo_name": "moreati/python-ipfs-api",
"id": "5c84e399646f502742bee304579ebcae49457644",
"size": "2101",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/unit/test_command.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "35737"
}
],
"symlink_target": ""
}
|
import sys
import zipfile
import os.path
import xml.etree.ElementTree
from rdflib import Graph
from rdflib import Namespace
import urllib
from rdflib.term import URIRef
Scufl2NS = Namespace("http://ns.taverna.org.uk/2010/scufl2#")
NS_CONTAINER="{urn:oasis:names:tc:opendocument:xmlns:container}"
import rdflib
class Scufl2(object):
def __init__(self, filename=None):
self.filename = filename
if filename is not None:
self.zip = zipfile.ZipFile(filename)
self.check_mime_type()
self.parse()
def check_mime_type(self):
mimetype = self.zip.open("mimetype").read()
if mimetype != "application/vnd.taverna.scufl2.workflow-bundle":
raise Scufl2Error("Unknown mimetype %r" % mimetype)
def _is_valid_prefix(self, filename):
valid_prefixes = ["workflow/", "annotation/", "profile/"]
for prefix in valid_prefixes:
if filename.startswith(prefix):
return True
return False
def parse_all_graphs(self, sameAs):
filenames = {}
for filename in self.zip.namelist():
if not self._is_valid_prefix(filename):
continue
if filename.endswith(".ttl"):
base = filename[:-4]
filenames[base] = filename
elif filename.endswith(".rdf"):
base = filename[:-4]
if not base in filenames:
filenames[base] = filename
for name in filenames:
filename = filenames[name]
rdf_file = self.zip.open(filename)
format = "n3"
if filename.endswith(".rdf"):
format = "xml"
base = sameAs + filename
self.graph.parse(rdf_file, base, format=format)
def parse(self):
if "workflowBundle.ttl" in self.zip.namelist():
format = "n3"
rootfile = "workflowBundle.ttl"
elif "workflowBundle.rdf" in self.zip.namelist():
rootfile = "workflowBundle.rdf"
format = "xml"
else:
raise Scufl2Error("Can't find workflowBundle.ttl or "
"workflowBundle.rdf")
self.uri = "file://" + urllib.pathname2url(os.path.abspath(self.filename)) + "/"
early_graph = Graph()
rdf_file = self.zip.open(rootfile)
early_graph.parse(rdf_file, self.uri, format=format)
sameBaseAs = list(early_graph.objects(subject=URIRef(self.uri), predicate=Scufl2NS.sameBaseAs))
if not sameBaseAs:
# Fall back to the file:/// URIs
self.graph = early_graph
else:
# Use the sameBaseAs as the base
self.uri = sameBaseAs[0]
self.graph = Graph()
# Reparse it
rdf_file = self.zip.open(rootfile)
self.graph.parse(rdf_file, self.uri, format=format)
self.parse_all_graphs(self.uri)
class Scufl2Error(Exception):
pass
def main(prg="processorNames.py", filename=None):
if filename is None:
filename = os.path.join(os.path.dirname(prg), "..", "..", "..", "..",
"scufl2-rdfxml", "src", "test", "resources",
"uk", "org", "taverna","scufl2","rdfxml", "example.wfbundle")
scufl2 = Scufl2(filename)
for workflowUri in scufl2.graph.objects(predicate=Scufl2NS.workflow):
for name in scufl2.graph.objects(workflowUri, Scufl2NS.name):
print name
for processorUri in scufl2.graph.objects(workflowUri, Scufl2NS.processor):
for name in scufl2.graph.objects(processorUri, Scufl2NS.name):
print "---", name, processorUri
if __name__ == "__main__":
main(*sys.argv)
|
{
"content_hash": "45813da71af0a75519f375193a914426",
"timestamp": "",
"source": "github",
"line_count": 113,
"max_line_length": 103,
"avg_line_length": 33.46017699115044,
"alnum_prop": 0.5770960063475271,
"repo_name": "taverna-incubator/incubator-taverna-language",
"id": "d1d62fa7027c77c39c4b1943cabb0a9ff34bae48",
"size": "4597",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "taverna-scufl2-examples/src/main/python/processorNames.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "1841161"
},
{
"name": "Python",
"bytes": "4597"
},
{
"name": "Ruby",
"bytes": "2927"
},
{
"name": "Shell",
"bytes": "916"
},
{
"name": "Web Ontology Language",
"bytes": "77566"
}
],
"symlink_target": ""
}
|
"""
Module containing all exceptions thrown througout the git package,
"""
class InvalidGitRepositoryError(Exception):
"""
Thrown if the given repository appears to have an invalid format.
"""
class NoSuchPathError(Exception):
"""
Thrown if a path could not be access by the system.
"""
class GitCommandError(Exception):
"""
Thrown if execution of the git command fails with non-zero status code.
"""
def __init__(self, command, status, stderr=None):
self.stderr = stderr
self.status = status
self.command = command
def __str__(self):
return repr("%s returned exit status %d" %
(str(self.command), self.status))
|
{
"content_hash": "178a2a5e89961d0ade998e623fd1653c",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 75,
"avg_line_length": 26.444444444444443,
"alnum_prop": 0.6358543417366946,
"repo_name": "kergoth/git-origin",
"id": "2632d5f34c27210fc5090d78659e68e23cddc807",
"size": "932",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "git-python/lib/git/errors.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "128684"
}
],
"symlink_target": ""
}
|
"""
Developed by Robin Bhattacharyya (memecache for GAE)
Released under the web2py license (LGPL)
from gluon.contrib.gae_memcache import MemcacheClient
cache.ram=cache.disk=MemcacheClient(request)
"""
import time
from google.appengine.api.memcache import Client
class MemcacheClient(Client):
def __init__(self, request):
self.request = request
Client.__init__(self)
def __call__(
self,
key,
f,
time_expire=300,
):
key = '%s/%s' % (self.request.application, key)
dt = time_expire
value = None
obj = self.get(key)
if obj and (dt == None or obj[0] > time.time() - dt):
value = obj[1]
elif f is None:
if obj:
self.delete(key)
else:
value = f()
self.set(key, (time.time(), value))
return value
def increment(self, key, value=1):
key = '%s/%s' % (self.request.application, key)
obj = self.get(key)
if obj:
value = obj[1] + value
self.set((time.time(), value))
return value
def clear(self, key):
key = '%s/%s' % (self.request.application, key)
self.delete(key)
|
{
"content_hash": "55e0864358aa720fa7b02306839e96ec",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 61,
"avg_line_length": 23.71153846153846,
"alnum_prop": 0.5425790754257908,
"repo_name": "SEA000/uw-empathica",
"id": "920690d6f0014f101c266a76fe6aad2a1264de47",
"size": "1280",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "empathica/gluon/contrib/gae_memcache.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "127034"
},
{
"name": "JavaScript",
"bytes": "981904"
},
{
"name": "PHP",
"bytes": "15326"
},
{
"name": "Python",
"bytes": "3911190"
},
{
"name": "Shell",
"bytes": "31485"
}
],
"symlink_target": ""
}
|
from prettyupload import PrettyUpload
from buildbot.process.properties import WithProperties
def conf(c):
pass
def factory(f):
f.addStep(PrettyUpload('/thisfile/does/not/exist',
masterdest='public_html/eexist.txt',
haltOnFailure=False))
f.addStep(PrettyUpload('/etc/debian_version',
masterdest='public_html/debian_version.txt',
flunkOnFailure=True))
f.addStep(PrettyUpload('/usr/share/pixmaps/debian-logo.png',
masterdest=WithProperties('public_html/logo-%(buildnumber)s.png')))
f.addStep(PrettyUpload('/etc/debian_version',
masterdest='debian_version.txt'))
|
{
"content_hash": "236d67f2c1cdf7b920287e0d5617fdad",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 94,
"avg_line_length": 37.3,
"alnum_prop": 0.5978552278820375,
"repo_name": "wanders/buildbot-extras",
"id": "ec02262b358ffa815bd45f41d6d9a97b03e49e37",
"size": "747",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "utils/tests/prettyupload/bbtest.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "21395"
},
{
"name": "Shell",
"bytes": "2497"
}
],
"symlink_target": ""
}
|
import glob
import os
import re
import time
import types
import plugin
import telepot
import telepot.namedtuple
when = '9am'
telegram, util = plugin.get("telegram", "util")
def msg_cb(self, msg):
content_type, chat_type, chat_id = telepot.glance(msg)
print("content_type, chat_type, chat_id", content_type, chat_type, chat_id)
k = telegram.Keyboard()
if content_type == 'location':
print("location", msg['location'])
k.hide_keyboard()
self.bot.sendMessage(chat_id, **k.get_message_params("Got it!"))
elif content_type == 'text' and "🏠 Go home" == msg['text']:
k.hide_keyboard()
self.bot.sendMessage(chat_id, **k.get_message_params('Welcome home'))
# Youtube-dl
#
elif content_type == 'text' and "/youtube-dl " in msg['text']:
tmp = re.findall(r'(http.+?)(?:s|$)', msg['text'], re.M)
if tmp:
telegram.send_video(chat_id, tmp[0])
k.hide_keyboard()
self.bot.sendMessage(chat_id, **k.get_message_params('youtube-dl'))
elif content_type == 'text' and 'http' in msg['text']:
tmp = re.findall(r'(http.+?)(?:s|$)', msg['text'], re.M)
if tmp:
url = tmp[0]
if any([d in url for d in telegram.youtube_dl_sites]):
k.add("🎞 /youtube-dl \n" + tmp[0])
k.add("🏠 Go home", callback_data="home")
self.bot.sendMessage(chat_id, **k.get_message_params('Sent'))
"""
else:
k.add('Plain text')
k.add('Phone', request_contact=True)
k.add('Location', request_location=True)
self.bot.sendMessage(chat_id, **k.get_message_params("This is custom keyboard"))
"""
def run():
telegram.msg_received_cb = types.MethodType(msg_cb, telegram)
while True:
time.sleep(60)
|
{
"content_hash": "f36975fce7155218ebc2315db2450e95",
"timestamp": "",
"source": "github",
"line_count": 59,
"max_line_length": 88,
"avg_line_length": 30.847457627118644,
"alnum_prop": 0.5785714285714286,
"repo_name": "alobbs/autome",
"id": "ff48adb5a2a38f30b63841785dcd22f78339e02e",
"size": "1829",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "threads/telebot.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "38536"
}
],
"symlink_target": ""
}
|
import hashlib, time, random
def generate_nonce(bits, randomness=None):
"This could be stronger"
if bits%8 != 0:
raise ValueError, "bits must be a multiple of 8"
nonce = hashlib.sha1(str(randomness) + str(time.time()) +
str(random.random()) ).hexdigest()
nonce = nonce[:bits/4]
return nonce
def parse_keqv_list(l):
"""Parse list of key=value strings where keys are not duplicated."""
parsed = {}
for elt in l:
k, v = elt.split('=', 1)
if v[0] == '"' and v[-1] == '"':
v = v[1:-1]
parsed[k] = v
return parsed
class DigestAuthServer:
def __init__(self, default_realm, algorithm="MD5"):
self.default_realm = default_realm
if algorithm != 'MD5':
raise ValueError, "Don't know about algorithm %s"%(algorithm)
self.algorithm = algorithm
self._user_hashes = {}
def get_algorithm_impls(self, algorithm=None):
# lambdas assume digest modules are imported at the top level
if algorithm is None:
algorithm = self.algorithm
if algorithm == 'MD5':
H = lambda x: hashlib.md5(x).hexdigest()
elif algorithm == 'SHA':
H = lambda x: hashlib.sha1(x).hexdigest()
# XXX MD5-sess
KD = lambda s, d, H=H: H("%s:%s" % (s, d))
return H, KD
def add_user(self, user, password, realm=None):
"add the given user and password"
H, KD = self.get_algorithm_impls()
if realm is None:
realm = self.default_realm
A1 = H('%s:%s:%s'%(user, realm, password))
self._user_hashes[(user, realm)] = A1
def add_user_hash(self, user, A1, realm=None):
"add the given user with the stated hash"
if realm is None:
realm = self.default_realm
self._user_hashes[(user, realm)] = A1
def parse_apache_digest_authfile(self, filename):
"Parse a password file, as generated by htdigest"
for line in open(filename, 'rU'):
line = line.strip()
user, realm, hash = line.split(':')
self.add_user_hash(user, hash, realm)
def generate_challenge(self, realm=None):
if realm is None:
realm = self.default_realm
# We should save off the nonce to make sure it's one we've
# offered already. And check for replay attacks :-(
chal = 'realm="%s", nonce="%s", ' \
'algorithm=%s, qop="auth"'%(realm,
generate_nonce(bits=208),
self.algorithm)
return chal
# Firebird
# username="anthony", realm="TestAuth",
# nonce="9da7db19648f95bd71f26a07b3423d91917b5205", uri="/test/foo",
# algorithm=MD5, response="f61ca0cb8a85e9bd985b7ab808978f1e",
# qop=auth, nc=00000001, cnonce="424a1ed1ddaa76ca"
# Konqi
# username="anthony", realm="TestAuth",
# nonce="7c8bdda0ed44db7de74bee97cec8dfd4fb59af0f", uri="/test/foo",
# algorithm="MD5", qop="auth", cnonce="ODQwMTk=", nc=00000001,
# response="1bebadb47d2aa5eab53cb419b94599f3"
def check_auth(self, header, method='GET'):
"Check a response to our auth challenge"
from urllib2 import parse_http_list
H, KD = self.get_algorithm_impls()
resp = parse_keqv_list(parse_http_list(header))
algo = resp.get('algorithm', 'MD5').upper()
if algo != self.algorithm:
return False, "unknown algo %s"%algo
user = resp['username']
realm = resp['realm']
nonce = resp['nonce']
# XXX Check the nonce is something we've issued
HA1 = self._user_hashes.get((user,realm))
if not HA1:
return False, "unknown user/realm %s/%s"%(user, realm)
qop = resp.get('qop')
if qop != 'auth':
return False, "unknown qop %r"%(qop)
cnonce, ncvalue = resp.get('cnonce'), resp.get('nc')
if not cnonce or not ncvalue:
return False, "failed to provide cnonce"
# Check the URI is correct!
A2 = '%s:%s'%(method, resp['uri'])
noncebit = "%s:%s:%s:%s:%s" % (nonce,ncvalue,cnonce,qop,H(A2))
respdig = KD(HA1, noncebit)
if respdig != resp['response']:
return False, "response incorrect"
print "all ok"
return True, "OK"
|
{
"content_hash": "45f4e5d92b14bf8dc08d10a4ef14bb32",
"timestamp": "",
"source": "github",
"line_count": 117,
"max_line_length": 73,
"avg_line_length": 37.07692307692308,
"alnum_prop": 0.573997233748271,
"repo_name": "habnabit/divmod-sine",
"id": "bb73ad752c4040aafa4b763f180e362d03427245",
"size": "4338",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "xshtoom/digestauth.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "379734"
}
],
"symlink_target": ""
}
|
import asyncio
import base64
from datetime import datetime
import functools
import hashlib
from itertools import repeat
import json
from urllib.parse import urlsplit, parse_qsl
import webbrowser
import aiohttp
class CustomDecoder(json.JSONDecoder):
def __init__(self, *args, **kargs):
super(CustomDecoder, self).__init__(
object_hook=self.dict_to_object, *args, **kargs)
def dict_to_object(self, d):
if '__type__' not in d:
return d
type = d.pop('__type__')
try:
dateobj = datetime(**d)
return dateobj
except:
d['__type__'] = type
return d
class CustomEncoder(json.JSONEncoder):
""" Instead of letting the default encoder convert datetime to string,
convert datetime objects into a dict, which can be decoded by the
DateTimeDecoder
"""
def default(self, obj):
if isinstance(obj, datetime):
return {
'__type__': 'datetime',
'iso': obj.isoformat(),
'year': obj.year,
'month': obj.month,
'day': obj.day,
'hour': obj.hour,
'minute': obj.minute,
'second': obj.second,
'microsecond': obj.microsecond,
}
elif isinstance(obj, aiohttp.MultiDict):
return {
k: obj.getall(k) for k in obj
}
else:
return super(CustomEncoder, self).default(obj)
def json_dumps(obj, indent=2):
return json.dumps(obj, cls=CustomEncoder, indent=indent, sort_keys=True)
def json_loads(obj):
return json.loads(obj, cls=CustomDecoder)
def args_kwargs_iterator(iterator):
"""
Converts items in an iterator to args tuples and kwargs dictionaries:
- if the item is a 2-tuple and the first item in the tuple is a tuple and
the second is a dict, then treat the first item as args, second as kwargs
- else if the item is a tuple, then treat the tuple as args, empty kwargs
- else treat the item as a single argument
"""
for args_kwargs in iterator:
if isinstance(args_kwargs, tuple):
if len(args_kwargs) == 2 and (
isinstance(args_kwargs[0], tuple) and
isinstance(args_kwargs[1], dict)):
args, kwargs = args_kwargs
else:
args = args_kwargs
kwargs = {}
else:
args = (args_kwargs,)
kwargs = {}
yield args, kwargs
def add_func_to_iterator(coro_arg, iterator):
try:
coro_iter = iter(coro_arg)
except TypeError:
coro_iter = repeat(coro_arg)
yield from zip(coro_iter, iterator)
def get_cache_id(url, *args, **kwargs):
cache_id = hashlib.md5()
cache_id.update(url.encode('utf-8'))
cache_id.update(json.dumps(kwargs.get('params', ''), sort_keys=True).encode('utf-8'))
return cache_id.hexdigest()
def doublewrap(f):
'''
a decorator decorator, allowing the decorator to be used as:
@decorator(with, arguments, and=kwargs)
or
@decorator
Credits: https://stackoverflow.com/questions/653368/
'''
@functools.wraps(f)
def new_dec(*args, **kwargs):
if len(args) == 1 and len(kwargs) == 0 and callable(args[0]):
# actual decorated function
return f(args[0])
else:
# decorator arguments
return lambda realf: f(realf, *args, **kwargs)
return new_dec
def mark_store(f, exclude=None):
f.store = True
f.store_exclude = exclude
@doublewrap
def async(f, store=False):
if store:
mark_store(f)
return asyncio.coroutine(f)
@doublewrap
def store(f, exclude=()):
mark_store(f, exclude=exclude)
return asyncio.coroutine(f)
def get_url_parts(url):
url_parts = urlsplit(url)
url_dict = vars(url_parts)
url_dict['query'] = parse_qsl(url_dict['query'])
return url_dict
def show_in_browser(html):
source = base64.b64encode(html.encode('utf-8')).decode('utf-8')
webbrowser.open('data:text/html;base64,%s' % source)
|
{
"content_hash": "59ea7272ad5e0b44f323baa6070f53af",
"timestamp": "",
"source": "github",
"line_count": 152,
"max_line_length": 89,
"avg_line_length": 27.43421052631579,
"alnum_prop": 0.5860911270983213,
"repo_name": "stefanw/scrapa",
"id": "5d3b92e28b48b440cbe7a36e9463fada23467b2c",
"size": "4170",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scrapa/utils.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "4699"
},
{
"name": "Python",
"bytes": "69472"
}
],
"symlink_target": ""
}
|
"""connection full name
Revision ID: e1855559096
Revises: 401bc82cc255
Create Date: 2015-09-26 17:40:20.742180
"""
# revision identifiers, used by Alembic.
revision = 'e1855559096'
down_revision = '401bc82cc255'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('connection', sa.Column('full_name', sa.String(length=255), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('connection', 'full_name')
### end Alembic commands ###
|
{
"content_hash": "12e8152c2824ef7dc00609e4c4ecbd8f",
"timestamp": "",
"source": "github",
"line_count": 26,
"max_line_length": 93,
"avg_line_length": 24.346153846153847,
"alnum_prop": 0.6951026856240127,
"repo_name": "ArthurPBressan/sisgep1",
"id": "8421e6f29dc860c958defcb0f8837716e7ec31c1",
"size": "633",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "migrations/versions/e1855559096_connection_full_name.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "16786"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "18645"
}
],
"symlink_target": ""
}
|
import sys, urllib, time
from mod_pywebsocket import common, msgutil, util
def web_socket_do_extra_handshake(request):
request.connection.write(b'x')
time.sleep(2)
request.connection.write(b'x')
time.sleep(2)
request.connection.write(b'x')
time.sleep(2)
request.connection.write(b'x')
time.sleep(2)
request.connection.write(b'x')
time.sleep(2)
return
def web_socket_transfer_data(request):
while True:
line = msgutil.receive_message(request)
if line == 'Goodbye':
return
request.ws_stream.send_message(line, binary=False)
|
{
"content_hash": "d91f8e2e5ab7d3f422290c5a6b0781c6",
"timestamp": "",
"source": "github",
"line_count": 23,
"max_line_length": 58,
"avg_line_length": 26.47826086956522,
"alnum_prop": 0.6617405582922824,
"repo_name": "youtube/cobalt",
"id": "ddf9e98267035be59ef0957e474be9c944918a4a",
"size": "628",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "third_party/web_platform_tests/websockets/handlers/sleep_10_v13_wsh.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
import unittest
from datetime import date, timedelta
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.auth.tokens import PasswordResetTokenGenerator
from django.test import TestCase
from django.utils.six import PY3
class TokenGeneratorTest(TestCase):
def test_make_token(self):
user = User.objects.create_user('tokentestuser', 'test2@example.com', 'testpw')
p0 = PasswordResetTokenGenerator()
tk1 = p0.make_token(user)
self.assertTrue(p0.check_token(user, tk1))
def test_10265(self):
"""
The token generated for a user created in the same request
will work correctly.
"""
# See ticket #10265
user = User.objects.create_user('comebackkid', 'test3@example.com', 'testpw')
p0 = PasswordResetTokenGenerator()
tk1 = p0.make_token(user)
reload = User.objects.get(username='comebackkid')
tk2 = p0.make_token(reload)
self.assertEqual(tk1, tk2)
def test_timeout(self):
"""
The token is valid after n days, but no greater.
"""
# Uses a mocked version of PasswordResetTokenGenerator so we can change
# the value of 'today'
class Mocked(PasswordResetTokenGenerator):
def __init__(self, today):
self._today_val = today
def _today(self):
return self._today_val
user = User.objects.create_user('tokentestuser', 'test2@example.com', 'testpw')
p0 = PasswordResetTokenGenerator()
tk1 = p0.make_token(user)
p1 = Mocked(date.today() + timedelta(settings.PASSWORD_RESET_TIMEOUT_DAYS))
self.assertTrue(p1.check_token(user, tk1))
p2 = Mocked(date.today() + timedelta(settings.PASSWORD_RESET_TIMEOUT_DAYS + 1))
self.assertFalse(p2.check_token(user, tk1))
@unittest.skipIf(PY3, "Unnecessary test with Python 3")
def test_date_length(self):
"""
Overly long dates, which are a potential DoS vector, aren't allowed.
"""
user = User.objects.create_user('ima1337h4x0r', 'test4@example.com', 'p4ssw0rd')
p0 = PasswordResetTokenGenerator()
# This will put a 14-digit base36 timestamp into the token, which is too large.
with self.assertRaises(ValueError):
p0._make_token_with_timestamp(user, 175455491841851871349)
def test_check_token_with_nonexistent_token_and_user(self):
user = User.objects.create_user('tokentestuser', 'test2@example.com', 'testpw')
p0 = PasswordResetTokenGenerator()
tk1 = p0.make_token(user)
self.assertIs(p0.check_token(None, tk1), False)
self.assertIs(p0.check_token(user, None), False)
|
{
"content_hash": "bd8da6bde94ec5c063cdcadb91c8c417",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 88,
"avg_line_length": 38.774647887323944,
"alnum_prop": 0.6498365419542318,
"repo_name": "cloudera/hue",
"id": "7ff3f15f3d80544c9420673723c2aa7da9a99375",
"size": "2753",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "desktop/core/ext-py/Django-1.11.29/tests/auth_tests/test_tokens.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ABAP",
"bytes": "962"
},
{
"name": "ActionScript",
"bytes": "1133"
},
{
"name": "Ada",
"bytes": "99"
},
{
"name": "Assembly",
"bytes": "2347"
},
{
"name": "AutoHotkey",
"bytes": "720"
},
{
"name": "BASIC",
"bytes": "2884"
},
{
"name": "Batchfile",
"bytes": "143575"
},
{
"name": "C",
"bytes": "5129166"
},
{
"name": "C#",
"bytes": "83"
},
{
"name": "C++",
"bytes": "718011"
},
{
"name": "COBOL",
"bytes": "4"
},
{
"name": "CSS",
"bytes": "680715"
},
{
"name": "Cirru",
"bytes": "520"
},
{
"name": "Clojure",
"bytes": "794"
},
{
"name": "Closure Templates",
"bytes": "1072"
},
{
"name": "CoffeeScript",
"bytes": "403"
},
{
"name": "ColdFusion",
"bytes": "86"
},
{
"name": "Common Lisp",
"bytes": "632"
},
{
"name": "Cython",
"bytes": "1016963"
},
{
"name": "D",
"bytes": "324"
},
{
"name": "Dart",
"bytes": "489"
},
{
"name": "Dockerfile",
"bytes": "13576"
},
{
"name": "EJS",
"bytes": "752"
},
{
"name": "Eiffel",
"bytes": "375"
},
{
"name": "Elixir",
"bytes": "692"
},
{
"name": "Elm",
"bytes": "487"
},
{
"name": "Emacs Lisp",
"bytes": "411907"
},
{
"name": "Erlang",
"bytes": "487"
},
{
"name": "Forth",
"bytes": "979"
},
{
"name": "FreeMarker",
"bytes": "1017"
},
{
"name": "G-code",
"bytes": "521"
},
{
"name": "GAP",
"bytes": "29873"
},
{
"name": "GLSL",
"bytes": "512"
},
{
"name": "Genshi",
"bytes": "946"
},
{
"name": "Gherkin",
"bytes": "699"
},
{
"name": "Go",
"bytes": "641"
},
{
"name": "Groovy",
"bytes": "1080"
},
{
"name": "HTML",
"bytes": "28328425"
},
{
"name": "Haml",
"bytes": "920"
},
{
"name": "Handlebars",
"bytes": "173"
},
{
"name": "Haskell",
"bytes": "512"
},
{
"name": "Haxe",
"bytes": "447"
},
{
"name": "HiveQL",
"bytes": "43"
},
{
"name": "Io",
"bytes": "140"
},
{
"name": "Java",
"bytes": "457398"
},
{
"name": "JavaScript",
"bytes": "39181239"
},
{
"name": "Jinja",
"bytes": "356"
},
{
"name": "Julia",
"bytes": "210"
},
{
"name": "LSL",
"bytes": "2080"
},
{
"name": "Lean",
"bytes": "213"
},
{
"name": "Less",
"bytes": "396102"
},
{
"name": "Lex",
"bytes": "218764"
},
{
"name": "Liquid",
"bytes": "1883"
},
{
"name": "LiveScript",
"bytes": "5747"
},
{
"name": "Lua",
"bytes": "78382"
},
{
"name": "M4",
"bytes": "1751"
},
{
"name": "MATLAB",
"bytes": "203"
},
{
"name": "Makefile",
"bytes": "1025937"
},
{
"name": "Mako",
"bytes": "3644004"
},
{
"name": "Mask",
"bytes": "597"
},
{
"name": "Myghty",
"bytes": "936"
},
{
"name": "Nix",
"bytes": "2212"
},
{
"name": "OCaml",
"bytes": "539"
},
{
"name": "Objective-C",
"bytes": "2672"
},
{
"name": "OpenSCAD",
"bytes": "333"
},
{
"name": "PHP",
"bytes": "662"
},
{
"name": "PLSQL",
"bytes": "29403"
},
{
"name": "PLpgSQL",
"bytes": "6006"
},
{
"name": "Pascal",
"bytes": "84273"
},
{
"name": "Perl",
"bytes": "4327"
},
{
"name": "PigLatin",
"bytes": "371"
},
{
"name": "PowerShell",
"bytes": "6235"
},
{
"name": "Procfile",
"bytes": "47"
},
{
"name": "Pug",
"bytes": "584"
},
{
"name": "Python",
"bytes": "92881549"
},
{
"name": "R",
"bytes": "2445"
},
{
"name": "Roff",
"bytes": "484108"
},
{
"name": "Ruby",
"bytes": "1098"
},
{
"name": "Rust",
"bytes": "495"
},
{
"name": "SCSS",
"bytes": "78508"
},
{
"name": "Sass",
"bytes": "770"
},
{
"name": "Scala",
"bytes": "1541"
},
{
"name": "Scheme",
"bytes": "559"
},
{
"name": "Shell",
"bytes": "249165"
},
{
"name": "Smarty",
"bytes": "130"
},
{
"name": "SourcePawn",
"bytes": "948"
},
{
"name": "Stylus",
"bytes": "682"
},
{
"name": "Tcl",
"bytes": "899"
},
{
"name": "TeX",
"bytes": "165743"
},
{
"name": "Thrift",
"bytes": "341963"
},
{
"name": "Twig",
"bytes": "761"
},
{
"name": "TypeScript",
"bytes": "1241396"
},
{
"name": "VBScript",
"bytes": "938"
},
{
"name": "VHDL",
"bytes": "830"
},
{
"name": "Vala",
"bytes": "485"
},
{
"name": "Verilog",
"bytes": "274"
},
{
"name": "Vim Snippet",
"bytes": "226931"
},
{
"name": "Vue",
"bytes": "350385"
},
{
"name": "XQuery",
"bytes": "114"
},
{
"name": "XSLT",
"bytes": "522199"
},
{
"name": "Yacc",
"bytes": "1070437"
},
{
"name": "jq",
"bytes": "4"
}
],
"symlink_target": ""
}
|
import numpy as np
from iminuit import Minuit
from matplotlib import pyplot as plt
from numpy.random import randn
from probfit import AddPdfNorm, BinnedLH, gaussian, rename
peak1 = randn(1000) * 0.5 + 1.0
peak2 = randn(500) * 0.5 + 0.0
# two peaks data with shared width
data = np.concatenate([peak1, peak2])
# Share the width
# If you use Normalized here. Do not reuse the object.
# It will be really slow due to cache miss. Read Normalized doc for more info.
pdf1 = rename(gaussian, ("x", "m_1", "sigma"))
pdf2 = rename(gaussian, ("x", "m_2", "sigma"))
compdf = AddPdfNorm(pdf1, pdf2) # merge by name (merge sigma)
ulh = BinnedLH(compdf, data, extended=False)
m = Minuit(ulh, m_1=1.1, m_2=-0.1, sigma=0.48, f_0=0.6, limit_f_0=(0, 1))
plt.figure(figsize=(8, 3))
plt.subplot(121)
ulh.draw(m, parts=True)
plt.title("Before")
m.migrad() # fit
plt.subplot(122)
ulh.draw(m, parts=True)
plt.title("After")
|
{
"content_hash": "b70f7bc77a16c6bf91c5557ca7d49f13",
"timestamp": "",
"source": "github",
"line_count": 33,
"max_line_length": 78,
"avg_line_length": 27.606060606060606,
"alnum_prop": 0.6937431394072447,
"repo_name": "iminuit/probfit",
"id": "f06e6f80e74b7e4552ebfd4f84a7146eaec4c816",
"size": "935",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "doc/pyplots/functor/addpdfnorm.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "234"
},
{
"name": "Jupyter Notebook",
"bytes": "1045673"
},
{
"name": "Makefile",
"bytes": "2332"
},
{
"name": "Python",
"bytes": "184629"
}
],
"symlink_target": ""
}
|
"""Tests for git store"""
from contextlib import contextmanager
import os
from shutil import rmtree
import subprocess
import tempfile
import unittest
from store.git.git import LocalGitStore
class GitStoreTest(unittest.TestCase):
TEST_STORE = "test_store"
TEST_KEY = "test_key"
GIT_ROOT = "git_root"
STATUS_FILE = ".status"
def setUp(self):
self.tmp_root = tempfile.mkdtemp(suffix="test_root_")
self.git_root = os.path.join(self.tmp_root, GitStoreTest.GIT_ROOT)
os.mkdir(self.git_root)
self.store_location = GitStoreTest.TEST_STORE
self.status_file = os.path.join(self.tmp_root, GitStoreTest.STATUS_FILE)
def tearDown(self):
rmtree(self.tmp_root)
@contextmanager
def helper_test(self, suppress_error=True):
with LocalGitStore(
git_root = self.git_root,
store_location = self.store_location,
key = GitStoreTest.TEST_KEY,
suppress_error = suppress_error,
status_file = self.status_file) as git_store:
yield git_store
def _create_store(self, with_key=False):
self.abs_store = os.path.join(self.git_root, self.store_location)
os.mkdir(self.abs_store)
if with_key:
open(os.path.join(self.abs_store, GitStoreTest.TEST_KEY), "w").close()
def _assert_exit_code(self, expected_exit_code):
# Check if the status file has right exit code
with open(self.status_file) as sf:
exit_code = sf.readlines()[0]
self.assertEquals(expected_exit_code, exit_code)
def testGetIfKeyExists(self):
self._create_store(with_key=True)
with self.helper_test() as git_store:
dest = os.path.join(self.tmp_root, "get_doc")
git_store.get(dest)
# Check if get document exists
self.assertTrue(os.path.exists(dest))
# Check if the status file has right exit code
self._assert_exit_code('0')
def testGetIfKeyNotExistsWithSuppress(self):
self._create_store()
dest = os.path.join(self.tmp_root, "get_doc")
with self.helper_test() as git_store:
git_store.get(dest)
# Make sure dest is not created
self.assertFalse(os.path.exists(dest))
# Check if the status file has error exit code
self._assert_exit_code('1')
def testGetIfKeyNotExists(self):
with self.assertRaises(LocalGitStore.LocalGitStoreError) as e:
self._create_store()
with self.helper_test(suppress_error=False) as git_store:
dest=os.path.join(self.tmp_root, "get_doc")
git_store.get(dest)
def testGetIfStoreNotExists(self):
with self.assertRaises(LocalGitStore.LocalGitStoreError) as e:
with self.helper_test(suppress_error=False) as git_store:
dest=os.path.join(self.tmp_root, "get_doc")
git_store.get(dest)
def _create_src(self):
(src_fp, src) = tempfile.mkstemp(dir=self.tmp_root)
return src
def testPutIfKeyNotExists(self):
self._create_store()
with self.helper_test() as git_store:
src = self._create_src()
git_store.put_if_not_exists(src)
# Make sure key exists in the store
self.assertTrue(os.path.exists(os.path.join(self.abs_store, GitStoreTest.TEST_KEY)))
self._assert_exit_code('0')
def testPutIfNotExistsWhenKeyExists(self):
abs_store = self._create_store(with_key=True)
key_file = os.path.join(self.abs_store, GitStoreTest.TEST_KEY)
m_time = os.path.getmtime(key_file)
with self.helper_test(suppress_error=False) as git_store:
src = self._create_src()
git_store.put_if_not_exists(src)
self.assertEquals(m_time, os.path.getmtime(key_file))
self._assert_exit_code('0')
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "3bfefada64abcb98132a1326c6bedeb9",
"timestamp": "",
"source": "github",
"line_count": 108,
"max_line_length": 88,
"avg_line_length": 33.407407407407405,
"alnum_prop": 0.6840354767184036,
"repo_name": "GoogleContainerTools/base-images-docker",
"id": "0d2ac58c6046d8852bf4558cb3b63192dda7368c",
"size": "4211",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "tests/store/git/git_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "115"
},
{
"name": "Makefile",
"bytes": "1241"
},
{
"name": "Python",
"bytes": "8847"
},
{
"name": "Roff",
"bytes": "36"
},
{
"name": "Shell",
"bytes": "21960"
},
{
"name": "Starlark",
"bytes": "100370"
}
],
"symlink_target": ""
}
|
import sys
import java.awt as awt
import javax.swing as swing
import java.lang as lang
import java.lang.System as System
import java.io.File as File
from java.lang import Object
import threading
import dircache
import os
import pickle
import math
class YayGuiCore:
def buildGui(self):
self.frame = swing.JFrame('Yay 0.7.1')
self.frame.windowClosing = self.handleShowHide
self.frame.contentPane.layout = awt.GridLayout(4,2)
find_icon = Object().getClass().getResource("/yay64.gif")
frame_icon = awt.Toolkit.getDefaultToolkit().getImage(find_icon)
#scaled_frame_icon = frame_icon.getScaledInstance(16,16,0)
self.frame.setIconImage(frame_icon)
panel = swing.JPanel()
###
# Menu Bar
##
menuBar = swing.JMenuBar()
fileMenu = swing.JMenu("File")
editMenu = swing.JMenu("Edit")
self.countMenu = swing.JMenu("")
menuItemPrune = swing.JMenuItem("Prune File",actionPerformed=self.callPrune)
menuItemReload = swing.JMenuItem("Reload Image Folder",actionPerformed=self.callReload)
menuItemChangeFolder = swing.JMenuItem("Image Folder",actionPerformed=self.callSetDir)
menuItemSetSpeed = swing.JMenuItem("Slideshow Speed",actionPerformed=self.showSpeedDialog)
menuHide = swing.JMenuItem("Hide",actionPerformed=self.handleShowHide)
menuItemQuit = swing.JMenuItem("Exit",actionPerformed=self.goodbye)
editMenu.add(menuItemChangeFolder)
editMenu.add(menuItemSetSpeed)
editMenu.addSeparator()
editMenu.add(menuItemPrune)
fileMenu.add(menuItemReload)
fileMenu.addSeparator()
fileMenu.add(menuHide)
fileMenu.add(menuItemQuit)
menuBar.add(fileMenu)
menuBar.add(editMenu)
menuBar.add(self.countMenu)
self.frame.setJMenuBar(menuBar)
###
# Top Panel
##
self.panelTop = swing.JPanel()
self.panelTop.layout = awt.GridLayout(1,2)
self.lblDirectory = swing.JLabel()
self.panelTop.add(self.lblDirectory)
self.lblStatus = swing.JTextField("?",3,keyPressed=self.callGoEnter)
self.panelTop.add(self.lblStatus)
panel.add(self.panelTop)
###
# Slideshow controls
##
panelControls = swing.JPanel()
panelControls.layout = awt.GridLayout(1,3)
self.btnPrev = swing.JButton('<<',actionPerformed=self.callLast)
panelControls.add(self.btnPrev)
self.btnStart = swing.JButton("Start", actionPerformed=self.callStart)
panelControls.add(self.btnStart)
self.btnNext = swing.JButton('>>',actionPerformed=self.callNext)
panelControls.add(self.btnNext)
panel.add(panelControls)
###
# Settings controls
##
self.panelSettings = swing.JPanel()
self.panelSettings.layout = awt.GridLayout(1,1)
self.lblCurrent = swing.JLabel()
self.panelSettings.add(self.lblCurrent)
panel.add(self.panelSettings)
self.frame.setContentPane(panel)
self.frame.size = self.normal_size
self.frame.resizable = False
self.frame.show()
|
{
"content_hash": "98ea5cc9c814080dd472bacb60d50c20",
"timestamp": "",
"source": "github",
"line_count": 92,
"max_line_length": 92,
"avg_line_length": 30.717391304347824,
"alnum_prop": 0.7484076433121019,
"repo_name": "jacktasia/yay",
"id": "dcacc3c79b93b3008938996cb2f5db42424f943c",
"size": "2952",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "yay_gui_core.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Java",
"bytes": "2121"
},
{
"name": "Python",
"bytes": "18473"
}
],
"symlink_target": ""
}
|
import http.server
class MyHTTPRequestHandler(http.server.SimpleHTTPRequestHandler):
def end_headers(self):
self.send_my_headers()
http.server.SimpleHTTPRequestHandler.end_headers(self)
def send_my_headers(self):
self.send_header("Cache-Control", "no-cache, no-store, must-revalidate")
self.send_header("Pragma", "no-cache")
self.send_header("Expires", "0")
# self.send_header("Access-Control-Allow-Origin", "*")
# self.send_header("Access-Control-Allow-Methods", "POST")
# self.send_header("Access-Control-Allow-Credentials", "false")
# self.send_header("Access-Control-Allow-Headers", "Content-Type")
if __name__ == '__main__':
http.server.test(HandlerClass=MyHTTPRequestHandler, port=4040)
|
{
"content_hash": "6588f1c884cfbf0fad44dc62223f6bd8",
"timestamp": "",
"source": "github",
"line_count": 19,
"max_line_length": 80,
"avg_line_length": 41.10526315789474,
"alnum_prop": 0.6696542893725992,
"repo_name": "japgolly/scalajs-benchmark",
"id": "84ee85c3c2fb9ff2018065994fa64b02aadb391c",
"size": "781",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "bin/serve-demo.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "781"
},
{
"name": "Scala",
"bytes": "320433"
},
{
"name": "Shell",
"bytes": "1133"
}
],
"symlink_target": ""
}
|
import os
import sys
import signal
import psutil
import logging
logging.basicConfig(filename='daemon.log', level=logging.INFO)
def stop(pgid):
logging.info(f"Stopping pgid {pgid} by ray_daemon.")
try:
# SIGTERM may not kill all the children processes in the group.
os.killpg(pgid, signal.SIGKILL)
except Exception:
logging.error("Cannot kill pgid: {}".format(pgid))
def manager():
pid_to_watch = int(sys.argv[1])
pgid_to_kill = int(sys.argv[2])
import time
while psutil.pid_exists(pid_to_watch):
time.sleep(1)
stop(pgid_to_kill)
if __name__ == "__main__":
manager()
|
{
"content_hash": "c649076d64ca8309e9cc711cc88849ff",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 71,
"avg_line_length": 22.857142857142858,
"alnum_prop": 0.6515625,
"repo_name": "intel-analytics/analytics-zoo",
"id": "fe5ff7533ea84bf23c27cee2a27be6c499787f63",
"size": "1231",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pyzoo/zoo/ray/ray_daemon.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "73165"
},
{
"name": "Groovy",
"bytes": "1613"
},
{
"name": "Java",
"bytes": "209136"
},
{
"name": "Jupyter Notebook",
"bytes": "24437284"
},
{
"name": "Makefile",
"bytes": "11724"
},
{
"name": "PureBasic",
"bytes": "593"
},
{
"name": "Python",
"bytes": "4085490"
},
{
"name": "RobotFramework",
"bytes": "17467"
},
{
"name": "Scala",
"bytes": "3562801"
},
{
"name": "Shell",
"bytes": "413512"
}
],
"symlink_target": ""
}
|
"""
history
A great place for reviews on filibuster
--------------------------------------------------------------------
"""
__version__ = '3.0.0'
__author__ = "someone"
content = {
'review': [
'<#review_intro#> <#review_conclusion#>',
],
'review_intro': [
'Looking for something new and exciting? Check out <#!bold,company#> website.',
'The <#!bold,company#> website is <#review_accolade#>.',
'In its new incarnation the <#!bold,company#> site has something for everyone - <#review_accolade#>.',
'<#!capitalize,company#> is building an online community by <#j_noun_gerund#> the content with <#j_thing#> to <#review_audience#>.',
u"<#!capitalize,company#>’s motto says it all: “<#filibuster_co_motto#>”",
'<#!capitalize,j_noun_gerund#>, <#j_noun_gerund#>, you heard it all before. But the new <#!bold,company#> website delivers.',
u"Targetting <#review_audience#>, <#!bold,company#> is expanding it’s online operations with this new enterprise.",
],
'review_source': [
'-- <#paper_US#>',
'[<#name#>, <#paper_US#>]'
],
'review_conclusion': [
'A must-see for anyone who is serious about <#review_interests#>.',
'Recommended for <#review_audience#> with an interest in <#review_interests#>!',
'Worth a visit, if only for the <#review_offers#> and <#review_offers#>. <#review_source#>',
"Editor's choice. <#review_source#>",
"This site is a winner! <#review_source#>",
'Voted <#review_award#> by the <#paper_financial#>.',
'Awarded "<#review_award#>" by the <#paper_US#>.',
'A breath of fresh air for the online community.',
'Required reading for <#review_audience#>',
],
'review_interests': [
'investing','medicin','health','shopping','news','money','the internet','privacy','content',
'information','being informed',
],
'review_offers': [
'news','sport','headlines','investing tips','shopping','catalogs','book reviews','movie reviews','auctions',
'ecommerce','banking', 'personal finance','personalised advice','health databases', 'cars',
'free downloads', 'free fonts', 'great deals', 'travel planners','advertising'
],
'review_accolade': [
'a wonderful example of how modern technology can assist <#review_audience#>!',
'a marvelous idea, great graphics, sharp and witty journalism',
'a great system',
'one of the most powerful <#portal_co_description_pl#> in the world',
'one of the most powerful in its kind in the world',
'the equivalent of an atomic bomb',
'the <#review_majorcompany#> of websites',
'more content than meets the eye!',
'everything <#review_audience#> needs in a website!',
'plenty of content for everyone',
'the next <#review_majorcompany#>',
'this will give <#review_majorcompany#> a run for their money',
'investors beware: this is the next <#review_majorcompany#>',
],
'review_majorcompany': [
'Microsoft','Intel', 'Apple', 'Adobe', '<#IPO1#>', 'Amazon', 'Barnes&Noble','Mozilla',
'Google', 'Facebook', 'IBM', 'Nokia', 'Sony', 'Philips',
],
'review_audience': [
'the <#review_audience_adj#> professional',
'the guy on the street',
'the <#review_audience_adj#> investor',
'the <#review_audience_adj#> shopper',
'the <#review_audience_adj#> journalist',
'the <#review_audience_adj#> trendwatcher',
'kids and perhaps the entire family',
'parents and grandparents',
'youngsters around the nation',
],
'review_audience_adj': [
'online',
'urban',
'wired',
'frugal',
'wise',
'nervous',
'witty',
'focussed',
'rich',
'poor',
'stressed',
],
'review_award': [
'Best Online Experience',
'Five Bulls Award',
'Site-2-See',
'Sites4SoreEyes',
'Place2B-Prize',
'Control-D Award'
],
}
|
{
"content_hash": "8750cbea6e4bc7038ccac8a06f8ff88c",
"timestamp": "",
"source": "github",
"line_count": 102,
"max_line_length": 148,
"avg_line_length": 47.65686274509804,
"alnum_prop": 0.4834396214770623,
"repo_name": "petrvanblokland/Xierpa3",
"id": "c2ffc97a573723da5297446a28ba4e76140eb5cf",
"size": "5317",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "xierpa3/contributions/filibuster/content/reviews.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "41394"
},
{
"name": "JavaScript",
"bytes": "1507"
},
{
"name": "Python",
"bytes": "1349828"
}
],
"symlink_target": ""
}
|
"""A module that handles series: find a limit, order the series etc.
"""
from .order import Order
from .limits import limit, Limit
from .gruntz import gruntz
from .series import series
from .approximants import approximants
from .residues import residue
from .sequences import (EmptySequence, SeqPer, SeqFormula, sequence, SeqAdd,
SeqMul)
from .fourier import fourier_series
from .formal import fps
from .limitseq import difference_delta, limit_seq
from ..core.singleton import S
EmptySequence = S.EmptySequence
del S
O = Order
__all__ = ['Order', 'O', 'limit', 'Limit', 'gruntz', 'series', 'residue',
'EmptySequence', 'SeqPer', 'SeqFormula', 'sequence',
'SeqAdd', 'SeqMul', 'fourier_series', 'fps', 'difference_delta',
'limit_seq']
|
{
"content_hash": "1c1e87bf8a187e7364166e0d971c9dba",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 76,
"avg_line_length": 33.041666666666664,
"alnum_prop": 0.6872635561160151,
"repo_name": "kaushik94/sympy",
"id": "96eee4b8106c2be8318e67c25cee8d1a3edf7efd",
"size": "793",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sympy/series/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "5094"
},
{
"name": "Python",
"bytes": "13553568"
},
{
"name": "Ruby",
"bytes": "304"
},
{
"name": "Scheme",
"bytes": "125"
},
{
"name": "Shell",
"bytes": "4008"
},
{
"name": "TeX",
"bytes": "32356"
},
{
"name": "XSLT",
"bytes": "366202"
}
],
"symlink_target": ""
}
|
from os.path import basename, dirname, join
import sys
from PyQt4 import QtCore, QtGui, uic
# Translation function
def tr(context, text):
return QtGui.QApplication.translate(context, text, None, QtGui.QApplication.UnicodeUTF8)
class PatcherApp(QtGui.QApplication):
def __init__(self, patches=[], patch_fn=None, translation_file=None):
super(PatcherApp, self).__init__(sys.argv)
self._load_translations(translation_file)
self._register_event_types()
self._open_status_window()
self._start_patch_thread(patches, patch_fn)
def _load_translations(self, translation_file):
if translation_file:
# Load the app-specific translation resources.
translator = QtCore.QTranslator()
translator.load(basename(translation_file), dirname(translation_file))
self.installTranslator(translator)
def _open_status_window(self):
self.window = StatusWidget()
self.window.show()
self.window.raise_()
def _register_event_types(self):
# Register the event type(s) that will be used
_InsertEggEvent.EVENT_TYPE = QtCore.QEvent.registerEventType(_InsertEggEvent.EVENT_TYPE)
def _start_patch_thread(self, patches, patch_fn):
self.thread = _PatchThread(patches, patch_fn, self.window)
self.thread.finished.connect(self._thread_finished)
self.thread.start()
def _thread_finished(self):
self.exit()
class StatusWidget(QtGui.QWidget):
def __init__(self, parent=None):
super(StatusWidget, self).__init__(parent)
# Load the UI and make sure it's translated
form_class, wc = uic.loadUiType(join(dirname(__file__), "patch_ui.ui"))
self.form = form_class()
self.form.setupUi(self)
if self.form is not None:
self.form.retranslateUi(self)
def customEvent(self, event):
if event.type() == _InsertEggEvent.EVENT_TYPE:
# Note the egg that is being installed
status = unicode(tr("UpdateStatusWidget", "Installing: %1").arg(basename(event.path())))
self.form.status_message.setText(status)
self.form.progress_bar.setValue(event.percentage())
event.accept()
else:
event.ignore()
class _InsertEggEvent(QtCore.QEvent):
EVENT_TYPE = QtCore.QEvent.User
def __init__(self, path, percentage):
super(_InsertEggEvent, self).__init__(_InsertEggEvent.EVENT_TYPE)
self._path = path
self._percentage = percentage
def path(self):
return self._path
def percentage(self):
return self._percentage * 100.0
class _PatchThread(QtCore.QThread):
def __init__(self, patches, patch_fn, window, parent=None):
super(_PatchThread, self).__init__(parent)
self.patches = patches
self.apply_patch = patch_fn
self.window = window
def run(self):
count = len(self.patches)
for i, patch in enumerate(self.patches):
QtGui.QApplication.instance().postEvent(self.window,
_InsertEggEvent(patch, (i+1)/float(count)))
self.apply_patch(patch)
|
{
"content_hash": "87d2ad557f0b5fe5753b30e4aee00648",
"timestamp": "",
"source": "github",
"line_count": 96,
"max_line_length": 100,
"avg_line_length": 33.927083333333336,
"alnum_prop": 0.6229659195578754,
"repo_name": "jwiggins/keyenst",
"id": "9bbd3da11f26ab1ba68fc17949fce6d5f5182051",
"size": "3257",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "egginst/patch_ui.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "103970"
},
{
"name": "Shell",
"bytes": "1424"
}
],
"symlink_target": ""
}
|
'''
Created by prinkk
Developer Kristian Øllegaard
Mail kristian@prinkk.net
www http://www.prinkk.net
License Copyright 2011 prinkk
Filename livestats.py
'''
from django import template
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.filter
@stringfilter
def remove_whitespace(value):
return value.strip()
|
{
"content_hash": "d1683b4296cf2d3d4854f8b67959fdab",
"timestamp": "",
"source": "github",
"line_count": 20,
"max_line_length": 55,
"avg_line_length": 20.55,
"alnum_prop": 0.7031630170316302,
"repo_name": "frankk00/django-livestats",
"id": "58ed0d0d03cacc1c466f84711b1b33e37ff03749",
"size": "436",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "livestats/templatetags/livestats.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
import inspect, os, sys
# From http://stackoverflow.com/questions/279237/python-import-a-module-from-a-folder
cmd_subfolder = os.path.realpath(os.path.abspath(os.path.join(os.path.split(inspect.getfile( inspect.currentframe() ))[0],"..")))
if cmd_subfolder not in sys.path:
sys.path.insert(0, cmd_subfolder)
import mosq_test
rc = 1
mid = 53
keepalive = 60
connect_packet = mosq_test.gen_connect("subpub-qos0-test", keepalive=keepalive)
connack_packet = mosq_test.gen_connack(rc=0)
subscribe_packet = mosq_test.gen_subscribe(mid, "subpub/qos0", 0)
suback_packet = mosq_test.gen_suback(mid, 0)
publish_packet = mosq_test.gen_publish("subpub/qos0", qos=0, payload="message")
cmd = ['../../src/mosquitto', '-p', '1888']
broker = mosq_test.start_broker(filename=os.path.basename(__file__), cmd=cmd)
try:
sock = mosq_test.do_client_connect(connect_packet, connack_packet, timeout=20)
sock.send(subscribe_packet)
if mosq_test.expect_packet(sock, "suback", suback_packet):
sock.send(publish_packet)
if mosq_test.expect_packet(sock, "publish", publish_packet):
rc = 0
sock.close()
finally:
broker.terminate()
broker.wait()
if rc:
(stdo, stde) = broker.communicate()
print(stde)
exit(rc)
|
{
"content_hash": "6ee8c59a91ae0999ba0b289867e06336",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 129,
"avg_line_length": 30.11904761904762,
"alnum_prop": 0.6861660079051384,
"repo_name": "jgarybanks/blenderbottle",
"id": "295b01731b40a51eea80139c0df623c3fb80c43e",
"size": "1380",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "broker/mosquitto/test/broker/02-subpub-qos0.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "3913"
},
{
"name": "Batchfile",
"bytes": "281"
},
{
"name": "C",
"bytes": "2561094"
},
{
"name": "C++",
"bytes": "131482"
},
{
"name": "CMake",
"bytes": "18218"
},
{
"name": "HTML",
"bytes": "11256"
},
{
"name": "M4",
"bytes": "36193"
},
{
"name": "Makefile",
"bytes": "102905"
},
{
"name": "NSIS",
"bytes": "10856"
},
{
"name": "Perl",
"bytes": "3271"
},
{
"name": "Python",
"bytes": "185697"
},
{
"name": "Roff",
"bytes": "5524"
},
{
"name": "Ruby",
"bytes": "1368"
},
{
"name": "Shell",
"bytes": "6304"
},
{
"name": "TeX",
"bytes": "8012"
},
{
"name": "XSLT",
"bytes": "1459"
}
],
"symlink_target": ""
}
|
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/ship/interior_components/shared_weapon_interior.iff"
result.attribute_template_id = 8
result.stfName("space/space_item","weapon_interior_n")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
{
"content_hash": "25d691adb40d1a0b23b567cdfb40aec6",
"timestamp": "",
"source": "github",
"line_count": 13,
"max_line_length": 88,
"avg_line_length": 25.53846153846154,
"alnum_prop": 0.713855421686747,
"repo_name": "anhstudios/swganh",
"id": "b0e5ced05a8557c7821e23d2182d55c2232cf035",
"size": "477",
"binary": false,
"copies": "2",
"ref": "refs/heads/develop",
"path": "data/scripts/templates/object/tangible/ship/interior_components/shared_weapon_interior.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "11887"
},
{
"name": "C",
"bytes": "7699"
},
{
"name": "C++",
"bytes": "2357839"
},
{
"name": "CMake",
"bytes": "41264"
},
{
"name": "PLSQL",
"bytes": "42065"
},
{
"name": "Python",
"bytes": "7503510"
},
{
"name": "SQLPL",
"bytes": "42770"
}
],
"symlink_target": ""
}
|
import maya.cmds as cmds
from mayaGeom import MayaGeom
class MayaSphere(MayaGeom):
def __init__(self, name='Sphere'):
MayaGeom.__init__(self)
parts = cmds.sphere(name=name, object=True, radius=1.0)
self.name = parts[0]
|
{
"content_hash": "2957497f531ebe1fa158f40ac94332e6",
"timestamp": "",
"source": "github",
"line_count": 9,
"max_line_length": 63,
"avg_line_length": 27.666666666666668,
"alnum_prop": 0.6465863453815262,
"repo_name": "madoodia/codeLab",
"id": "10fe419e5f9241a00b46b3da9ddc17ebcba0487a",
"size": "249",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "python/mayaSphere1.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "26584"
},
{
"name": "C#",
"bytes": "3735"
},
{
"name": "C++",
"bytes": "1041008"
},
{
"name": "CMake",
"bytes": "2191"
},
{
"name": "CSS",
"bytes": "14746"
},
{
"name": "HTML",
"bytes": "6401216"
},
{
"name": "Makefile",
"bytes": "17623"
},
{
"name": "Prolog",
"bytes": "295"
},
{
"name": "Python",
"bytes": "218348"
},
{
"name": "QML",
"bytes": "23919"
},
{
"name": "QMake",
"bytes": "1554"
},
{
"name": "Shell",
"bytes": "16371"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from hashlib import md5
def dedup_data_hash(todedup_data, todedup_labels, base_data):
"""Fast deduplicate datasets by hashing: MD5.
Args:
todedup_data: A NumPy array. Dataset to be deduplicated.
todedup_labels: A NumPy array. Labels to be deduplicated with
the corresponding dataset.
base_data: A NumPy array. Basis dataset for deduplication.
Returns:
deduped_d: A dict,
deduped_d['data']: deduplicated dataset.
deduped_d['labels']: deduplicated labels.
### Usage
```python
deduped_valid_d = dedup_data_hash(valid_data, valid_labels, train_data)
```
"""
base_data_hash = [md5(x).hexdigest() for x in base_data]
todedup_data_hash = [md5(x).hexdigest() for x in todedup_data]
dedup_flags = np.logical_not(
np.in1d(todedup_data_hash, base_data_hash))
deduped_data = todedup_data[dedup_flags]
deduped_labels = todedup_labels[dedup_flags]
deduped_d = {
'data': deduped_data,
'labels': deduped_labels
}
return deduped_d
|
{
"content_hash": "7068e804437f4c2da118592e253d3ea9",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 75,
"avg_line_length": 29.24390243902439,
"alnum_prop": 0.6522101751459549,
"repo_name": "bowen0701/python_handson",
"id": "1bd9dfb809c243a29129c2da08018e0b16a54a27",
"size": "1199",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "dedup_dataset_hash.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "62005"
}
],
"symlink_target": ""
}
|
"""
Author: Alok Jani
binomial_distribution_table.py
Builds Table for calculating probability of
r successes in
n independent trials,
each with probability of success p
"""
import math
def combinatorial(n, r):
return math.factorial(n) // math.factorial(r) // math.factorial(n-r)
def binomial_probability(n,p,x):
return combinatorial(n,x) * math.pow(p,x) * math.pow(1-p,n - x)
range_n = 20 # Range for number of trials
# Range for number of successes is 0 to i for i'th trial
# Probability of success is printed at intervals of 0.1
print
print ' n r ',
for prob in range(1,10,1):
p = prob/10.0
print '%0.4f ' % p,
print
print "-----------------------------------------------------------------------------"
for n in range(2,range_n+1):
for x in range(0,n+1):
print '%2d %2d ' % (n,x),
for prob in range(1,10,1):
p = prob/10.0
px = binomial_probability(n,p,x)
print '%0.4f ' % (px),
print
print
|
{
"content_hash": "466fd9d5bf4ec45c8c1da41d42724f4e",
"timestamp": "",
"source": "github",
"line_count": 42,
"max_line_length": 85,
"avg_line_length": 25.047619047619047,
"alnum_prop": 0.5399239543726235,
"repo_name": "alokjani/python-mathmodeling",
"id": "ce7e2e454538a53756888a49fccd942dd27e0bec",
"size": "1074",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "probability/binomial_distribution_table.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1074"
}
],
"symlink_target": ""
}
|
import os
import sys
import unittest
from nose.config import Config
from nose.core import TestProgram
here = os.path.abspath(os.path.dirname(__file__))
support = os.path.join(here, 'support')
units = os.path.normpath(os.path.join(here, '..', 'unit_tests'))
if units not in sys.path:
sys.path.insert(0, units)
from mock import RecordingPluginManager
class TestPluginCalls(unittest.TestCase):
"""
Tests how plugins are called throughout a standard test run
"""
def test_plugin_calls_package1(self):
wdir = os.path.join(support, 'package1')
man = RecordingPluginManager()
conf = Config(plugins=man, stream=sys.stdout)
t = TestProgram(defaultTest=wdir, config=conf,
argv=['test_plugin_calls_package1'], exit=False)
print man.calls()
assert man.called
self.assertEqual(
man.calls(),
['loadPlugins', 'addOptions', 'configure', 'begin',
'prepareTestLoader', 'loadTestsFromNames', 'loadTestsFromName',
'prepareTestRunner', 'prepareTest', 'setOutputStream',
'prepareTestResult', 'beforeDirectory', 'wantFile',
'wantDirectory', 'beforeContext', 'beforeImport',
'afterImport', 'wantModule', 'wantClass', 'wantFunction',
'makeTest', 'wantMethod', 'loadTestsFromTestClass',
'loadTestsFromTestCase', 'loadTestsFromModule', 'startContext',
'beforeTest', 'prepareTestCase', 'startTest', 'addSuccess',
'stopTest', 'afterTest', 'stopContext', 'afterContext',
'loadTestsFromDir', 'afterDirectory',
'report', 'finalize'])
def test_plugin_calls_package1_verbose(self):
wdir = os.path.join(support, 'package1')
man = RecordingPluginManager()
conf = Config(plugins=man, stream=sys.stdout)
t = TestProgram(defaultTest=wdir, config=conf,
argv=['test_plugin_calls_package1', '-v'], exit=False)
print man.calls()
assert man.called
self.assertEqual(
man.calls(),
['loadPlugins', 'addOptions', 'configure', 'begin',
'prepareTestLoader', 'loadTestsFromNames', 'loadTestsFromName',
'prepareTestRunner', 'prepareTest', 'setOutputStream',
'prepareTestResult', 'beforeDirectory', 'wantFile',
'wantDirectory', 'beforeContext', 'beforeImport',
'afterImport', 'wantModule', 'wantClass', 'wantFunction',
'makeTest', 'wantMethod', 'loadTestsFromTestClass',
'loadTestsFromTestCase', 'loadTestsFromModule', 'startContext',
'beforeTest', 'prepareTestCase', 'startTest', 'describeTest',
'testName', 'addSuccess', 'stopTest', 'afterTest', 'stopContext',
'afterContext', 'loadTestsFromDir', 'afterDirectory',
'report', 'finalize'])
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "06f7dd584003601dac098ee256ce8299",
"timestamp": "",
"source": "github",
"line_count": 71,
"max_line_length": 78,
"avg_line_length": 41.71830985915493,
"alnum_prop": 0.6164753544902093,
"repo_name": "cloudera/hue",
"id": "eff6c7a4f381b8881dfd47c11cbdccdf85f8dac4",
"size": "2962",
"binary": false,
"copies": "10",
"ref": "refs/heads/master",
"path": "desktop/core/ext-py/nose-1.3.7/functional_tests/test_plugins.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "ABAP",
"bytes": "962"
},
{
"name": "ActionScript",
"bytes": "1133"
},
{
"name": "Ada",
"bytes": "99"
},
{
"name": "Assembly",
"bytes": "2347"
},
{
"name": "AutoHotkey",
"bytes": "720"
},
{
"name": "BASIC",
"bytes": "2884"
},
{
"name": "Batchfile",
"bytes": "143575"
},
{
"name": "C",
"bytes": "5129166"
},
{
"name": "C#",
"bytes": "83"
},
{
"name": "C++",
"bytes": "718011"
},
{
"name": "COBOL",
"bytes": "4"
},
{
"name": "CSS",
"bytes": "680715"
},
{
"name": "Cirru",
"bytes": "520"
},
{
"name": "Clojure",
"bytes": "794"
},
{
"name": "Closure Templates",
"bytes": "1072"
},
{
"name": "CoffeeScript",
"bytes": "403"
},
{
"name": "ColdFusion",
"bytes": "86"
},
{
"name": "Common Lisp",
"bytes": "632"
},
{
"name": "Cython",
"bytes": "1016963"
},
{
"name": "D",
"bytes": "324"
},
{
"name": "Dart",
"bytes": "489"
},
{
"name": "Dockerfile",
"bytes": "13576"
},
{
"name": "EJS",
"bytes": "752"
},
{
"name": "Eiffel",
"bytes": "375"
},
{
"name": "Elixir",
"bytes": "692"
},
{
"name": "Elm",
"bytes": "487"
},
{
"name": "Emacs Lisp",
"bytes": "411907"
},
{
"name": "Erlang",
"bytes": "487"
},
{
"name": "Forth",
"bytes": "979"
},
{
"name": "FreeMarker",
"bytes": "1017"
},
{
"name": "G-code",
"bytes": "521"
},
{
"name": "GAP",
"bytes": "29873"
},
{
"name": "GLSL",
"bytes": "512"
},
{
"name": "Genshi",
"bytes": "946"
},
{
"name": "Gherkin",
"bytes": "699"
},
{
"name": "Go",
"bytes": "641"
},
{
"name": "Groovy",
"bytes": "1080"
},
{
"name": "HTML",
"bytes": "28328425"
},
{
"name": "Haml",
"bytes": "920"
},
{
"name": "Handlebars",
"bytes": "173"
},
{
"name": "Haskell",
"bytes": "512"
},
{
"name": "Haxe",
"bytes": "447"
},
{
"name": "HiveQL",
"bytes": "43"
},
{
"name": "Io",
"bytes": "140"
},
{
"name": "Java",
"bytes": "457398"
},
{
"name": "JavaScript",
"bytes": "39181239"
},
{
"name": "Jinja",
"bytes": "356"
},
{
"name": "Julia",
"bytes": "210"
},
{
"name": "LSL",
"bytes": "2080"
},
{
"name": "Lean",
"bytes": "213"
},
{
"name": "Less",
"bytes": "396102"
},
{
"name": "Lex",
"bytes": "218764"
},
{
"name": "Liquid",
"bytes": "1883"
},
{
"name": "LiveScript",
"bytes": "5747"
},
{
"name": "Lua",
"bytes": "78382"
},
{
"name": "M4",
"bytes": "1751"
},
{
"name": "MATLAB",
"bytes": "203"
},
{
"name": "Makefile",
"bytes": "1025937"
},
{
"name": "Mako",
"bytes": "3644004"
},
{
"name": "Mask",
"bytes": "597"
},
{
"name": "Myghty",
"bytes": "936"
},
{
"name": "Nix",
"bytes": "2212"
},
{
"name": "OCaml",
"bytes": "539"
},
{
"name": "Objective-C",
"bytes": "2672"
},
{
"name": "OpenSCAD",
"bytes": "333"
},
{
"name": "PHP",
"bytes": "662"
},
{
"name": "PLSQL",
"bytes": "29403"
},
{
"name": "PLpgSQL",
"bytes": "6006"
},
{
"name": "Pascal",
"bytes": "84273"
},
{
"name": "Perl",
"bytes": "4327"
},
{
"name": "PigLatin",
"bytes": "371"
},
{
"name": "PowerShell",
"bytes": "6235"
},
{
"name": "Procfile",
"bytes": "47"
},
{
"name": "Pug",
"bytes": "584"
},
{
"name": "Python",
"bytes": "92881549"
},
{
"name": "R",
"bytes": "2445"
},
{
"name": "Roff",
"bytes": "484108"
},
{
"name": "Ruby",
"bytes": "1098"
},
{
"name": "Rust",
"bytes": "495"
},
{
"name": "SCSS",
"bytes": "78508"
},
{
"name": "Sass",
"bytes": "770"
},
{
"name": "Scala",
"bytes": "1541"
},
{
"name": "Scheme",
"bytes": "559"
},
{
"name": "Shell",
"bytes": "249165"
},
{
"name": "Smarty",
"bytes": "130"
},
{
"name": "SourcePawn",
"bytes": "948"
},
{
"name": "Stylus",
"bytes": "682"
},
{
"name": "Tcl",
"bytes": "899"
},
{
"name": "TeX",
"bytes": "165743"
},
{
"name": "Thrift",
"bytes": "341963"
},
{
"name": "Twig",
"bytes": "761"
},
{
"name": "TypeScript",
"bytes": "1241396"
},
{
"name": "VBScript",
"bytes": "938"
},
{
"name": "VHDL",
"bytes": "830"
},
{
"name": "Vala",
"bytes": "485"
},
{
"name": "Verilog",
"bytes": "274"
},
{
"name": "Vim Snippet",
"bytes": "226931"
},
{
"name": "Vue",
"bytes": "350385"
},
{
"name": "XQuery",
"bytes": "114"
},
{
"name": "XSLT",
"bytes": "522199"
},
{
"name": "Yacc",
"bytes": "1070437"
},
{
"name": "jq",
"bytes": "4"
}
],
"symlink_target": ""
}
|
"""Reports memory usage stats.
The code is based on:
https://raw.githubusercontent.com/pixelb/ps_mem/master/ps_mem.py
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import errno
import fnmatch
import io
import logging
import os
import sys
import mmap
from treadmill import sysinfo
_LOGGER = logging.getLogger(__name__)
# Pagesize in K.
_PAGESIZE = mmap.PAGESIZE // 1024
_KERNEL_VER = sysinfo.kernel_ver()
def proc_path(*args):
"""Helper function to construct /proc path.
"""
return os.path.join('/proc', *(str(a) for a in args))
def proc_open(*args):
"""Helper function to open /proc path.
"""
try:
return io.open(proc_path(*args))
except (IOError, OSError):
val = sys.exc_info()[1]
# kernel thread or process gone
if val.errno == errno.ENOENT or val.errno == errno.EPERM:
raise LookupError
raise
def proc_readlines(*args):
"""Read lines from /proc file.
"""
with proc_open(*args) as f:
return f.readlines()
def proc_readline(*args):
"""Read line from /proc file.
"""
with proc_open(*args) as f:
return f.readline()
def proc_read(*args):
"""Read content of /proc file.
"""
with proc_open(*args) as f:
return f.read()
def get_thread_id(pid):
"""Read thread group id designated in /proc/<pid>/status.
"""
return proc_readlines(pid, 'status')[2][6:-1]
def get_threads(pid):
"""Read number of threads designated in /proc/<pid>/status.
"""
return int(proc_readlines(pid, 'status')[26][8:-1].strip())
def get_mem_stats(pid, use_pss=True):
"""Return private, shared memory given pid.
Note: shared is always a subset of rss (trs is not always).
"""
statm = proc_readline(pid, 'statm').split()
rss = int(statm[1]) * _PAGESIZE
private_lines = []
shared_lines = []
pss_lines = []
have_pss = False
if use_pss and os.path.exists(proc_path(pid, 'smaps')):
for line in proc_readlines(pid, 'smaps'):
if line.startswith('Shared'):
shared_lines.append(line)
elif line.startswith('Private'):
private_lines.append(line)
elif line.startswith('Pss'):
have_pss = True
pss_lines.append(line)
shared = sum([int(line.split()[1]) for line in shared_lines])
private = sum([int(line.split()[1]) for line in private_lines])
# shared + private = rss above
# the Rss in smaps includes video card mem etc.
if have_pss:
# add 0.5KiB as this avg error due to trunctation
pss_adjust = 0.5
pss = sum([float(line.split()[1]) + pss_adjust
for line in pss_lines])
shared = pss - private
else:
shared = int(statm[2]) * _PAGESIZE
private = rss - shared
# values are in Kbytes.
return (int(private * 1024), int(shared * 1024), have_pss)
def get_cmd_name(pid, verbose):
"""Returns truncated command line name given pid."""
cmdline = proc_read(pid, 'cmdline').split(r'\0')
if cmdline[-1] == '' and len(cmdline) > 1:
cmdline = cmdline[:-1]
path = proc_path(pid, 'exe')
try:
path = os.readlink(path)
# Some symlink targets were seen to contain NULs on RHEL 5 at least
# https://github.com/pixelb/scripts/pull/10, so take string up to NUL
path = path.split(r'\0')[0]
except OSError as err:
val = sys.exc_info()[1]
# either kernel thread or process gone
if val.errno == errno.ENOENT or val.errno == errno.EPERM:
raise LookupError
_LOGGER.error('OS Error: %s', err)
raise
if verbose:
return cmdline[0].replace('\x00', ' ')
if path.endswith(' (deleted)'):
path = path[:-10]
if os.path.exists(path):
path += ' [updated]'
else:
# The path could be have prelink stuff so try cmdline
# which might have the full path present. This helped for:
# /usr/libexec/notification-area-applet.#prelink#.fX7LCT (deleted)
if os.path.exists(cmdline[0]):
path = cmdline[0] + ' [updated]'
else:
path += ' [deleted]'
exe = os.path.basename(path)
cmd = proc_readline(pid, 'status')[6:-1]
if exe.startswith(cmd):
cmd = exe
return cmd
def get_memory_usage(pids, verbose=False, exclude=None, use_pss=True):
"""Returns memory stats for list of pids, aggregated by cmd line."""
# TODO: pylint complains about too many branches, need to refactor.
# pylint: disable=R0912
meminfos = []
for pid in pids:
thread_id = int(get_thread_id(pid))
if not pid or thread_id != pid:
continue
try:
cmd = get_cmd_name(pid, verbose)
except LookupError:
# kernel threads don't have exe links or
# process gone
continue
except OSError:
# operation not permitted
continue
if exclude:
match = False
for pattern in exclude:
if fnmatch.fnmatch(cmd, pattern):
match = True
break
if match:
continue
meminfo = {}
meminfo['name'] = cmd
meminfo['tgid'] = thread_id
try:
private, shared, have_pss = get_mem_stats(pid, use_pss=use_pss)
except RuntimeError:
continue # process gone
if 'shared' in meminfo:
if have_pss:
meminfo['shared'] += shared
else:
meminfo['shared'] = max(meminfo['shared'], shared)
else:
meminfo['shared'] = shared
meminfo['private'] = meminfo.setdefault('private', 0) + private
meminfo['threads'] = get_threads(pid)
meminfo['total'] = meminfo['private'] + meminfo['shared']
meminfos.append(meminfo)
return meminfos
|
{
"content_hash": "07fc4ef11984c8a6a5060ca01323c59c",
"timestamp": "",
"source": "github",
"line_count": 219,
"max_line_length": 78,
"avg_line_length": 28.1324200913242,
"alnum_prop": 0.5690634637234215,
"repo_name": "Morgan-Stanley/treadmill",
"id": "583339d088015acf962918aa31063287cd951b4d",
"size": "6161",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "lib/python/treadmill/psmem.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PowerShell",
"bytes": "3750"
},
{
"name": "Python",
"bytes": "3372983"
},
{
"name": "Ruby",
"bytes": "3712"
},
{
"name": "Shell",
"bytes": "51646"
}
],
"symlink_target": ""
}
|
"""
The "Api" class is used when an instance
of an API is required in the athena.apis.api_lib
Use "from athena.apis import api_lib" & "api_lib['(api_name_key)']"
to access instances of APIs.
"""
import traceback
class Api(object):
def __init__(self, key, save_data=None, enabled=True):
""" Make a unique api key name (e.g. 'spotify_api') """
self.key = key
if save_data is not None:
self.save_data = save_data
self.enabled = enabled
def verify_data(self, user):
""" Verify that the current user .yml file
has required save_data attributes
"""
try:
if hasattr(self, 'save_data'):
# print()
for field in self.save_data:
""" If data is required and not there, throw error """
if field.key in user[self.key]:
setattr(self, field.key, user[self.key][field.key])
# print('API: '+self.key+', Loading data: '+field.key)
elif field.require:
raise Exception
return True
except:
print(traceback.format_exc())
return False
|
{
"content_hash": "c2d34bc49b64151b861f45fff11aa9d4",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 78,
"avg_line_length": 33.972972972972975,
"alnum_prop": 0.5147175815433572,
"repo_name": "hey-athena/hey-athena-client",
"id": "2a171914887bd0c60c4f45c0ea1e6a5bc4c1690e",
"size": "1257",
"binary": false,
"copies": "2",
"ref": "refs/heads/demo-branch",
"path": "athena/classes/api.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "58387"
}
],
"symlink_target": ""
}
|
import sys
import os.path
import platform
major = sys.version_info[0]
minor = sys.version_info[1]
path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(os.path.join(path, "source/module"))
sys.path.append(
os.path.join(path,
"build/lib.{}-{}-{}.{}".format(
platform.system().lower(),
platform.machine(),
major, minor)))
try:
import uno
except Exception as e:
import traceback
traceback.print_exc()
sys.exit(1)
def connect(uno_url):
try:
local_ctx = uno.getComponentContext()
resolver = local_ctx.getServiceManager().createInstanceWithContext(
"com.sun.star.bridge.UnoUrlResolver", local_ctx)
return resolver.resolve(uno_url)
except Exception as e:
print("Error on connect to office: " + uno_url)
print(e)
raise
import unittest
class PyUNOTestFunctions(unittest.TestCase):
UNO_URL = "uno:socket,host=localhost,port=2002;urp;StarOffice.ComponentContext"
ctx = None
doc = None
def setUp(self):
if not self.__class__.ctx:
self.__class__.ctx = connect(self.UNO_URL)
self.__class__.doc = self.create_doc()
#self.assertIsNotNone(self.ctx)
def get_ctx(self):
return self.__class__.ctx
def get_doc(self):
return self.__class__.doc
def get_desktop(self):
return self.create("com.sun.star.frame.Desktop")
def create(self, name, args=None):
""" Create instance of service specified by name. """
smgr = self.get_ctx().getServiceManager()
if args:
return smgr.createInstanceWithArgumentsAndContext(name, args, self.get_ctx())
else:
return smgr.createInstanceWithContext(name, self.get_ctx())
def create_doc(self):
return self.get_desktop().loadComponentFromURL(
"private:factory/swriter", "_blank", 0, ())
def test_getServiceManager(self):
smgr = self.get_ctx().getServiceManager()
self.assertIsNotNone(smgr)
# functions defined in uno module
def test_getComponentContext(self):
r = uno.getComponentContext()
self.assertIsNotNone(r)
def test_getConstantByName(self):
n = uno.getConstantByName("com.sun.star.awt.FontWeight.BOLD")
self.assertEqual(n, 150.0)
n = uno.getConstantByName("com.sun.star.awt.FontSlant.ITALIC")
self.assertEqual(n, 2)
#uno.getConstantByName("ふぉお") # error ok
def test_getTypeByName(self):
def test(type_name, type_class):
t = uno.getTypeByName(type_name)
self.assertTrue(isinstance(t, uno.Type))
self.assertEqual(t.typeName, type_name)
self.assertTrue(isinstance(t.typeClass, uno.Enum))
self.assertEqual(t.typeClass.value, type_class)
test("long", "LONG")
test("[]long", "SEQUENCE")
test("com.sun.star.awt.XMouseListener", "INTERFACE")
def test_createUnoStruct(self):
from com.sun.star.awt import Rectangle
rect1 = uno.createUnoStruct("com.sun.star.awt.Rectangle")
self.assertTrue(isinstance(rect1, Rectangle))
rect2 = uno.createUnoStruct("com.sun.star.awt.Rectangle", 100, 200, 50, 1)
self.assertEqual(rect2.X, 100)
rect3 = uno.createUnoStruct("com.sun.star.awt.Rectangle", rect2)
#self.assertEqual(rect2, rect3)
def test_getClass(self):
from com.sun.star.uno import Exception as UNOException
ex = uno.getClass("com.sun.star.uno.Exception")
self.assertEqual(ex, UNOException)
def test_isInterface(self):
from com.sun.star.awt import XMouseListener
from com.sun.star.awt.FontSlant import ITALIC
self.assertTrue(uno.isInterface(XMouseListener))
self.assertFalse(uno.isInterface(ITALIC))
def test_generateUuid(self):
v = uno.generateUuid()
self.assertTrue(isinstance(v, uno.ByteSequence))
self.assertEqual(len(v), 16)
self.assertTrue(isinstance(v.value, bytes))
def test_systemPathToFileUrl(self):
sys_path = "/home/foo/bar/hoge/123.ods"
url = "file:///home/foo/bar/hoge/123.ods"
result = uno.systemPathToFileUrl(sys_path)
self.assertEqual(result, url)
def test_fileUrlToSystemPath(self):
sys_path = "/home/foo/bar/hoge/123.ods"
url = "file:///home/foo/bar/hoge/123.ods"
result = uno.fileUrlToSystemPath(url)
self.assertEqual(result, sys_path)
def test_absolutize(self):
relative_path = "../.."
url = "file:///home/foo/bar/hoge/123.ods"
desired = "file:///home/foo/bar/"
result = uno.absolutize(url, relative_path)
self.assertEqual(result, desired)
def test_hasModule(self):
import pyuno
self.assertTrue(pyuno.hasModule("com"))
self.assertTrue(pyuno.hasModule("com.sun"))
self.assertTrue(pyuno.hasModule("com.sun.star.awt.FontWeight"))
self.assertTrue(pyuno.hasModule("com.sun.star.awt.FontSlant"))
self.assertFalse(pyuno.hasModule("foo"))
def test_getModuleElementNames(self):
self.assertTrue("sun" in uno.getModuleElementNames("com"))
self.assertTrue("beans" in uno.getModuleElementNames("com.sun.star"))
_all = set(uno.getModuleElementNames("com.sun.star.beans"))
self.assertTrue("XExactName" in _all)
self.assertTrue("NamedValue" in _all)
self.assertTrue("UnknownPropertyException" in _all)
self.assertTrue("PropertyState" in _all)
self.assertTrue("PropertyAttribute" in _all)
self.assertFalse("Introspection" in _all)
self.assertFalse("Optional" in _all)
self.assertFalse("PropertyValues" in _all)
_all = set(uno.getModuleElementNames("com.sun.star.awt.FontSlant"))
self.assertTrue("ITALIC" in _all)
_all = set(uno.getModuleElementNames("com.sun.star.awt.FontWeight"))
self.assertTrue("BOLD" in _all)
# classes defined in uno module
def test_Enum(self):
repr_base = "<uno.Enum {type_name} ('{value}')>"
type_name = "com.sun.star.awt.FontSlant"
value = "ITALIC"
repr_desired = repr_base.format(type_name=type_name, value=value)
e = uno.Enum(type_name, value)
self.assertEqual(repr(e), repr_desired)
e2 = uno.Enum(type_name, value)
self.assertTrue(e == e2)
type_name2 = "com.sun.star.beans.PropertyState"
value2 = "DIRECT_VALUE"
repr_desired2 = repr_base.format(type_name=type_name2, value=value2)
em = uno.Enum(type_name2, value2)
self.assertEqual(repr(em), repr_desired2)
self.assertFalse(e == em)
self.assertTrue(e != em)
# ToDo illegal type name and value
def test_Type(self):
repr_base = "<Type instance {type_name} ({type_class})>"
type_name = "boolean"
type_class = uno.Enum("com.sun.star.uno.TypeClass", "BOOLEAN")
repr_desired = repr_base.format(type_name=type_name, type_class=type_class)
t = uno.Type(type_name, type_class)
self.assertTrue(t == uno.getTypeByName(type_name))
self.assertFalse(t == uno.getTypeByName("void"))
self.assertEqual(repr(t), repr_desired)
self.assertEqual(hash(t), hash(type_name))
def test_Char(self):
repr_base = "<Char instance {}>"
v = "c"
repr_desired = repr_base.format(v)
c = uno.Char(v)
self.assertEqual(repr(c), repr_desired)
self.assertEqual(c, v)
self.assertEqual(c, c)
self.assertNotEqual(c, uno.Char("v"))
def test_ByteSequence(self):
a = b"abcdef"
b = b"xyz"
bsa = uno.ByteSequence(a)
bsb = uno.ByteSequence(b)
self.assertEqual(bsa.value, a)
self.assertEqual(bsb.value, b)
self.assertEqual(bsa, a)
self.assertEqual(bsa, bytearray(a))
self.assertEqual(len(bsa), len(a))
self.assertEqual(bsa[1], a[1])
c = a + b
bsc = uno.ByteSequence(c)
self.assertEqual(bsc, bsa + bsb)
self.assertEqual(bsc, c)
def test_Any(self):
vt = self.create_value_test()
uno.invoke(vt, "setLong", (uno.Any("long", 100),))
ret = uno.invoke(vt, "getLong", ())
self.assertEqual(ret, 100)
# import test
def test_import_interface(self):
from com.sun.star.container import XNameAccess, XIndexAccess
self.assertTrue(uno.isInterface(XNameAccess))
self.assertTrue(uno.isInterface(XIndexAccess))
def test_import_struct(self):
from com.sun.star.awt import Rectangle
r = Rectangle()
self.assertTrue(isinstance(r, Rectangle))
self.assertTrue(isinstance(r, uno.UNOStruct))
self.assertEqual(Rectangle.typeName, "com.sun.star.awt.Rectangle")
self.assertEqual(Rectangle.__pyunostruct__, "com.sun.star.awt.Rectangle")
def test_import_exception(self):
from com.sun.star.uno import RuntimeException
e = RuntimeException()
self.assertTrue(isinstance(e, RuntimeException))
self.assertTrue(isinstance(e, uno.UNOException))
self.assertTrue(isinstance(e, Exception))
self.assertEqual(RuntimeException.typeName, "com.sun.star.uno.RuntimeException")
self.assertEqual(RuntimeException.__pyunostruct__, "com.sun.star.uno.RuntimeException")
def test_import_enum(self):
from com.sun.star.awt.FontSlant import OBLIQUE
e = uno.Enum("com.sun.star.awt.FontSlant", "OBLIQUE")
self.assertEqual(OBLIQUE, e)
def test_import_constant(self):
from com.sun.star.awt.FontWeight import BLACK
self.assertEqual(BLACK, 200.0)
import com.sun.star.awt.PosSize as PosSize
self.assertEqual(PosSize.X, 1)
def test_import_typeOf(self):
from com.sun.star.container import typeOfXNameAccess
t = uno.getTypeByName("com.sun.star.container.XNameAccess")
self.assertEqual(typeOfXNameAccess, t)
def test_import_module(self):
import com.sun.star
self.assertTrue(isinstance(com, uno.UNOModule))
self.assertTrue(isinstance(com.sun, uno.UNOModule))
self.assertTrue(isinstance(com.sun.star, uno.UNOModule))
def test_import_unknown_module(self):
def _import():
import com.foo
self.assertRaises(ImportError, _import)
def test_import_unknown_atrribute(self):
def _import():
from com.sun.star.awt.FontSlant import FOO
self.assertRaises(ImportError, _import)
def test_import_imported_element(self):
from com.sun.star.awt import FontSlant
self.assertEqual(FontSlant.ITALIC,
uno.Enum("com.sun.star.awt.FontSlant", "ITALIC"))
self.assertEqual(FontSlant.OBLIQUE,
uno.Enum("com.sun.star.awt.FontSlant", "OBLIQUE"))
from com.sun.star.awt import XActionListener
import com.sun.star.awt
self.assertTrue(hasattr(com.sun.star.awt, "XActionListener"))
# this is valid because hasattr calls __getattr__
self.assertTrue(hasattr(com.sun.star.awt, "XButton"))
#def test_import_all(self):
# pass
# type class test
def create_value_test(self, **kwds):
from mytools import Values
vs = Values()
for k, v in kwds.items():
setattr(vs, k, v)
vt = self.create("mytools.ValueTest", (vs,))
return vt
def test_void(self):
vt = self.create_value_test()
self.assertIsNone(vt.getVoid())
def test_char(self):
vt = self.create_value_test(CharValue=uno.Char("c"))
self.assertEqual(vt.getChar(), uno.Char("c"))
vt.setChar(uno.Char("b"))
self.assertEqual(vt.getChar(), uno.Char("b"))
s = "あ"
vt.setChar(s)
self.assertEqual(vt.getChar(), uno.Char(s))
def test_boolean(self):
vt = self.create_value_test(BooleanValue=False)
self.assertFalse(vt.getBoolean())
vt.setBoolean(True)
self.assertTrue(vt.getBoolean())
vt.setBoolean(False)
self.assertFalse(vt.getBoolean())
def test_byte(self):
vt = self.create_value_test(ByteValue=100)
self.assertEqual(vt.getByte(), 100)
vt.setByte(10)
self.assertEqual(vt.getByte(), 10)
def test_short(self):
vt = self.create_value_test(ShortValue=30000)
self.assertEqual(vt.getShort(), 30000)
vt.setShort(-10000)
self.assertEqual(vt.getShort(), -10000)
def test_long(self):
vt = self.create_value_test(LongValue=1234567)
self.assertEqual(vt.getLong(), 1234567)
vt.setLong(-1234567)
self.assertEqual(vt.getLong(), -1234567)
def test_hyper(self):
v = 1111222333
vt = self.create_value_test(HyperValue=v)
self.assertEqual(vt.getHyper(), v)
vt.setHyper(-v)
self.assertEqual(vt.getHyper(), -v)
#def test_float(self):
#vt = self.create_value_test(FloatValue=100.111)
#self.assertEqual(vt.getFloat(), 100.111)
#vt.setFloat(-100.011)
#self.assertEqual(vt.getFloat(), -100.011)
# error on conversion between double and float?
def test_double(self):
vt = self.create_value_test(DoubleValue=100.111)
self.assertEqual(vt.getDouble(), 100.111)
vt.setDouble(-100.011)
self.assertEqual(vt.getDouble(), -100.011)
def test_string(self):
vt = self.create_value_test(StringValue="hoge")
self.assertEqual(vt.getString(), "hoge")
s = "pyてょn"
vt.setString(s)
self.assertEqual(vt.getString(), s)
s = "マルチバイトテキスト Multi-byte text"
doc = self.get_doc()
text = doc.getText()
text.setString(s)
self.assertEqual(text.getString(), s)
def test_type(self):
vt = self.create_value_test(TypeValue=uno.getTypeByName("[]long"))
self.assertEqual(vt.getType(), uno.getTypeByName("[]long"))
vt.setType(uno.getTypeByName("com.sun.star.awt.Rectangle"))
self.assertEqual(vt.getType(), uno.getTypeByName("com.sun.star.awt.Rectangle"))
#def test_any(self):
# pass
def test_enum(self):
italic = uno.Enum("com.sun.star.awt.FontSlant", "ITALIC")
doc = self.get_doc()
text = doc.getText()
text.setString("From PyUNO")
cursor = text.createTextCursor()
cursor.goRight(5, True)
cursor.CharPosture = italic
self.assertEqual(cursor.CharPosture, italic)
def test_struct(self):
from com.sun.star.table import BorderLine
border = BorderLine(0xff0000, 10, 0, 0)
doc = self.get_doc()
text = doc.getText()
cursor = text.createTextCursor()
cursor.BottomBorder = border
border2 = cursor.BottomBorder
self.assertEqual(border.Color, border2.Color)
def test_exception(self):
pass
def test_sequence(self):
doc = self.get_doc()
text = doc.getText()
table = doc.createInstance("com.sun.star.text.TextTable")
table.setName("NewTable")
table.initialize(2, 2)
text.insertTextContent(text.getEnd(), table, True)
a = (("foo", "bar"), (1, 2))
table.setDataArray(a)
data = table.getDataArray()
self.assertEqual(data, a)
from com.sun.star.beans import PropertyValue
arg1 = PropertyValue()
arg1.Name = "InputStream"
arg2 = PropertyValue()
arg2.Name = "FilterName"
arg2.Value = "writer_web_HTML"
bs = b"<html><body><p>Text from <b>HTML</b>.</p></body></html>"
sequence = self.create("com.sun.star.io.SequenceInputStream")
sequence.initialize((uno.ByteSequence(bs),))
arg1.Value = sequence
text = doc.getText()
text.setString("")
text.getEnd().insertDocumentFromURL("", (arg1, arg2))
sequence.closeInput()
def test_stream(self):
path = "/home/asuka/foo.txt"
b = b"test text"
pipe = self.create("com.sun.star.io.Pipe")
pipe.writeBytes(uno.ByteSequence(b))
pipe.flush()
pipe.closeOutput()
n, d = pipe.readBytes(None, 100)
v = d.value
pipe.closeInput()
self.assertEqual(v, b)
def test_interface(self):
n = 0
doc = self.get_doc()
self.assertTrue(len(dir(doc)))
desktop = self.get_desktop()
frames = desktop.getFrames()
for i in range(frames.getCount()):
frame = frames.getByIndex(i)
model = frame.getController().getModel()
if doc == model:
n += 1
self.assertEqual(n, 1)
self.assertFalse(doc == desktop)
def test_dialog(self):
return # needs dialog and user interaction
from com.sun.star.awt import XActionListener
import unohelper
class ActionListener(unohelper.Base, XActionListener):
def disposing(self, ev): pass
def actionPerformed(self, ev):
d.endExecute()
dp = self.create("com.sun.star.awt.DialogProvider")
d = dp.createDialog("vnd.sun.star.script:Standard.Dialog1?location=application")
l = ActionListener()
d.getControl("CommandButton1").addActionListener(l)
d.execute()
d.dispose()
if __name__ == "__main__":
if sys.version_info[0] == 3:
unittest.main()
else:
print("This unittest can execute only on Python3.")
sys.exit(1)
|
{
"content_hash": "3f59f5b6bc5ede72dd712eee0db93989",
"timestamp": "",
"source": "github",
"line_count": 521,
"max_line_length": 95,
"avg_line_length": 34.74664107485604,
"alnum_prop": 0.6013367950063525,
"repo_name": "hanya/pyuno3",
"id": "c0010da168654dce3b504da2012066574aaf5834",
"size": "18167",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C++",
"bytes": "182547"
},
{
"name": "Python",
"bytes": "46253"
}
],
"symlink_target": ""
}
|
"""Provide info to system health."""
from homeassistant.components import system_health
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import system_info
@callback
def async_register(
hass: HomeAssistant, register: system_health.SystemHealthRegistration
) -> None:
"""Register system health callbacks."""
register.async_register_info(system_health_info)
async def system_health_info(hass):
"""Get info for the info page."""
info = await system_info.async_get_system_info(hass)
return {
"version": f"core-{info.get('version')}",
"installation_type": info.get("installation_type"),
"dev": info.get("dev"),
"hassio": info.get("hassio"),
"docker": info.get("docker"),
"virtualenv": info.get("virtualenv"),
"python_version": info.get("python_version"),
"os_name": info.get("os_name"),
"os_version": info.get("os_version"),
"arch": info.get("arch"),
"timezone": info.get("timezone"),
}
|
{
"content_hash": "e97385714a357022970863939cedfea8",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 73,
"avg_line_length": 33.41935483870968,
"alnum_prop": 0.6476833976833977,
"repo_name": "partofthething/home-assistant",
"id": "ff3562a24f926655d083ae9f98cd4c14d4d24184",
"size": "1036",
"binary": false,
"copies": "5",
"ref": "refs/heads/dev",
"path": "homeassistant/components/homeassistant/system_health.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1720"
},
{
"name": "Python",
"bytes": "31051838"
},
{
"name": "Shell",
"bytes": "4832"
}
],
"symlink_target": ""
}
|
'''
A perfect number is a number for which
the sum of its proper divisors is exactly equal to the number.
For example, the sum of the proper divisors of 28 would be
1 + 2 + 4 + 7 + 14 = 28, which means that 28 is a perfect number.
A number n is called deficient if the sum of its proper divisors is
less than n and it is called abundant if this sum exceeds n.
As 12 is the smallest abundant number, 1 + 2 + 3 + 4 + 6 = 16,
the smallest number that can be written as the sum of two abundant numbers is 24.
By mathematical analysis, it can be shown that
all integers greater than 28123 can be written as the sum of two abundant numbers.
However, this upper limit cannot be reduced any further by analysis
even though it is known that the greatest number that cannot be expressed
as the sum of two abundant numbers is less than this limit.
Find the sum of all the positive integers which cannot be written
as the sum of two abundant numbers.
'''
from math import sqrt
import timeit
def get_divisor(n):
d = [1]
for i in range(2, int(sqrt(n)+1)):
if i*i == n:
d.append(i)
elif n % i == 0:
d.append(i)
d.append(n/i)
return d
def sum_of_two(n, a, c):
for i in a:
if i > n / 2:
break
if c[n - i]:
return True
return False
def calc(n):
c = [False] * (n + 1)
for i in range(1, n+1):
if sum(get_divisor(i)) > i:
c[i] = True
a = [i for i in range(n+1) if c[i]]
return sum([i for i in range(1, n+1) if not sum_of_two(i, a, c)])
if __name__ == '__main__':
print calc(28123)
print timeit.Timer('problem_023.calc(28123)', 'import problem_023').timeit(1)
|
{
"content_hash": "18217644fe4a7eb3a9ff4c8036a2236f",
"timestamp": "",
"source": "github",
"line_count": 54,
"max_line_length": 82,
"avg_line_length": 31.537037037037038,
"alnum_prop": 0.6365237815619496,
"repo_name": "smrmkt/project_euler",
"id": "d442a9b1db80de65e3d83e62f5c4f3a448d29709",
"size": "1746",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "problem_023.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "104794"
}
],
"symlink_target": ""
}
|
from django.http.request import HttpRequest
from json import JSONDecoder
from json import JSONEncoder
def request_principal_get(self):
return getattr(self, '_principal', None)
def request_principal_set(self, value):
setattr(self, '_principal', value)
def request_principal_del(self):
delattr(self, '_principal')
request_principal_property = property(request_principal_get, request_principal_set, request_principal_del)
HttpRequest.user = request_principal_property
HttpRequest.principal = request_principal_property
class CustomJSONEncoder(JSONEncoder):
def default(self, obj):
from django.db.models import Model
from django.forms.models import model_to_dict
from django.core import serializers as model_serializer
from ..models import Principal
if isinstance(obj, Principal):
fields = {}
fields['principal'] = model_serializer.serialize('json', [obj])
fields['basic_identities'] = model_serializer.serialize('json', obj.identities.basic)
fields['basic_credentials'] = model_serializer.serialize('json', obj.credentials.basic)
fields['otp_credentials'] = model_serializer.serialize('json', obj.credentials.otp)
return {
'__type': 'django-talos-principal',
'__application': obj._meta.app_label,
'__model': obj._meta.object_name,
'fields': fields
}
elif isinstance(obj, Model):
return {
'__type': 'django-model',
'__application': obj._meta.app_label,
'__model': obj._meta.object_name,
'fields': model_to_dict(obj)}
return super(CustomJSONEncoder, self).default(obj)
class CustomJSONDecoder(JSONDecoder):
def __init__(self, *args, **kwargs):
JSONDecoder.__init__(self, object_hook=self.object_hook, *args, **kwargs)
def object_hook(self, obj):
from django.apps import apps
from django.core import serializers as model_serializer
if '__type' not in obj:
return obj
obj_type = obj['__type']
if obj_type == 'django-talos-principal':
model_class = apps.get_model(obj['__application'], obj['__model'])
principal = list(model_serializer.deserialize('json', obj['fields']['principal']))[0].object
basic_identities = [obj.object for obj in list(model_serializer.deserialize('json', obj['fields']['basic_identities']))]
basic_credentials = [obj.object for obj in list(model_serializer.deserialize('json', obj['fields']['basic_credentials']))]
otp_credentials = [obj.object for obj in list(model_serializer.deserialize('json', obj['fields']['otp_credentials']))]
principal.identities.basic = basic_identities
principal.credentials.basic = basic_credentials
principal.credentials.otp = otp_credentials
return principal
elif obj_type == 'django-model':
model_class = apps.get_model(obj['__application'], obj['__model'])
return model_class(**obj['fields'])
return obj
class Context(object):
TEST_COOKIE_NAME = 'cbcf165cfa4e4e30b2e5fe0e9d4fac6d'
TEST_COOKIE_VALUE = 'c7446479a81643f59314999b2c34ba7b'
def _parse_user_agent(self, user_agent):
try:
from ua_parser import user_agent_parser
user_agent = user_agent_parser.Parse(user_agent)
remote_hw_family = '-'
remote_hw_model = '-'
if 'device' in user_agent:
user_agent_device = user_agent.get('device')
user_agent_device_brand = user_agent_device.get('brand', None)
user_agent_device_family = user_agent_device.get('family', None)
user_agent_device_model = user_agent_device.get('model', None)
if user_agent_device_brand:
remote_hw_family = user_agent_device_brand
if user_agent_device_family:
if user_agent_device_model and (user_agent_device_family != user_agent_device_model):
remote_hw_model = '{0}, {1}'.format(user_agent_device_family, user_agent_device_model)
else:
remote_hw_model = '{0}'.format(user_agent_device_family)
remote_os_family = '-'
remote_os_version = '-'
if 'os' in user_agent:
user_agent_os = user_agent.get('os')
user_agent_os_family = user_agent_os.get('family', None)
user_agent_os_major = user_agent_os.get('major', None)
user_agent_os_minor = user_agent_os.get('minor', None)
if user_agent_os_family:
remote_os_family = user_agent_os_family
if user_agent_os_major:
if user_agent_os_minor:
remote_os_version = '{0}.{1}'.format(user_agent_os_major, user_agent_os_minor)
else:
remote_os_version = user_agent_os_major
remote_ua_family = '-'
remote_ua_version = '-'
if 'user_agent' in user_agent:
user_agent_ua = user_agent.get('user_agent')
user_agent_ua_family = user_agent_ua.get('family', None)
user_agent_ua_major = user_agent_ua.get('major', None)
user_agent_ua_minor = user_agent_ua.get('minor', None)
if user_agent_ua_family:
remote_ua_family = user_agent_ua_family
if user_agent_ua_major:
if user_agent_ua_minor:
remote_ua_version = '{0}.{1}'.format(user_agent_ua_major, user_agent_ua_minor)
else:
remote_ua_version = user_agent_ua_major
return remote_hw_family, remote_hw_model, remote_os_family, remote_os_version, remote_ua_family, remote_ua_version
except:
return '', '', '', '', user_agent, ''
def _new_session(self):
from ..models import Session
address = self.request.META.get('REMOTE_ADDR', '')
geoname = self.request.META.get(self._geoname_header_name, '')
user_agent = self.request.META.get('HTTP_USER_AGENT', '')
remote_hw_family, remote_hw_model, remote_os_family, remote_os_version, remote_ua_family, remote_ua_version = self._parse_user_agent(user_agent)
self._session = Session(
previous_session=self._session,
remote_address=address,
remote_geoname=geoname,
remote_hw_family=remote_hw_family,
remote_hw_model=remote_hw_model,
remote_os_family=remote_os_family,
remote_os_version=remote_os_version,
remote_ua_family=remote_ua_family,
remote_ua_version=remote_ua_version)
def _get_session(self, now, uuid):
from ..models import Session
address = self.request.META.get('REMOTE_ADDR', '')
geoname = self.request.META.get(self._geoname_header_name, '')
user_agent = self.request.META.get('HTTP_USER_AGENT', '')
remote_hw_family, remote_hw_model, remote_os_family, remote_os_version, remote_ua_family, remote_ua_version = self._parse_user_agent(user_agent)
try:
self._session = Session.objects.get(
uuid=uuid,
valid_from__lte=now,
valid_till__gte=now,
remote_geoname=geoname,
remote_hw_family=remote_hw_family,
remote_os_family=remote_os_family,
remote_ua_family=remote_ua_family)
self._session.remote_hw_model = remote_hw_model
self._session.remote_os_version = remote_os_version
self._session.remote_ua_version = remote_ua_version
except Session.DoesNotExist:
self._session = Session(
previous_session=self._session,
remote_address=address,
remote_geoname=geoname,
remote_hw_family=remote_hw_family,
remote_hw_model=remote_hw_model,
remote_os_family=remote_os_family,
remote_os_version=remote_os_version,
remote_ua_family=remote_ua_family,
remote_ua_version=remote_ua_version)
def __init__(self, request):
from collections import OrderedDict
from django.conf import settings
from ..models import Principal
self._session = None
self._variables = OrderedDict()
self._geoname_header_name = getattr(settings, 'TALOS_GEONAME_HEADER', '')
self.request = request
self.request.session = self
self.principal = Principal.objects.get(id=0)
self.principal._load_authentication_context([])
def init(self):
self._new_session()
self.request.principal = self.principal
def load(self, uuid):
from ..models import _tznow
from ..models import Principal
from json import loads
now = _tznow()
self._get_session(now, uuid)
self._variables = loads(self._session.variables, cls=CustomJSONDecoder) if self._session.variables else {}
if self._session.principal:
self.principal = self._session.principal
self.principal._inject_authentication_context(
self._session.evidences,
self._session.roles,
self._session.privileges,
self._session.model_actions)
authentication_period = (now - self._session.valid_from).total_seconds()
valid_evidences = []
for evidence in self.principal._evidences_effective.values():
if authentication_period < evidence.expiration_period:
valid_evidences.append(evidence)
if len(valid_evidences) != len(self.principal._evidences_effective) or len(valid_evidences) == 0:
if len(valid_evidences) == 0:
self.principal = Principal.objects.get(id=0)
self.principal._load_authentication_context(valid_evidences)
self._session.valid_from = now
self.request.principal = self.principal
def save(self):
from ..models import Principal
from django.utils.functional import LazyObject
from json import dumps
prev_evidences = self._session.evidences
prev_roles = self._session.roles
prev_privileges = self._session.privileges
prev_model_actions = self._session.model_actions
prev_variables = self._session.variables
prev_principal = self._session.principal
self._session.variables = dumps(self._variables, cls=CustomJSONEncoder)
if self.request.principal and self.request.principal.is_authenticated:
if issubclass(type(self.request.principal), LazyObject):
self.request.principal._setup()
if type(self.request.principal._wrapped) == Principal:
self._session.principal = self.request.principal._wrapped
elif type(self.request.principal) == Principal:
self._session.principal = self.request.principal
if self._session.principal:
self._session.evidences, \
self._session.roles, \
self._session.privileges, \
self._session.model_actions = self._session.principal._extract_authentication_context()
else:
self._session.evidences = ''
self._session.roles = ''
self._session.privileges = ''
self._session.model_actions = ''
if (
(self._session.evidences != prev_evidences) or
(self._session.roles != prev_roles) or
(self._session.privileges != prev_privileges) or
(self._session.model_actions != prev_model_actions) or
(self._session.variables != prev_variables) or
(self._session.principal != prev_principal)):
self._session.save()
def get(self, key, default=None):
return self._variables.get(key, default)
def pop(self, key, default=None):
if key in self._variables:
return self._variables.pop(key, default)
return None
def setdefault(self, key, default=None):
if key in self._variables:
return self._variables[key]
else:
self._variables[key] = default
return default
def set_test_cookie(self):
self[self.TEST_COOKIE_NAME] = self.TEST_COOKIE_VALUE
def test_cookie_worked(self):
return self.get(self.TEST_COOKIE_NAME) == self.TEST_COOKIE_VALUE
def delete_test_cookie(self):
del self[self.TEST_COOKIE_NAME]
def update(self, other):
self._variables.update(other)
def has_key(self, key):
return key in self.__store
def keys(self):
return self._variables.keys()
def values(self):
return self._variables.values()
def items(self):
return self._variables.items()
def iterkeys(self):
return self._variables.iterkeys()
def itervalues(self):
return self._variables.itervalues()
def iteritems(self):
return self._variables.iteritems()
def clear(self):
from collections import OrderedDict
self._variables = OrderedDict()
def is_empty(self):
try:
return len(self._variables) == 0
except AttributeError:
return True
def flush(self):
from ..models import _tznow
from ..models import Principal
if self._session:
self._session.valid_till = _tznow()
self._session.save()
self._new_session()
self.principal = Principal.objects.get(id=0)
self.principal._load_authentication_context([])
self.request.principal = self.principal
def cycle_key(self):
if self._session:
from uuid import uuid4
self._session.uuid = uuid4()
def __contains__(self, key):
return key in self._variables
def __getitem__(self, key):
return self._variables[key]
def __setitem__(self, key, value):
if key not in ('_auth_user_backend', '_auth_user_hash', '_auth_user_id'):
self._variables[key] = value
def __delitem__(self, key):
del self._variables[key]
@property
def uuid(self):
return self._session.uuid
|
{
"content_hash": "1af206e9ff9fe39f4548d2aa06592762",
"timestamp": "",
"source": "github",
"line_count": 397,
"max_line_length": 152,
"avg_line_length": 37.04785894206549,
"alnum_prop": 0.587299428882241,
"repo_name": "triflesoft/django-application-talos",
"id": "5f7ab1e2097a573e30a88d2e109ae4d3c61574ec",
"size": "14708",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "applications/talos/helpers/session.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "157"
},
{
"name": "HTML",
"bytes": "30076"
},
{
"name": "Python",
"bytes": "363785"
},
{
"name": "Shell",
"bytes": "118"
}
],
"symlink_target": ""
}
|
import cv2
import numpy as np
def get_data(input_path):
found_bg = False
all_imgs = {}
classes_count = {}
class_mapping = {}
visualise = True
with open(input_path,'r') as f:
print('Parsing annotation files')
for line in f:
line_split = line.strip().split(',')
(filename,x1,y1,x2,y2,class_name) = line_split
if class_name not in classes_count:
classes_count[class_name] = 1
else:
classes_count[class_name] += 1
if class_name not in class_mapping:
if class_name == 'bg' and found_bg == False:
print('Found class name with special name bg. Will be treated as a background region (this is usually for hard negative mining).')
found_bg = True
class_mapping[class_name] = len(class_mapping)
if filename not in all_imgs:
all_imgs[filename] = {}
img = cv2.imread(filename)
(rows,cols) = img.shape[:2]
all_imgs[filename]['filepath'] = filename
all_imgs[filename]['width'] = cols
all_imgs[filename]['height'] = rows
all_imgs[filename]['bboxes'] = []
if np.random.randint(0,6) > 0:
all_imgs[filename]['imageset'] = 'trainval'
else:
all_imgs[filename]['imageset'] = 'test'
all_imgs[filename]['bboxes'].append({'class': class_name, 'x1': int(x1), 'x2': int(x2), 'y1': int(y1), 'y2': int(y2)})
all_data = []
for key in all_imgs:
all_data.append(all_imgs[key])
# make sure the bg class is last in the list
if found_bg:
if class_mapping['bg'] != len(class_mapping) - 1:
key_to_switch = [key for key in class_mapping.keys() if class_mapping[key] == len(class_mapping)-1][0]
val_to_switch = class_mapping['bg']
class_mapping['bg'] = len(class_mapping) - 1
class_mapping[key_to_switch] = val_to_switch
return all_data, classes_count, class_mapping
|
{
"content_hash": "18a827be23fa279a663a18b460a73805",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 135,
"avg_line_length": 27.984375,
"alnum_prop": 0.6353992183137912,
"repo_name": "yhenon/keras-frcnn",
"id": "8bfb55aa53c7684cbbfc1941a34a32735870a5d6",
"size": "1791",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "keras_frcnn/simple_parser.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "81206"
}
],
"symlink_target": ""
}
|
"""
===========================
Upper Air Sounding Tutorial
===========================
Upper air analysis is a staple of many synoptic and mesoscale analysis
problems. In this tutorial we will gather weather balloon data, plot it,
perform a series of thermodynamic calculations, and summarize the results.
To learn more about the Skew-T diagram and its use in weather analysis and
forecasting, checkout `this <http://www.pmarshwx.com/research/manuals/AF_skewt_manual.pdf>`_
air weather service guide.
"""
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
import pandas as pd
import metpy.calc as mpcalc
from metpy.cbook import get_test_data
from metpy.plots import Hodograph, SkewT
from metpy.units import units
#########################################################################
# Getting Data
# ------------
#
# Upper air data can be obtained using the siphon package, but for this tutorial we will use
# some of MetPy's sample data. This event is the Veterans Day tornado outbreak in 2002.
col_names = ['pressure', 'height', 'temperature', 'dewpoint', 'direction', 'speed']
df = pd.read_fwf(get_test_data('nov11_sounding.txt', as_file_obj=False),
skiprows=5, usecols=[0, 1, 2, 3, 6, 7], names=col_names)
# Drop any rows with all NaN values for T, Td, winds
df = df.dropna(subset=('temperature', 'dewpoint', 'direction', 'speed'
), how='all').reset_index(drop=True)
##########################################################################
# We will pull the data out of the example dataset into individual variables and
# assign units.
p = df['pressure'].values * units.hPa
T = df['temperature'].values * units.degC
Td = df['dewpoint'].values * units.degC
wind_speed = df['speed'].values * units.knots
wind_dir = df['direction'].values * units.degrees
u, v = mpcalc.wind_components(wind_speed, wind_dir)
##########################################################################
# Thermodynamic Calculations
# --------------------------
#
# Often times we will want to calculate some thermodynamic parameters of a
# sounding. The MetPy calc module has many such calculations already implemented!
#
# * **Lifting Condensation Level (LCL)** - The level at which an air parcel's
# relative humidity becomes 100% when lifted along a dry adiabatic path.
# * **Parcel Path** - Path followed by a hypothetical parcel of air, beginning
# at the surface temperature/pressure and rising dry adiabatically until
# reaching the LCL, then rising moist adiabatially.
# Calculate the LCL
lcl_pressure, lcl_temperature = mpcalc.lcl(p[0], T[0], Td[0])
print(lcl_pressure, lcl_temperature)
# Calculate the parcel profile.
parcel_prof = mpcalc.parcel_profile(p, T[0], Td[0]).to('degC')
##########################################################################
# Basic Skew-T Plotting
# ---------------------
#
# The Skew-T (log-P) diagram is the standard way to view rawinsonde data. The
# y-axis is height in pressure coordinates and the x-axis is temperature. The
# y coordinates are plotted on a logarithmic scale and the x coordinate system
# is skewed. An explanation of skew-T interpretation is beyond the scope of this
# tutorial, but here we will plot one that can be used for analysis or
# publication.
#
# The most basic skew-T can be plotted with only five lines of Python.
# These lines perform the following tasks:
#
# 1. Create a ``Figure`` object and set the size of the figure.
#
# 2. Create a ``SkewT`` object
#
# 3. Plot the pressure and temperature (note that the pressure,
# the independent variable, is first even though it is plotted on the y-axis).
#
# 4. Plot the pressure and dewpoint temperature.
#
# 5. Plot the wind barbs at the appropriate pressure using the u and v wind
# components.
# Create a new figure. The dimensions here give a good aspect ratio
fig = plt.figure(figsize=(9, 9))
skew = SkewT(fig)
# Plot the data using normal plotting functions, in this case using
# log scaling in Y, as dictated by the typical meteorological plot
skew.plot(p, T, 'r', linewidth=2)
skew.plot(p, Td, 'g', linewidth=2)
skew.plot_barbs(p, u, v)
# Show the plot
plt.show()
##########################################################################
# Advanced Skew-T Plotting
# ------------------------
#
# Fiducial lines indicating dry adiabats, moist adiabats, and mixing ratio are
# useful when performing further analysis on the Skew-T diagram. Often the
# 0C isotherm is emphasized and areas of CAPE and CIN are shaded.
# Create a new figure. The dimensions here give a good aspect ratio
fig = plt.figure(figsize=(9, 9))
skew = SkewT(fig, rotation=30)
# Plot the data using normal plotting functions, in this case using
# log scaling in Y, as dictated by the typical meteorological plot
skew.plot(p, T, 'r')
skew.plot(p, Td, 'g')
skew.plot_barbs(p, u, v)
skew.ax.set_ylim(1000, 100)
skew.ax.set_xlim(-40, 60)
# Plot LCL temperature as black dot
skew.plot(lcl_pressure, lcl_temperature, 'ko', markerfacecolor='black')
# Plot the parcel profile as a black line
skew.plot(p, parcel_prof, 'k', linewidth=2)
# Shade areas of CAPE and CIN
skew.shade_cin(p, T, parcel_prof, Td)
skew.shade_cape(p, T, parcel_prof)
# Plot a zero degree isotherm
skew.ax.axvline(0, color='c', linestyle='--', linewidth=2)
# Add the relevant special lines
skew.plot_dry_adiabats()
skew.plot_moist_adiabats()
skew.plot_mixing_lines()
# Show the plot
plt.show()
##########################################################################
# Adding a Hodograph
# ------------------
#
# A hodograph is a polar representation of the wind profile measured by the rawinsonde.
# Winds at different levels are plotted as vectors with their tails at the origin, the angle
# from the vertical axes representing the direction, and the length representing the speed.
# The line plotted on the hodograph is a line connecting the tips of these vectors,
# which are not drawn.
# Create a new figure. The dimensions here give a good aspect ratio
fig = plt.figure(figsize=(9, 9))
skew = SkewT(fig, rotation=30)
# Plot the data using normal plotting functions, in this case using
# log scaling in Y, as dictated by the typical meteorological plot
skew.plot(p, T, 'r')
skew.plot(p, Td, 'g')
skew.plot_barbs(p, u, v)
skew.ax.set_ylim(1000, 100)
skew.ax.set_xlim(-40, 60)
# Plot LCL as black dot
skew.plot(lcl_pressure, lcl_temperature, 'ko', markerfacecolor='black')
# Plot the parcel profile as a black line
skew.plot(p, parcel_prof, 'k', linewidth=2)
# Shade areas of CAPE and CIN
skew.shade_cin(p, T, parcel_prof, Td)
skew.shade_cape(p, T, parcel_prof)
# Plot a zero degree isotherm
skew.ax.axvline(0, color='c', linestyle='--', linewidth=2)
# Add the relevant special lines
skew.plot_dry_adiabats()
skew.plot_moist_adiabats()
skew.plot_mixing_lines()
# Create a hodograph
# Create an inset axes object that is 40% width and height of the
# figure and put it in the upper right hand corner.
ax_hod = inset_axes(skew.ax, '40%', '40%', loc=1)
h = Hodograph(ax_hod, component_range=80.)
h.add_grid(increment=20)
h.plot_colormapped(u, v, wind_speed) # Plot a line colored by wind speed
# Show the plot
plt.show()
|
{
"content_hash": "019297da149a8558baaaf706c866988f",
"timestamp": "",
"source": "github",
"line_count": 202,
"max_line_length": 92,
"avg_line_length": 35.68811881188119,
"alnum_prop": 0.6712442779858511,
"repo_name": "metpy/MetPy",
"id": "107ecdda1d83c0f0df0b3d11c8d93d4436f3fd12",
"size": "7352",
"binary": false,
"copies": "6",
"ref": "refs/heads/gh-pages",
"path": "dev/_downloads/cf626a06a2e16be83be26aa926cd5642/upperair_soundings.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Jupyter Notebook",
"bytes": "989941"
},
{
"name": "Python",
"bytes": "551868"
}
],
"symlink_target": ""
}
|
from astropy.nddata import NDData, NDDataRef, NDIOMixin
# Alias NDDataAllMixins in case this will be renamed ... :-)
NDDataIO = NDDataRef
def test_simple_write_read():
ndd = NDDataIO([1, 2, 3])
assert hasattr(ndd, "read")
assert hasattr(ndd, "write")
|
{
"content_hash": "d870d4cd50b84c472d4b614a25ad3586",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 60,
"avg_line_length": 26.6,
"alnum_prop": 0.6917293233082706,
"repo_name": "mhvk/astropy",
"id": "1667f4d4c760e6becd6afc2d80b3de3017d543a6",
"size": "266",
"binary": false,
"copies": "3",
"ref": "refs/heads/placeholder",
"path": "astropy/nddata/mixins/tests/test_ndio.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "11040101"
},
{
"name": "C++",
"bytes": "47001"
},
{
"name": "Cython",
"bytes": "78776"
},
{
"name": "HTML",
"bytes": "1172"
},
{
"name": "Lex",
"bytes": "183333"
},
{
"name": "M4",
"bytes": "18757"
},
{
"name": "Makefile",
"bytes": "52508"
},
{
"name": "Python",
"bytes": "12404182"
},
{
"name": "Shell",
"bytes": "17024"
},
{
"name": "TeX",
"bytes": "853"
}
],
"symlink_target": ""
}
|
ACCOUNT_NAME = 'Fab Furnish'
|
{
"content_hash": "aaf06678e13655cc965276c3ac5c6c00",
"timestamp": "",
"source": "github",
"line_count": 1,
"max_line_length": 28,
"avg_line_length": 29,
"alnum_prop": 0.7241379310344828,
"repo_name": "ddy88958620/lib",
"id": "bee068e8fb64f56beea68a27c2518ac391035887",
"size": "29",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "Python/scrapy/fabfurnish/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
from django.db import models
from django.contrib.auth.models import User
from django.shortcuts import resolve_url as r
class Doctor(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
slug = models.SlugField('slug')
address = models.CharField('endereço', max_length=255, blank=True)
neighborhood = models.CharField('bairro', max_length=255, blank=True)
city = models.CharField('cidade', max_length=255, blank=True)
phone = models.CharField('telefone', max_length=255, blank=True)
specialization = models.CharField('especialização', max_length=255,
blank=True)
class Meta:
verbose_name = 'médico'
verbose_name_plural = 'médicos'
def __str__(self):
return ' '.join([self.user.first_name, self.user.last_name])
def get_absolute_url(self):
return r('doctor_details', slug=self.slug)
|
{
"content_hash": "c6e2d80162d8d03e8928bb69814587f0",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 73,
"avg_line_length": 38.208333333333336,
"alnum_prop": 0.6673936750272628,
"repo_name": "vribeiro1/plainsboro_221",
"id": "803f91b02312167a6e5db6ba47989f9533c5d1e3",
"size": "922",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plainsboro/doctor_subscriptions/models.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "14785"
},
{
"name": "Python",
"bytes": "43763"
}
],
"symlink_target": ""
}
|
import doctest
import glob
import os
import sys
sys.path.append(".\src")
opts = doctest.REPORT_ONLY_FIRST_FAILURE|doctest.ELLIPSIS|doctest.NORMALIZE_WHITESPACE
files = glob.glob('tests\*.rst')
failure = False
for f in files:
failed, tested = doctest.testfile(f, optionflags=opts)
if failed > 0:
failure = True
if not failure:
print "All tests passed!"
print "\nPress enter to re-run...",
raw_input()
os.system(['clear','cls'][os.name == 'nt'])
# restart myself
python = sys.executable
os.execl(python, python, * sys.argv)
|
{
"content_hash": "b628689fe2560f669e303f556e5acacb",
"timestamp": "",
"source": "github",
"line_count": 27,
"max_line_length": 86,
"avg_line_length": 21.25925925925926,
"alnum_prop": 0.6637630662020906,
"repo_name": "thisismyrobot/gedit-pytts",
"id": "7afe49b839911944a4fb91ecfaf49d5caed2db83",
"size": "574",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "runtests.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "62864"
}
],
"symlink_target": ""
}
|
"""
Test lldb watchpoint that uses '-s size' to watch a pointed location with size.
"""
import re
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class HelloWatchLocationTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
NO_DEBUG_INFO_TESTCASE = True
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Our simple source filename.
self.source = 'main.cpp'
# Find the line number to break inside main().
self.line = line_number(
self.source, '// Set break point at this line.')
# This is for verifying that watch location works.
self.violating_func = "do_bad_thing_with_location"
# Build dictionary to have unique executable names for each test
# method.
self.exe_name = self.testMethodName
self.d = {'CXX_SOURCES': self.source, 'EXE': self.exe_name}
# Most of the MIPS boards provide only one H/W watchpoints, and S/W
# watchpoints are not supported yet
@expectedFailureAll(triple=re.compile('^mips'))
# SystemZ and PowerPC also currently supports only one H/W watchpoint
@expectedFailureAll(archs=['powerpc64le', 's390x'])
@skipIfDarwin
def test_hello_watchlocation(self):
"""Test watching a location with '-s size' option."""
self.build(dictionary=self.d)
self.setTearDownCleanup(dictionary=self.d)
exe = self.getBuildArtifact(self.exe_name)
self.runCmd("file " + exe, CURRENT_EXECUTABLE_SET)
# Add a breakpoint to set a watchpoint when stopped on the breakpoint.
lldbutil.run_break_set_by_file_and_line(
self, None, self.line, num_expected_locations=1, loc_exact=False)
# Run the program.
self.runCmd("run", RUN_SUCCEEDED)
# We should be stopped again due to the breakpoint.
# The stop reason of the thread should be breakpoint.
self.expect("thread list", STOPPED_DUE_TO_BREAKPOINT,
substrs=['stopped',
'stop reason = breakpoint'])
# Now let's set a write-type watchpoint pointed to by 'g_char_ptr'.
self.expect(
"watchpoint set expression -w write -s 1 -- g_char_ptr",
WATCHPOINT_CREATED,
substrs=[
'Watchpoint created',
'size = 1',
'type = w'])
# Get a hold of the watchpoint id just created, it is used later on to
# match the watchpoint id which is expected to be fired.
match = re.match(
"Watchpoint created: Watchpoint (.*):",
self.res.GetOutput().splitlines()[0])
if match:
expected_wp_id = int(match.group(1), 0)
else:
self.fail("Grokking watchpoint id faailed!")
self.runCmd("expr unsigned val = *g_char_ptr; val")
self.expect(self.res.GetOutput().splitlines()[0], exe=False,
endstr=' = 0')
self.runCmd("watchpoint set expression -w write -s 4 -- &threads[0]")
# Use the '-v' option to do verbose listing of the watchpoint.
# The hit count should be 0 initially.
self.expect("watchpoint list -v",
substrs=['hit_count = 0'])
self.runCmd("process continue")
# We should be stopped again due to the watchpoint (write type), but
# only once. The stop reason of the thread should be watchpoint.
self.expect("thread list", STOPPED_DUE_TO_WATCHPOINT,
substrs=['stopped',
'stop reason = watchpoint %d' % expected_wp_id])
# Switch to the thread stopped due to watchpoint and issue some
# commands.
self.switch_to_thread_with_stop_reason(lldb.eStopReasonWatchpoint)
self.runCmd("thread backtrace")
self.expect("frame info",
substrs=[self.violating_func])
# Use the '-v' option to do verbose listing of the watchpoint.
# The hit count should now be 1.
self.expect("watchpoint list -v",
substrs=['hit_count = 1'])
self.runCmd("thread backtrace all")
|
{
"content_hash": "3ffbad90647eb6ab84a71358d7f7442a",
"timestamp": "",
"source": "github",
"line_count": 109,
"max_line_length": 79,
"avg_line_length": 38.944954128440365,
"alnum_prop": 0.6061248527679624,
"repo_name": "endlessm/chromium-browser",
"id": "55bf929b25fcf0eea6a84b14920331d9f01d6113",
"size": "4245",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "third_party/llvm/lldb/test/API/commands/watchpoints/hello_watchlocation/TestWatchLocation.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
import logging
from horizon.test import helpers as test
from selenium.common import exceptions as selenium_exceptions
class BrowserTests(test.SeleniumTestCase):
def test_jasmine(self):
url = "%s%s" % (self.live_server_url, "/jasmine/")
self.selenium.get(url)
wait = self.ui.WebDriverWait(self.selenium, 10)
def jasmine_done(driver):
text = driver.find_element_by_id("jasmine-testresult").text
return "Tests completed" in text
wait.until(jasmine_done)
failed_elem = self.selenium.find_element_by_class_name("failed")
failed = int(failed_elem.text)
if failed:
self.log_failure_messages()
self.assertEqual(failed, 0)
def log_failure_messages(self):
logger = logging.getLogger('selenium')
logger.error("Errors found during jasmine test:")
fail_elems = self.selenium.find_elements_by_class_name("fail")
for elem in fail_elems:
try:
module = elem.find_element_by_class_name("module-name").text
except selenium_exceptions.NoSuchElementException:
continue
message = elem.find_element_by_class_name("test-message").text
source = elem.find_element_by_tag_name("pre").text
logger.error("Module: %s, message: %s, source: %s" % (
module, message, source))
|
{
"content_hash": "62da92e624bcfbd319ea229754717add",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 76,
"avg_line_length": 38,
"alnum_prop": 0.6237553342816501,
"repo_name": "openstack/watcher-dashboard",
"id": "189326858758b1a889144bf1c6a3d8b5f720c08b",
"size": "1979",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "watcher_dashboard/test/selenium.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "HTML",
"bytes": "34377"
},
{
"name": "JavaScript",
"bytes": "2386"
},
{
"name": "Python",
"bytes": "181244"
},
{
"name": "SCSS",
"bytes": "131"
},
{
"name": "Shell",
"bytes": "21121"
}
],
"symlink_target": ""
}
|
def extractWwwMoonsTavernCom(item):
'''
Parser for 'www.moons-tavern.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
|
{
"content_hash": "520625e32df45be9b73305850491021f",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 104,
"avg_line_length": 26.19047619047619,
"alnum_prop": 0.6272727272727273,
"repo_name": "fake-name/ReadableWebProxy",
"id": "77d8e68d50502a612f61e7b571c9490dac21e5d0",
"size": "551",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "WebMirror/management/rss_parser_funcs/feed_parse_extractWwwMoonsTavernCom.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "105811"
},
{
"name": "Dockerfile",
"bytes": "1178"
},
{
"name": "HTML",
"bytes": "119737"
},
{
"name": "JavaScript",
"bytes": "3006524"
},
{
"name": "Jupyter Notebook",
"bytes": "148075"
},
{
"name": "Mako",
"bytes": "1454"
},
{
"name": "Python",
"bytes": "5264346"
},
{
"name": "Shell",
"bytes": "1059"
}
],
"symlink_target": ""
}
|
"""
Forward messages sent to the configured email gateway to Zulip.
For zulip.com, messages to that address go to the Inbox of emailgateway@zulip.com.
Zulip voyager configurations will differ.
Messages meant for Zulip have a special recipient form of
<stream name>+<regenerable stream token>@streams.zulip.com
This pattern is configurable via the EMAIL_GATEWAY_PATTERN settings.py
variable.
This script can be used via two mechanisms:
1) Run this in a cronjob every N minutes if you have configured Zulip to poll
an external IMAP mailbox for messages. The script will then connect to
your IMAP server and batch-process all messages.
We extract and validate the target stream from information in the
recipient address and retrieve, forward, and archive the message.
2) Alternatively, configure your MTA to execute this script on message
receipt with the contents of the message piped to standard input. The
script will queue the message for processing. In this mode of invocation,
you should pass the destination email address in the ORIGINAL_RECIPIENT
environment variable.
In Postfix, you can express that via an /etc/aliases entry like this:
|/usr/bin/env python /home/zulip/deployments/current/manage.py email_mirror
"""
from __future__ import absolute_import
from __future__ import print_function
import six
from typing import Any, List, Generator
from argparse import ArgumentParser
import os
import logging
import sys
import posix
from django.conf import settings
from django.core.management.base import BaseCommand
from zerver.lib.queue import queue_json_publish
from zerver.lib.email_mirror import logger, process_message, \
extract_and_validate, ZulipEmailForwardError, \
mark_missed_message_address_as_used, is_missed_message_address
import email
from email.message import Message
from imaplib import IMAP4_SSL
## Setup ##
log_format = "%(asctime)s: %(message)s"
logging.basicConfig(format=log_format)
formatter = logging.Formatter(log_format)
file_handler = logging.FileHandler(settings.EMAIL_MIRROR_LOG_PATH)
file_handler.setFormatter(formatter)
logger.setLevel(logging.DEBUG)
logger.addHandler(file_handler)
def get_imap_messages():
# type: () -> Generator[Message, None, None]
mbox = IMAP4_SSL(settings.EMAIL_GATEWAY_IMAP_SERVER, settings.EMAIL_GATEWAY_IMAP_PORT)
mbox.login(settings.EMAIL_GATEWAY_LOGIN, settings.EMAIL_GATEWAY_PASSWORD)
try:
mbox.select(settings.EMAIL_GATEWAY_IMAP_FOLDER)
try:
status, num_ids_data = mbox.search(None, 'ALL') # type: bytes, List[bytes]
for msgid in num_ids_data[0].split():
status, msg_data = mbox.fetch(msgid, '(RFC822)')
msg_as_bytes = msg_data[0][1]
if six.PY2:
message = email.message_from_string(msg_as_bytes)
else:
message = email.message_from_bytes(msg_as_bytes)
yield message
mbox.store(msgid, '+FLAGS', '\\Deleted')
mbox.expunge()
finally:
mbox.close()
finally:
mbox.logout()
class Command(BaseCommand):
help = __doc__
def add_arguments(self, parser):
# type: (ArgumentParser) -> None
parser.add_argument('recipient', metavar='<recipient>', type=str, nargs='?', default=None,
help="original recipient")
def handle(self, *args, **options):
# type: (*Any, **str) -> None
rcpt_to = os.environ.get("ORIGINAL_RECIPIENT", options['recipient'])
if rcpt_to is not None:
if is_missed_message_address(rcpt_to):
try:
mark_missed_message_address_as_used(rcpt_to)
except ZulipEmailForwardError:
print("5.1.1 Bad destination mailbox address: Bad or expired missed message address.")
exit(posix.EX_NOUSER) # type: ignore # There are no stubs for posix in python 3
else:
try:
extract_and_validate(rcpt_to)
except ZulipEmailForwardError:
print("5.1.1 Bad destination mailbox address: Please use the address specified "
"in your Streams page.")
exit(posix.EX_NOUSER) # type: ignore # There are no stubs for posix in python 3
# Read in the message, at most 25MiB. This is the limit enforced by
# Gmail, which we use here as a decent metric.
msg_text = sys.stdin.read(25*1024*1024)
if len(sys.stdin.read(1)) != 0:
# We're not at EOF, reject large mail.
print("5.3.4 Message too big for system: Max size is 25MiB")
exit(posix.EX_DATAERR) # type: ignore # There are no stubs for posix in python 3
queue_json_publish(
"email_mirror",
{
"message": msg_text,
"rcpt_to": rcpt_to
},
lambda x: None
)
else:
# We're probably running from cron, try to batch-process mail
if (not settings.EMAIL_GATEWAY_BOT or not settings.EMAIL_GATEWAY_LOGIN or
not settings.EMAIL_GATEWAY_PASSWORD or not settings.EMAIL_GATEWAY_IMAP_SERVER or
not settings.EMAIL_GATEWAY_IMAP_PORT or not settings.EMAIL_GATEWAY_IMAP_FOLDER):
print("Please configure the Email Mirror Gateway in /etc/zulip/, "
"or specify $ORIGINAL_RECIPIENT if piping a single mail.")
exit(1)
for message in get_imap_messages():
process_message(message)
|
{
"content_hash": "1e85322e3f8d4f62918aed6eeac53712",
"timestamp": "",
"source": "github",
"line_count": 144,
"max_line_length": 106,
"avg_line_length": 40.06944444444444,
"alnum_prop": 0.634315424610052,
"repo_name": "krtkmj/zulip",
"id": "6f147f21d1e16cf1efb2222379d43f9df9a66457",
"size": "5793",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "zerver/management/commands/email_mirror.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "230591"
},
{
"name": "Groovy",
"bytes": "5516"
},
{
"name": "HTML",
"bytes": "440665"
},
{
"name": "JavaScript",
"bytes": "1412579"
},
{
"name": "Nginx",
"bytes": "1229"
},
{
"name": "PHP",
"bytes": "18929"
},
{
"name": "Pascal",
"bytes": "1113"
},
{
"name": "Perl",
"bytes": "401825"
},
{
"name": "Puppet",
"bytes": "86728"
},
{
"name": "Python",
"bytes": "2692439"
},
{
"name": "Ruby",
"bytes": "249738"
},
{
"name": "Shell",
"bytes": "34307"
}
],
"symlink_target": ""
}
|
from __future__ import unicode_literals, absolute_import, division
__version_info__ = (0, 2, 4)
__version__ = '.'.join(map(str, __version_info__))
|
{
"content_hash": "7cf2c2e38d848e13289b8014c55b26f9",
"timestamp": "",
"source": "github",
"line_count": 4,
"max_line_length": 66,
"avg_line_length": 37,
"alnum_prop": 0.6418918918918919,
"repo_name": "concordusapps/strudel",
"id": "7d2695abd566a83e641ceecacd83fb8593505ba9",
"size": "172",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "strudel/_version.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "19121"
}
],
"symlink_target": ""
}
|
"""
A server process manager providing different locking strategies to processes
accessing a shared resource. See strategies for concrete implementations.
"""
from multiprocessing.managers import BaseManager
from .strategies import (no_starve, writer_preference, LOCK_STRATEGY_NO_STARVE,
LOCK_STRATEGY_WRITER_PREFERENCE)
class SWMRSync(object):
def __init__(self):
self.__strategy = writer_preference
def set_strategy(self, strategy):
if strategy == LOCK_STRATEGY_NO_STARVE:
self.__strategy = no_starve
elif strategy == LOCK_STRATEGY_WRITER_PREFERENCE:
self.__strategy = writer_preference
else:
self.__strategy = None
raise Exception('Unknown locking strategy %s' % strategy)
def start_read(self, name):
return self.__strategy.start_read(name)
def end_read(self, name):
return self.__strategy.end_read(name)
def start_write(self, name):
return self.__strategy.start_write(name)
def end_write(self, name):
return self.__strategy.end_write(name)
class SWMRSyncManager(BaseManager):
pass
SWMRSyncManager.register('SWMRSync', SWMRSync)
def start_sync_manager():
"""
Start a server process which holds the SWMRSync object.
Other processes can manipulate (mainly acquire and release locks) it using
a proxy
:return: A SWMRSync proxy
"""
manager = SWMRSyncManager()
manager.start()
return manager.SWMRSync()
# All forked children have to access SWMRSync object using the SWMR_SYNC proxy.
# It is important that this module is imported before the child processes are
# forked to ensure that the manager is started by the parent process.
SWMR_SYNC = start_sync_manager()
|
{
"content_hash": "17d72b352a9943b0f2e4b660f90bac60",
"timestamp": "",
"source": "github",
"line_count": 60,
"max_line_length": 79,
"avg_line_length": 29.683333333333334,
"alnum_prop": 0.6855699045480067,
"repo_name": "meteotest/hurray",
"id": "e87db0740d83284d2125a745b24d491e0ce93931",
"size": "3306",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "hurray/swmr/lock.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "391421"
},
{
"name": "Shell",
"bytes": "291"
}
],
"symlink_target": ""
}
|
from django.test import TestCase
from tastypie.http import HttpAccepted, HttpBadRequest, HttpConflict,\
HttpCreated, HttpGone, HttpMethodNotAllowed, HttpNoContent, HttpNotFound,\
HttpNotImplemented, HttpNotModified, HttpSeeOther, HttpTooManyRequests,\
HttpUnauthorized
class HttpTestCase(TestCase):
def test_various_statuses(self):
created = HttpCreated(location='http://example.com/thingy/1/')
self.assertEqual(created.status_code, 201)
self.assertEqual(created['Location'], 'http://example.com/thingy/1/')
# Regression.
created_2 = HttpCreated()
self.assertEqual(created_2.status_code, 201)
self.assertEqual(created_2['Location'], '')
accepted = HttpAccepted()
self.assertEqual(accepted.status_code, 202)
no_content = HttpNoContent()
self.assertEqual(no_content.status_code, 204)
see_other = HttpSeeOther()
self.assertEqual(see_other.status_code, 303)
not_modified = HttpNotModified()
self.assertEqual(not_modified.status_code, 304)
bad_request = HttpBadRequest()
self.assertEqual(bad_request.status_code, 400)
unauthorized = HttpUnauthorized()
self.assertEqual(unauthorized.status_code, 401)
not_found = HttpNotFound()
self.assertEqual(not_found.status_code, 404)
not_allowed = HttpMethodNotAllowed()
self.assertEqual(not_allowed.status_code, 405)
conflict = HttpConflict()
self.assertEqual(conflict.status_code, 409)
gone = HttpGone()
self.assertEqual(gone.status_code, 410)
toomanyrequests = HttpTooManyRequests()
self.assertEqual(toomanyrequests.status_code, 429)
not_implemented = HttpNotImplemented()
self.assertEqual(not_implemented.status_code, 501)
|
{
"content_hash": "b1b9734caadd1c4a3771d6521a3d9126",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 78,
"avg_line_length": 45.65,
"alnum_prop": 0.6861993428258488,
"repo_name": "ipsosante/django-tastypie",
"id": "f1724e13a8dba51305f4ae50bfd0c91ea93cf945",
"size": "1911",
"binary": false,
"copies": "9",
"ref": "refs/heads/master",
"path": "tests/core/tests/http.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "HTML",
"bytes": "988"
},
{
"name": "Python",
"bytes": "803225"
},
{
"name": "Shell",
"bytes": "1162"
}
],
"symlink_target": ""
}
|
from django.contrib import admin
from NutmegCRM.apps.crm.models import Customer
# Register your models here.
class CustomerAdmin(admin.ModelAdmin):
list_display = ['first_name']
list_filter = ['first_name', 'created', 'last_name']
search_fields = ['first_name', 'last_name', 'email']
date_heirachy = 'created'
save_on_top = True
#prepopulated_fields = {"slug": ("title",)}
admin.site.register(Customer, CustomerAdmin)
|
{
"content_hash": "f26777cb6f71cf81fd5db732699e6d85",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 56,
"avg_line_length": 31.714285714285715,
"alnum_prop": 0.6936936936936937,
"repo_name": "Justasic/NutmegCRM",
"id": "2c134035d09c4d1c01a63c59f3c1e84de601cc1a",
"size": "444",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "NutmegCRM/apps/crm/admin.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "15052"
},
{
"name": "HTML",
"bytes": "11947"
},
{
"name": "JavaScript",
"bytes": "8085"
},
{
"name": "Python",
"bytes": "18687"
}
],
"symlink_target": ""
}
|
import sys
from twisted.internet.defer import Deferred
def got_poem(poem):
print poem
from twisted.internet import reactor
reactor.stop()
def poem_failed(err):
print >>sys.stderr, 'poem download failed'
print >>sys.stderr, 'I am terribly sorry'
print >>sys.stderr, 'try again later?'
from twisted.internet import reactor
reactor.stop()
d = Deferred()
d.addCallbacks(got_poem, poem_failed)
from twisted.internet import reactor
reactor.callWhenRunning(d.callback, 'Another short poem.')
reactor.run()
#Another short poem.
|
{
"content_hash": "43462cee2b989c0d971bf8acfcece842",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 58,
"avg_line_length": 20.071428571428573,
"alnum_prop": 0.7206405693950177,
"repo_name": "tidalmelon/twisted-intro",
"id": "7b8c71ca95701663fe89033a4799dfa2e0ca8d21",
"size": "562",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "twisted-deferred/defer-8.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Erlang",
"bytes": "2523"
},
{
"name": "Haskell",
"bytes": "3262"
},
{
"name": "Makefile",
"bytes": "137"
},
{
"name": "Python",
"bytes": "135832"
},
{
"name": "Shell",
"bytes": "86"
}
],
"symlink_target": ""
}
|
class Transaction(object):
def __init__(self, database, isolation_level):
self.database = database
self.isolation_level = isolation_level
def __enter__(self):
self.database.start_transaction(self.isolation_level)
def __exit__(self, exception_type, exception, traceback):
if exception:
self.database.rollback()
else:
self.database.commit()
|
{
"content_hash": "14d360d6827ecf2ed28cefe310e35563",
"timestamp": "",
"source": "github",
"line_count": 14,
"max_line_length": 61,
"avg_line_length": 29.857142857142858,
"alnum_prop": 0.6220095693779905,
"repo_name": "hugollm/rebel",
"id": "d437565d41bf371c0509f8b63f1f1f29ecbc7d1e",
"size": "418",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rebel/transaction.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "27378"
},
{
"name": "Shell",
"bytes": "728"
}
],
"symlink_target": ""
}
|
"""Functions that aid with generating text from templates and maps."""
import re
# POSIX shell variable syntax:
# Expansions with unset var
# ${var}=
# ${var+WORD}=
# ${var:+WORD}=
# ${var-WORD}=WORD
# ${var:-WORD}=WORD
# Expansions with empty var
# ${var}=
# ${var+WORD}=WORD
# ${var:+WORD}=
# ${var-WORD}=
# ${var:-WORD}=WORD
# Expansions with var=SET
# ${var}=SET
# ${var+WORD}=WORD
# ${var:+WORD}=WORD
# ${var-WORD}=SET
# ${var:-WORD}=SET
# This expression replaces markers in template text with the value
# obtained by looking up the marker in a dictionary.
# %{id} = value
_substIdPattern = re.compile("%{(?P<id>\w+)}")
# This expression performs conditional substitution: if the expression
# provided evaluates to true in a given context, then one value is
# substituted, otherwise the alternative value is substituted.
# %{?<cond>??<true>?:<false>?}
# %{?1 == 2??true?:false?}
_substConditionalPattern = re.compile("%{\?(?P<expr>.+?)\?\?(?P<true>.*?)(\?:(?P<false>.*?))?\?}", re.MULTILINE + re.DOTALL)
# This expression tests whether an identifier is defined to a non-None
# value in the context; if so, it replaces the marker with template
# text. In that replacement text, the value ?@ is replaced by the
# test expression. Contrast POSIX shell ${ID+subst}${ID-subst}
# Note: NOT by the value of the test expression. If no replacement
# text is given, the replacement '%{?@}' is used, which replaces it
# with the value of the test expression.
# %{?<id>?+<yessubst>?-?<nosubst>}}
# %{?maybe_text?+?@ is defined to be %{?@}?}
_substIfDefinedPattern = re.compile("%{\?(?P<id>\w+)(\?\+(?P<repl>.*?))?(\?\-(?P<ndrepl>.*?))?\?}", re.MULTILINE + re.DOTALL)
# The pattern which, if present in the body of a IfDefined block, is
# replaced by the test expression.
_substDefinedBodyPattern = re.compile("\?@")
def _bodyIfDefinedPattern (match_object, dictionary):
global _substDefinedBodyPattern
id = match_object.group('id')
repl = match_object.group('repl')
ndrepl = match_object.group('ndrepl')
value = dictionary.get(id, None)
if value is not None:
if repl:
return _substDefinedBodyPattern.sub(id, repl)
if ndrepl:
return ''
return _substDefinedBodyPattern.sub(id, '%{?@}')
else:
if ndrepl:
return _substDefinedBodyPattern.sub(id, ndrepl)
return ''
def _bodyConditionalPattern (match_object, dictionary):
global _substDefinedBodyPattern
expr = match_object.group('expr')
true = match_object.group('true')
false = match_object.group('false')
value = None
try:
value = eval(expr, dictionary)
except Exception, e:
return '%%{EXCEPTION: %s}' % (e,)
if value:
return _substDefinedBodyPattern.sub(expr, true)
if false is not None:
return _substDefinedBodyPattern.sub(expr, false)
return ''
def replaceInText (text, **dictionary):
global _substIfDefinedPattern
global _substConditionalPattern
global _substIdPattern
global _substDefinedBodyPattern
rv = text
rv = _substIfDefinedPattern.sub(lambda _x: _bodyIfDefinedPattern(_x, dictionary), rv)
rv = _substConditionalPattern.sub(lambda _x: _bodyConditionalPattern(_x, dictionary), rv)
rv = _substIdPattern.sub(
lambda _x,_map=dictionary:
_map.get(_x.group('id'), '%%{MISSING:%s}' % (_x.group('id'),))
, rv)
return rv
|
{
"content_hash": "69bb856a355bc181355dcd9bb0de2e85",
"timestamp": "",
"source": "github",
"line_count": 97,
"max_line_length": 125,
"avg_line_length": 35.391752577319586,
"alnum_prop": 0.6481211768132829,
"repo_name": "jonfoster/pyxb1",
"id": "74e56705223276b453b8518291a6dbdfa31e15bc",
"size": "4021",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "pyxb/utils/templates.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "1564427"
},
{
"name": "Shell",
"bytes": "18946"
}
],
"symlink_target": ""
}
|
"""
Talon.One API
Use the Talon.One API to integrate with your application and to manage applications and campaigns: - Use the operations in the [Integration API section](#integration-api) are used to integrate with our platform - Use the operation in the [Management API section](#management-api) to manage applications and campaigns. ## Determining the base URL of the endpoints The API is available at the same hostname as your Campaign Manager deployment. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerSession](https://docs.talon.one/integration-api/#operation/updateCustomerSessionV2) endpoint is `https://mycompany.talon.one/v2/customer_sessions/{Id}` # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from talon_one.configuration import Configuration
class LoyaltyProjection(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'projections': 'list[LoyaltyProjectionData]',
'total_expiring_points': 'float',
'total_activating_points': 'float'
}
attribute_map = {
'projections': 'projections',
'total_expiring_points': 'totalExpiringPoints',
'total_activating_points': 'totalActivatingPoints'
}
def __init__(self, projections=None, total_expiring_points=None, total_activating_points=None, local_vars_configuration=None): # noqa: E501
"""LoyaltyProjection - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._projections = None
self._total_expiring_points = None
self._total_activating_points = None
self.discriminator = None
if projections is not None:
self.projections = projections
self.total_expiring_points = total_expiring_points
self.total_activating_points = total_activating_points
@property
def projections(self):
"""Gets the projections of this LoyaltyProjection. # noqa: E501
:return: The projections of this LoyaltyProjection. # noqa: E501
:rtype: list[LoyaltyProjectionData]
"""
return self._projections
@projections.setter
def projections(self, projections):
"""Sets the projections of this LoyaltyProjection.
:param projections: The projections of this LoyaltyProjection. # noqa: E501
:type: list[LoyaltyProjectionData]
"""
self._projections = projections
@property
def total_expiring_points(self):
"""Gets the total_expiring_points of this LoyaltyProjection. # noqa: E501
Sum of points to be expired by the projection date set in the query parameter. # noqa: E501
:return: The total_expiring_points of this LoyaltyProjection. # noqa: E501
:rtype: float
"""
return self._total_expiring_points
@total_expiring_points.setter
def total_expiring_points(self, total_expiring_points):
"""Sets the total_expiring_points of this LoyaltyProjection.
Sum of points to be expired by the projection date set in the query parameter. # noqa: E501
:param total_expiring_points: The total_expiring_points of this LoyaltyProjection. # noqa: E501
:type: float
"""
if self.local_vars_configuration.client_side_validation and total_expiring_points is None: # noqa: E501
raise ValueError("Invalid value for `total_expiring_points`, must not be `None`") # noqa: E501
self._total_expiring_points = total_expiring_points
@property
def total_activating_points(self):
"""Gets the total_activating_points of this LoyaltyProjection. # noqa: E501
Sum of points to be active by the projection date set in the query parameter. # noqa: E501
:return: The total_activating_points of this LoyaltyProjection. # noqa: E501
:rtype: float
"""
return self._total_activating_points
@total_activating_points.setter
def total_activating_points(self, total_activating_points):
"""Sets the total_activating_points of this LoyaltyProjection.
Sum of points to be active by the projection date set in the query parameter. # noqa: E501
:param total_activating_points: The total_activating_points of this LoyaltyProjection. # noqa: E501
:type: float
"""
if self.local_vars_configuration.client_side_validation and total_activating_points is None: # noqa: E501
raise ValueError("Invalid value for `total_activating_points`, must not be `None`") # noqa: E501
self._total_activating_points = total_activating_points
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, LoyaltyProjection):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, LoyaltyProjection):
return True
return self.to_dict() != other.to_dict()
|
{
"content_hash": "cc227c36212692e2a71077ca42f4d06a",
"timestamp": "",
"source": "github",
"line_count": 176,
"max_line_length": 732,
"avg_line_length": 38.44886363636363,
"alnum_prop": 0.6383921974286981,
"repo_name": "talon-one/talon_one.py",
"id": "816e16d07c09af7d73f718c70a86d51419262eb7",
"size": "6784",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "talon_one/models/loyalty_projection.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "238"
},
{
"name": "Python",
"bytes": "5139586"
},
{
"name": "Shell",
"bytes": "1826"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.