code
stringlengths 1
199k
|
|---|
import logging
def setup_logging():
logger = logging.getLogger()
# Clear any old handlers
old_handlers = logger.handlers
for handler in old_handlers:
logger.removeHandler(handler)
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# ch.setLevel(logging.WARNING)
formatter = logging.Formatter('%(asctime)s - %(message)s', "%Y-%m-%d %H:%M:%S")
ch.setFormatter(formatter)
logger.addHandler(ch)
return logger
|
import os
try:
from setuptools import setup, find_packages, Extension
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages, Extension
import os.path
src_folder= os.path.join(
os.path.split(os.path.abspath(__file__))[0], 'src')
setup(
name='meaningtoolws',
version="0.1",
description='Meaningtool Web Services Python Client',
author='Popego Team',
author_email='contact@meaningtool.com',
url='',
install_requires=[ ],
tests_require=[
'nose'
],
package_dir= {'' : 'src' },
packages=find_packages(where=src_folder, exclude=['test', 'test.*']),
include_package_data=True,
test_suite='nose.collector',
entry_points="""""",
zip_safe=False
)
|
import numpy
import os
import numpy
import os
from nmt import train
def main(job_id, params):
print params
validerr = train(saveto=params['model'][0],
reload_=params['reload'][0],
dim_word=params['dim_word'][0],
dim=params['dim'][0],
n_words=params['n-words'][0],
n_words_src=params['n-words'][0],
decay_c=params['decay-c'][0],
clip_c=params['clip-c'][0],
lrate=params['learning-rate'][0],
optimizer=params['optimizer'][0],
patience=1000,
maxlen=50,
batch_size=32,
valid_batch_size=32,
validFreq=100,
dispFreq=100,
saveFreq=1000,
sampleFreq=1000,
datasets=['/home/chenhd/data/zh2en/tree/corpus.ch',
'/home/chenhd/data/zh2en/tree/corpus.en'],
valid_datasets=['/home/chenhd/data/zh2en/devntest/MT02/MT02.src',
'/home/chenhd/data/zh2en/devntest/MT02/reference0'],
dictionaries=['/home/chenhd/data/zh2en/tree/corpus.ch.pkl',
'/home/chenhd/data/zh2en/tree/corpus.en.pkl'],
treeset=['/home/chenhd/data/zh2en/tree/corpus.ch.tree',
'/home/chenhd/data/zh2en/devntest/MT02/MT02.ce.tree'],
use_dropout=params['use-dropout'][0],
# shuffle_each_epoch=True,
overwrite=False)
return validerr
if __name__ == '__main__':
main(0, {
'model': ['model_hal.npz'],
'dim_word': [512],
'dim': [1024],
'n-words': [30000],
'optimizer': ['adadelta'],
'decay-c': [0.],
'clip-c': [1.],
'use-dropout': [False],
'learning-rate': [0.0001],
'reload': [True]})
|
import os
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
PROJECT_NAME = os.path.split(PROJECT_ROOT)[-1]
DEBUG = True
TEMPLATE_DEBUG = DEBUG
INTERNAL_IPS = ()
if DEBUG:
TEMPLATE_STRING_IF_INVALID = ''
CACHE_BACKEND = 'locmem://'
CACHE_MIDDLEWARE_KEY_PREFIX = '%s_' % PROJECT_NAME
CACHE_MIDDLEWARE_SECONDS = 600
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DEFAULT_FROM_EMAIL = 'from-mail@example.com'
SERVER_EMAIL = 'error-notify@example.com'
EMAIL_SUBJECT_PREFIX = '[%s] ' % PROJECT_NAME
EMAIL_HOST = 'localhost'
EMAIL_PORT = 25
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_USE_TLS = False
LOGIN_URL = '/accounts/login/'
LOGOUT_URL = '/accounts/logout/'
LOGIN_REDIRECT_URL = '/'
DATABASE_ENGINE = 'sqlite3'
DATABASE_NAME = os.path.join(PROJECT_ROOT, 'dev.db')
DATABASE_USER = ''
DATABASE_PASSWORD = ''
DATABASE_HOST = ''
DATABASE_PORT = ''
TIME_ZONE = 'Europe/Berlin'
LANGUAGE_CODE = 'de'
LANGUAGES = (('en', 'English'),
('de', 'German'))
USE_I18N = True
SITE_ID = 1
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'site_media')
MEDIA_URL = '/media/'
ADMIN_MEDIA_PREFIX = '/django_admin_media/'
ROOT_URLCONF = '%s.urls' % PROJECT_NAME
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.admin',
'django.contrib.comments',
'tagging',
'ticker',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.middleware.http.ConditionalGetMiddleware',
'django.middleware.common.CommonMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.request',
)
TEMPLATE_DIRS = (
os.path.join(PROJECT_ROOT, 'templates'),
)
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
)
try:
SECRET_KEY
except NameError:
SECRET_FILE = os.path.join(PROJECT_ROOT, 'secret.txt')
try:
SECRET_KEY = open(SECRET_FILE).read().strip()
except IOError:
try:
from random import choice
SECRET_KEY = ''.join([choice('abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)])
secret = file(SECRET_FILE, 'w')
secret.write(SECRET_KEY)
secret.close()
except IOError:
Exception('Please create a %s file with random characters to generate your secret key!' % SECRET_FILE)
try:
from local_settings import *
except ImportError:
pass
|
from gdb_test import AssertEquals
import gdb_test
def test(gdb):
gdb.Command('break leaf_call')
gdb.ResumeAndExpectStop('continue', 'breakpoint-hit')
result = gdb.Command('-stack-list-frames 0 2')
AssertEquals(result['stack'][0]['frame']['func'], 'leaf_call')
AssertEquals(result['stack'][1]['frame']['func'], 'nested_calls')
AssertEquals(result['stack'][2]['frame']['func'], 'main')
result = gdb.Command('-stack-list-arguments 1 0 1')
AssertEquals(result['stack-args'][0]['frame']['args'][0]['value'], '2')
AssertEquals(result['stack-args'][1]['frame']['args'][0]['value'], '1')
gdb.Command('return')
gdb.ResumeAndExpectStop('finish', 'function-finished')
AssertEquals(gdb.Eval('global_var'), '1')
if __name__ == '__main__':
gdb_test.RunTest(test, 'stack_trace')
|
import sys, os, os.path
from subprocess import call
cur_dir = os.path.dirname(os.path.realpath(__file__))
parent_dir = os.path.dirname(cur_dir)
statuses = [
call(["echo", "Running python unit tests via nose..."]),
call([os.path.join(parent_dir, "manage.py"), "test", "lab.tests"], env=os.environ.copy()),
call([os.path.join(cur_dir, "prepare_tests.py")], env=os.environ.copy()),
call(["/usr/bin/env", "phantomjs", os.path.join(cur_dir, "jasmine.js")], env=os.environ.copy()),
]
final_status = 0
for status in statuses:
if status != 0:
final_status = status
break
sys.exit(final_status)
|
"""
Class for easily interacting with virtual machines
(c) 2015 Massachusetts Institute of Technology
"""
import multiprocessing
import os
import logging
logger = logging.getLogger(__name__)
from subprocess import call
import lophi.globals as G
from lophi.machine import Machine
from lophi.sensors.memory.virtual import MemorySensorVirtual
from lophi.sensors.disk.virtual import DiskSensorVirtual
from lophi.sensors.control.virtual import ControlSensorVirtual
from lophi.sensors.cpu.virtual import CPUSensorVirtual
from lophi.sensors.network.virtual import NetworkSensorVirtual
libvirt_mutex = multiprocessing.Lock()
"""
These are the strings in the template that have to be replaced for a new VM
"""
VIRSH_TEMPLATE = {}
VIRSH_TEMPLATE[G.MACHINE_TYPES.XEN] = """<domain type='xen'>
<name>%%VMNAME%%</name>
<uuid></uuid>
<memory>%%MEM_SIZE%%</memory>
<currentMemory>%%MEM_SIZE%%</currentMemory>
<vcpu>%%CPU_COUNT%%</vcpu>
<os>
<type>hvm</type>
<loader>/usr/lib/xen-default/boot/hvmloader</loader>
<boot dev='hd'/>
</os>
<features>
<acpi/>
<apic/>
<pae/>
</features>
<clock offset='localtime'>
<timer name='hpet' present='no'/>
</clock>
<on_poweroff>destroy</on_poweroff>
<on_reboot>restart</on_reboot>
<on_crash>restart</on_crash>
<devices>
<emulator>/usr/lib/xen-default/bin/qemu-dm</emulator>
<disk type='file' device='disk'>
<driver name='tap' type='qcow2'/>
<source file='%%DISKIMG%%'/>
<target dev='xvda' bus='xen'/>
</disk>
<interface type='network'>
<mac address='%%MACADDR%%'/>
<source network='network-lophi'/>
<filterref filter='isolated-lophi'>
<parameter name='GATEWAY_MAC' value='aa:bb:cc:dd:ee:ff'/>
</filterref>
</interface>
<serial type='pty'>
<target port='0'/>
</serial>
<console type='pty'>
<target type='serial' port='0'/>
</console>
<input type='tablet' bus='usb'/>
<input type='mouse' bus='ps2'/>
<graphics type='vnc' port='-1' autoport='yes' keymap='en-us'/>
<sound model='es1370'/>
</devices>
</domain>
"""
VIRSH_TEMPLATE[G.MACHINE_TYPES.KVM] = """<domain type='kvm'>
<name>%%VMNAME%%</name>
<uuid></uuid>
<memory>%%MEM_SIZE%%</memory>
<currentMemory>%%MEM_SIZE%%</currentMemory>
<vcpu>%%CPU_COUNT%%</vcpu>
<os>
<type arch='x86_64' machine='pc-i440fx-trusty'>hvm</type>
<boot dev='hd'/>
</os>
<features>
<acpi/>
<apic/>
<pae/>
</features>
<clock offset='localtime'/>
<on_poweroff>destroy</on_poweroff>
<on_reboot>restart</on_reboot>
<on_crash>restart</on_crash>
<devices>
<emulator>/usr/bin/kvm-spice</emulator>
<disk type='file' device='disk'>
<driver name='qemu' type='qcow2'/>
<source file='%%DISKIMG%%'/>
<target dev='hda' bus='ide'/>
<address type='drive' controller='0' bus='0' unit='0'/>
</disk>
<disk type='file' device='cdrom'>
<driver name='qemu' type='raw'/>
<target dev='hdc' bus='ide'/>
<readonly/>
<address type='drive' controller='0' bus='1' unit='0'/>
</disk>
<controller type='ide' index='0'>
<address type='pci' domain='0x0000' bus='0x00' slot='0x01' function='0x1'/>
</controller>
<interface type='network'>
<mac address='%%MACADDR%%'/>
<source network='network-lophi'/>
<address type='pci' domain='0x0000' bus='0x00' slot='0x03' function='0x0'/>
<filterref filter='isolated-lophi'>
<parameter name='GATEWAY_MAC' value='aa:bb:cc:dd:ee:ff'/>
</filterref>
</interface>
<serial type='pty'>
<target port='0'/>
</serial>
<console type='pty'>
<target type='serial' port='0'/>
</console>
<input type='tablet' bus='usb'/>
<input type='mouse' bus='ps2'/>
<graphics type='vnc' port='-1' autoport='yes'/>
<sound model='ich6'>
<address type='pci' domain='0x0000' bus='0x00' slot='0x04' function='0x0'/>
</sound>
<video>
<model type='vga' vram='9216' heads='1'/>
<address type='pci' domain='0x0000' bus='0x00' slot='0x02' function='0x0'/>
</video>
<memballoon model='virtio'>
<address type='pci' domain='0x0000' bus='0x00' slot='0x05' function='0x0'/>
</memballoon>
</devices>
</domain>
"""
class REPLACE_STRINGS:
vm_name = '%%VMNAME%%'
disk_img = '%%DISKIMG%%'
mac_addr = '%%MACADDR%%'
usb_img = '%%USBIMAGE%%'
memory_size = '%%MEM_SIZE%%'
cpu_count = '%%CPU_COUNT%%'
def generate_new_xen_xml(VIRSH_TEMPLATE, vm_name,
disk_img,
mac_addr,
memory_size=1048576, # 1GB of memory
cpu_count=1):
"""
Given a name, disk, and mac, this will output the appropriate xml
config
"""
tmp = VIRSH_TEMPLATE
tmp = tmp.replace(REPLACE_STRINGS.vm_name, vm_name)
tmp = tmp.replace(REPLACE_STRINGS.disk_img, disk_img)
tmp = tmp.replace(REPLACE_STRINGS.mac_addr, mac_addr)
tmp = tmp.replace(REPLACE_STRINGS.memory_size, str(memory_size))
tmp = tmp.replace(REPLACE_STRINGS.cpu_count, str(cpu_count))
return tmp
class VirtualMachine(Machine):
SNAPSHOT_NAME = "lophisnapshot"
SNAPSHOT_XML = "\n<domainsnapshot>\n" \
"\t<name>%s</name>\n" \
"\t<description>LOPHI Snapshot</description>\n" \
"</domainsnapshot>\n" % (SNAPSHOT_NAME)
#/home/lophi/projects/lophi_software/usb.img'/
USB_XML = "<disk type='file' device='disk'>" \
" <driver name='qemu' type='raw'/>" \
" <source file='%%USBIMAGE%%'/> " \
" <target dev='sda' bus = 'usb'/> " \
" <alias name='usb-disk0'/> " \
"</disk>"
MAC_ADDR = 18090670686208
def __init__(self, vm_name,
vm_type=G.MACHINE_TYPES.KVM,
static_mac=None,
memory_size=1073741824,
cpu_count=1,
force_new=False,
volatility_profile=None,
**kargs):
"""
Initialize
@param config: Machine configuration object
@param init_sensors: Initialize all sensors by default
"""
# Initialize our state variables
self.type = vm_type
self.MACHINE_TYPE = vm_type
class MachineConfig():
# Name
name = vm_name
# DISK
disk = os.path.join(G.DIR_ROOT,G.DIR_VM_OUTPUT,vm_name+".qcow2")
disk_base = None
# Config
vm_config = os.path.join(G.DIR_ROOT,G.DIR_VM_OUTPUT,vm_name+".xml")
config = MachineConfig()
# MAC
if static_mac is None:
config.__dict__['mac_addr'] = self.__get_new_mac()
else:
config.__dict__['mac_addr'] = static_mac
config.__dict__['vm_name'] = vm_name
config.__dict__['memory_size'] = memory_size
config.__dict__['cpu_count'] = cpu_count
config.__dict__['volatility_profile'] = volatility_profile
Machine.__init__(self, config)
# Add all of our sensors
# Control sensor must be added first to interact with libvirt
self.add_sensor(ControlSensorVirtual(vm_name,vm_type))
# What state are we in?
state = self.control.get_state()
# UKNOWN is does not exist
if force_new and state is None:
self.lophi_init()
elif state != G.SENSOR_CONTROL.POWER_STATUS.UNKNOWN:
logger.debug("VM (%s) already exists."%self.config.name)
# Add all of our sensors to this VM
vm_disk = self.disk_get_filename()
if vm_disk is not None:
self.add_sensor(DiskSensorVirtual(vm_disk))
else:
self.add_sensor(DiskSensorVirtual(self.config.disk))
self.add_sensor(CPUSensorVirtual(config.vm_name))
self.add_sensor(MemorySensorVirtual(config.vm_name))
net_iface = self.network_get_interface()
if net_iface is not None:
self.add_sensor(NetworkSensorVirtual(net_iface))
else:
logger.warn("No network intface exists for %s"%self.config.vm_name)
# Do we need to mutex these accesses?
self.REQUIRE_MUTEX = False
if "require_mutex" in kargs and kargs['require_mutex']:
self.REQUIRE_MUTEX = True
# Force a completely fresh instance?
if "force_new" in kargs and kargs['force_new']:
# Poweroff existing machine
self.control.power_off()
def __get_new_mac(self):
# Convert to hex
mac = hex(VirtualMachine.MAC_ADDR)[2:]
# Increment
VirtualMachine.MAC_ADDR += 1
# Break into hex sections
out = []
for x in range(6):
out.append(mac[x*2:x*2+2])
# return properly formatted MAC
rtn = ":".join(out)
logger.debug("Generated MAC: %s"%rtn)
return rtn
def lophi_init(self, force_new=False):
"""
This will get our machine ready to be used with LO-PHI
"""
# Check for sensor
if self.control is None:
logger.error("No control sensor has been defined for "%self.config.name)
return
# Create our VM config
logger.debug("Creating a VM config file for %s..." % self.config.name)
self.config_create()
def set_volatility_profile(self, profile_name):
"""
Set the profile of this machine.
In a physical system this will change the pxe image that it restores
from, if one exists.
In a virtual system, this will change which base disk image we use.
@param profile_name: Profile name of system, based on Volatility's
naming scheme.
"""
logger.debug("Setting volatility profile for %s to %s...",self.config.name,
profile_name)
self.config.__dict__['volatility_profile'] = profile_name
# Check for sensor
if not self._has_sensor("control"):
logger.error("No control sensor has been defined for "%self.config.name)
return False
if self.images_map is None:
logger.error("No image map found for %s. (%s)"%(self.config.name,
profile_name))
return False
# do we have this profile?
if profile_name.lower() in self.images_map:
# Update our disk
base_filename = self.images_map[profile_name.lower()]
base_path = os.path.join(G.DIR_ROOT, G.DIR_DISK_IMAGES, base_filename)
self.config.__dict__['disk_base'] = base_path
logger.debug("Updated base filename for %s to %s."%(self.config.name,
self.config.disk_base))
return True
else:
logger.error("No virtual disk image exists for this profile (%s)"%profile_name)
return False
def has_lophi_snapshot(self):
"""
Check to see if a LO-PHI snapshot exists, if so, we consider
this machine ready to go.
"""
# Check for sensor
if not self._has_sensor("control"):
return
return self.control.has_lophi_snapshot()
"""
Actuation Functions
"""
# Defined in Machine
"""
Memory Functions
"""
# Read and write defined in Machine
def memory_get_size(self):
"""
Retrieve the allocated memory for this SUT.
@return: Memory size of the SUT it bytes.
"""
# Check for sensor
if not self._has_sensor("memory"):
return
return self.control.memory_get_size()
"""
Power functions
"""
# Defined in Machine
def power_on(self):
"""
Power on the machine
"""
# Check for sensor
if not self._has_sensor("control"):
return
if not self.control.power_on():
self.machine_create()
# Update state variable
self.MACHINE_STATE = G.MACHINE_STATES['STARTED']
"""
Machine Control Functions
"""
def machine_create(self, paused=False):
"""
Creates a new VM from the specified config file.
"""
if not self._has_sensor("control"):
return
# Read our XML config and create a new machine
xml_config = self.config_read()
rtn = self.control.machine_create(xml_config,paused)
# Now that the machine is created our network should be setup
net_iface = self.network_get_interface()
if net_iface is not None:
self.add_sensor(NetworkSensorVirtual(net_iface))
else:
logger.warn("No network intface exists for %s"%self.config.vm_name)
return rtn
def machine_pause(self):
"""
Pause a machine
"""
# Check for sensor
if not self._has_sensor("control"):
return
self.control.machine_pause()
self.MACHINE_STATE = G.MACHINE_STATES['PAUSED']
def machine_resume(self):
"""
Resume a paused machine
"""
# Check for sensor
if not self._has_sensor("control"):
return
self.MACHINE_STATE = G.MACHINE_STATES['RESUMING']
self.control.machine_resume()
self.MACHINE_STATE = G.MACHINE_STATES['STARTED']
self.DISK_STATE = G.DISK_STATES['DIRTY']
"""
Snapshot functions
"""
def machine_snapshot(self):
"""
Takes a snapshot of the VM and freezes it.
"""
# Check for sensor
if not self._has_sensor("control"):
return
self.MACHINE_STATE = G.MACHINE_STATES['SNAPSHOTTING']
rtn = self.control.machine_snapshot()
# Update state variable
self.MACHINE_STATE = G.MACHINE_STATES['OFF']
return rtn
def machine_snapshot_restore(self):
"""
Restore a previously snapshotte version of the machine
"""
# Check for sensor
if not self._has_sensor("control"):
return
return self.control.machine_snapshot_restore()
def machine_save(self):
"""
Suspends machine and saves state to a file.
"""
# Check for sensor
if not self._has_sensor("control"):
return
self.MACHINE_STATE = G.MACHINE_STATES['SNAPSHOTTING']
self.control.machine_save(self.config.disk_snapshot)
self.MACHINE_STATE = G.MACHINE_STATES['OFF']
def machine_restore(self, paused=False):
"""
Restore a machine from our saved state and start it
"""
# Check for sensor
if not self._has_sensor("control"):
return
self.control.machine_restore(self.config.disk_snapshot, paused)
self.MACHINE_STATE = G.MACHINE_STATES['STARTED']
self.DISK_STATE = G.DISK_STATES['DIRTY']
"""
Disk management functions
@TODO: Store our configuration variables in libvirt so that
our control sensor can access them
"""
def disk_revert(self):
"""
Overwrite the disk with a backup of our original
"""
logger.debug("Reverting disk of %s..." % self.config.name)
if self.config.disk_base is not None:
self.disk_create_cow(self.config.disk_base)
return True
else:
logger.error("No base disk image found for %s."%self.config.name)
return False
def disk_backup(self):
"""
Create a backup of the disk to revert to later
"""
call(["cp", self.config.disk, self.config.disk_orig])
""" Virtual Machine Only """
def disk_create_cow(self, base_filename):
"""
Will generate a qcow disk for this VM
"""
logger.debug("Creating qcow disk from %s for %s..."%(base_filename,
self.config.name))
self.config.__dict__['disk_base'] = base_filename
call(["qemu-img", "create", "-b%s" % base_filename, "-fqcow2", self.config.disk])
def disk_get_filename(self):
"""
Retrieve the filename of the disk used by this SUT.
@return: Filename of backing disk.
"""
# Check for sensor
if not self._has_sensor("control"):
return
filename = self.control.disk_get_filename()
return filename
"""
VM Config functions
"""
def config_create(self):
"""
Create our config file on disk to be loaded into Xen
"""
logger.debug("Creating new machine. (%s)"%self.config.mac_addr)
config = generate_new_xen_xml(VIRSH_TEMPLATE[self.type],
self.config.vm_name,
self.config.disk,
self.config.mac_addr)
f = open(self.config.vm_config, "w+")
f.write(config)
f.close()
def config_read(self):
"""
Read our xml config content
"""
if not os.path.exists(self.config.vm_config):
self.config_create()
f = open(self.config.vm_config, "r")
config_content = f.read()
f.close()
return config_content
"""
Network Functions
"""
def network_get_ip(self):
"""
Return the IP of the virtual machine using only the MAC address
@return: IP address if it can be determined or None
"""
lease_file = "/var/lib/libvirt/dnsmasq/default.leases"
if os.path.exists(lease_file):
logger.debug("Found lease file, looking for NAT'ed leases.")
f = open(lease_file,"r")
for line in f:
columns = line.split()
mac_addr = columns[1]
ip_addr = columns[2]
if mac_addr == self.config.mac_addr:
return ip_addr
logger.debug("Nothing in lease file, Trying arp...")
import subprocess
import sys
cmd="arp -an"
p=subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
output, errors = p.communicate()
if output is not None :
if sys.platform in ['linux','linux2']:
for line in output.split("\n"):
columns = line.split()
if len(columns) > 3 and self.config.mac_addr in columns[3]:
return columns[1][1:-1]
else:
logger.debug("No lease file found.")
# Try the default which is our dhcp server
return None
def network_get_interface(self):
"""
Dump the xml configuration and get the network interface assigned
to this virtual machine.
@return: Interface name or None
"""
return self.control.network_get_interface()
"""
Miscellanous Funcitons
"""
def screenshot(self,filename,vol_uri=None):
"""
Screenshot the display of the machine and save it to a file.
@param filename: Filename to save screenshot data to.
@param vol_uri: UNIMPLEMENTED Just here to be compatible with Physical
"""
return self.control.screenshot(filename)
class VirtualMachineCreator(multiprocessing.Process):
"""
This class is meant to aide generating snapshots of VM's by launching
a separate process to snapshot numerous VM's in parallel
"""
def __init__(self, machine, **kargs):
"""
Initialize all of our VM params for future reference
"""
self.machine = machine
# Init our multiprocess
multiprocessing.Process.__init__(self)
def set_sleep_time(self, sleep_time):
"""
Set the number of seconds that we should sleep before we take a
snapshot
"""
self.sleep_time = sleep_time
def run(self):
"""
Run the appropriate commands to start the VM and snapshot it.
"""
# Create our VM config
print "* Creating a VM config file for %s..." % self.machine.config.name
self.machine.config_create()
# Create our disk image
print "* Cloning %s to qcow disk..." % self.machine.config.disk_orig
self.machine.disk_create_cow()
# Create our VM instance
print "* Creating our VM instance..."
self.machine.machine_create()
self.machine.machine_resume()
# Sleep for the allotted time
from time import sleep
print "* Sleeping for %d seconds. See you in a few..." % (self.sleep_time)
sleep(self.sleep_time)
# Create our disk image
print "* Snapshotting %s..." % (self.machine.config.vm_name)
self.machine.machine_save()
# Store a copy in our orig directory for reverting
print "* Backing up disk..."
self.machine.disk_backup()
# Scan our disk image
print "* Converting disk back to raw to scan..."
self.scan_raw_img()
print "* Done."
|
from nose.plugins.attrib import attr
from microscopes.irm.definition import model_definition as irm_definition
from microscopes.irm.model import (
initialize as irm_initialize,
bind as irm_bind,
)
from microscopes.mixture.definition import model_definition as mm_definition
from microscopes.mixture.model import (
initialize as mm_initialize,
bind as mm_bind,
)
from microscopes.common.rng import rng
from microscopes.common.relation.dataview import (
numpy_dataview as relation_numpy_dataview,
sparse_2d_dataview as sparse_relation_dataview,
)
from microscopes.common.recarray.dataview import (
numpy_dataview as rec_numpy_dataview,
)
from microscopes.models import bb, bbnc, gp
from microscopes.kernels.gibbs import assign, assign_resample
from microscopes.kernels.slice import theta
import numpy as np
import numpy.ma as ma
import itertools as it
import operator as op
import time
from scipy.sparse import coo_matrix
from nose.tools import assert_almost_equals
from microscopes.common.testutil import (
assert_1d_lists_almost_equals,
assert_discrete_dist_approx,
permutation_iter,
permutation_canonical,
scores_to_probs,
)
def _tocsr(raw):
n, m = raw.shape
def indices():
for i, j in it.product(range(n), range(m)):
if not raw.mask[i, j]:
yield i, j
data = [raw[i, j] for i, j in indices()]
i = list(map(op.itemgetter(0), indices()))
j = list(map(op.itemgetter(1), indices()))
return coo_matrix((data, (i, j)), shape=raw.shape).tocsr()
def test_compare_to_mixture_model():
r = rng()
N, D = 4, 5
Y = np.random.uniform(size=(N, D)) > 0.8
Y_rec = np.array([tuple(y) for y in Y], dtype=[('', bool)] * D)
mm_view = rec_numpy_dataview(Y_rec)
irm_view = relation_numpy_dataview(Y)
mm_def = mm_definition(N, [bb] * D)
irm_def = irm_definition([N, D], [((0, 1), bb)])
perms = list(permutation_iter(N))
assignment = perms[np.random.randint(0, len(perms))]
mm_s = mm_initialize(mm_def, mm_view, r=r, assignment=assignment)
irm_s = irm_initialize(irm_def,
[irm_view],
r=r,
domain_assignments=[
assignment,
range(D),
])
def assert_suff_stats_equal():
assert set(mm_s.groups()) == set(irm_s.groups(0))
assert irm_s.groups(1) == range(D)
groups = mm_s.groups()
for g in groups:
for i in xrange(D):
a = mm_s.get_suffstats(g, i)
b = irm_s.get_suffstats(0, [g, i])
if b is None:
b = {'heads': 0L, 'tails': 0L}
assert a['heads'] == b['heads'] and a['tails'] == b['tails']
assert_suff_stats_equal()
assert_almost_equals(
mm_s.score_assignment(), irm_s.score_assignment(0), places=3)
bound_mm_s = mm_bind(mm_s, mm_view)
bound_irm_s = irm_bind(irm_s, 0, [irm_view])
# XXX: doesn't really have to be true, just is true of impl
assert not bound_mm_s.empty_groups()
assert not bound_irm_s.empty_groups()
bound_mm_s.create_group(r)
bound_irm_s.create_group(r)
gid_a = bound_mm_s.remove_value(0, r)
gid_b = bound_irm_s.remove_value(0, r)
assert gid_a == gid_b
assert_suff_stats_equal()
x0, y0 = bound_mm_s.score_value(0, r)
x1, y1 = bound_irm_s.score_value(0, r)
assert x0 == x1 # XXX: not really a requirement
# XXX: should really normalize and then check
for a, b in zip(y0, y1):
assert_almost_equals(a, b, places=2)
def test_dense_vs_sparse():
# XXX: really belongs in irm test cases, but kernels has a nice cluster
# enumeration iterator
r = rng()
n = 5
raw = ma.array(
np.random.choice(np.arange(20), size=(n, n)),
mask=np.random.choice([False, True], size=(n, n)))
dense = [relation_numpy_dataview(raw)]
sparse = [sparse_relation_dataview(_tocsr(raw))]
domains = [n]
relations = [((0, 0), gp)]
defn = irm_definition(domains, relations)
def score_fn(data):
def f(assignments):
s = irm_initialize(defn, data, r=r, domain_assignments=assignments)
assign = sum(s.score_assignment(i)
for i in xrange(len(assignments)))
likelihood = s.score_likelihood(r)
return assign + likelihood
return f
product_assignments = tuple(map(list, map(permutation_iter, domains)))
dense_posterior = scores_to_probs(
np.array(map(score_fn(dense), it.product(*product_assignments))))
sparse_posterior = scores_to_probs(
np.array(map(score_fn(sparse), it.product(*product_assignments))))
assert_1d_lists_almost_equals(dense_posterior, sparse_posterior, places=3)
def _test_convergence(domains,
data,
reg_relations,
brute_relations,
kernel,
burnin_niters=10000,
skip=10,
ntries=50,
nsamples=1000,
places=2):
r = rng()
reg_defn = irm_definition(domains, reg_relations)
brute_defn = irm_definition(domains, brute_relations)
def score_fn(assignments):
s = irm_initialize(
brute_defn, data, r=r,
domain_assignments=assignments)
assign = sum(s.score_assignment(i) for i in xrange(len(assignments)))
likelihood = s.score_likelihood(r)
return assign + likelihood
product_assignments = tuple(map(list, map(permutation_iter, domains)))
posterior = scores_to_probs(
np.array(map(score_fn, it.product(*product_assignments))))
s = irm_initialize(reg_defn, data, r=r)
bounded_states = [irm_bind(s, i, data) for i in xrange(len(domains))]
# burnin
start = time.time()
last = start
for i in xrange(burnin_niters):
for bs in bounded_states:
kernel(bs, r)
if not ((i + 1) % 1000):
print 'burning finished iteration', (i + 1), \
'in', (time.time() - last), 'seconds'
last = time.time()
print 'finished burnin of', burnin_niters, \
'iters in', (time.time() - start), 'seconds'
idmap = {C: i for i, C in enumerate(it.product(*product_assignments))}
#print idmap
def sample_fn():
for _ in xrange(skip):
for bs in bounded_states:
kernel(bs, r)
key = tuple(tuple(permutation_canonical(bs.assignments()))
for bs in bounded_states)
return idmap[key]
assert_discrete_dist_approx(
sample_fn, posterior,
ntries=ntries, nsamples=nsamples,
kl_places=places)
@attr('slow')
def test_one_binary():
# 1 domain, 1 binary relation
domains = [4]
def mk_relations(model):
return [((0, 0), model)]
relsize = (domains[0], domains[0])
data = [relation_numpy_dataview(
ma.array(
np.random.choice([False, True], size=relsize),
mask=np.random.choice([False, True], size=relsize)))]
_test_convergence(
domains, data, mk_relations(bb), mk_relations(bb), assign)
@attr('slow')
def test_one_binary_sparse():
# 1 domain, 1 binary relation
domains = [4]
def mk_relations(model):
return [((0, 0), model)]
relsize = (domains[0], domains[0])
raw = ma.array(
np.random.choice([False, True], size=relsize),
mask=np.random.choice([False, True], size=relsize))
data = [sparse_relation_dataview(_tocsr(raw))]
_test_convergence(
domains, data, mk_relations(bb), mk_relations(bb), assign)
@attr('slow')
def test_one_binary_nonconj_kernel():
# 1 domain, 1 binary relation
domains = [4]
def mk_relations(model):
return [((0, 0), model)]
relsize = (domains[0], domains[0])
data = [relation_numpy_dataview(
ma.array(
np.random.choice([False, True], size=relsize),
mask=np.random.choice([False, True], size=relsize)))]
kernel = lambda s, r: assign_resample(s, 10, r)
_test_convergence(
domains, data, mk_relations(bb), mk_relations(bb), kernel)
@attr('slow')
def test_two_binary():
# 1 domain, 2 binary relations
domains = [4]
def mk_relations(model):
return [((0, 0), model), ((0, 0), model)]
relsize = (domains[0], domains[0])
data = [
relation_numpy_dataview(
ma.array(
np.random.choice([False, True], size=relsize),
mask=np.random.choice([False, True], size=relsize))),
relation_numpy_dataview(
ma.array(
np.random.choice([False, True], size=relsize),
mask=np.random.choice([False, True], size=relsize))),
]
_test_convergence(
domains, data, mk_relations(bb), mk_relations(bb), assign)
@attr('slow')
def test_one_binary_one_ternary():
# 1 domain, 1 binary, 1 ternary
domains = [4]
def mk_relations(model):
return [((0, 0), model), ((0, 0, 0), model)]
relsize2 = (domains[0], domains[0])
relsize3 = (domains[0], domains[0], domains[0])
data = [
relation_numpy_dataview(
ma.array(
np.random.choice([False, True], size=relsize2),
mask=np.random.choice([False, True], size=relsize2))),
relation_numpy_dataview(
ma.array(
np.random.choice(
[False, True], size=relsize3),
mask=np.random.choice([False, True], size=relsize3))),
]
_test_convergence(
domains, data, mk_relations(bb), mk_relations(bb), assign)
@attr('slow')
def test_one_binary_nonconj():
# 1 domain, 1 binary relation, nonconj
domains = [3]
def mk_relations(model):
return [((0, 0), model)]
relsize = (domains[0], domains[0])
data = [relation_numpy_dataview(
ma.array(
np.random.choice([False, True], size=relsize),
mask=np.random.random(size=relsize) > 0.8))]
def mkparam():
return {'p': 0.05}
params = {0: mkparam()}
def kernel(s, r):
assign_resample(s, 10, r)
theta(s, r, tparams=params)
_test_convergence(
domains, data, mk_relations(bbnc), mk_relations(bb), kernel)
@attr('slow')
def test_two_domain_two_binary():
# 1 domain, 2 binary relations
domains = [3, 4]
def mk_relations(model):
return [((0, 0), model), ((1, 0), model)]
relsize00 = (domains[0], domains[0])
relsize10 = (domains[1], domains[0])
data = [
relation_numpy_dataview(
ma.array(
np.random.choice([False, True], size=relsize00),
mask=np.random.choice([False, True], size=relsize00))),
relation_numpy_dataview(
ma.array(
np.random.choice([False, True], size=relsize10),
mask=np.random.choice([False, True], size=relsize10))),
]
_test_convergence(
domains, data, mk_relations(bb), mk_relations(bb), assign)
|
from sys import version_info as _swig_python_version_info
if _swig_python_version_info < (2, 7, 0):
raise RuntimeError("Python 2.7 or later required")
if __package__ or "." in __name__:
from . import _bilinearform
else:
import _bilinearform
try:
import builtins as __builtin__
except ImportError:
import __builtin__
_swig_new_instance_method = _bilinearform.SWIG_PyInstanceMethod_New
_swig_new_static_method = _bilinearform.SWIG_PyStaticMethod_New
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except __builtin__.Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
def _swig_setattr_nondynamic_instance_variable(set):
def set_instance_attr(self, name, value):
if name == "thisown":
self.this.own(value)
elif name == "this":
set(self, name, value)
elif hasattr(self, name) and isinstance(getattr(type(self), name), property):
set(self, name, value)
else:
raise AttributeError("You cannot add instance attributes to %s" % self)
return set_instance_attr
def _swig_setattr_nondynamic_class_variable(set):
def set_class_attr(cls, name, value):
if hasattr(cls, name) and not isinstance(getattr(cls, name), property):
set(cls, name, value)
else:
raise AttributeError("You cannot add class attributes to %s" % cls)
return set_class_attr
def _swig_add_metaclass(metaclass):
"""Class decorator for adding a metaclass to a SWIG wrapped class - a slimmed down version of six.add_metaclass"""
def wrapper(cls):
return metaclass(cls.__name__, cls.__bases__, cls.__dict__.copy())
return wrapper
class _SwigNonDynamicMeta(type):
"""Meta class to enforce nondynamic attributes (no new attributes) for a class"""
__setattr__ = _swig_setattr_nondynamic_class_variable(type.__setattr__)
import weakref
import mfem._par.globals
import mfem._par.array
import mfem._par.mem_manager
import mfem._par.fespace
import mfem._par.vector
import mfem._par.coefficient
import mfem._par.matrix
import mfem._par.operators
import mfem._par.symmat
import mfem._par.intrules
import mfem._par.sparsemat
import mfem._par.densemat
import mfem._par.eltrans
import mfem._par.fe
import mfem._par.geom
import mfem._par.fe_base
import mfem._par.fe_fixed_order
import mfem._par.element
import mfem._par.table
import mfem._par.hash
import mfem._par.fe_h1
import mfem._par.fe_nd
import mfem._par.fe_rt
import mfem._par.fe_l2
import mfem._par.fe_nurbs
import mfem._par.fe_pos
import mfem._par.fe_ser
import mfem._par.mesh
import mfem._par.sort_pairs
import mfem._par.ncmesh
import mfem._par.vtk
import mfem._par.vertex
import mfem._par.gridfunc
import mfem._par.bilininteg
import mfem._par.fe_coll
import mfem._par.lininteg
import mfem._par.linearform
import mfem._par.nonlininteg
import mfem._par.doftrans
import mfem._par.handle
import mfem._par.hypre
import mfem._par.restriction
AssemblyLevel_LEGACY = _bilinearform.AssemblyLevel_LEGACY
AssemblyLevel_LEGACYFULL = _bilinearform.AssemblyLevel_LEGACYFULL
AssemblyLevel_FULL = _bilinearform.AssemblyLevel_FULL
AssemblyLevel_ELEMENT = _bilinearform.AssemblyLevel_ELEMENT
AssemblyLevel_PARTIAL = _bilinearform.AssemblyLevel_PARTIAL
AssemblyLevel_NONE = _bilinearform.AssemblyLevel_NONE
class BilinearForm(mfem._par.matrix.Matrix):
r"""Proxy of C++ mfem::BilinearForm class."""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
def __init__(self, *args):
r"""
__init__(BilinearForm self) -> BilinearForm
__init__(BilinearForm self, FiniteElementSpace f) -> BilinearForm
__init__(BilinearForm self, FiniteElementSpace f, BilinearForm bf, int ps=0) -> BilinearForm
"""
if self.__class__ == BilinearForm:
_self = None
else:
_self = self
_bilinearform.BilinearForm_swiginit(self, _bilinearform.new_BilinearForm(_self, *args))
def Size(self):
r"""Size(BilinearForm self) -> int"""
return _bilinearform.BilinearForm_Size(self)
Size = _swig_new_instance_method(_bilinearform.BilinearForm_Size)
def SetAssemblyLevel(self, assembly_level):
r"""SetAssemblyLevel(BilinearForm self, mfem::AssemblyLevel assembly_level)"""
return _bilinearform.BilinearForm_SetAssemblyLevel(self, assembly_level)
SetAssemblyLevel = _swig_new_instance_method(_bilinearform.BilinearForm_SetAssemblyLevel)
def GetAssemblyLevel(self):
r"""GetAssemblyLevel(BilinearForm self) -> mfem::AssemblyLevel"""
return _bilinearform.BilinearForm_GetAssemblyLevel(self)
GetAssemblyLevel = _swig_new_instance_method(_bilinearform.BilinearForm_GetAssemblyLevel)
def GetHybridization(self):
r"""GetHybridization(BilinearForm self) -> Hybridization *"""
return _bilinearform.BilinearForm_GetHybridization(self)
GetHybridization = _swig_new_instance_method(_bilinearform.BilinearForm_GetHybridization)
def EnableStaticCondensation(self):
r"""EnableStaticCondensation(BilinearForm self)"""
return _bilinearform.BilinearForm_EnableStaticCondensation(self)
EnableStaticCondensation = _swig_new_instance_method(_bilinearform.BilinearForm_EnableStaticCondensation)
def StaticCondensationIsEnabled(self):
r"""StaticCondensationIsEnabled(BilinearForm self) -> bool"""
return _bilinearform.BilinearForm_StaticCondensationIsEnabled(self)
StaticCondensationIsEnabled = _swig_new_instance_method(_bilinearform.BilinearForm_StaticCondensationIsEnabled)
def SCFESpace(self):
r"""SCFESpace(BilinearForm self) -> FiniteElementSpace"""
return _bilinearform.BilinearForm_SCFESpace(self)
SCFESpace = _swig_new_instance_method(_bilinearform.BilinearForm_SCFESpace)
def EnableHybridization(self, constr_space, constr_integ, ess_tdof_list):
r"""EnableHybridization(BilinearForm self, FiniteElementSpace constr_space, BilinearFormIntegrator constr_integ, intArray ess_tdof_list)"""
val = _bilinearform.BilinearForm_EnableHybridization(self, constr_space, constr_integ, ess_tdof_list)
if not hasattr(self, "_integrators"): self._integrators = []
self._integrators.append(constr_integ)
# this will be deleted by Hybridization destructor
constr_integ.thisown = 0
return val
def UsePrecomputedSparsity(self, ps=1):
r"""UsePrecomputedSparsity(BilinearForm self, int ps=1)"""
return _bilinearform.BilinearForm_UsePrecomputedSparsity(self, ps)
UsePrecomputedSparsity = _swig_new_instance_method(_bilinearform.BilinearForm_UsePrecomputedSparsity)
def UseSparsity(self, *args):
r"""
UseSparsity(BilinearForm self, int * I, int * J, bool isSorted)
UseSparsity(BilinearForm self, SparseMatrix A)
"""
return _bilinearform.BilinearForm_UseSparsity(self, *args)
UseSparsity = _swig_new_instance_method(_bilinearform.BilinearForm_UseSparsity)
def AllocateMatrix(self):
r"""AllocateMatrix(BilinearForm self)"""
return _bilinearform.BilinearForm_AllocateMatrix(self)
AllocateMatrix = _swig_new_instance_method(_bilinearform.BilinearForm_AllocateMatrix)
def GetDBFI(self):
r"""GetDBFI(BilinearForm self) -> mfem::Array< mfem::BilinearFormIntegrator * > *"""
return _bilinearform.BilinearForm_GetDBFI(self)
GetDBFI = _swig_new_instance_method(_bilinearform.BilinearForm_GetDBFI)
def GetBBFI(self):
r"""GetBBFI(BilinearForm self) -> mfem::Array< mfem::BilinearFormIntegrator * > *"""
return _bilinearform.BilinearForm_GetBBFI(self)
GetBBFI = _swig_new_instance_method(_bilinearform.BilinearForm_GetBBFI)
def GetBBFI_Marker(self):
r"""GetBBFI_Marker(BilinearForm self) -> mfem::Array< mfem::Array< int > * > *"""
return _bilinearform.BilinearForm_GetBBFI_Marker(self)
GetBBFI_Marker = _swig_new_instance_method(_bilinearform.BilinearForm_GetBBFI_Marker)
def GetFBFI(self):
r"""GetFBFI(BilinearForm self) -> mfem::Array< mfem::BilinearFormIntegrator * > *"""
return _bilinearform.BilinearForm_GetFBFI(self)
GetFBFI = _swig_new_instance_method(_bilinearform.BilinearForm_GetFBFI)
def GetBFBFI(self):
r"""GetBFBFI(BilinearForm self) -> mfem::Array< mfem::BilinearFormIntegrator * > *"""
return _bilinearform.BilinearForm_GetBFBFI(self)
GetBFBFI = _swig_new_instance_method(_bilinearform.BilinearForm_GetBFBFI)
def GetBFBFI_Marker(self):
r"""GetBFBFI_Marker(BilinearForm self) -> mfem::Array< mfem::Array< int > * > *"""
return _bilinearform.BilinearForm_GetBFBFI_Marker(self)
GetBFBFI_Marker = _swig_new_instance_method(_bilinearform.BilinearForm_GetBFBFI_Marker)
def __call__(self, i, j):
r"""__call__(BilinearForm self, int i, int j) -> double const &"""
return _bilinearform.BilinearForm___call__(self, i, j)
__call__ = _swig_new_instance_method(_bilinearform.BilinearForm___call__)
def Elem(self, *args):
r"""
Elem(BilinearForm self, int i, int j) -> double
Elem(BilinearForm self, int i, int j) -> double const &
"""
return _bilinearform.BilinearForm_Elem(self, *args)
Elem = _swig_new_instance_method(_bilinearform.BilinearForm_Elem)
def Mult(self, x, y):
r"""Mult(BilinearForm self, Vector x, Vector y)"""
return _bilinearform.BilinearForm_Mult(self, x, y)
Mult = _swig_new_instance_method(_bilinearform.BilinearForm_Mult)
def FullMult(self, x, y):
r"""FullMult(BilinearForm self, Vector x, Vector y)"""
return _bilinearform.BilinearForm_FullMult(self, x, y)
FullMult = _swig_new_instance_method(_bilinearform.BilinearForm_FullMult)
def AddMult(self, x, y, a=1.0):
r"""AddMult(BilinearForm self, Vector x, Vector y, double const a=1.0)"""
return _bilinearform.BilinearForm_AddMult(self, x, y, a)
AddMult = _swig_new_instance_method(_bilinearform.BilinearForm_AddMult)
def FullAddMult(self, x, y):
r"""FullAddMult(BilinearForm self, Vector x, Vector y)"""
return _bilinearform.BilinearForm_FullAddMult(self, x, y)
FullAddMult = _swig_new_instance_method(_bilinearform.BilinearForm_FullAddMult)
def AddMultTranspose(self, x, y, a=1.0):
r"""AddMultTranspose(BilinearForm self, Vector x, Vector y, double const a=1.0)"""
return _bilinearform.BilinearForm_AddMultTranspose(self, x, y, a)
AddMultTranspose = _swig_new_instance_method(_bilinearform.BilinearForm_AddMultTranspose)
def FullAddMultTranspose(self, x, y):
r"""FullAddMultTranspose(BilinearForm self, Vector x, Vector y)"""
return _bilinearform.BilinearForm_FullAddMultTranspose(self, x, y)
FullAddMultTranspose = _swig_new_instance_method(_bilinearform.BilinearForm_FullAddMultTranspose)
def MultTranspose(self, x, y):
r"""MultTranspose(BilinearForm self, Vector x, Vector y)"""
return _bilinearform.BilinearForm_MultTranspose(self, x, y)
MultTranspose = _swig_new_instance_method(_bilinearform.BilinearForm_MultTranspose)
def InnerProduct(self, x, y):
r"""InnerProduct(BilinearForm self, Vector x, Vector y) -> double"""
return _bilinearform.BilinearForm_InnerProduct(self, x, y)
InnerProduct = _swig_new_instance_method(_bilinearform.BilinearForm_InnerProduct)
def Inverse(self):
r"""Inverse(BilinearForm self) -> MatrixInverse"""
return _bilinearform.BilinearForm_Inverse(self)
Inverse = _swig_new_instance_method(_bilinearform.BilinearForm_Inverse)
def Finalize(self, skip_zeros=1):
r"""Finalize(BilinearForm self, int skip_zeros=1)"""
return _bilinearform.BilinearForm_Finalize(self, skip_zeros)
Finalize = _swig_new_instance_method(_bilinearform.BilinearForm_Finalize)
def SpMat(self, *args):
r"""
SpMat(BilinearForm self) -> SparseMatrix
SpMat(BilinearForm self) -> SparseMatrix
"""
val = _bilinearform.BilinearForm_SpMat(self, *args)
if not hasattr(self, "_spmat"): self._spmat = []
self._spmat.append(val)
#val.thisown=0
return val
def LoseMat(self):
r"""LoseMat(BilinearForm self) -> SparseMatrix"""
return _bilinearform.BilinearForm_LoseMat(self)
LoseMat = _swig_new_instance_method(_bilinearform.BilinearForm_LoseMat)
def SpMatElim(self, *args):
r"""
SpMatElim(BilinearForm self) -> SparseMatrix
SpMatElim(BilinearForm self) -> SparseMatrix
"""
return _bilinearform.BilinearForm_SpMatElim(self, *args)
SpMatElim = _swig_new_instance_method(_bilinearform.BilinearForm_SpMatElim)
def AddDomainIntegrator(self, *args):
r"""
AddDomainIntegrator(BilinearForm self, BilinearFormIntegrator bfi)
AddDomainIntegrator(BilinearForm self, BilinearFormIntegrator bfi, intArray elem_marker)
"""
if not hasattr(self, "_integrators"): self._integrators = []
bfi = args[0]
self._integrators.append(bfi)
self.UseExternalIntegrators()
#bfi.thisown=0
return _bilinearform.BilinearForm_AddDomainIntegrator(self, *args)
def AddBoundaryIntegrator(self, *args):
r"""
AddBoundaryIntegrator(BilinearForm self, BilinearFormIntegrator bfi)
AddBoundaryIntegrator(BilinearForm self, BilinearFormIntegrator bfi, intArray bdr_marker)
"""
if not hasattr(self, "_integrators"): self._integrators = []
bfi = args[0]
self._integrators.append(bfi)
self.UseExternalIntegrators()
#bfi.thisown=0
return _bilinearform.BilinearForm_AddBoundaryIntegrator(self, *args)
def AddInteriorFaceIntegrator(self, bfi):
r"""AddInteriorFaceIntegrator(BilinearForm self, BilinearFormIntegrator bfi)"""
if not hasattr(self, "_integrators"): self._integrators = []
self._integrators.append(bfi)
self.UseExternalIntegrators()
#bfi.thisown=0
return _bilinearform.BilinearForm_AddInteriorFaceIntegrator(self, bfi)
def AddBdrFaceIntegrator(self, *args):
r"""
AddBdrFaceIntegrator(BilinearForm self, BilinearFormIntegrator bfi)
AddBdrFaceIntegrator(BilinearForm self, BilinearFormIntegrator bfi, intArray bdr_marker)
"""
if not hasattr(self, "_integrators"): self._integrators = []
bfi = args[0]
self._integrators.append(bfi)
self.UseExternalIntegrators()
bfi.thisown=0
return _bilinearform.BilinearForm_AddBdrFaceIntegrator(self, *args)
def Assemble(self, skip_zeros=1):
r"""Assemble(BilinearForm self, int skip_zeros=1)"""
return _bilinearform.BilinearForm_Assemble(self, skip_zeros)
Assemble = _swig_new_instance_method(_bilinearform.BilinearForm_Assemble)
def AssembleDiagonal(self, diag):
r"""AssembleDiagonal(BilinearForm self, Vector diag)"""
return _bilinearform.BilinearForm_AssembleDiagonal(self, diag)
AssembleDiagonal = _swig_new_instance_method(_bilinearform.BilinearForm_AssembleDiagonal)
def GetProlongation(self):
r"""GetProlongation(BilinearForm self) -> Operator"""
return _bilinearform.BilinearForm_GetProlongation(self)
GetProlongation = _swig_new_instance_method(_bilinearform.BilinearForm_GetProlongation)
def GetRestriction(self):
r"""GetRestriction(BilinearForm self) -> Operator"""
return _bilinearform.BilinearForm_GetRestriction(self)
GetRestriction = _swig_new_instance_method(_bilinearform.BilinearForm_GetRestriction)
def GetOutputProlongation(self):
r"""GetOutputProlongation(BilinearForm self) -> Operator"""
return _bilinearform.BilinearForm_GetOutputProlongation(self)
GetOutputProlongation = _swig_new_instance_method(_bilinearform.BilinearForm_GetOutputProlongation)
def GetOutputRestrictionTranspose(self):
r"""GetOutputRestrictionTranspose(BilinearForm self) -> Operator"""
return _bilinearform.BilinearForm_GetOutputRestrictionTranspose(self)
GetOutputRestrictionTranspose = _swig_new_instance_method(_bilinearform.BilinearForm_GetOutputRestrictionTranspose)
def GetOutputRestriction(self):
r"""GetOutputRestriction(BilinearForm self) -> Operator"""
return _bilinearform.BilinearForm_GetOutputRestriction(self)
GetOutputRestriction = _swig_new_instance_method(_bilinearform.BilinearForm_GetOutputRestriction)
def RecoverFEMSolution(self, X, b, x):
r"""RecoverFEMSolution(BilinearForm self, Vector X, Vector b, Vector x)"""
return _bilinearform.BilinearForm_RecoverFEMSolution(self, X, b, x)
RecoverFEMSolution = _swig_new_instance_method(_bilinearform.BilinearForm_RecoverFEMSolution)
def ComputeElementMatrices(self):
r"""ComputeElementMatrices(BilinearForm self)"""
return _bilinearform.BilinearForm_ComputeElementMatrices(self)
ComputeElementMatrices = _swig_new_instance_method(_bilinearform.BilinearForm_ComputeElementMatrices)
def FreeElementMatrices(self):
r"""FreeElementMatrices(BilinearForm self)"""
return _bilinearform.BilinearForm_FreeElementMatrices(self)
FreeElementMatrices = _swig_new_instance_method(_bilinearform.BilinearForm_FreeElementMatrices)
def ComputeElementMatrix(self, i, elmat):
r"""ComputeElementMatrix(BilinearForm self, int i, DenseMatrix elmat)"""
return _bilinearform.BilinearForm_ComputeElementMatrix(self, i, elmat)
ComputeElementMatrix = _swig_new_instance_method(_bilinearform.BilinearForm_ComputeElementMatrix)
def ComputeBdrElementMatrix(self, i, elmat):
r"""ComputeBdrElementMatrix(BilinearForm self, int i, DenseMatrix elmat)"""
return _bilinearform.BilinearForm_ComputeBdrElementMatrix(self, i, elmat)
ComputeBdrElementMatrix = _swig_new_instance_method(_bilinearform.BilinearForm_ComputeBdrElementMatrix)
def AssembleElementMatrix(self, *args):
r"""
AssembleElementMatrix(BilinearForm self, int i, DenseMatrix elmat, int skip_zeros=1)
AssembleElementMatrix(BilinearForm self, int i, DenseMatrix elmat, intArray vdofs, int skip_zeros=1)
"""
return _bilinearform.BilinearForm_AssembleElementMatrix(self, *args)
AssembleElementMatrix = _swig_new_instance_method(_bilinearform.BilinearForm_AssembleElementMatrix)
def AssembleBdrElementMatrix(self, *args):
r"""
AssembleBdrElementMatrix(BilinearForm self, int i, DenseMatrix elmat, int skip_zeros=1)
AssembleBdrElementMatrix(BilinearForm self, int i, DenseMatrix elmat, intArray vdofs, int skip_zeros=1)
"""
return _bilinearform.BilinearForm_AssembleBdrElementMatrix(self, *args)
AssembleBdrElementMatrix = _swig_new_instance_method(_bilinearform.BilinearForm_AssembleBdrElementMatrix)
def EliminateEssentialBC(self, *args):
r"""
EliminateEssentialBC(BilinearForm self, intArray bdr_attr_is_ess, Vector sol, Vector rhs, mfem::Operator::DiagonalPolicy dpolicy=DIAG_ONE)
EliminateEssentialBC(BilinearForm self, intArray bdr_attr_is_ess, mfem::Operator::DiagonalPolicy dpolicy=DIAG_ONE)
"""
return _bilinearform.BilinearForm_EliminateEssentialBC(self, *args)
EliminateEssentialBC = _swig_new_instance_method(_bilinearform.BilinearForm_EliminateEssentialBC)
def EliminateEssentialBCDiag(self, bdr_attr_is_ess, value):
r"""EliminateEssentialBCDiag(BilinearForm self, intArray bdr_attr_is_ess, double value)"""
return _bilinearform.BilinearForm_EliminateEssentialBCDiag(self, bdr_attr_is_ess, value)
EliminateEssentialBCDiag = _swig_new_instance_method(_bilinearform.BilinearForm_EliminateEssentialBCDiag)
def EliminateVDofs(self, *args):
r"""
EliminateVDofs(BilinearForm self, intArray vdofs, Vector sol, Vector rhs, mfem::Operator::DiagonalPolicy dpolicy=DIAG_ONE)
EliminateVDofs(BilinearForm self, intArray vdofs, mfem::Operator::DiagonalPolicy dpolicy=DIAG_ONE)
"""
return _bilinearform.BilinearForm_EliminateVDofs(self, *args)
EliminateVDofs = _swig_new_instance_method(_bilinearform.BilinearForm_EliminateVDofs)
def EliminateEssentialBCFromDofs(self, *args):
r"""
EliminateEssentialBCFromDofs(BilinearForm self, intArray ess_dofs, Vector sol, Vector rhs, mfem::Operator::DiagonalPolicy dpolicy=DIAG_ONE)
EliminateEssentialBCFromDofs(BilinearForm self, intArray ess_dofs, mfem::Operator::DiagonalPolicy dpolicy=DIAG_ONE)
"""
return _bilinearform.BilinearForm_EliminateEssentialBCFromDofs(self, *args)
EliminateEssentialBCFromDofs = _swig_new_instance_method(_bilinearform.BilinearForm_EliminateEssentialBCFromDofs)
def EliminateEssentialBCFromDofsDiag(self, ess_dofs, value):
r"""EliminateEssentialBCFromDofsDiag(BilinearForm self, intArray ess_dofs, double value)"""
return _bilinearform.BilinearForm_EliminateEssentialBCFromDofsDiag(self, ess_dofs, value)
EliminateEssentialBCFromDofsDiag = _swig_new_instance_method(_bilinearform.BilinearForm_EliminateEssentialBCFromDofsDiag)
def EliminateVDofsInRHS(self, vdofs, x, b):
r"""EliminateVDofsInRHS(BilinearForm self, intArray vdofs, Vector x, Vector b)"""
return _bilinearform.BilinearForm_EliminateVDofsInRHS(self, vdofs, x, b)
EliminateVDofsInRHS = _swig_new_instance_method(_bilinearform.BilinearForm_EliminateVDofsInRHS)
def FullInnerProduct(self, x, y):
r"""FullInnerProduct(BilinearForm self, Vector x, Vector y) -> double"""
return _bilinearform.BilinearForm_FullInnerProduct(self, x, y)
FullInnerProduct = _swig_new_instance_method(_bilinearform.BilinearForm_FullInnerProduct)
def Update(self, nfes=None):
r"""Update(BilinearForm self, FiniteElementSpace nfes=None)"""
return _bilinearform.BilinearForm_Update(self, nfes)
Update = _swig_new_instance_method(_bilinearform.BilinearForm_Update)
def GetFES(self):
r"""GetFES(BilinearForm self) -> FiniteElementSpace"""
return _bilinearform.BilinearForm_GetFES(self)
GetFES = _swig_new_instance_method(_bilinearform.BilinearForm_GetFES)
def FESpace(self, *args):
r"""
FESpace(BilinearForm self) -> FiniteElementSpace
FESpace(BilinearForm self) -> FiniteElementSpace
"""
return _bilinearform.BilinearForm_FESpace(self, *args)
FESpace = _swig_new_instance_method(_bilinearform.BilinearForm_FESpace)
def SetDiagonalPolicy(self, policy):
r"""SetDiagonalPolicy(BilinearForm self, mfem::Operator::DiagonalPolicy policy)"""
return _bilinearform.BilinearForm_SetDiagonalPolicy(self, policy)
SetDiagonalPolicy = _swig_new_instance_method(_bilinearform.BilinearForm_SetDiagonalPolicy)
def UseExternalIntegrators(self):
r"""UseExternalIntegrators(BilinearForm self)"""
return _bilinearform.BilinearForm_UseExternalIntegrators(self)
UseExternalIntegrators = _swig_new_instance_method(_bilinearform.BilinearForm_UseExternalIntegrators)
__swig_destroy__ = _bilinearform.delete_BilinearForm
def FormLinearSystem(self, *args):
r"""
FormLinearSystem(BilinearForm self, intArray ess_tdof_list, Vector x, Vector b, OperatorHandle A, Vector X, Vector B, int copy_interior=0)
FormLinearSystem(BilinearForm self, intArray ess_tdof_list, Vector x, Vector b, SparseMatrix A, Vector X, Vector B, int copy_interior=0)
FormLinearSystem(BilinearForm self, intArray ess_tdof_list, Vector x, Vector b, HypreParMatrix A, Vector X, Vector B, int copy_interior=0)
"""
return _bilinearform.BilinearForm_FormLinearSystem(self, *args)
FormLinearSystem = _swig_new_instance_method(_bilinearform.BilinearForm_FormLinearSystem)
def FormSystemMatrix(self, *args):
r"""
FormSystemMatrix(BilinearForm self, intArray ess_tdof_list, OperatorHandle A)
FormSystemMatrix(BilinearForm self, intArray ess_tdof_list, SparseMatrix A)
FormSystemMatrix(BilinearForm self, intArray ess_tdof_list, HypreParMatrix A)
"""
return _bilinearform.BilinearForm_FormSystemMatrix(self, *args)
FormSystemMatrix = _swig_new_instance_method(_bilinearform.BilinearForm_FormSystemMatrix)
def __disown__(self):
self.this.disown()
_bilinearform.disown_BilinearForm(self)
return weakref.proxy(self)
_bilinearform.BilinearForm_swigregister(BilinearForm)
class MixedBilinearForm(mfem._par.matrix.Matrix):
r"""Proxy of C++ mfem::MixedBilinearForm class."""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
def __init__(self, *args):
r"""
__init__(MixedBilinearForm self, FiniteElementSpace tr_fes, FiniteElementSpace te_fes) -> MixedBilinearForm
__init__(MixedBilinearForm self, FiniteElementSpace tr_fes, FiniteElementSpace te_fes, MixedBilinearForm mbf) -> MixedBilinearForm
"""
_bilinearform.MixedBilinearForm_swiginit(self, _bilinearform.new_MixedBilinearForm(*args))
def Elem(self, *args):
r"""
Elem(MixedBilinearForm self, int i, int j) -> double
Elem(MixedBilinearForm self, int i, int j) -> double const &
"""
return _bilinearform.MixedBilinearForm_Elem(self, *args)
Elem = _swig_new_instance_method(_bilinearform.MixedBilinearForm_Elem)
def Mult(self, x, y):
r"""Mult(MixedBilinearForm self, Vector x, Vector y)"""
return _bilinearform.MixedBilinearForm_Mult(self, x, y)
Mult = _swig_new_instance_method(_bilinearform.MixedBilinearForm_Mult)
def AddMult(self, x, y, a=1.0):
r"""AddMult(MixedBilinearForm self, Vector x, Vector y, double const a=1.0)"""
return _bilinearform.MixedBilinearForm_AddMult(self, x, y, a)
AddMult = _swig_new_instance_method(_bilinearform.MixedBilinearForm_AddMult)
def MultTranspose(self, x, y):
r"""MultTranspose(MixedBilinearForm self, Vector x, Vector y)"""
return _bilinearform.MixedBilinearForm_MultTranspose(self, x, y)
MultTranspose = _swig_new_instance_method(_bilinearform.MixedBilinearForm_MultTranspose)
def AddMultTranspose(self, x, y, a=1.0):
r"""AddMultTranspose(MixedBilinearForm self, Vector x, Vector y, double const a=1.0)"""
return _bilinearform.MixedBilinearForm_AddMultTranspose(self, x, y, a)
AddMultTranspose = _swig_new_instance_method(_bilinearform.MixedBilinearForm_AddMultTranspose)
def Inverse(self):
r"""Inverse(MixedBilinearForm self) -> MatrixInverse"""
return _bilinearform.MixedBilinearForm_Inverse(self)
Inverse = _swig_new_instance_method(_bilinearform.MixedBilinearForm_Inverse)
def Finalize(self, skip_zeros=1):
r"""Finalize(MixedBilinearForm self, int skip_zeros=1)"""
return _bilinearform.MixedBilinearForm_Finalize(self, skip_zeros)
Finalize = _swig_new_instance_method(_bilinearform.MixedBilinearForm_Finalize)
def GetBlocks(self, blocks):
r"""GetBlocks(MixedBilinearForm self, mfem::Array2D< mfem::SparseMatrix * > & blocks)"""
return _bilinearform.MixedBilinearForm_GetBlocks(self, blocks)
GetBlocks = _swig_new_instance_method(_bilinearform.MixedBilinearForm_GetBlocks)
def SpMat(self, *args):
r"""
SpMat(MixedBilinearForm self) -> SparseMatrix
SpMat(MixedBilinearForm self) -> SparseMatrix
"""
val = _bilinearform.MixedBilinearForm_SpMat(self, *args)
if not hasattr(self, "_spmat"): self._spmat = []
self._spmat.append(val)
val.thisown=0
return val
def LoseMat(self):
r"""LoseMat(MixedBilinearForm self) -> SparseMatrix"""
return _bilinearform.MixedBilinearForm_LoseMat(self)
LoseMat = _swig_new_instance_method(_bilinearform.MixedBilinearForm_LoseMat)
def AddDomainIntegrator(self, bfi):
r"""AddDomainIntegrator(MixedBilinearForm self, BilinearFormIntegrator bfi)"""
if not hasattr(self, "_integrators"): self._integrators = []
self._integrators.append(bfi)
bfi.thisown=0
return _bilinearform.MixedBilinearForm_AddDomainIntegrator(self, bfi)
def AddBoundaryIntegrator(self, *args):
r"""
AddBoundaryIntegrator(MixedBilinearForm self, BilinearFormIntegrator bfi)
AddBoundaryIntegrator(MixedBilinearForm self, BilinearFormIntegrator bfi, intArray bdr_marker)
"""
if not hasattr(self, "_integrators"): self._integrators = []
bfi = args[0]
self._integrators.append(bfi)
bfi.thisown=0
return _bilinearform.MixedBilinearForm_AddBoundaryIntegrator(self, *args)
def AddTraceFaceIntegrator(self, bfi):
r"""AddTraceFaceIntegrator(MixedBilinearForm self, BilinearFormIntegrator bfi)"""
if not hasattr(self, "_integrators"): self._integrators = []
self._integrators.append(bfi)
bfi.thisown=0
return _bilinearform.MixedBilinearForm_AddTraceFaceIntegrator(self, bfi)
def AddBdrTraceFaceIntegrator(self, *args):
r"""
AddBdrTraceFaceIntegrator(MixedBilinearForm self, BilinearFormIntegrator bfi)
AddBdrTraceFaceIntegrator(MixedBilinearForm self, BilinearFormIntegrator bfi, intArray bdr_marker)
"""
if not hasattr(self, "_integrators"): self._integrators = []
bfi = args[0]
self._integrators.append(bfi)
bfi.thisown=0
return _bilinearform.MixedBilinearForm_AddBdrTraceFaceIntegrator(self, *args)
def GetDBFI(self):
r"""GetDBFI(MixedBilinearForm self) -> mfem::Array< mfem::BilinearFormIntegrator * > *"""
return _bilinearform.MixedBilinearForm_GetDBFI(self)
GetDBFI = _swig_new_instance_method(_bilinearform.MixedBilinearForm_GetDBFI)
def GetBBFI(self):
r"""GetBBFI(MixedBilinearForm self) -> mfem::Array< mfem::BilinearFormIntegrator * > *"""
return _bilinearform.MixedBilinearForm_GetBBFI(self)
GetBBFI = _swig_new_instance_method(_bilinearform.MixedBilinearForm_GetBBFI)
def GetBBFI_Marker(self):
r"""GetBBFI_Marker(MixedBilinearForm self) -> mfem::Array< mfem::Array< int > * > *"""
return _bilinearform.MixedBilinearForm_GetBBFI_Marker(self)
GetBBFI_Marker = _swig_new_instance_method(_bilinearform.MixedBilinearForm_GetBBFI_Marker)
def GetTFBFI(self):
r"""GetTFBFI(MixedBilinearForm self) -> mfem::Array< mfem::BilinearFormIntegrator * > *"""
return _bilinearform.MixedBilinearForm_GetTFBFI(self)
GetTFBFI = _swig_new_instance_method(_bilinearform.MixedBilinearForm_GetTFBFI)
def GetBTFBFI(self):
r"""GetBTFBFI(MixedBilinearForm self) -> mfem::Array< mfem::BilinearFormIntegrator * > *"""
return _bilinearform.MixedBilinearForm_GetBTFBFI(self)
GetBTFBFI = _swig_new_instance_method(_bilinearform.MixedBilinearForm_GetBTFBFI)
def GetBTFBFI_Marker(self):
r"""GetBTFBFI_Marker(MixedBilinearForm self) -> mfem::Array< mfem::Array< int > * > *"""
return _bilinearform.MixedBilinearForm_GetBTFBFI_Marker(self)
GetBTFBFI_Marker = _swig_new_instance_method(_bilinearform.MixedBilinearForm_GetBTFBFI_Marker)
def SetAssemblyLevel(self, assembly_level):
r"""SetAssemblyLevel(MixedBilinearForm self, mfem::AssemblyLevel assembly_level)"""
return _bilinearform.MixedBilinearForm_SetAssemblyLevel(self, assembly_level)
SetAssemblyLevel = _swig_new_instance_method(_bilinearform.MixedBilinearForm_SetAssemblyLevel)
def Assemble(self, skip_zeros=1):
r"""Assemble(MixedBilinearForm self, int skip_zeros=1)"""
return _bilinearform.MixedBilinearForm_Assemble(self, skip_zeros)
Assemble = _swig_new_instance_method(_bilinearform.MixedBilinearForm_Assemble)
def AssembleDiagonal_ADAt(self, D, diag):
r"""AssembleDiagonal_ADAt(MixedBilinearForm self, Vector D, Vector diag)"""
return _bilinearform.MixedBilinearForm_AssembleDiagonal_ADAt(self, D, diag)
AssembleDiagonal_ADAt = _swig_new_instance_method(_bilinearform.MixedBilinearForm_AssembleDiagonal_ADAt)
def GetProlongation(self):
r"""GetProlongation(MixedBilinearForm self) -> Operator"""
return _bilinearform.MixedBilinearForm_GetProlongation(self)
GetProlongation = _swig_new_instance_method(_bilinearform.MixedBilinearForm_GetProlongation)
def GetRestriction(self):
r"""GetRestriction(MixedBilinearForm self) -> Operator"""
return _bilinearform.MixedBilinearForm_GetRestriction(self)
GetRestriction = _swig_new_instance_method(_bilinearform.MixedBilinearForm_GetRestriction)
def GetOutputProlongation(self):
r"""GetOutputProlongation(MixedBilinearForm self) -> Operator"""
return _bilinearform.MixedBilinearForm_GetOutputProlongation(self)
GetOutputProlongation = _swig_new_instance_method(_bilinearform.MixedBilinearForm_GetOutputProlongation)
def GetOutputRestriction(self):
r"""GetOutputRestriction(MixedBilinearForm self) -> Operator"""
return _bilinearform.MixedBilinearForm_GetOutputRestriction(self)
GetOutputRestriction = _swig_new_instance_method(_bilinearform.MixedBilinearForm_GetOutputRestriction)
def ConformingAssemble(self):
r"""ConformingAssemble(MixedBilinearForm self)"""
return _bilinearform.MixedBilinearForm_ConformingAssemble(self)
ConformingAssemble = _swig_new_instance_method(_bilinearform.MixedBilinearForm_ConformingAssemble)
def ComputeElementMatrix(self, i, elmat):
r"""ComputeElementMatrix(MixedBilinearForm self, int i, DenseMatrix elmat)"""
return _bilinearform.MixedBilinearForm_ComputeElementMatrix(self, i, elmat)
ComputeElementMatrix = _swig_new_instance_method(_bilinearform.MixedBilinearForm_ComputeElementMatrix)
def ComputeBdrElementMatrix(self, i, elmat):
r"""ComputeBdrElementMatrix(MixedBilinearForm self, int i, DenseMatrix elmat)"""
return _bilinearform.MixedBilinearForm_ComputeBdrElementMatrix(self, i, elmat)
ComputeBdrElementMatrix = _swig_new_instance_method(_bilinearform.MixedBilinearForm_ComputeBdrElementMatrix)
def AssembleElementMatrix(self, *args):
r"""
AssembleElementMatrix(MixedBilinearForm self, int i, DenseMatrix elmat, int skip_zeros=1)
AssembleElementMatrix(MixedBilinearForm self, int i, DenseMatrix elmat, intArray trial_vdofs, intArray test_vdofs, int skip_zeros=1)
"""
return _bilinearform.MixedBilinearForm_AssembleElementMatrix(self, *args)
AssembleElementMatrix = _swig_new_instance_method(_bilinearform.MixedBilinearForm_AssembleElementMatrix)
def AssembleBdrElementMatrix(self, *args):
r"""
AssembleBdrElementMatrix(MixedBilinearForm self, int i, DenseMatrix elmat, int skip_zeros=1)
AssembleBdrElementMatrix(MixedBilinearForm self, int i, DenseMatrix elmat, intArray trial_vdofs, intArray test_vdofs, int skip_zeros=1)
"""
return _bilinearform.MixedBilinearForm_AssembleBdrElementMatrix(self, *args)
AssembleBdrElementMatrix = _swig_new_instance_method(_bilinearform.MixedBilinearForm_AssembleBdrElementMatrix)
def EliminateTrialDofs(self, bdr_attr_is_ess, sol, rhs):
r"""EliminateTrialDofs(MixedBilinearForm self, intArray bdr_attr_is_ess, Vector sol, Vector rhs)"""
return _bilinearform.MixedBilinearForm_EliminateTrialDofs(self, bdr_attr_is_ess, sol, rhs)
EliminateTrialDofs = _swig_new_instance_method(_bilinearform.MixedBilinearForm_EliminateTrialDofs)
def EliminateEssentialBCFromTrialDofs(self, marked_vdofs, sol, rhs):
r"""EliminateEssentialBCFromTrialDofs(MixedBilinearForm self, intArray marked_vdofs, Vector sol, Vector rhs)"""
return _bilinearform.MixedBilinearForm_EliminateEssentialBCFromTrialDofs(self, marked_vdofs, sol, rhs)
EliminateEssentialBCFromTrialDofs = _swig_new_instance_method(_bilinearform.MixedBilinearForm_EliminateEssentialBCFromTrialDofs)
def EliminateTestDofs(self, bdr_attr_is_ess):
r"""EliminateTestDofs(MixedBilinearForm self, intArray bdr_attr_is_ess)"""
return _bilinearform.MixedBilinearForm_EliminateTestDofs(self, bdr_attr_is_ess)
EliminateTestDofs = _swig_new_instance_method(_bilinearform.MixedBilinearForm_EliminateTestDofs)
def FormRectangularSystemMatrix(self, trial_tdof_list, test_tdof_list, A):
r"""FormRectangularSystemMatrix(MixedBilinearForm self, intArray trial_tdof_list, intArray test_tdof_list, OperatorHandle A)"""
return _bilinearform.MixedBilinearForm_FormRectangularSystemMatrix(self, trial_tdof_list, test_tdof_list, A)
FormRectangularSystemMatrix = _swig_new_instance_method(_bilinearform.MixedBilinearForm_FormRectangularSystemMatrix)
def FormRectangularLinearSystem(self, trial_tdof_list, test_tdof_list, x, b, A, X, B):
r"""FormRectangularLinearSystem(MixedBilinearForm self, intArray trial_tdof_list, intArray test_tdof_list, Vector x, Vector b, OperatorHandle A, Vector X, Vector B)"""
return _bilinearform.MixedBilinearForm_FormRectangularLinearSystem(self, trial_tdof_list, test_tdof_list, x, b, A, X, B)
FormRectangularLinearSystem = _swig_new_instance_method(_bilinearform.MixedBilinearForm_FormRectangularLinearSystem)
def Update(self):
r"""Update(MixedBilinearForm self)"""
return _bilinearform.MixedBilinearForm_Update(self)
Update = _swig_new_instance_method(_bilinearform.MixedBilinearForm_Update)
def TrialFESpace(self, *args):
r"""
TrialFESpace(MixedBilinearForm self) -> FiniteElementSpace
TrialFESpace(MixedBilinearForm self) -> FiniteElementSpace
"""
return _bilinearform.MixedBilinearForm_TrialFESpace(self, *args)
TrialFESpace = _swig_new_instance_method(_bilinearform.MixedBilinearForm_TrialFESpace)
def TestFESpace(self, *args):
r"""
TestFESpace(MixedBilinearForm self) -> FiniteElementSpace
TestFESpace(MixedBilinearForm self) -> FiniteElementSpace
"""
return _bilinearform.MixedBilinearForm_TestFESpace(self, *args)
TestFESpace = _swig_new_instance_method(_bilinearform.MixedBilinearForm_TestFESpace)
__swig_destroy__ = _bilinearform.delete_MixedBilinearForm
_bilinearform.MixedBilinearForm_swigregister(MixedBilinearForm)
class DiscreteLinearOperator(MixedBilinearForm):
r"""Proxy of C++ mfem::DiscreteLinearOperator class."""
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
def __init__(self, domain_fes, range_fes):
r"""__init__(DiscreteLinearOperator self, FiniteElementSpace domain_fes, FiniteElementSpace range_fes) -> DiscreteLinearOperator"""
_bilinearform.DiscreteLinearOperator_swiginit(self, _bilinearform.new_DiscreteLinearOperator(domain_fes, range_fes))
def AddDomainInterpolator(self, di):
r"""AddDomainInterpolator(DiscreteLinearOperator self, DiscreteInterpolator di)"""
if not hasattr(self, "_integrators"): self._integrators = []
self._integrators.append(di)
di.thisown=0
return _bilinearform.DiscreteLinearOperator_AddDomainInterpolator(self, di)
def AddTraceFaceInterpolator(self, di):
r"""AddTraceFaceInterpolator(DiscreteLinearOperator self, DiscreteInterpolator di)"""
if not hasattr(self, "_integrators"): self._integrators = []
self._integrators.append(di)
di.thisown=0
return _bilinearform.DiscreteLinearOperator_AddTraceFaceInterpolator(self, di)
def GetDI(self):
r"""GetDI(DiscreteLinearOperator self) -> mfem::Array< mfem::BilinearFormIntegrator * > *"""
return _bilinearform.DiscreteLinearOperator_GetDI(self)
GetDI = _swig_new_instance_method(_bilinearform.DiscreteLinearOperator_GetDI)
def SetAssemblyLevel(self, assembly_level):
r"""SetAssemblyLevel(DiscreteLinearOperator self, mfem::AssemblyLevel assembly_level)"""
return _bilinearform.DiscreteLinearOperator_SetAssemblyLevel(self, assembly_level)
SetAssemblyLevel = _swig_new_instance_method(_bilinearform.DiscreteLinearOperator_SetAssemblyLevel)
def Assemble(self, skip_zeros=1):
r"""Assemble(DiscreteLinearOperator self, int skip_zeros=1)"""
return _bilinearform.DiscreteLinearOperator_Assemble(self, skip_zeros)
Assemble = _swig_new_instance_method(_bilinearform.DiscreteLinearOperator_Assemble)
def GetOutputRestrictionTranspose(self):
r"""GetOutputRestrictionTranspose(DiscreteLinearOperator self) -> Operator"""
return _bilinearform.DiscreteLinearOperator_GetOutputRestrictionTranspose(self)
GetOutputRestrictionTranspose = _swig_new_instance_method(_bilinearform.DiscreteLinearOperator_GetOutputRestrictionTranspose)
__swig_destroy__ = _bilinearform.delete_DiscreteLinearOperator
_bilinearform.DiscreteLinearOperator_swigregister(DiscreteLinearOperator)
|
import os, ConfigParser
class Configuration(object):
"""General settings for niprov.
Individual settings are documented as follows;
**setting** *= default_value*
*type* - Explanation.
The settings can be changed in the configuration file, or in code.
All settings:
"""
database_type = 'file'
"""str: Type of backend in which to store provenance. Currently only 'file'
or 'MongoDB'
"""
database_url = '~/provenance.json'
"""str: URL of the database. If ``database-type`` is ``file``, this is the
path to the file."""
dryrun = False
"""bool: Do not execute commands or make lasting changes to the
provenance database."""
verbosity = 'info'
"""string: Level of information to report. One of 'debug','info','warning',
'error'. Any level includes higher levels, i.e. 'info' will log messages of
that are deemed 'info', 'warning' or 'error'. """
discover_file_extensions = ['.PAR','.dcm','.fif','.cnt']
"""list: Discover uses this to determine which files to include.
Not strictly extensions, can be any string that appears in the file name.
Use comma's to separate items."""
attach = False
"""bool: Attach provenance to image files. For nifti files for instance,
this means inserting a header extension with serialized provenance. See
'attach_format' to configure which data format is used."""
attach_format = 'json'
"""string: Format in which to attach provenance to the file. One of 'json',
or 'xml'.
For example, if set to 'json' and the 'attach' option is True, this will
add a header extension to nifti files created with the relevant provenance
data in json format."""
user = ''
"""string: Name of the user creating provenance. If not provided, will
be determined based on OS information or as passed as an argument to the
provenance operation. See also :py:mod:`niprov.users`"""
def __init__(self, configFilePath='~/niprov.cfg'):
configFilePath = os.path.expanduser(configFilePath)
if os.path.isfile(configFilePath):
keys = [k for k in dir(self) if k[0] != '_']
defaults = {k:getattr(self, k) for k in keys}
types = {k:type(defaults[k]) for k in keys}
parser = ConfigParser.SafeConfigParser()
parser.read(configFilePath)
for key in keys:
if not parser.has_option('main', key):
val = defaults[key]
elif types[key] is str:
val = parser.get('main', key)
elif types[key] is bool:
val = parser.getboolean('main', key)
elif types[key] is list:
items = parser.get('main', key).split(',')
val = [i.strip() for i in items if i is not '']
setattr(self, key, val)
|
import requests
class Cachet(object):
def __init__(self, url, apiToken):
self.url = url
self.apiToken = apiToken
def __getRequest(self, path):
return requests.get(self.url + path)
def __postRequest(self, path, data):
return requests.post(self.url + path, data, headers={'X-Cachet-Token': self.apiToken})
def __putRequest(self, path, data):
return requests.put(self.url + path, data, headers={'X-Cachet-Token': self.apiToken})
def __delRequest(self, path):
return requests.delete(self.url + path, headers={'X-Cachet-Token': self.apiToken})
def ping(self):
'''API test endpoint.
:return: :class:`Response <Response>` object
:rtype: requests.Response
'''
return self.__getRequest('/ping')
def getComponents(self):
'''Return all components that have been created.
:return: :class:`Response <Response>` object
:rtype: requests.Response
'''
return self.__getRequest('/components')
def getComponentsByID(self, id):
'''Return a single component.
:param id: Component ID
:return: :class:`Response <Response>` object
:rtype: requests.Response
'''
return self.__getRequest('/components/%s' % id)
def postComponents(self, name, status, **kwargs):
'''Create a new component.
:param name: Name of the component
:param status: Status of the component; 1-4
:param description: (optional) Description of the component
:param link: (optional) A hyperlink to the component
:param order: (optional) Order of the component
:param group_id: (optional) The group id that the component is within
:param enabled: (optional)
:return: :class:`Response <Response>` object
:rtype: requests.Response
'''
kwargs['name'] = name
kwargs['status'] = status
return self.__postRequest('/components', kwargs)
def putComponentsByID(self, id, **kwargs):
'''Updates a component.
:param id: Component ID
:param name: (optional) Name of the component
:param status: (optional) Status of the component; 1-4
:param link: (optional) A hyperlink to the component
:param order: (optional) Order of the component
:param group_id: (optional) The group id that the component is within
:return: :class:`Response <Response>` object
:rtype: requests.Response
'''
return self.__putRequest('/components/%s' % id, kwargs)
def deleteComponentsByID(self, id):
'''Delete a component.
:param id: Component ID
:return: :class:`Response <Response>` object
:rtype: requests.Response
'''
return self.__delRequest('/components/%s' % id)
def getComponentsGroups(self):
'''
:return: :class:`Response <Response>` object
:rtype: requests.Response
'''
return self.__getRequest('/components/groups')
def getComponentsGroupsByID(self, id):
'''
:param id: ID of the group you want to fetch
:return: :class:`Response <Response>` object
:rtype: requests.Response
'''
return self.__getRequest('/components/groups/%s' % id)
def postComponentsGroups(self, name, **kwargs):
'''
:param name: Name of the component group
:param order: (optional) Order of the component group
:param collapsed: (optional) Whether to collapse the group by default
:return: :class:`Response <Response>` object
:rtype: requests.Response
'''
kwargs['name'] = name
return self.__postRequest('/components/groups', kwargs)
def putComponentsGroupsByID(self, id, **kwargs):
'''
:param id: Component group to update
:param name: (optional) Name of the component group
:param order: (optional) Order of the group
:param collapsed: (optional) Whether to collapse the group by default
:return: :class:`Response <Response>` object
:rtype: requests.Response
'''
return self.__putRequest('/components/groups/%s' % id, kwargs)
def deleteComponentsGroupsByID(self, id):
'''
:param id: Component group to delete
:return: :class:`Response <Response>` object
:rtype: requests.Response
'''
return self.__getRequest('/components/groups/%s' % id)
def getIncidents(self):
'''Return all incidents.
:return: :class:`Response <Response>` object
:rtype: requests.Response
'''
return self.__getRequest('/incidents')
def getIncidentsByID(self, id):
'''Returns a single incident.
:param id: Incident ID
:return: :class:`Response <Response>` object
:rtype: requests.Response
'''
return self.__getRequest('/incidents/%s' % id)
def postIncidents(self, name, message, status, visible, **kwargs):
'''Create a new incident.
:param name: Name of the incident
:param message: A message (supporting Markdown) to explain more.
:param status: Status of the incident.
:param visible: Whether the incident is publicly visible.
:param component_id: (optional) Component to update.
:param component_status: (optional) The status to update the given component with.
:param notify: (optional) Whether to notify subscribers.
:return: :class:`Response <Response>` object
:rtype: requests.Response
'''
kwargs['name'] = name
kwargs['message'] = message
kwargs['status'] = status
kwargs['visible'] = visible
return self.__postRequest('/incidents', kwargs)
def putIncidentsByID(self, id, **kwargs):
'''
:param id: ID of the incident to update.
:param name: (optional) Name of the incident
:param message: (optional) A message (supporting Markdown) to explain more.
:param status: (optional) Status of the incident.
:param visible: (optional) Whether the incident is publicly visible.
:param component_id: (optional) Component to update.
:param notify: (optional) Whether to notify subscribers.
:return: :class:`Response <Response>` object
:rtype: requests.Response
'''
return self.__putRequest('/incidents/%s' % id, kwargs)
def deleteIncidentsByID(self, id):
'''Delete an incident.
:param id: Incident ID
:return: :class:`Response <Response>` object
:rtype: requests.Response
'''
return self.__delRequest('/incidents/%s' % id)
def getMetrics(self):
'''Returns all metrics that have been setup.
:return: :class:`Response <Response>` object
:rtype: requests.Response
'''
return self.__getRequest('/metrics')
def postMetrics(self, name, suffix, description, default_value, **kwargs):
'''Create a new metric.
:param name: Name of metric
:param suffix: Measurments in
:param description: Description of what the metric is measuring
:param default_value: The default value to use when a point is added
:param display_chart: (optional) Whether to display the chart on the status page
:return: :class:`Response <Response>` object
:rtype: requests.Response
'''
kwargs['name'] = name
kwargs['suffix'] = suffix
kwargs['description'] = description
kwargs['default_value'] = default_value
return self.__postRequest('/metrics', kwargs)
def getMetricsByID(self, id):
'''Returns a single metric, without points.
:param id: Metric ID
:return: :class:`Response <Response>` object
:rtype: requests.Response
'''
return self.__getRequest('/metrics/%s' % id)
def deleteMetricsByID(self, id):
'''Delete a metric.
:param id: Metric ID
:return: :class:`Response <Response>` object
:rtype: requests.Response
'''
return self.__delRequest('/metrics/%s' % id)
def getMetricsPointsByID(self, id):
'''Return a list of metric points.
:param id: Metric ID
:return: :class:`Response <Response>` object
:rtype: requests.Response
'''
return self.__getRequest('/metrics/%s/points' % id)
def postMetricsPointsByID(self, id, value, **kwargs):
'''Add a metric point to a given metric.
:param id: Metric ID
:param value: Value to plot on the metric graph
:param timestamp: Unix timestamp of the point was measured
:return: :class:`Response <Response>` object
:rtype: requests.Response
'''
kwargs['value'] = value
return self.__postRequest('/metrics/%s/points' % id, kwargs)
def deleteMetricsPointsByID(self, id, point_id):
'''Delete a metric point.
:param id: Metric ID
:param point_id: Metric Point ID
:return: :class:`Response <Response>` object
:rtype: requests.Response
'''
return self.__delRequest('/metrics/%s/points/%s' % (id, point_id))
def getSubscribers(self):
'''Returns all subscribers.
:return: :class:`Response <Response>` object
:rtype: requests.Response
'''
return self.__getRequest('/subscribers')
def postSubscribers(self, email, **kwargs):
'''Create a new subscriber.
:param email: Email address to subscribe
:param verify: (optional) Whether to send verification email
:return: :class:`Response <Response>` object
:rtype: requests.Response
'''
kwargs['email'] = email
return self.__postRequest('/subscribers', kwargs)
def deleteSubscribersByID(self, id):
'''Delete a subscriber.
:param id: ID of the subscriber to delete
:return: :class:`Response <Response>` object
:rtype: requests.Response
'''
return self.__delRequest('/subscribers/%s' % id)
|
__author__ = 'v-lshen'
from dev.python.Serverlet import *
from EchoCodeDefinition import *
from dev.python.RpcStream import *
class EchoServer:
def open_servive(self):
Serverlet.register_rpc_handler(RPC_ECHO, 'RPC_ECHO', self.on_echo)
@staticmethod
def on_echo(rpc_request_content, rpc_response):
# server receive echo request
unpacked_data = RpcStream.read('f6s', rpc_request_content)
rpc_stream = RpcStream()
rpc_stream.write('7s', b'ccccccc')
rpc_stream.write('9s', b'localhost')
rpc_stream.write('i', 2130706433)
rpc_stream.write('i', 8101)
response_content = rpc_stream.get_content()
Serverlet.rpc_reply(rpc_response, response_content)
return
@staticmethod
def close_service():
Native.dsn_rpc_unregiser_handler(RPC_ECHO)
InterOpLookupTable.dict_release()
|
"""Various utility functions for dealing with images, such as
reading images as numpy arrays, resizing images or making them sharper."""
import os.path
import cv2
import numpy
import exifread
from utils import file_cache
@file_cache
def read_image(image_path, ROI=None):
"""Read an image from a file in grayscale
:param image_path: path to the image file
:type image_path: str
:returns: numpy.ndarray
"""
image = read_color_image(image_path, ROI)
return cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
@file_cache
def read_color_image(image_path, ROI=None):
"""Read an image from a file in BGR format
:param image_path: path to the image file
:type image_path: str
:returns: numpy.ndarray
"""
if not (isinstance(image_path, str) or
isinstance(image_path, unicode)):
raise TypeError("image_path should be a string, not %s" %
type(image_path))
if not os.path.isfile(image_path):
raise IOError("Image not found: %s" % image_path)
image = cv2.imread(image_path)
if image is None:
raise IOError("Unable to read image: %s" % image_path)
return extract_ROI(image_path, image, ROI)
def extract_ROI(image_path, image, ROI):
"""Extract the region of interest out of an image
:param image_path: path to the image file
:type image_path: str
:param image: the image matrix
:type image: numpy.ndarray
:returns: numpy.ndarray -- the region of interest
"""
if ROI is None:
return image
if len(ROI) != 4:
raise TypeError("ROI needs to be of length 4")
x, y, w, h = ROI
height, width, _ = image.shape
if x < 0 or y < 0 or x + w > width or y + h > height:
raise ValueError("Invalid dimensions for ROI for image: %s"
% image_path)
return image[x:x + w, y:y + h]
def resize(image, size):
"""Resizes an image matrix so that the longest side
of an image is the specified size at maximum.
:param image: the image matrix
:type image: numpy.ndarray
:param size: the maximum size for one side of the image
:type size: int
:returns: numpy.ndarray
"""
height, width = image.shape
if max(height, width) <= size:
return image
ratio = max(height, width) / float(size)
height /= ratio
width /= ratio
return cv2.resize(image, (int(height), int(width)))
def read_exif_tags(image_path):
"""Parses the EXIF tags from an image.
:param image_path: path to the image file
:type image_path: str
:returns: dict -- the exif tags
"""
with open(image_path, 'rb') as image:
return exifread.process_file(image, details=False)
def reduce_colors(image, colors):
"""Reduces the number of colors in a given image to certain
amount. The algorithm uses the k-nearest neighbors method to
do this. The given image must have colors, meaning three color
channels. The algorithm is taken from
"http://docs.opencv.org/3.0-beta/doc/py_tutorials/py_ml/py_kmeans
/py_kmeans_opencv/py_kmeans_opencv.html"
:param image: the image to process (must have three channels)
:type image: numpy.ndarray
:param colors: how many colors the final image should have
:type colors: int
:returns: numpy.ndarray
"""
Z = image.reshape((-1, 3)).astype(numpy.float32)
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 10, 1.0)
ret, label, center = cv2.kmeans(data=Z, K=colors, criteria=criteria,
attempts=10, flags=cv2.KMEANS_PP_CENTERS,
bestLabels=None)
center = numpy.uint8(center)
return center[label.flatten()].reshape(image.shape)
def sharpen(image):
"""Sharpens an image.
:param image: the image matrix
:type image: numpy.ndarray
:returns: numpy.ndarray
"""
blur = cv2.GaussianBlur(image, (5, 5), 0)
return cv2.addWeighted(image, 1.5, blur, -0.5, 0)
def logarithmic_transformation2D(array_2D):
"""Performs a logarithmic transformation of a matrix.
:param array_2D: a numpy matrix
:type array_2D: numpy.ndarray
:returns: numpy.ndarray
"""
c = 1 / numpy.log(1 + numpy.abs(numpy.amax(array_2D)))
return c * numpy.log(1 + numpy.abs(array_2D))
def count_magnitude_spectrum(image):
"""Returns the magnitude spectrum of an image.
:param image: the image matrix
:type image: numpy.ndarray
:returns: numpy.ndarray
"""
fft = numpy.fft.fft2(image)
fshift = numpy.fft.fftshift(fft)
return logarithmic_transformation2D(fshift)
|
from __future__ import unicode_literals
import os
from arpeggio import *
from arpeggio import RegExMatch as _
def comment(): return [_("//.*"), _("/\*.*\*/")]
def literal(): return _(r'\d*\.\d*|\d+|".*?"')
def symbol(): return _(r"\w+")
def operator(): return _(r"\+|\-|\*|\/|\=\=")
def operation(): return symbol, operator, [literal, functioncall]
def expression(): return [literal, operation, functioncall]
def expressionlist(): return expression, ZeroOrMore(",", expression)
def returnstatement(): return Kwd("return"), expression
def ifstatement(): return Kwd("if"), "(", expression, ")", block, Kwd("else"), block
def statement(): return [ifstatement, returnstatement], ";"
def block(): return "{", OneOrMore(statement), "}"
def parameterlist(): return "(", symbol, ZeroOrMore(",", symbol), ")"
def functioncall(): return symbol, "(", expressionlist, ")"
def function(): return Kwd("function"), symbol, parameterlist, block
def simpleLanguage(): return function
def main(debug=False):
# Load test program from file
current_dir = os.path.dirname(__file__)
test_program = open(os.path.join(current_dir, 'program.simple')).read()
# Parser instantiation. simpleLanguage is the definition of the root rule
# and comment is a grammar rule for comments.
parser = ParserPython(simpleLanguage, comment, debug=debug)
parse_tree = parser.parse(test_program)
if __name__ == "__main__":
# In debug mode dot (graphviz) files for parser model
# and parse tree will be created for visualization.
# Checkout current folder for .dot files.
main(debug=True)
|
from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
class EmailBackend(ModelBackend):
"""A django.contrib.auth backend that authenticates the user based on its
email address instead of the username.
"""
def authenticate(self, email=None, password=None):
"""Authenticate user using its email address instead of username."""
try:
user = User.objects.get(email=email)
if user.check_password(password):
return user
except User.DoesNotExist:
return None
|
"""Create and parse 'send'-type messages."""
import struct
from . import (util, config, exceptions, czarcoin, util)
FORMAT = '>QQ'
LENGTH = 8 + 8
ID = 0
def validate (db, source, destination, asset, quantity):
problems = []
if asset == config.CZR: problems.append('cannot send czarcoins') # Only for parsing.
if not isinstance(quantity, int):
problems.append('quantity must be in satoshis')
return problems
if quantity < 0: problems.append('negative quantity')
return problems
def compose (db, source, destination, asset, quantity):
cursor = db.cursor()
# Just send CZR?
if asset == config.CZR:
return (source, [(destination, quantity)], None)
#quantity must be in int satoshi (not float, string, etc)
if not isinstance(quantity, int):
raise exceptions.SendError('quantity must be an int (in satoshi)')
# Only for outgoing (incoming will overburn).
balances = list(cursor.execute('''SELECT * FROM balances WHERE (address = ? AND asset = ?)''', (source, asset)))
if not balances or balances[0]['quantity'] < quantity:
raise exceptions.SendError('insufficient funds')
problems = validate(db, source, destination, asset, quantity)
if problems: raise exceptions.SendError(problems)
asset_id = util.asset_id(asset)
data = config.PREFIX + struct.pack(config.TXTYPE_FORMAT, ID)
data += struct.pack(FORMAT, asset_id, quantity)
cursor.close()
return (source, [(destination, None)], data)
def parse (db, tx, message):
cursor = db.cursor()
# Unpack message.
try:
assert len(message) == LENGTH
asset_id, quantity = struct.unpack(FORMAT, message)
asset = util.asset_name(asset_id)
status = 'valid'
except (AssertionError, struct.error) as e:
asset, quantity = None, None
status = 'invalid: could not unpack'
if status == 'valid':
# Oversend
cursor.execute('''SELECT * FROM balances \
WHERE (address = ? AND asset = ?)''', (tx['source'], asset))
balances = cursor.fetchall()
if not balances:
status = 'invalid: insufficient funds'
elif balances[0]['quantity'] < quantity:
quantity = min(balances[0]['quantity'], quantity)
if status == 'valid':
# For SQLite3
quantity = min(quantity, config.MAX_INT)
problems = validate(db, tx['source'], tx['destination'], asset, quantity)
if problems: status = 'invalid: ' + '; '.join(problems)
if status == 'valid':
util.debit(db, tx['block_index'], tx['source'], asset, quantity, event=tx['tx_hash'])
util.credit(db, tx['block_index'], tx['destination'], asset, quantity, event=tx['tx_hash'])
# Add parsed transaction to message-type–specific table.
bindings = {
'tx_index': tx['tx_index'],
'tx_hash': tx['tx_hash'],
'block_index': tx['block_index'],
'source': tx['source'],
'destination': tx['destination'],
'asset': asset,
'quantity': quantity,
'status': status,
}
sql='insert into sends values(:tx_index, :tx_hash, :block_index, :source, :destination, :asset, :quantity, :status)'
cursor.execute(sql, bindings)
cursor.close()
|
import Cookie
import os
from datetime import datetime, timedelta
import time
from beaker.crypto import hmac as HMAC, hmac_sha1 as SHA1, sha1
from beaker import crypto, util
from beaker.cache import clsmap
from beaker.exceptions import BeakerException, InvalidCryptoBackendError
from base64 import b64encode, b64decode
__all__ = ['SignedCookie', 'Session']
try:
import uuid
def _session_id():
return uuid.uuid4().hex
except ImportError:
import random
if hasattr(os, 'getpid'):
getpid = os.getpid
else:
def getpid():
return ''
def _session_id():
id_str = "%f%s%f%s" % (
time.time(),
id({}),
random.random(),
getpid()
)
# NB: nothing against second parameter to b64encode, but it seems
# to be slower than simple chained replacement
if util.py3k:
raw_id = b64encode(sha1(id_str.encode('ascii')).digest())
return str(raw_id.replace(b'+', b'-').replace(b'/', b'_').rstrip(b'='))
else:
raw_id = b64encode(sha1(id_str).digest())
return raw_id.replace('+', '-').replace('/', '_').rstrip('=')
class SignedCookie(Cookie.BaseCookie):
"""Extends python cookie to give digital signature support"""
def __init__(self, secret, input=None):
self.secret = secret.encode('UTF-8')
Cookie.BaseCookie.__init__(self, input)
def value_decode(self, val):
val = val.strip('"')
sig = HMAC.new(self.secret, val[40:].encode('UTF-8'), SHA1).hexdigest()
# Avoid timing attacks
invalid_bits = 0
input_sig = val[:40]
if len(sig) != len(input_sig):
return None, val
for a, b in zip(sig, input_sig):
invalid_bits += a != b
if invalid_bits:
return None, val
else:
return val[40:], val
def value_encode(self, val):
sig = HMAC.new(self.secret, val.encode('UTF-8'), SHA1).hexdigest()
return str(val), ("%s%s" % (sig, val))
class Session(dict):
"""Session object that uses container package for storage.
:param invalidate_corrupt: How to handle corrupt data when loading. When
set to True, then corrupt data will be silently
invalidated and a new session created,
otherwise invalid data will cause an exception.
:type invalidate_corrupt: bool
:param use_cookies: Whether or not cookies should be created. When set to
False, it is assumed the user will handle storing the
session on their own.
:type use_cookies: bool
:param type: What data backend type should be used to store the underlying
session data
:param key: The name the cookie should be set to.
:param timeout: How long session data is considered valid. This is used
regardless of the cookie being present or not to determine
whether session data is still valid.
:type timeout: int
:param cookie_expires: Expiration date for cookie
:param cookie_domain: Domain to use for the cookie.
:param cookie_path: Path to use for the cookie.
:param secure: Whether or not the cookie should only be sent over SSL.
:param httponly: Whether or not the cookie should only be accessible by
the browser not by JavaScript.
:param encrypt_key: The key to use for the local session encryption, if not
provided the session will not be encrypted.
:param validate_key: The key used to sign the local encrypted session
"""
def __init__(self, request, id=None, invalidate_corrupt=False,
use_cookies=True, type=None, data_dir=None,
key='beaker.session.id', timeout=None, cookie_expires=True,
cookie_domain=None, cookie_path='/', secret=None,
secure=False, namespace_class=None, httponly=False,
encrypt_key=None, validate_key=None, **namespace_args):
if not type:
if data_dir:
self.type = 'file'
else:
self.type = 'memory'
else:
self.type = type
self.namespace_class = namespace_class or clsmap[self.type]
self.namespace_args = namespace_args
self.request = request
self.data_dir = data_dir
self.key = key
self.timeout = timeout
self.use_cookies = use_cookies
self.cookie_expires = cookie_expires
# Default cookie domain/path
self._domain = cookie_domain
self._path = cookie_path
self.was_invalidated = False
self.secret = secret
self.secure = secure
self.httponly = httponly
self.encrypt_key = encrypt_key
self.validate_key = validate_key
self.id = id
self.accessed_dict = {}
self.invalidate_corrupt = invalidate_corrupt
if self.use_cookies:
cookieheader = request.get('cookie', '')
if secret:
try:
self.cookie = SignedCookie(secret, input=cookieheader)
except Cookie.CookieError:
self.cookie = SignedCookie(secret, input=None)
else:
self.cookie = Cookie.SimpleCookie(input=cookieheader)
if not self.id and self.key in self.cookie:
self.id = self.cookie[self.key].value
self.is_new = self.id is None
if self.is_new:
self._create_id()
self['_accessed_time'] = self['_creation_time'] = time.time()
else:
try:
self.load()
except Exception, e:
if invalidate_corrupt:
util.warn(
"Invalidating corrupt session %s; "
"error was: %s. Set invalidate_corrupt=False "
"to propagate this exception." % (self.id, e))
self.invalidate()
else:
raise
def has_key(self, name):
return name in self
def _set_cookie_values(self, expires=None):
self.cookie[self.key] = self.id
if self._domain:
self.cookie[self.key]['domain'] = self._domain
if self.secure:
self.cookie[self.key]['secure'] = True
self._set_cookie_http_only()
self.cookie[self.key]['path'] = self._path
self._set_cookie_expires(expires)
def _set_cookie_expires(self, expires):
if expires is None:
if self.cookie_expires is not True:
if self.cookie_expires is False:
expires = datetime.fromtimestamp(0x7FFFFFFF)
elif isinstance(self.cookie_expires, timedelta):
expires = datetime.utcnow() + self.cookie_expires
elif isinstance(self.cookie_expires, datetime):
expires = self.cookie_expires
else:
raise ValueError("Invalid argument for cookie_expires: %s"
% repr(self.cookie_expires))
else:
expires = None
if expires is not None:
if not self.cookie or self.key not in self.cookie:
self.cookie[self.key] = self.id
self.cookie[self.key]['expires'] = \
expires.strftime("%a, %d-%b-%Y %H:%M:%S GMT")
return expires
def _update_cookie_out(self, set_cookie=True):
self.request['cookie_out'] = self.cookie[self.key].output(header='')
self.request['set_cookie'] = set_cookie
def _set_cookie_http_only(self):
try:
if self.httponly:
self.cookie[self.key]['httponly'] = True
except Cookie.CookieError, e:
if 'Invalid Attribute httponly' not in str(e):
raise
util.warn('Python 2.6+ is required to use httponly')
def _create_id(self, set_new=True):
self.id = _session_id()
if set_new:
self.is_new = True
self.last_accessed = None
if self.use_cookies:
self._set_cookie_values()
sc = set_new == False
self._update_cookie_out(set_cookie=sc)
@property
def created(self):
return self['_creation_time']
def _set_domain(self, domain):
self['_domain'] = domain
self.cookie[self.key]['domain'] = domain
self._update_cookie_out()
def _get_domain(self):
return self._domain
domain = property(_get_domain, _set_domain)
def _set_path(self, path):
self['_path'] = self._path = path
self.cookie[self.key]['path'] = path
self._update_cookie_out()
def _get_path(self):
return self._path
path = property(_get_path, _set_path)
def _encrypt_data(self, session_data=None):
"""Serialize, encipher, and base64 the session dict"""
session_data = session_data or self.copy()
if self.encrypt_key:
nonce = b64encode(os.urandom(6))[:8]
encrypt_key = crypto.generateCryptoKeys(self.encrypt_key,
self.validate_key + nonce, 1)
data = util.pickle.dumps(session_data, 2)
return nonce + b64encode(crypto.aesEncrypt(data, encrypt_key))
else:
data = util.pickle.dumps(session_data, 2)
return b64encode(data)
def _decrypt_data(self, session_data):
"""Bas64, decipher, then un-serialize the data for the session
dict"""
if self.encrypt_key:
try:
nonce = session_data[:8]
encrypt_key = crypto.generateCryptoKeys(self.encrypt_key,
self.validate_key + nonce, 1)
payload = b64decode(session_data[8:])
data = crypto.aesDecrypt(payload, encrypt_key)
except:
# As much as I hate a bare except, we get some insane errors
# here that get tossed when crypto fails, so we raise the
# 'right' exception
if self.invalidate_corrupt:
return None
else:
raise
try:
return util.pickle.loads(data)
except:
if self.invalidate_corrupt:
return None
else:
raise
else:
data = b64decode(session_data)
return util.pickle.loads(data)
def _delete_cookie(self):
self.request['set_cookie'] = True
expires = datetime.utcnow() - timedelta(365)
self._set_cookie_values(expires)
self._update_cookie_out()
def delete(self):
"""Deletes the session from the persistent storage, and sends
an expired cookie out"""
if self.use_cookies:
self._delete_cookie()
self.clear()
def invalidate(self):
"""Invalidates this session, creates a new session id, returns
to the is_new state"""
self.clear()
self.was_invalidated = True
self._create_id()
self.load()
def load(self):
"Loads the data from this session from persistent storage"
self.namespace = self.namespace_class(self.id,
data_dir=self.data_dir,
digest_filenames=False,
**self.namespace_args)
now = time.time()
if self.use_cookies:
self.request['set_cookie'] = True
self.namespace.acquire_read_lock()
timed_out = False
try:
self.clear()
try:
session_data = self.namespace['session']
if (session_data is not None and self.encrypt_key):
session_data = self._decrypt_data(session_data)
# Memcached always returns a key, its None when its not
# present
if session_data is None:
session_data = {
'_creation_time': now,
'_accessed_time': now
}
self.is_new = True
except (KeyError, TypeError):
session_data = {
'_creation_time': now,
'_accessed_time': now
}
self.is_new = True
if session_data is None or len(session_data) == 0:
session_data = {
'_creation_time': now,
'_accessed_time': now
}
self.is_new = True
if self.timeout is not None and \
now - session_data['_accessed_time'] > self.timeout:
timed_out = True
else:
# Properly set the last_accessed time, which is different
# than the *currently* _accessed_time
if self.is_new or '_accessed_time' not in session_data:
self.last_accessed = None
else:
self.last_accessed = session_data['_accessed_time']
# Update the current _accessed_time
session_data['_accessed_time'] = now
# Set the path if applicable
if '_path' in session_data:
self._path = session_data['_path']
self.update(session_data)
self.accessed_dict = session_data.copy()
finally:
self.namespace.release_read_lock()
if timed_out:
self.invalidate()
def save(self, accessed_only=False):
"""Saves the data for this session to persistent storage
If accessed_only is True, then only the original data loaded
at the beginning of the request will be saved, with the updated
last accessed time.
"""
# Look to see if its a new session that was only accessed
# Don't save it under that case
if accessed_only and self.is_new:
return None
# this session might not have a namespace yet or the session id
# might have been regenerated
if not hasattr(self, 'namespace') or self.namespace.namespace != self.id:
self.namespace = self.namespace_class(
self.id,
data_dir=self.data_dir,
digest_filenames=False,
**self.namespace_args)
self.namespace.acquire_write_lock(replace=True)
try:
if accessed_only:
data = dict(self.accessed_dict.items())
else:
data = dict(self.items())
if self.encrypt_key:
data = self._encrypt_data(data)
# Save the data
if not data and 'session' in self.namespace:
del self.namespace['session']
else:
self.namespace['session'] = data
finally:
self.namespace.release_write_lock()
if self.use_cookies and self.is_new:
self.request['set_cookie'] = True
def revert(self):
"""Revert the session to its original state from its first
access in the request"""
self.clear()
self.update(self.accessed_dict)
def regenerate_id(self):
"""
creates a new session id, retains all session data
Its a good security practice to regnerate the id after a client
elevates priviliges.
"""
self._create_id(set_new=False)
# TODO: I think both these methods should be removed. They're from
# the original mod_python code i was ripping off but they really
# have no use here.
def lock(self):
"""Locks this session against other processes/threads. This is
automatic when load/save is called.
***use with caution*** and always with a corresponding 'unlock'
inside a "finally:" block, as a stray lock typically cannot be
unlocked without shutting down the whole application.
"""
self.namespace.acquire_write_lock()
def unlock(self):
"""Unlocks this session against other processes/threads. This
is automatic when load/save is called.
***use with caution*** and always within a "finally:" block, as
a stray lock typically cannot be unlocked without shutting down
the whole application.
"""
self.namespace.release_write_lock()
class CookieSession(Session):
"""Pure cookie-based session
Options recognized when using cookie-based sessions are slightly
more restricted than general sessions.
:param key: The name the cookie should be set to.
:param timeout: How long session data is considered valid. This is used
regardless of the cookie being present or not to determine
whether session data is still valid.
:type timeout: int
:param cookie_expires: Expiration date for cookie
:param cookie_domain: Domain to use for the cookie.
:param cookie_path: Path to use for the cookie.
:param secure: Whether or not the cookie should only be sent over SSL.
:param httponly: Whether or not the cookie should only be accessible by
the browser not by JavaScript.
:param encrypt_key: The key to use for the local session encryption, if not
provided the session will not be encrypted.
:param validate_key: The key used to sign the local encrypted session
"""
def __init__(self, request, key='beaker.session.id', timeout=None,
cookie_expires=True, cookie_domain=None, cookie_path='/',
encrypt_key=None, validate_key=None, secure=False,
httponly=False, **kwargs):
if not crypto.has_aes and encrypt_key:
raise InvalidCryptoBackendError("No AES library is installed, can't generate "
"encrypted cookie-only Session.")
self.request = request
self.key = key
self.timeout = timeout
self.cookie_expires = cookie_expires
self.encrypt_key = encrypt_key
self.validate_key = validate_key
self.request['set_cookie'] = False
self.secure = secure
self.httponly = httponly
self._domain = cookie_domain
self._path = cookie_path
try:
cookieheader = request['cookie']
except KeyError:
cookieheader = ''
if validate_key is None:
raise BeakerException("No validate_key specified for Cookie only "
"Session.")
try:
self.cookie = SignedCookie(validate_key, input=cookieheader)
except Cookie.CookieError:
self.cookie = SignedCookie(validate_key, input=None)
self['_id'] = _session_id()
self.is_new = True
# If we have a cookie, load it
if self.key in self.cookie and self.cookie[self.key].value is not None:
self.is_new = False
try:
cookie_data = self.cookie[self.key].value
self.update(self._decrypt_data(cookie_data))
self._path = self.get('_path', '/')
except:
pass
if self.timeout is not None and time.time() - \
self['_accessed_time'] > self.timeout:
self.clear()
self.accessed_dict = self.copy()
self._create_cookie()
def created(self):
return self['_creation_time']
created = property(created)
def id(self):
return self['_id']
id = property(id)
def _set_domain(self, domain):
self['_domain'] = domain
self._domain = domain
def _get_domain(self):
return self._domain
domain = property(_get_domain, _set_domain)
def _set_path(self, path):
self['_path'] = self._path = path
def _get_path(self):
return self._path
path = property(_get_path, _set_path)
def save(self, accessed_only=False):
"""Saves the data for this session to persistent storage"""
if accessed_only and self.is_new:
return
if accessed_only:
self.clear()
self.update(self.accessed_dict)
self._create_cookie()
def expire(self):
"""Delete the 'expires' attribute on this Session, if any."""
self.pop('_expires', None)
def _create_cookie(self):
if '_creation_time' not in self:
self['_creation_time'] = time.time()
if '_id' not in self:
self['_id'] = _session_id()
self['_accessed_time'] = time.time()
val = self._encrypt_data()
if len(val) > 4064:
raise BeakerException("Cookie value is too long to store")
self.cookie[self.key] = val
if '_expires' in self:
expires = self['_expires']
else:
expires = None
expires = self._set_cookie_expires(expires)
if expires is not None:
self['_expires'] = expires
if '_domain' in self:
self.cookie[self.key]['domain'] = self['_domain']
elif self._domain:
self.cookie[self.key]['domain'] = self._domain
if self.secure:
self.cookie[self.key]['secure'] = True
self._set_cookie_http_only()
self.cookie[self.key]['path'] = self.get('_path', '/')
self.request['cookie_out'] = self.cookie[self.key].output(header='')
self.request['set_cookie'] = True
def delete(self):
"""Delete the cookie, and clear the session"""
# Send a delete cookie request
self._delete_cookie()
self.clear()
def invalidate(self):
"""Clear the contents and start a new session"""
self.clear()
self['_id'] = _session_id()
class SessionObject(object):
"""Session proxy/lazy creator
This object proxies access to the actual session object, so that in
the case that the session hasn't been used before, it will be
setup. This avoid creating and loading the session from persistent
storage unless its actually used during the request.
"""
def __init__(self, environ, **params):
self.__dict__['_params'] = params
self.__dict__['_environ'] = environ
self.__dict__['_sess'] = None
self.__dict__['_headers'] = {}
def _session(self):
"""Lazy initial creation of session object"""
if self.__dict__['_sess'] is None:
params = self.__dict__['_params']
environ = self.__dict__['_environ']
self.__dict__['_headers'] = req = {'cookie_out': None}
req['cookie'] = environ.get('HTTP_COOKIE')
if params.get('type') == 'cookie':
self.__dict__['_sess'] = CookieSession(req, **params)
else:
self.__dict__['_sess'] = Session(req, use_cookies=True,
**params)
return self.__dict__['_sess']
def __getattr__(self, attr):
return getattr(self._session(), attr)
def __setattr__(self, attr, value):
setattr(self._session(), attr, value)
def __delattr__(self, name):
self._session().__delattr__(name)
def __getitem__(self, key):
return self._session()[key]
def __setitem__(self, key, value):
self._session()[key] = value
def __delitem__(self, key):
self._session().__delitem__(key)
def __repr__(self):
return self._session().__repr__()
def __iter__(self):
"""Only works for proxying to a dict"""
return iter(self._session().keys())
def __contains__(self, key):
return key in self._session()
def has_key(self, key):
return key in self._session()
def get_by_id(self, id):
"""Loads a session given a session ID"""
params = self.__dict__['_params']
session = Session({}, use_cookies=False, id=id, **params)
if session.is_new:
return None
return session
def save(self):
self.__dict__['_dirty'] = True
def delete(self):
self.__dict__['_dirty'] = True
self._session().delete()
def persist(self):
"""Persist the session to the storage
If its set to autosave, then the entire session will be saved
regardless of if save() has been called. Otherwise, just the
accessed time will be updated if save() was not called, or
the session will be saved if save() was called.
"""
if self.__dict__['_params'].get('auto'):
self._session().save()
else:
if self.__dict__.get('_dirty'):
self._session().save()
else:
self._session().save(accessed_only=True)
def dirty(self):
return self.__dict__.get('_dirty', False)
def accessed(self):
"""Returns whether or not the session has been accessed"""
return self.__dict__['_sess'] is not None
|
import re
from helpers.common import *
class Parser:
"""
Parses tag references and tag definitions. Used for ranking
"""
@staticmethod
def extract_member_exp(line_to_symbol, source):
"""
Extract receiver object e.g. receiver.mtd()
Strip away brackets and operators.
TODO:HIGH: Add base lang defs + Python/Ruby/C++/Java/C#/PHP overrides (should be very similar)
TODO: comment and string support (eat as may contain brackets. add them to context - js['prop1']['prop-of-prop1'])
"""
lang = get_lang_setting(source)
if not lang:
return [line_to_symbol]
# Get per-language syntax regex of brackets, splitters etc.
mbr_exp = lang.get('member_exp')
if mbr_exp is None:
return [line_to_symbol]
lstStop = mbr_exp.get('stop', [])
if (not lstStop):
print('warning!: language has member_exp setting but it is ineffective: Must have "stop" key with array of regex to stop search backward from identifier')
return [line_to_symbol]
lstClose = mbr_exp.get('close', [])
reClose = concat_re(lstClose)
lstOpen = mbr_exp.get('open', [])
reOpen = concat_re(lstOpen)
lstIgnore = mbr_exp.get('ignore', [])
reIgnore = concat_re(lstIgnore)
if len(lstOpen) != len(lstClose):
print('warning!: extract_member_exp: settings lstOpen must match lstClose')
matchOpenClose = dict(zip(lstOpen, lstClose))
# Construct | regex from all open and close strings with capture (..)
splex = concat_re(lstOpen + lstClose + lstIgnore + lstStop)
reStop = concat_re(lstStop)
splex = "({0}|{1})".format(splex, reIgnore)
splat = re.split(splex, line_to_symbol)
#print('splat=%s' % splat)
# Stack iter reverse(splat) for detecting unbalanced e.g 'func(obj.yyy'
# while skipping balanced brackets in getSlow(a && b).mtd()
stack = []
lstMbr = []
insideExp = False
for cur in reversed(splat):
# Scan backwards from the symbol: If alpha-numeric - keep it. If
# Closing bracket e.g ] or ) or } --> push into stack
if re.match(reClose, cur):
stack.append(cur)
insideExp = True
# If opening bracket --> match it from top-of-stack: If stack empty
# - stop else If match pop-and-continue else stop scanning +
# warning
elif re.match(reOpen, cur):
# '(' with no matching ')' --> func(obj.yyy case --> return obj.yyy
if len(stack) == 0:
break
tokClose = stack.pop()
tokCloseCur = matchOpenClose.get(cur)
if tokClose != tokCloseCur:
print(
'non-matching brackets at the same nesting level: %s %s' %
(tokCloseCur, tokClose))
break
insideExp = False
# If white space --> stop. Do not stop for whitespace inside
# open-close brackets nested expression
elif re.match(reStop, cur):
if not insideExp:
break
elif re.match(reIgnore, cur):
pass
else:
lstMbr[0:0] = cur
strMbrExp = "".join(lstMbr)
lstSplit = mbr_exp.get('splitters', [])
reSplit = concat_re(lstSplit)
# Split member deref per-lang (-> and :: in PHP and C++) - use base if
# not found
arrMbrParts = list(filter(None, re.split(reSplit, strMbrExp)))
# print('arrMbrParts=%s' % arrMbrParts)
return arrMbrParts
|
"""
TxHandler
This class should be instantiated once per client.
It takes an InvCollector and a TxStore, and an optional Tx validator.
When a new Tx object is noted by the InvCollector, this object will
fetch it, validate it, then store it in the TxStore and tell the
InvCollector to advertise it to other peers.
When a new peer comes online, invoke add_peer.
This object will then watch for mempool and getdata messages
and handle them appropriately.
"""
import asyncio
import logging
from pycoinnet.InvItem import InvItem, ITEM_TYPE_TX
class TxHandler:
def __init__(self, inv_collector, tx_store, tx_validator=lambda tx: True):
self.inv_collector = inv_collector
self.q = inv_collector.new_inv_item_queue()
self.tx_store = tx_store
self._validator_handle = asyncio.Task(self._run(tx_validator))
def add_peer(self, peer):
"""
Call this method when a peer comes online and you want to keep its mempool
in sync with this mempool.
"""
@asyncio.coroutine
def _run_getdata(next_message):
while True:
name, data = yield from next_message()
inv_items = data["items"]
not_found = []
txs_found = []
for inv_item in inv_items:
if inv_item.item_type != ITEM_TYPE_TX:
continue
tx = self.tx_store.get(inv_item.data)
if tx:
txs_found.append(tx)
else:
not_found.append(inv_item)
if not_found:
peer.send_msg("notfound", items=not_found)
for tx in txs_found:
peer.send_msg("tx", tx=tx)
@asyncio.coroutine
def _run_mempool(next_message):
try:
name, data = yield from next_message()
inv_items = [InvItem(ITEM_TYPE_TX, tx.hash()) for tx in self.tx_store.values()]
logging.debug("sending inv of %d item(s) in response to mempool", len(inv_items))
if len(inv_items) > 0:
peer.send_msg("inv", items=inv_items)
# then we exit. We don't need to handle this message more than once.
except EOFError:
pass
next_getdata = peer.new_get_next_message_f(lambda name, data: name == 'getdata')
peer.add_task(_run_getdata(next_getdata))
next_mempool = peer.new_get_next_message_f(lambda name, data: name == 'mempool')
peer.add_task(_run_mempool(next_mempool))
peer.send_msg("mempool")
def add_tx(self, tx):
"""
Add a transaction to the mempool and advertise it to peers so it can
propogate throughout the network.
"""
the_hash = tx.hash()
if the_hash not in self.tx_store:
self.tx_store[the_hash] = tx
self.inv_collector.advertise_item(InvItem(ITEM_TYPE_TX, the_hash))
@asyncio.coroutine
def _run(self, tx_validator):
while True:
inv_item = yield from self.q.get()
if inv_item.item_type != ITEM_TYPE_TX:
continue
self.inv_collector.fetch_validate_store_item_async(inv_item, self.tx_store, tx_validator)
|
from math import sqrt, exp
from CoolProp.CoolProp import Props
import numpy as np
import matplotlib.pyplot as plt
from scipy.odr import *
from math import log
E_K = {'REFPROP-Ammonia': 386,
'REFPROP-Argon': 143.2
}
SIGMA = {'REFPROP-Ammonia': 0.2957,
'REFPROP-Argon': 0.335
}
E_K['REFPROP-Propane'] = 263.88
SIGMA['REFPROP-Propane'] = 0.49748
E_K['REFPROP-R32'] = 289.65
SIGMA['REFPROP-R32'] = 0.4098
E_K['REFPROP-R245fa'] = 329.72
SIGMA['REFPROP-R245fa'] = 0.5529
def viscosity_dilute(fluid, T, e_k, sigma):
"""
T in [K], e_k in [K], sigma in [nm]
viscosity returned is in [Pa-s]
"""
Tstar = T / e_k
molemass = Props(fluid, 'molemass')
if fluid == 'Propane' or fluid == 'REFPROP-Propane':
a = [0.25104574, -0.47271238, 0, 0.060836515, 0]
theta_star = exp(a[0] * pow(log(Tstar), 0) + a[1] * pow(log(Tstar), 1) + a[3] * pow(log(Tstar), 3));
eta_star = 0.021357 * sqrt(molemass * T) / (pow(sigma, 2) * theta_star) / 1e6;
return eta_star
# From Neufeld, 1972, Journal of Chemical Physics - checked coefficients
OMEGA_2_2 = 1.16145 * pow(Tstar, -0.14874) + 0.52487 * exp(-0.77320 * Tstar) + 2.16178 * exp(-2.43787 * Tstar)
# Using the leading constant from McLinden, 2000 since the leading term from Huber 2003 gives crazy values
eta_star = 26.692e-3 * sqrt(molemass * T) / (pow(sigma, 2) * OMEGA_2_2) / 1e6
return eta_star
def viscosity_linear(fluid, T, rho, e_k, sigma):
"""
Implements the method of Vogel 1998 (Propane) for the linear part
"""
N_A = 6.02214129e23
molemass = Props(fluid, 'molemass')
Tstar = T / e_k
b = [-19.572881, 219.73999, -1015.3226, 2471.01251, -3375.1717, 2491.6597, -787.26086, 14.085455, -0.34664158]
s = sum([b[i] * pow(Tstar, -0.25 * i) for i in range(7)])
B_eta_star = s + b[7] * pow(Tstar, -2.5) + b[8] * pow(Tstar, -5.5) # //[no units]
B_eta = N_A * pow(sigma / 1e9, 3) * B_eta_star # [m3/mol]
return viscosity_dilute(fluid, T, e_k, sigma) * B_eta * rho / molemass * 1000
from PDSim.misc.datatypes import Collector
RHO = Collector()
TT = Collector()
DELTA = Collector()
TAU = Collector()
VV = Collector()
VV0 = Collector()
VV1 = Collector()
VVH = Collector()
fluid = 'REFPROP-R32'
Tc = Props(fluid, 'Tcrit')
rhoc = Props(fluid, 'rhocrit')
for T in np.linspace(290, Props(fluid, 'Tcrit') - 0.1, 100):
rhoV = Props('D', 'T', T, 'Q', 1, fluid)
rhoL = Props('D', 'T', T, 'Q', 0, fluid)
rhomax = Props('D', 'T', Props(fluid, 'Tmin'), 'Q', 0, fluid)
for rho in list(np.linspace(rhoL, rhomax, 100)): # +list(np.linspace(rhoV,0.0001,100)):
# for rho in list(np.linspace(rhoV,0.0001,100)):
mu_0 = viscosity_dilute(fluid, T, E_K[fluid], SIGMA[fluid])
mu_1 = viscosity_linear(fluid, T, rho, E_K[fluid], SIGMA[fluid])
mu = Props('V', 'T', T, 'D', rho, fluid)
VV << mu
VV0 << mu_0
VV1 << mu_1
VVH << mu - mu_0 - mu_1
TT << T
RHO << rho
DELTA << rho / rhoc
TAU << Tc / T
def f_RHS(E, DELTA_TAU, VV):
k = 0
sum = 0
DELTA = DELTA_TAU[0, :]
TAU = DELTA_TAU[1, :]
for i in range(2, 5):
for j in range(3):
sum += E[k] * DELTA**i / TAU**j
k += 1
print('%s %%' % np.mean(np.abs(((sum / VV - 1) * 100))))
return sum
log_muH = np.log(VVH.v().T)
x = np.c_[DELTA.v().T, TAU.v().T].T
y = VVH.v()
linear = Model(f_RHS, extra_args=(y,))
mydata = Data(x, y)
myodr = ODR(mydata, linear, beta0=np.array([0.1] * 17),)
myoutput = myodr.run()
E = myoutput.beta
print(E)
plt.plot(y.T, f_RHS(E, x, y))
plt.show()
|
import CatalogItem
from toontown.toonbase import ToontownGlobals
from toontown.toonbase import TTLocalizer
from otp.otpbase import OTPLocalizer
from direct.interval.IntervalGlobal import *
from direct.gui.DirectGui import *
class CatalogNametagItem(CatalogItem.CatalogItem):
sequenceNumber = 0
def makeNewItem(self, nametagStyle):
self.nametagStyle = nametagStyle
CatalogItem.CatalogItem.makeNewItem(self)
def getPurchaseLimit(self):
return 1
def reachedPurchaseLimit(self, avatar):
if self in avatar.onOrder or self in avatar.mailboxContents or self in avatar.onGiftOrder or self in avatar.awardMailboxContents or self in avatar.onAwardOrder:
return 1
if avatar.nametagStyle == self.nametagStyle:
return 1
return 0
def getAcceptItemErrorText(self, retcode):
if retcode == ToontownGlobals.P_ItemAvailable:
return TTLocalizer.CatalogAcceptNametag
return CatalogItem.CatalogItem.getAcceptItemErrorText(self, retcode)
def saveHistory(self):
return 1
def getTypeName(self):
return TTLocalizer.NametagTypeName
def getName(self):
if self.nametagStyle == 100:
name = TTLocalizer.UnpaidNameTag
else:
name = TTLocalizer.NametagFontNames[self.nametagStyle]
if TTLocalizer.NametagReverse:
name = TTLocalizer.NametagLabel + name
else:
name = name + TTLocalizer.NametagLabel
return name
if self.nametagStyle == 0:
name = TTLocalizer.NametagPaid
elif self.nametagStyle == 1:
name = TTLocalizer.NametagAction
elif self.nametagStyle == 2:
name = TTLocalizer.NametagFrilly
def recordPurchase(self, avatar, optional):
if avatar:
avatar.b_setNametagStyle(self.nametagStyle)
return ToontownGlobals.P_ItemAvailable
def getPicture(self, avatar):
frame = self.makeFrame()
if self.nametagStyle == 100:
inFont = ToontownGlobals.getToonFont()
else:
inFont = ToontownGlobals.getNametagFont(self.nametagStyle)
nameTagDemo = DirectLabel(parent=frame, relief=None, pos=(0, 0, 0.24), scale=0.5, text=localAvatar.getName(), text_fg=(1.0, 1.0, 1.0, 1), text_shadow=(0, 0, 0, 1), text_font=inFont, text_wordwrap=9)
self.hasPicture = True
return (frame, None)
def output(self, store = -1):
return 'CatalogNametagItem(%s%s)' % (self.nametagStyle, self.formatOptionalData(store))
def compareTo(self, other):
return self.nametagStyle - other.nametagStyle
def getHashContents(self):
return self.nametagStyle
def getBasePrice(self):
return 500
cost = 500
if self.nametagStyle == 0:
cost = 600
elif self.nametagStyle == 1:
cost = 600
elif self.nametagStyle == 2:
cost = 600
elif self.nametagStyle == 100:
cost = 50
return cost
def decodeDatagram(self, di, versionNumber, store):
CatalogItem.CatalogItem.decodeDatagram(self, di, versionNumber, store)
self.nametagStyle = di.getUint16()
def encodeDatagram(self, dg, store):
CatalogItem.CatalogItem.encodeDatagram(self, dg, store)
dg.addUint16(self.nametagStyle)
def isGift(self):
return 0
def getBackSticky(self):
itemType = 1
numSticky = 4
return (itemType, numSticky)
|
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Note.content'
db.alter_column('notes_note', 'content', self.gf('tinymce.models.HTMLField')())
def backwards(self, orm):
# Changing field 'Note.content'
db.alter_column('notes_note', 'content', self.gf('django.db.models.fields.TextField')())
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'notes.note': {
'Meta': {'ordering': "['-last_updated']", 'object_name': 'Note'},
'content': ('tinymce.models.HTMLField', [], {'blank': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
}
}
complete_apps = ['notes']
|
import vim # pylint: disable=import-error
import shlex
import os
import clang.cindex as cindex
LOG_FILENAME = "grayout.log"
DB_CACHE = {}
ARGS_CACHE = {}
DEBUG = False
DEBUG_FILE = False
def printdebug(*args, info=False):
text = " ".join(map(str, args))
if DEBUG:
print(text)
elif info:
print("grayout.vim:", text)
if DEBUG_FILE:
with open(LOG_FILENAME, "a") as f:
f.write(text + "\n")
def find_config_file(path, searchname):
rootpath = os.path.abspath(os.sep) # should work this way on windows, too
fullpath = ""
while True:
fullpath = os.path.join(path, searchname)
printdebug("Searching", fullpath)
if os.path.isfile(fullpath):
printdebug("Found config file", fullpath)
return fullpath
if path == rootpath:
break
path = os.path.dirname(path)
printdebug("No config file found")
return None
def get_current_args():
"""Returns a tuple (filename, compiler args)."""
filename = os.path.realpath(vim.current.buffer.name)
dirname = os.path.dirname(filename)
cwd = ""
printdebug("Querying arguments for", filename)
args, cwd = ARGS_CACHE.get(filename, (None, None))
if args is not None:
printdebug("Found file in cache")
return filename, args
args, cwd = load_compilation_database(filename, dirname)
if args is None:
args, cwd = load_grayout_conf(dirname)
if args is not None:
printdebug("Using .grayout.conf", info=True)
else:
args = list(vim.eval("g:grayout_default_args"))
printdebug("Falling back to `g:grayout_default_args`", info=True)
args = prepare_args(args)
ARGS_CACHE[filename] = (args, cwd)
return filename, args
def load_compilation_database(filename, dirname):
"""Returns a tuple (args, cwd)."""
global DB_CACHE
cfg = find_config_file(dirname, "compile_commands.json")
if cfg:
db: cindex.CompilationDatabase = DB_CACHE.get(cfg, None)
if db is None:
cfgdir = os.path.dirname(cfg)
db: cindex.CompilationDatabase = cindex.CompilationDatabase.fromDirectory(cfgdir)
DB_CACHE[cfg] = db
printdebug("CompilationDatabase loaded and stored in cache")
else:
printdebug("CompilationDatabase found in cache")
cmd: cindex.CompileCommand = db.getCompileCommands(filename)[0]
printdebug(cmd.directory, cmd.filename)
if cmd:
# Strip last argument because it's the source filename
return list(cmd.arguments)[:-1], cmd.directory
printdebug("No command found in database")
return None, None
def load_grayout_conf(dirname):
"""Returns a tuple (args, cwd)."""
cfg = find_config_file(dirname, ".grayout.conf")
if cfg:
with open(cfg, "r") as f:
return shlex.split(f.read()), os.path.dirname(cfg)
return None, None
def prepare_args(args):
"""Strip output arguments."""
strip_arg(args, "-o", True)
strip_arg(args, "-c", False)
strip_arg(args, "-S", False)
strip_arg(args, "-v", False)
return args
def strip_arg(args, name, stripnext):
try:
pos = args.index(name)
except ValueError:
return
del args[pos]
if stripnext and pos < len(args):
del args[pos]
|
"""
heat exchangers
"""
from math import log, ceil
import pandas as pd
import numpy as np
from cea.constants import HEAT_CAPACITY_OF_WATER_JPERKGK
from cea.technologies.constants import MAX_NODE_FLOW
from cea.analysis.costs.equations import calc_capex_annualized, calc_opex_annualized
__author__ = "Thuy-An Nguyen"
__copyright__ = "Copyright 2015, Architecture and Building Systems - ETH Zurich"
__credits__ = ["Thuy-An Nguyen", "Tim Vollrath", "Jimeno A. Fonseca"]
__license__ = "MIT"
__version__ = "0.1"
__maintainer__ = "Daren Thomas"
__email__ = "cea@arch.ethz.ch"
__status__ = "Production"
def calc_Cinv_HEX(Q_design_W, locator, config, technology_type):
"""
Calculates the cost of a heat exchanger (based on A+W cost of oil boilers) [CHF / a]
:type Q_design_W : float
:param Q_design_W: Design Load of Boiler
:rtype InvC_return : float
:returns InvC_return: total investment Cost in [CHF]
:rtype InvCa : float
:returns InvCa: annualized investment costs in [CHF/a]
"""
if Q_design_W > 0:
HEX_cost_data = pd.read_excel(locator.get_database_conversion_systems(), sheet_name="HEX")
HEX_cost_data = HEX_cost_data[HEX_cost_data['code'] == technology_type]
# if the Q_design is below the lowest capacity available for the technology, then it is replaced by the least
# capacity for the corresponding technology from the database
if Q_design_W < HEX_cost_data.iloc[0]['cap_min']:
Q_design_W = HEX_cost_data.iloc[0]['cap_min']
HEX_cost_data = HEX_cost_data[
(HEX_cost_data['cap_min'] <= Q_design_W) & (HEX_cost_data['cap_max'] > Q_design_W)]
Inv_a = HEX_cost_data.iloc[0]['a']
Inv_b = HEX_cost_data.iloc[0]['b']
Inv_c = HEX_cost_data.iloc[0]['c']
Inv_d = HEX_cost_data.iloc[0]['d']
Inv_e = HEX_cost_data.iloc[0]['e']
Inv_IR = HEX_cost_data.iloc[0]['IR_%']
Inv_LT = HEX_cost_data.iloc[0]['LT_yr']
Inv_OM = HEX_cost_data.iloc[0]['O&M_%'] / 100
InvC = Inv_a + Inv_b * (Q_design_W) ** Inv_c + (Inv_d + Inv_e * Q_design_W) * log(Q_design_W)
Capex_a_HEX_USD = calc_capex_annualized(InvC, Inv_IR, Inv_LT)
Opex_fixed_HEX_USD = InvC * Inv_OM
Capex_HEX_USD = InvC
else:
Capex_a_HEX_USD = 0.0
Opex_fixed_HEX_USD = 0.0
Capex_HEX_USD = 0.0
return Capex_a_HEX_USD, Opex_fixed_HEX_USD, Capex_HEX_USD
def calc_Cinv_HEX_hisaka(network_info):
"""
Calculates costs of all substation heat exchangers in a network.
Used in thermal_network_optimization.
"""
## read in cost values from database
HEX_prices = pd.read_excel(network_info.locator.get_database_conversion_systems(),
sheet_name="HEX", index_col=0)
a = HEX_prices['a']['District substation heat exchanger']
b = HEX_prices['b']['District substation heat exchanger']
c = HEX_prices['c']['District substation heat exchanger']
d = HEX_prices['d']['District substation heat exchanger']
e = HEX_prices['e']['District substation heat exchanger']
Inv_IR = HEX_prices['IR_%']['District substation heat exchanger']
Inv_LT = HEX_prices['LT_yr']['District substation heat exchanger']
Inv_OM = HEX_prices['O&M_%']['District substation heat exchanger'] / 100
## list node id of all substations
# read in nodes list
all_nodes = pd.read_csv(network_info.locator.get_thermal_network_node_types_csv_file(network_info.network_type,
network_info.network_name))
Capex_a = 0.0
Opex_a_fixed = 0.0
substation_node_id_list = []
# add buildings to node id list
for building in network_info.building_names:
# check if building is connected to network
if building not in network_info.building_names[network_info.disconnected_buildings_index]:
# add HEX cost
node_id = int(np.where(all_nodes['Building'] == building)[0])
substation_node_id_list.append(all_nodes['Name'][node_id])
# add plants to node id list
plant_id_list = np.where(all_nodes['Type'] == 'Plant')[0]
# find plant nodes
for plant_id in plant_id_list:
substation_node_id_list.append('NODE' + str(plant_id))
## calculate costs of hex at substations
for node_id in substation_node_id_list:
# read in node mass flows
node_flows = pd.read_csv(
network_info.locator.get_nominal_node_mass_flow_csv_file(network_info.network_type, network_info.network_name))
# find design condition node mcp
node_flow = max(node_flows[node_id])
if node_flow > 0:
# if the Q_design is below the lowest capacity available for the technology, then it is replaced by the least
# capacity for the corresponding technology from the database # TODO: add minimum capacity to cost function
# Split into several HEXs if flows are too high
if node_flow <= MAX_NODE_FLOW:
mcp_sub = node_flow * HEAT_CAPACITY_OF_WATER_JPERKGK
Capex_substation_hex = a + b * mcp_sub ** c + d * np.log(mcp_sub) + e * mcp_sub * np.log(mcp_sub)
else:
# we need to split into several HEXs
Capex_substation_hex = 0
number_of_HEXs = int(ceil(node_flow / MAX_NODE_FLOW))
nodeflow_nom = node_flow / number_of_HEXs
mcp_sub = nodeflow_nom * HEAT_CAPACITY_OF_WATER_JPERKGK
for i in range(number_of_HEXs):
Capex_substation_hex = Capex_substation_hex + (a + b * mcp_sub ** c + d * np.log(mcp_sub) + e * mcp_sub * np.log(mcp_sub))
Capex_a_substation_hex = calc_capex_annualized(Capex_substation_hex, Inv_IR, Inv_LT)
Opex_fixed_substation_hex = Capex_substation_hex * Inv_OM
# aggregate all substation costs in a network
Capex_a = Capex_a + Capex_a_substation_hex
Opex_a_fixed = Opex_a_fixed + Opex_fixed_substation_hex
return Capex_a, Opex_a_fixed
|
import unittest
import sqlite3 as sqlite
def func_returntext():
return "foo"
def func_returnunicode():
return u"bar"
def func_returnint():
return 42
def func_returnfloat():
return 3.14
def func_returnnull():
return None
def func_returnblob():
return buffer("blob")
def func_raiseexception():
5 // 0
def func_isstring(v):
return type(v) is unicode
def func_isint(v):
return type(v) is int
def func_isfloat(v):
return type(v) is float
def func_isnone(v):
return type(v) is type(None)
def func_isblob(v):
return type(v) is buffer
class AggrNoStep:
def __init__(self):
pass
def finalize(self):
return 1
class AggrNoFinalize:
def __init__(self):
pass
def step(self, x):
pass
class AggrExceptionInInit:
def __init__(self):
5 // 0
def step(self, x):
pass
def finalize(self):
pass
class AggrExceptionInStep:
def __init__(self):
pass
def step(self, x):
5 // 0
def finalize(self):
return 42
class AggrExceptionInFinalize:
def __init__(self):
pass
def step(self, x):
pass
def finalize(self):
5 // 0
class AggrCheckType:
def __init__(self):
self.val = None
def step(self, whichType, val):
theType = {"str": unicode, "int": int, "float": float, "None": type(None), "blob": buffer}
self.val = int(theType[whichType] is type(val))
def finalize(self):
return self.val
class AggrSum:
def __init__(self):
self.val = 0.0
def step(self, val):
self.val += val
def finalize(self):
return self.val
class FunctionTests(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:")
self.con.create_function("returntext", 0, func_returntext)
self.con.create_function("returnunicode", 0, func_returnunicode)
self.con.create_function("returnint", 0, func_returnint)
self.con.create_function("returnfloat", 0, func_returnfloat)
self.con.create_function("returnnull", 0, func_returnnull)
self.con.create_function("returnblob", 0, func_returnblob)
self.con.create_function("raiseexception", 0, func_raiseexception)
self.con.create_function("isstring", 1, func_isstring)
self.con.create_function("isint", 1, func_isint)
self.con.create_function("isfloat", 1, func_isfloat)
self.con.create_function("isnone", 1, func_isnone)
self.con.create_function("isblob", 1, func_isblob)
def tearDown(self):
self.con.close()
def CheckFuncErrorOnCreate(self):
try:
self.con.create_function("bla", -100, lambda x: 2*x)
self.fail("should have raised an OperationalError")
except sqlite.OperationalError:
pass
def CheckFuncRefCount(self):
def getfunc():
def f():
return 1
return f
f = getfunc()
globals()["foo"] = f
# self.con.create_function("reftest", 0, getfunc())
self.con.create_function("reftest", 0, f)
cur = self.con.cursor()
cur.execute("select reftest()")
def CheckFuncReturnText(self):
cur = self.con.cursor()
cur.execute("select returntext()")
val = cur.fetchone()[0]
self.assertEqual(type(val), unicode)
self.assertEqual(val, "foo")
def CheckFuncReturnUnicode(self):
cur = self.con.cursor()
cur.execute("select returnunicode()")
val = cur.fetchone()[0]
self.assertEqual(type(val), unicode)
self.assertEqual(val, u"bar")
def CheckFuncReturnInt(self):
cur = self.con.cursor()
cur.execute("select returnint()")
val = cur.fetchone()[0]
self.assertEqual(type(val), int)
self.assertEqual(val, 42)
def CheckFuncReturnFloat(self):
cur = self.con.cursor()
cur.execute("select returnfloat()")
val = cur.fetchone()[0]
self.assertEqual(type(val), float)
if val < 3.139 or val > 3.141:
self.fail("wrong value")
def CheckFuncReturnNull(self):
cur = self.con.cursor()
cur.execute("select returnnull()")
val = cur.fetchone()[0]
self.assertEqual(type(val), type(None))
self.assertEqual(val, None)
def CheckFuncReturnBlob(self):
cur = self.con.cursor()
cur.execute("select returnblob()")
val = cur.fetchone()[0]
self.assertEqual(type(val), buffer)
self.assertEqual(val, buffer("blob"))
def CheckFuncException(self):
cur = self.con.cursor()
try:
cur.execute("select raiseexception()")
cur.fetchone()
self.fail("should have raised OperationalError")
except sqlite.OperationalError, e:
self.assertEqual(e.args[0], 'user-defined function raised exception')
def CheckParamString(self):
cur = self.con.cursor()
cur.execute("select isstring(?)", ("foo",))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckParamInt(self):
cur = self.con.cursor()
cur.execute("select isint(?)", (42,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckParamFloat(self):
cur = self.con.cursor()
cur.execute("select isfloat(?)", (3.14,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckParamNone(self):
cur = self.con.cursor()
cur.execute("select isnone(?)", (None,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckParamBlob(self):
cur = self.con.cursor()
cur.execute("select isblob(?)", (buffer("blob"),))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
class AggregateTests(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:")
cur = self.con.cursor()
cur.execute("""
create table test(
t text,
i integer,
f float,
n,
b blob
)
""")
cur.execute("insert into test(t, i, f, n, b) values (?, ?, ?, ?, ?)",
("foo", 5, 3.14, None, buffer("blob"),))
self.con.create_aggregate("nostep", 1, AggrNoStep)
self.con.create_aggregate("nofinalize", 1, AggrNoFinalize)
self.con.create_aggregate("excInit", 1, AggrExceptionInInit)
self.con.create_aggregate("excStep", 1, AggrExceptionInStep)
self.con.create_aggregate("excFinalize", 1, AggrExceptionInFinalize)
self.con.create_aggregate("checkType", 2, AggrCheckType)
self.con.create_aggregate("mysum", 1, AggrSum)
def tearDown(self):
#self.cur.close()
#self.con.close()
pass
def CheckAggrErrorOnCreate(self):
try:
self.con.create_function("bla", -100, AggrSum)
self.fail("should have raised an OperationalError")
except sqlite.OperationalError:
pass
def CheckAggrNoStep(self):
# XXX it's better to raise OperationalError in order to stop
# the query earlier.
cur = self.con.cursor()
try:
cur.execute("select nostep(t) from test")
self.fail("should have raised an OperationalError")
except sqlite.OperationalError, e:
self.assertEqual(e.args[0], "user-defined aggregate's 'step' method raised error")
def CheckAggrNoFinalize(self):
cur = self.con.cursor()
try:
cur.execute("select nofinalize(t) from test")
val = cur.fetchone()[0]
self.fail("should have raised an OperationalError")
except sqlite.OperationalError, e:
self.assertEqual(e.args[0], "user-defined aggregate's 'finalize' method raised error")
def CheckAggrExceptionInInit(self):
cur = self.con.cursor()
try:
cur.execute("select excInit(t) from test")
val = cur.fetchone()[0]
self.fail("should have raised an OperationalError")
except sqlite.OperationalError, e:
self.assertEqual(e.args[0], "user-defined aggregate's '__init__' method raised error")
def CheckAggrExceptionInStep(self):
cur = self.con.cursor()
try:
cur.execute("select excStep(t) from test")
val = cur.fetchone()[0]
self.fail("should have raised an OperationalError")
except sqlite.OperationalError, e:
self.assertEqual(e.args[0], "user-defined aggregate's 'step' method raised error")
def CheckAggrExceptionInFinalize(self):
cur = self.con.cursor()
try:
cur.execute("select excFinalize(t) from test")
val = cur.fetchone()[0]
self.fail("should have raised an OperationalError")
except sqlite.OperationalError, e:
self.assertEqual(e.args[0], "user-defined aggregate's 'finalize' method raised error")
def CheckAggrCheckParamStr(self):
cur = self.con.cursor()
cur.execute("select checkType('str', ?)", ("foo",))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckAggrCheckParamInt(self):
cur = self.con.cursor()
cur.execute("select checkType('int', ?)", (42,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckAggrCheckParamFloat(self):
cur = self.con.cursor()
cur.execute("select checkType('float', ?)", (3.14,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckAggrCheckParamNone(self):
cur = self.con.cursor()
cur.execute("select checkType('None', ?)", (None,))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckAggrCheckParamBlob(self):
cur = self.con.cursor()
cur.execute("select checkType('blob', ?)", (buffer("blob"),))
val = cur.fetchone()[0]
self.assertEqual(val, 1)
def CheckAggrCheckAggrSum(self):
cur = self.con.cursor()
cur.execute("delete from test")
cur.executemany("insert into test(i) values (?)", [(10,), (20,), (30,)])
cur.execute("select mysum(i) from test")
val = cur.fetchone()[0]
self.assertEqual(val, 60)
def authorizer_cb(action, arg1, arg2, dbname, source):
if action != sqlite.SQLITE_SELECT:
return sqlite.SQLITE_DENY
if arg2 == 'c2' or arg1 == 't2':
return sqlite.SQLITE_DENY
return sqlite.SQLITE_OK
class AuthorizerTests(unittest.TestCase):
def setUp(self):
self.con = sqlite.connect(":memory:")
self.con.executescript("""
create table t1 (c1, c2);
create table t2 (c1, c2);
insert into t1 (c1, c2) values (1, 2);
insert into t2 (c1, c2) values (4, 5);
""")
# For our security test:
self.con.execute("select c2 from t2")
self.con.set_authorizer(authorizer_cb)
def tearDown(self):
pass
def CheckTableAccess(self):
try:
self.con.execute("select * from t2")
except sqlite.DatabaseError, e:
if not e.args[0].endswith("prohibited"):
self.fail("wrong exception text: %s" % e.args[0])
return
self.fail("should have raised an exception due to missing privileges")
def CheckColumnAccess(self):
try:
self.con.execute("select c2 from t1")
except sqlite.DatabaseError, e:
if not e.args[0].endswith("prohibited"):
self.fail("wrong exception text: %s" % e.args[0])
return
self.fail("should have raised an exception due to missing privileges")
def suite():
function_suite = unittest.makeSuite(FunctionTests, "Check")
aggregate_suite = unittest.makeSuite(AggregateTests, "Check")
authorizer_suite = unittest.makeSuite(AuthorizerTests, "Check")
return unittest.TestSuite((function_suite, aggregate_suite, authorizer_suite))
def test():
runner = unittest.TextTestRunner()
runner.run(suite())
if __name__ == "__main__":
test()
|
""" %prog [options] modules_or_packages
Check that module(s) satisfy a coding standard (and more !).
%prog --help
Display this help message and exit.
%prog --help-msg <msg-id>[,<msg-id>]
Display help messages about given message identifiers and exit.
"""
from __future__ import print_function
import collections
import contextlib
import operator
import os
try:
import multiprocessing
except ImportError:
multiprocessing = None
import sys
import tokenize
import warnings
import six
import astroid
from astroid.__pkginfo__ import version as astroid_version
from astroid import modutils
from pylint import checkers
from pylint import interfaces
from pylint import reporters
from pylint import exceptions
from pylint import utils
from pylint import config
from pylint.__pkginfo__ import version
from pylint.reporters.ureports import nodes as report_nodes
MANAGER = astroid.MANAGER
def _get_new_args(message):
location = (
message.abspath,
message.path,
message.module,
message.obj,
message.line,
message.column,
)
return (
message.msg_id,
message.symbol,
location,
message.msg,
message.confidence,
)
def _get_python_path(filepath):
dirname = os.path.realpath(os.path.expanduser(filepath))
if not os.path.isdir(dirname):
dirname = os.path.dirname(dirname)
while True:
if not os.path.exists(os.path.join(dirname, "__init__.py")):
return dirname
old_dirname = dirname
dirname = os.path.dirname(dirname)
if old_dirname == dirname:
return os.getcwd()
return None
def _merge_stats(stats):
merged = {}
by_msg = collections.Counter()
for stat in stats:
message_stats = stat.pop('by_msg', {})
by_msg.update(message_stats)
for key, item in six.iteritems(stat):
if key not in merged:
merged[key] = item
else:
if isinstance(item, dict):
merged[key].update(item)
else:
merged[key] = merged[key] + item
merged['by_msg'] = by_msg
return merged
@contextlib.contextmanager
def _patch_sysmodules():
# Context manager that permits running pylint, on Windows, with -m switch
# and with --jobs, as in 'python -2 -m pylint .. --jobs'.
# For more details why this is needed,
# see Python issue http://bugs.python.org/issue10845.
mock_main = __name__ != '__main__' # -m switch
if mock_main:
sys.modules['__main__'] = sys.modules[__name__]
try:
yield
finally:
if mock_main:
sys.modules.pop('__main__')
MSGS = {
'F0001': ('%s',
'fatal',
'Used when an error occurred preventing the analysis of a \
module (unable to find it for instance).'),
'F0002': ('%s: %s',
'astroid-error',
'Used when an unexpected error occurred while building the '
'Astroid representation. This is usually accompanied by a '
'traceback. Please report such errors !'),
'F0010': ('error while code parsing: %s',
'parse-error',
'Used when an exception occurred while building the Astroid '
'representation which could be handled by astroid.'),
'I0001': ('Unable to run raw checkers on built-in module %s',
'raw-checker-failed',
'Used to inform that a built-in module has not been checked '
'using the raw checkers.'),
'I0010': ('Unable to consider inline option %r',
'bad-inline-option',
'Used when an inline option is either badly formatted or can\'t '
'be used inside modules.'),
'I0011': ('Locally disabling %s (%s)',
'locally-disabled',
'Used when an inline option disables a message or a messages '
'category.'),
'I0012': ('Locally enabling %s (%s)',
'locally-enabled',
'Used when an inline option enables a message or a messages '
'category.'),
'I0013': ('Ignoring entire file',
'file-ignored',
'Used to inform that the file will not be checked'),
'I0020': ('Suppressed %s (from line %d)',
'suppressed-message',
'A message was triggered on a line, but suppressed explicitly '
'by a disable= comment in the file. This message is not '
'generated for messages that are ignored due to configuration '
'settings.'),
'I0021': ('Useless suppression of %s',
'useless-suppression',
'Reported when a message is explicitly disabled for a line or '
'a block of code, but never triggered.'),
'I0022': ('Pragma "%s" is deprecated, use "%s" instead',
'deprecated-pragma',
'Some inline pylint options have been renamed or reworked, '
'only the most recent form should be used. '
'NOTE:skip-all is only available with pylint >= 0.26',
{'old_names': [('I0014', 'deprecated-disable-all')]}),
'E0001': ('%s',
'syntax-error',
'Used when a syntax error is raised for a module.'),
'E0011': ('Unrecognized file option %r',
'unrecognized-inline-option',
'Used when an unknown inline option is encountered.'),
'E0012': ('Bad option value %r',
'bad-option-value',
'Used when a bad value for an inline option is encountered.'),
}
if multiprocessing is not None:
class ChildLinter(multiprocessing.Process):
def run(self):
# pylint: disable=no-member, unbalanced-tuple-unpacking
tasks_queue, results_queue, self._config = self._args
self._config["jobs"] = 1 # Child does not parallelize any further.
self._python3_porting_mode = self._config.pop(
'python3_porting_mode', None)
self._plugins = self._config.pop('plugins', None)
# Run linter for received files/modules.
for file_or_module in iter(tasks_queue.get, 'STOP'):
try:
result = self._run_linter(file_or_module[0])
results_queue.put(result)
except Exception as ex:
print("internal error with sending report for module %s" %
file_or_module, file=sys.stderr)
print(ex, file=sys.stderr)
results_queue.put({})
def _run_linter(self, file_or_module):
linter = PyLinter()
# Register standard checkers.
linter.load_default_plugins()
# Load command line plugins.
if self._plugins:
linter.load_plugin_modules(self._plugins)
linter.load_configuration_from_config(self._config)
linter.set_reporter(reporters.CollectingReporter())
# Enable the Python 3 checker mode. This option is
# passed down from the parent linter up to here, since
# the Python 3 porting flag belongs to the Run class,
# instead of the Linter class.
if self._python3_porting_mode:
linter.python3_porting_mode()
# Run the checks.
linter.check(file_or_module)
msgs = [_get_new_args(m) for m in linter.reporter.messages]
return (file_or_module, linter.file_state.base_name, linter.current_name,
msgs, linter.stats, linter.msg_status)
class PyLinter(config.OptionsManagerMixIn,
utils.MessagesHandlerMixIn,
utils.ReportsHandlerMixIn,
checkers.BaseTokenChecker):
"""lint Python modules using external checkers.
This is the main checker controlling the other ones and the reports
generation. It is itself both a raw checker and an astroid checker in order
to:
* handle message activation / deactivation at the module level
* handle some basic but necessary stats'data (number of classes, methods...)
IDE plugin developers: you may have to call
`astroid.builder.MANAGER.astroid_cache.clear()` across runs if you want
to ensure the latest code version is actually checked.
"""
__implements__ = (interfaces.ITokenChecker, )
name = 'master'
priority = 0
level = 0
msgs = MSGS
@staticmethod
def make_options():
return (('ignore',
{'type' : 'csv', 'metavar' : '<file>[,<file>...]',
'dest' : 'black_list', 'default' : ('CVS',),
'help' : 'Add files or directories to the blacklist. '
'They should be base names, not paths.'}),
('ignore-patterns',
{'type' : 'regexp_csv', 'metavar' : '<pattern>[,<pattern>...]',
'dest' : 'black_list_re', 'default' : (),
'help' : 'Add files or directories matching the regex patterns to the'
' blacklist. The regex matches against base names, not paths.'}),
('persistent',
{'default': True, 'type' : 'yn', 'metavar' : '<y_or_n>',
'level': 1,
'help' : 'Pickle collected data for later comparisons.'}),
('load-plugins',
{'type' : 'csv', 'metavar' : '<modules>', 'default' : (),
'level': 1,
'help' : 'List of plugins (as comma separated values of '
'python modules names) to load, usually to register '
'additional checkers.'}),
('output-format',
{'default': 'text', 'type': 'string', 'metavar' : '<format>',
'short': 'f',
'group': 'Reports',
'help' : 'Set the output format. Available formats are text,'
' parseable, colorized, json and msvs (visual studio).'
'You can also give a reporter class, eg mypackage.mymodule.'
'MyReporterClass.'}),
('reports',
{'default': False, 'type' : 'yn', 'metavar' : '<y_or_n>',
'short': 'r',
'group': 'Reports',
'help' : 'Tells whether to display a full report or only the '
'messages'}),
('evaluation',
{'type' : 'string', 'metavar' : '<python_expression>',
'group': 'Reports', 'level': 1,
'default': '10.0 - ((float(5 * error + warning + refactor + '
'convention) / statement) * 10)',
'help' : 'Python expression which should return a note less '
'than 10 (10 is the highest note). You have access '
'to the variables errors warning, statement which '
'respectively contain the number of errors / '
'warnings messages and the total number of '
'statements analyzed. This is used by the global '
'evaluation report (RP0004).'}),
('score',
{'default': True, 'type': 'yn', 'metavar': '<y_or_n>',
'short': 's',
'group': 'Reports',
'help': 'Activate the evaluation score.'}),
('confidence',
{'type' : 'multiple_choice', 'metavar': '<levels>',
'default': '',
'choices': [c.name for c in interfaces.CONFIDENCE_LEVELS],
'group': 'Messages control',
'help' : 'Only show warnings with the listed confidence levels.'
' Leave empty to show all. Valid levels: %s' % (
', '.join(c.name for c in interfaces.CONFIDENCE_LEVELS),)}),
('enable',
{'type' : 'csv', 'metavar': '<msg ids>',
'short': 'e',
'group': 'Messages control',
'help' : 'Enable the message, report, category or checker with the '
'given id(s). You can either give multiple identifier '
'separated by comma (,) or put this option multiple time '
'(only on the command line, not in the configuration file '
'where it should appear only once). '
'See also the "--disable" option for examples. '}),
('disable',
{'type' : 'csv', 'metavar': '<msg ids>',
'short': 'd',
'group': 'Messages control',
'help' : 'Disable the message, report, category or checker '
'with the given id(s). You can either give multiple identifiers'
' separated by comma (,) or put this option multiple times '
'(only on the command line, not in the configuration file '
'where it should appear only once).'
'You can also use "--disable=all" to disable everything first '
'and then reenable specific checks. For example, if you want '
'to run only the similarities checker, you can use '
'"--disable=all --enable=similarities". '
'If you want to run only the classes checker, but have no '
'Warning level messages displayed, use'
'"--disable=all --enable=classes --disable=W"'}),
('msg-template',
{'type' : 'string', 'metavar': '<template>',
'group': 'Reports',
'help' : ('Template used to display messages. '
'This is a python new-style format string '
'used to format the message information. '
'See doc for all details')
}),
('jobs',
{'type' : 'int', 'metavar': '<n-processes>',
'short': 'j',
'default': 1,
'help' : '''Use multiple processes to speed up Pylint.''',
}),
('unsafe-load-any-extension',
{'type': 'yn', 'metavar': '<yn>', 'default': False, 'hide': True,
'help': ('Allow loading of arbitrary C extensions. Extensions'
' are imported into the active Python interpreter and'
' may run arbitrary code.')}),
('extension-pkg-whitelist',
{'type': 'csv', 'metavar': '<pkg[,pkg]>', 'default': [],
'help': ('A comma-separated list of package or module names'
' from where C extensions may be loaded. Extensions are'
' loading into the active Python interpreter and may run'
' arbitrary code')}),
('suggestion-mode',
{'type': 'yn', 'metavar': '<yn>', 'default': True,
'help': ('When enabled, pylint would attempt to guess common '
'misconfiguration and emit user-friendly hints instead '
'of false-positive error messages')}),
)
option_groups = (
('Messages control', 'Options controlling analysis messages'),
('Reports', 'Options related to output formatting and reporting'),
)
def __init__(self, options=(), reporter=None, option_groups=(),
pylintrc=None):
# some stuff has to be done before ancestors initialization...
#
# messages store / checkers / reporter / astroid manager
self.msgs_store = utils.MessagesStore()
self.reporter = None
self._reporter_name = None
self._reporters = {}
self._checkers = collections.defaultdict(list)
self._pragma_lineno = {}
self._ignore_file = False
# visit variables
self.file_state = utils.FileState()
self.current_name = None
self.current_file = None
self.stats = None
# init options
self._external_opts = options
self.options = options + PyLinter.make_options()
self.option_groups = option_groups + PyLinter.option_groups
self._options_methods = {
'enable': self.enable,
'disable': self.disable}
self._bw_options_methods = {'disable-msg': self.disable,
'enable-msg': self.enable}
full_version = '%%prog %s, \nastroid %s\nPython %s' % (
version, astroid_version, sys.version)
utils.MessagesHandlerMixIn.__init__(self)
utils.ReportsHandlerMixIn.__init__(self)
super(PyLinter, self).__init__(
usage=__doc__,
version=full_version,
config_file=pylintrc or config.PYLINTRC)
checkers.BaseTokenChecker.__init__(self)
# provided reports
self.reports = (('RP0001', 'Messages by category',
report_total_messages_stats),
('RP0002', '% errors / warnings by module',
report_messages_by_module_stats),
('RP0003', 'Messages',
report_messages_stats),
)
self.register_checker(self)
self._dynamic_plugins = set()
self._python3_porting_mode = False
self._error_mode = False
self.load_provider_defaults()
if reporter:
self.set_reporter(reporter)
def load_default_plugins(self):
checkers.initialize(self)
reporters.initialize(self)
# Make sure to load the default reporter, because
# the option has been set before the plugins had been loaded.
if not self.reporter:
self._load_reporter()
def load_plugin_modules(self, modnames):
"""take a list of module names which are pylint plugins and load
and register them
"""
for modname in modnames:
if modname in self._dynamic_plugins:
continue
self._dynamic_plugins.add(modname)
module = modutils.load_module_from_name(modname)
module.register(self)
def _load_reporter(self):
name = self._reporter_name.lower()
if name in self._reporters:
self.set_reporter(self._reporters[name]())
else:
try:
reporter_class = self._load_reporter_class()
except (ImportError, AttributeError):
raise exceptions.InvalidReporterError(name)
else:
self.set_reporter(reporter_class())
def _load_reporter_class(self):
qname = self._reporter_name
module = modutils.load_module_from_name(
modutils.get_module_part(qname))
class_name = qname.split('.')[-1]
reporter_class = getattr(module, class_name)
return reporter_class
def set_reporter(self, reporter):
"""set the reporter used to display messages and reports"""
self.reporter = reporter
reporter.linter = self
def set_option(self, optname, value, action=None, optdict=None):
"""overridden from config.OptionsProviderMixin to handle some
special options
"""
if optname in self._options_methods or \
optname in self._bw_options_methods:
if value:
try:
meth = self._options_methods[optname]
except KeyError:
meth = self._bw_options_methods[optname]
warnings.warn('%s is deprecated, replace it by %s' % (optname,
optname.split('-')[0]),
DeprecationWarning)
value = utils._check_csv(value)
if isinstance(value, (list, tuple)):
for _id in value:
meth(_id, ignore_unknown=True)
else:
meth(value)
return # no need to call set_option, disable/enable methods do it
elif optname == 'output-format':
self._reporter_name = value
# If the reporters are already available, load
# the reporter class.
if self._reporters:
self._load_reporter()
try:
checkers.BaseTokenChecker.set_option(self, optname,
value, action, optdict)
except config.UnsupportedAction:
print('option %s can\'t be read from config file' % \
optname, file=sys.stderr)
def register_reporter(self, reporter_class):
self._reporters[reporter_class.name] = reporter_class
def report_order(self):
reports = sorted(self._reports, key=lambda x: getattr(x, 'name', ''))
try:
# Remove the current reporter and add it
# at the end of the list.
reports.pop(reports.index(self))
except ValueError:
pass
else:
reports.append(self)
return reports
# checkers manipulation methods ############################################
def register_checker(self, checker):
"""register a new checker
checker is an object implementing IRawChecker or / and IAstroidChecker
"""
assert checker.priority <= 0, 'checker priority can\'t be >= 0'
self._checkers[checker.name].append(checker)
for r_id, r_title, r_cb in checker.reports:
self.register_report(r_id, r_title, r_cb, checker)
self.register_options_provider(checker)
if hasattr(checker, 'msgs'):
self.msgs_store.register_messages(checker)
checker.load_defaults()
# Register the checker, but disable all of its messages.
# TODO(cpopa): we should have a better API for this.
if not getattr(checker, 'enabled', True):
self.disable(checker.name)
def disable_noerror_messages(self):
for msgcat, msgids in six.iteritems(self.msgs_store._msgs_by_category):
# enable only messages with 'error' severity and above ('fatal')
if msgcat in ['E', 'F']:
for msgid in msgids:
self.enable(msgid)
else:
for msgid in msgids:
self.disable(msgid)
def disable_reporters(self):
"""disable all reporters"""
for _reporters in six.itervalues(self._reports):
for report_id, _, _ in _reporters:
self.disable_report(report_id)
def error_mode(self):
"""error mode: enable only errors; no reports, no persistent"""
self._error_mode = True
self.disable_noerror_messages()
self.disable('miscellaneous')
if self._python3_porting_mode:
self.disable('all')
for msg_id in self._checker_messages('python3'):
if msg_id.startswith('E'):
self.enable(msg_id)
config_parser = self.cfgfile_parser
if config_parser.has_option('MESSAGES CONTROL', 'disable'):
value = config_parser.get('MESSAGES CONTROL', 'disable')
self.global_set_option('disable', value)
else:
self.disable('python3')
self.set_option('reports', False)
self.set_option('persistent', False)
self.set_option('score', False)
def python3_porting_mode(self):
"""Disable all other checkers and enable Python 3 warnings."""
self.disable('all')
self.enable('python3')
if self._error_mode:
# The error mode was activated, using the -E flag.
# So we'll need to enable only the errors from the
# Python 3 porting checker.
for msg_id in self._checker_messages('python3'):
if msg_id.startswith('E'):
self.enable(msg_id)
else:
self.disable(msg_id)
config_parser = self.cfgfile_parser
if config_parser.has_option('MESSAGES CONTROL', 'disable'):
value = config_parser.get('MESSAGES CONTROL', 'disable')
self.global_set_option('disable', value)
self._python3_porting_mode = True
# block level option handling #############################################
#
# see func_block_disable_msg.py test case for expected behaviour
def process_tokens(self, tokens):
"""process tokens from the current module to search for module/block
level options
"""
control_pragmas = {'disable', 'enable'}
for (tok_type, content, start, _, _) in tokens:
if tok_type != tokenize.COMMENT:
continue
match = utils.OPTION_RGX.search(content)
if match is None:
continue
if match.group(1).strip() == "disable-all" or \
match.group(1).strip() == 'skip-file':
if match.group(1).strip() == "disable-all":
self.add_message('deprecated-pragma', line=start[0],
args=('disable-all', 'skip-file'))
self.add_message('file-ignored', line=start[0])
self._ignore_file = True
return
try:
opt, value = match.group(1).split('=', 1)
except ValueError:
self.add_message('bad-inline-option', args=match.group(1).strip(),
line=start[0])
continue
opt = opt.strip()
if opt in self._options_methods or opt in self._bw_options_methods:
try:
meth = self._options_methods[opt]
except KeyError:
meth = self._bw_options_methods[opt]
# found a "(dis|en)able-msg" pragma deprecated suppression
self.add_message('deprecated-pragma', line=start[0],
args=(opt, opt.replace('-msg', '')))
for msgid in utils._splitstrip(value):
# Add the line where a control pragma was encountered.
if opt in control_pragmas:
self._pragma_lineno[msgid] = start[0]
try:
if (opt, msgid) == ('disable', 'all'):
self.add_message('deprecated-pragma', line=start[0],
args=('disable=all', 'skip-file'))
self.add_message('file-ignored', line=start[0])
self._ignore_file = True
return
meth(msgid, 'module', start[0])
except exceptions.UnknownMessageError:
self.add_message('bad-option-value', args=msgid, line=start[0])
else:
self.add_message('unrecognized-inline-option', args=opt, line=start[0])
# code checking methods ###################################################
def get_checkers(self):
"""return all available checkers as a list"""
return [self] + [c for _checkers in six.itervalues(self._checkers)
for c in _checkers if c is not self]
def prepare_checkers(self):
"""return checkers needed for activated messages and reports"""
if not self.config.reports:
self.disable_reporters()
# get needed checkers
neededcheckers = [self]
for checker in self.get_checkers()[1:]:
messages = set(msg for msg in checker.msgs
if self.is_message_enabled(msg))
if (messages or
any(self.report_is_enabled(r[0]) for r in checker.reports)):
neededcheckers.append(checker)
# Sort checkers by priority
neededcheckers = sorted(neededcheckers,
key=operator.attrgetter('priority'),
reverse=True)
return neededcheckers
# pylint: disable=unused-argument
@staticmethod
def should_analyze_file(modname, path, is_argument=False):
"""Returns whether or not a module should be checked.
This implementation returns True for all python source file, indicating
that all files should be linted.
Subclasses may override this method to indicate that modules satisfying
certain conditions should not be linted.
:param str modname: The name of the module to be checked.
:param str path: The full path to the source code of the module.
:param bool is_argument: Whetter the file is an argument to pylint or not.
Files which respect this property are always
checked, since the user requested it explicitly.
:returns: True if the module should be checked.
:rtype: bool
"""
if is_argument:
return True
return path.endswith('.py')
# pylint: enable=unused-argument
def check(self, files_or_modules):
"""main checking entry: check a list of files or modules from their
name.
"""
# initialize msgs_state now that all messages have been registered into
# the store
for msg in self.msgs_store.messages:
if not msg.may_be_emitted():
self._msgs_state[msg.msgid] = False
if not isinstance(files_or_modules, (list, tuple)):
files_or_modules = (files_or_modules,)
if self.config.jobs == 1:
self._do_check(files_or_modules)
else:
with _patch_sysmodules():
self._parallel_check(files_or_modules)
def _get_jobs_config(self):
child_config = collections.OrderedDict()
filter_options = {'long-help'}
filter_options.update((opt_name for opt_name, _ in self._external_opts))
for opt_providers in six.itervalues(self._all_options):
for optname, optdict, val in opt_providers.options_and_values():
if optdict.get('deprecated'):
continue
if optname not in filter_options:
child_config[optname] = utils._format_option_value(
optdict, val)
child_config['python3_porting_mode'] = self._python3_porting_mode
child_config['plugins'] = self._dynamic_plugins
return child_config
def _parallel_task(self, files_or_modules):
# Prepare configuration for child linters.
child_config = self._get_jobs_config()
children = []
manager = multiprocessing.Manager()
tasks_queue = manager.Queue()
results_queue = manager.Queue()
# Send files to child linters.
expanded_files = self.expand_files(files_or_modules)
# do not start more jobs than needed
for _ in range(min(self.config.jobs, len(expanded_files))):
child_linter = ChildLinter(args=(tasks_queue, results_queue,
child_config))
child_linter.start()
children.append(child_linter)
for files_or_module in expanded_files:
path = files_or_module['path']
tasks_queue.put([path])
# collect results from child linters
failed = False
for _ in expanded_files:
try:
result = results_queue.get()
except Exception as ex:
print("internal error while receiving results from child linter",
file=sys.stderr)
print(ex, file=sys.stderr)
failed = True
break
yield result
# Stop child linters and wait for their completion.
for _ in range(self.config.jobs):
tasks_queue.put('STOP')
for child in children:
child.join()
if failed:
print("Error occurred, stopping the linter.", file=sys.stderr)
sys.exit(32)
def _parallel_check(self, files_or_modules):
# Reset stats.
self.open()
all_stats = []
module = None
for result in self._parallel_task(files_or_modules):
if not result:
continue
(
_,
self.file_state.base_name,
module,
messages,
stats,
msg_status
) = result
for msg in messages:
msg = utils.Message(*msg)
self.set_current_module(module)
self.reporter.handle_message(msg)
all_stats.append(stats)
self.msg_status |= msg_status
self.stats = _merge_stats(all_stats)
self.current_name = module
# Insert stats data to local checkers.
for checker in self.get_checkers():
if checker is not self:
checker.stats = self.stats
def _do_check(self, files_or_modules):
walker = utils.PyLintASTWalker(self)
_checkers = self.prepare_checkers()
tokencheckers = [c for c in _checkers
if interfaces.implements(c, interfaces.ITokenChecker)
and c is not self]
rawcheckers = [c for c in _checkers
if interfaces.implements(c, interfaces.IRawChecker)]
# notify global begin
for checker in _checkers:
checker.open()
if interfaces.implements(checker, interfaces.IAstroidChecker):
walker.add_checker(checker)
# build ast and check modules or packages
for descr in self.expand_files(files_or_modules):
modname, filepath, is_arg = descr['name'], descr['path'], descr['isarg']
if not self.should_analyze_file(modname, filepath, is_argument=is_arg):
continue
self.set_current_module(modname, filepath)
# get the module representation
ast_node = self.get_ast(filepath, modname)
if ast_node is None:
continue
# XXX to be correct we need to keep module_msgs_state for every
# analyzed module (the problem stands with localized messages which
# are only detected in the .close step)
self.file_state = utils.FileState(descr['basename'])
self._ignore_file = False
# fix the current file (if the source file was not available or
# if it's actually a c extension)
self.current_file = ast_node.file # pylint: disable=maybe-no-member
self.check_astroid_module(ast_node, walker, rawcheckers, tokencheckers)
# warn about spurious inline messages handling
spurious_messages = self.file_state.iter_spurious_suppression_messages(self.msgs_store)
for msgid, line, args in spurious_messages:
self.add_message(msgid, line, None, args)
# notify global end
self.stats['statement'] = walker.nbstatements
for checker in reversed(_checkers):
checker.close()
def expand_files(self, modules):
"""get modules and errors from a list of modules and handle errors
"""
result, errors = utils.expand_modules(modules, self.config.black_list,
self.config.black_list_re)
for error in errors:
message = modname = error["mod"]
key = error["key"]
self.set_current_module(modname)
if key == "fatal":
message = str(error["ex"]).replace(os.getcwd() + os.sep, '')
self.add_message(key, args=message)
return result
def set_current_module(self, modname, filepath=None):
"""set the name of the currently analyzed module and
init statistics for it
"""
if not modname and filepath is None:
return
self.reporter.on_set_current_module(modname, filepath)
self.current_name = modname
self.current_file = filepath or modname
self.stats['by_module'][modname] = {}
self.stats['by_module'][modname]['statement'] = 0
for msg_cat in six.itervalues(utils.MSG_TYPES):
self.stats['by_module'][modname][msg_cat] = 0
def get_ast(self, filepath, modname):
"""return a ast(roid) representation for a module"""
try:
return MANAGER.ast_from_file(filepath, modname, source=True)
except astroid.AstroidSyntaxError as ex:
self.add_message('syntax-error',
line=getattr(ex.error, 'lineno', 0),
args=str(ex.error))
except astroid.AstroidBuildingException as ex:
self.add_message('parse-error', args=ex)
except Exception as ex: # pylint: disable=broad-except
import traceback
traceback.print_exc()
self.add_message('astroid-error', args=(ex.__class__, ex))
def check_astroid_module(self, ast_node, walker,
rawcheckers, tokencheckers):
"""Check a module from its astroid representation."""
try:
tokens = utils.tokenize_module(ast_node)
except tokenize.TokenError as ex:
self.add_message('syntax-error', line=ex.args[1][0], args=ex.args[0])
return None
if not ast_node.pure_python:
self.add_message('raw-checker-failed', args=ast_node.name)
else:
#assert astroid.file.endswith('.py')
# invoke ITokenChecker interface on self to fetch module/block
# level options
self.process_tokens(tokens)
if self._ignore_file:
return False
# walk ast to collect line numbers
self.file_state.collect_block_lines(self.msgs_store, ast_node)
# run raw and tokens checkers
for checker in rawcheckers:
checker.process_module(ast_node)
for checker in tokencheckers:
checker.process_tokens(tokens)
# generate events to astroid checkers
walker.walk(ast_node)
return True
# IAstroidChecker interface #################################################
def open(self):
"""initialize counters"""
self.stats = {'by_module' : {},
'by_msg' : {},
}
MANAGER.always_load_extensions = self.config.unsafe_load_any_extension
MANAGER.extension_package_whitelist.update(
self.config.extension_pkg_whitelist)
for msg_cat in six.itervalues(utils.MSG_TYPES):
self.stats[msg_cat] = 0
def generate_reports(self):
"""close the whole package /module, it's time to make reports !
if persistent run, pickle results for later comparison
"""
# Display whatever messages are left on the reporter.
self.reporter.display_messages(report_nodes.Section())
if self.file_state.base_name is not None:
# load previous results if any
previous_stats = config.load_results(self.file_state.base_name)
# XXX code below needs refactoring to be more reporter agnostic
self.reporter.on_close(self.stats, previous_stats)
if self.config.reports:
sect = self.make_reports(self.stats, previous_stats)
else:
sect = report_nodes.Section()
if self.config.reports:
self.reporter.display_reports(sect)
self._report_evaluation()
# save results if persistent run
if self.config.persistent:
config.save_results(self.stats, self.file_state.base_name)
else:
self.reporter.on_close(self.stats, {})
def _report_evaluation(self):
"""make the global evaluation report"""
# check with at least check 1 statements (usually 0 when there is a
# syntax error preventing pylint from further processing)
previous_stats = config.load_results(self.file_state.base_name)
if self.stats['statement'] == 0:
return
# get a global note for the code
evaluation = self.config.evaluation
try:
note = eval(evaluation, {}, self.stats) # pylint: disable=eval-used
except Exception as ex: # pylint: disable=broad-except
msg = 'An exception occurred while rating: %s' % ex
else:
self.stats['global_note'] = note
msg = 'Your code has been rated at %.2f/10' % note
pnote = previous_stats.get('global_note')
if pnote is not None:
msg += ' (previous run: %.2f/10, %+.2f)' % (pnote, note - pnote)
if self.config.score:
sect = report_nodes.EvaluationSection(msg)
self.reporter.display_reports(sect)
def report_total_messages_stats(sect, stats, previous_stats):
"""make total errors / warnings report"""
lines = ['type', 'number', 'previous', 'difference']
lines += checkers.table_lines_from_stats(stats, previous_stats,
('convention', 'refactor',
'warning', 'error'))
sect.append(report_nodes.Table(children=lines, cols=4, rheaders=1))
def report_messages_stats(sect, stats, _):
"""make messages type report"""
if not stats['by_msg']:
# don't print this report when we didn't detected any errors
raise exceptions.EmptyReportError()
in_order = sorted([(value, msg_id)
for msg_id, value in six.iteritems(stats['by_msg'])
if not msg_id.startswith('I')])
in_order.reverse()
lines = ('message id', 'occurrences')
for value, msg_id in in_order:
lines += (msg_id, str(value))
sect.append(report_nodes.Table(children=lines, cols=2, rheaders=1))
def report_messages_by_module_stats(sect, stats, _):
"""make errors / warnings by modules report"""
if len(stats['by_module']) == 1:
# don't print this report when we are analysing a single module
raise exceptions.EmptyReportError()
by_mod = collections.defaultdict(dict)
for m_type in ('fatal', 'error', 'warning', 'refactor', 'convention'):
total = stats[m_type]
for module in six.iterkeys(stats['by_module']):
mod_total = stats['by_module'][module][m_type]
if total == 0:
percent = 0
else:
percent = float((mod_total)*100) / total
by_mod[module][m_type] = percent
sorted_result = []
for module, mod_info in six.iteritems(by_mod):
sorted_result.append((mod_info['error'],
mod_info['warning'],
mod_info['refactor'],
mod_info['convention'],
module))
sorted_result.sort()
sorted_result.reverse()
lines = ['module', 'error', 'warning', 'refactor', 'convention']
for line in sorted_result:
# Don't report clean modules.
if all(entry == 0 for entry in line[:-1]):
continue
lines.append(line[-1])
for val in line[:-1]:
lines.append('%.2f' % val)
if len(lines) == 5:
raise exceptions.EmptyReportError()
sect.append(report_nodes.Table(children=lines, cols=5, rheaders=1))
class ArgumentPreprocessingError(Exception):
"""Raised if an error occurs during argument preprocessing."""
def preprocess_options(args, search_for):
"""look for some options (keys of <search_for>) which have to be processed
before others
values of <search_for> are callback functions to call when the option is
found
"""
i = 0
while i < len(args):
arg = args[i]
if arg.startswith('--'):
try:
option, val = arg[2:].split('=', 1)
except ValueError:
option, val = arg[2:], None
try:
cb, takearg = search_for[option]
except KeyError:
i += 1
else:
del args[i]
if takearg and val is None:
if i >= len(args) or args[i].startswith('-'):
msg = 'Option %s expects a value' % option
raise ArgumentPreprocessingError(msg)
val = args[i]
del args[i]
elif not takearg and val is not None:
msg = "Option %s doesn't expects a value" % option
raise ArgumentPreprocessingError(msg)
cb(option, val)
else:
i += 1
@contextlib.contextmanager
def fix_import_path(args):
"""Prepare sys.path for running the linter checks.
Within this context, each of the given arguments is importable.
Paths are added to sys.path in corresponding order to the arguments.
We avoid adding duplicate directories to sys.path.
`sys.path` is reset to its original value upon exiting this context.
"""
orig = list(sys.path)
changes = []
for arg in args:
path = _get_python_path(arg)
if path in changes:
continue
else:
changes.append(path)
sys.path[:] = changes + ["."] + sys.path
try:
yield
finally:
sys.path[:] = orig
class Run(object):
"""helper class to use as main for pylint :
run(*sys.argv[1:])
"""
LinterClass = PyLinter
option_groups = (
('Commands', 'Options which are actually commands. Options in this \
group are mutually exclusive.'),
)
def __init__(self, args, reporter=None, exit=True):
self._rcfile = None
self._plugins = []
try:
preprocess_options(args, {
# option: (callback, takearg)
'init-hook': (cb_init_hook, True),
'rcfile': (self.cb_set_rcfile, True),
'load-plugins': (self.cb_add_plugins, True),
})
except ArgumentPreprocessingError as ex:
print(ex, file=sys.stderr)
sys.exit(32)
self.linter = linter = self.LinterClass((
('rcfile',
{'action' : 'callback', 'callback' : lambda *args: 1,
'type': 'string', 'metavar': '<file>',
'help' : 'Specify a configuration file.'}),
('init-hook',
{'action' : 'callback', 'callback' : lambda *args: 1,
'type' : 'string', 'metavar': '<code>',
'level': 1,
'help' : 'Python code to execute, usually for sys.path '
'manipulation such as pygtk.require().'}),
('help-msg',
{'action' : 'callback', 'type' : 'string', 'metavar': '<msg-id>',
'callback' : self.cb_help_message,
'group': 'Commands',
'help' : 'Display a help message for the given message id and '
'exit. The value may be a comma separated list of message ids.'}),
('list-msgs',
{'action' : 'callback', 'metavar': '<msg-id>',
'callback' : self.cb_list_messages,
'group': 'Commands', 'level': 1,
'help' : "Generate pylint's messages."}),
('list-conf-levels',
{'action' : 'callback',
'callback' : cb_list_confidence_levels,
'group': 'Commands', 'level': 1,
'help' : "Generate pylint's messages."}),
('full-documentation',
{'action' : 'callback', 'metavar': '<msg-id>',
'callback' : self.cb_full_documentation,
'group': 'Commands', 'level': 1,
'help' : "Generate pylint's full documentation."}),
('generate-rcfile',
{'action' : 'callback', 'callback' : self.cb_generate_config,
'group': 'Commands',
'help' : 'Generate a sample configuration file according to '
'the current configuration. You can put other options '
'before this one to get them in the generated '
'configuration.'}),
('generate-man',
{'action' : 'callback', 'callback' : self.cb_generate_manpage,
'group': 'Commands',
'help' : "Generate pylint's man page.", 'hide': True}),
('errors-only',
{'action' : 'callback', 'callback' : self.cb_error_mode,
'short': 'E',
'help' : 'In error mode, checkers without error messages are '
'disabled and for others, only the ERROR messages are '
'displayed, and no reports are done by default'''}),
('py3k',
{'action' : 'callback', 'callback' : self.cb_python3_porting_mode,
'help' : 'In Python 3 porting mode, all checkers will be '
'disabled and only messages emitted by the porting '
'checker will be displayed'}),
), option_groups=self.option_groups, pylintrc=self._rcfile)
# register standard checkers
linter.load_default_plugins()
# load command line plugins
linter.load_plugin_modules(self._plugins)
# add some help section
linter.add_help_section('Environment variables', config.ENV_HELP, level=1)
# pylint: disable=bad-continuation
linter.add_help_section('Output',
'Using the default text output, the message format is : \n'
' \n'
' MESSAGE_TYPE: LINE_NUM:[OBJECT:] MESSAGE \n'
' \n'
'There are 5 kind of message types : \n'
' * (C) convention, for programming standard violation \n'
' * (R) refactor, for bad code smell \n'
' * (W) warning, for python specific problems \n'
' * (E) error, for probable bugs in the code \n'
' * (F) fatal, if an error occurred which prevented pylint from doing further\n'
'processing.\n'
, level=1)
linter.add_help_section('Output status code',
'Pylint should leave with following status code: \n'
' * 0 if everything went fine \n'
' * 1 if a fatal message was issued \n'
' * 2 if an error message was issued \n'
' * 4 if a warning message was issued \n'
' * 8 if a refactor message was issued \n'
' * 16 if a convention message was issued \n'
' * 32 on usage error \n'
' \n'
'status 1 to 16 will be bit-ORed so you can know which different categories has\n'
'been issued by analysing pylint output status code\n',
level=1)
# read configuration
linter.disable('I')
linter.enable('c-extension-no-member')
linter.read_config_file()
config_parser = linter.cfgfile_parser
# run init hook, if present, before loading plugins
if config_parser.has_option('MASTER', 'init-hook'):
cb_init_hook('init-hook',
utils._unquote(config_parser.get('MASTER',
'init-hook')))
# is there some additional plugins in the file configuration, in
if config_parser.has_option('MASTER', 'load-plugins'):
plugins = utils._splitstrip(
config_parser.get('MASTER', 'load-plugins'))
linter.load_plugin_modules(plugins)
# now we can load file config and command line, plugins (which can
# provide options) have been registered
linter.load_config_file()
if reporter:
# if a custom reporter is provided as argument, it may be overridden
# by file parameters, so re-set it here, but before command line
# parsing so it's still overrideable by command line option
linter.set_reporter(reporter)
try:
args = linter.load_command_line_configuration(args)
except SystemExit as exc:
if exc.code == 2: # bad options
exc.code = 32
raise
if not args:
print(linter.help())
sys.exit(32)
if linter.config.jobs < 0:
print("Jobs number (%d) should be greater than 0"
% linter.config.jobs, file=sys.stderr)
sys.exit(32)
if linter.config.jobs > 1 or linter.config.jobs == 0:
if multiprocessing is None:
print("Multiprocessing library is missing, "
"fallback to single process", file=sys.stderr)
linter.set_option("jobs", 1)
else:
if linter.config.jobs == 0:
linter.config.jobs = multiprocessing.cpu_count()
# insert current working directory to the python path to have a correct
# behaviour
with fix_import_path(args):
linter.check(args)
linter.generate_reports()
if exit:
sys.exit(self.linter.msg_status)
def cb_set_rcfile(self, name, value):
"""callback for option preprocessing (i.e. before option parsing)"""
self._rcfile = value
def cb_add_plugins(self, name, value):
"""callback for option preprocessing (i.e. before option parsing)"""
self._plugins.extend(utils._splitstrip(value))
def cb_error_mode(self, *args, **kwargs):
"""error mode:
* disable all but error messages
* disable the 'miscellaneous' checker which can be safely deactivated in
debug
* disable reports
* do not save execution information
"""
self.linter.error_mode()
def cb_generate_config(self, *args, **kwargs):
"""optik callback for sample config file generation"""
self.linter.generate_config(skipsections=('COMMANDS',))
sys.exit(0)
def cb_generate_manpage(self, *args, **kwargs):
"""optik callback for sample config file generation"""
from pylint import __pkginfo__
self.linter.generate_manpage(__pkginfo__)
sys.exit(0)
def cb_help_message(self, option, optname, value, parser):
"""optik callback for printing some help about a particular message"""
self.linter.msgs_store.help_message(utils._splitstrip(value))
sys.exit(0)
def cb_full_documentation(self, option, optname, value, parser):
"""optik callback for printing full documentation"""
self.linter.print_full_documentation()
sys.exit(0)
def cb_list_messages(self, option, optname, value, parser): # FIXME
"""optik callback for printing available messages"""
self.linter.msgs_store.list_messages()
sys.exit(0)
def cb_python3_porting_mode(self, *args, **kwargs):
"""Activate only the python3 porting checker."""
self.linter.python3_porting_mode()
def cb_list_confidence_levels(option, optname, value, parser):
for level in interfaces.CONFIDENCE_LEVELS:
print('%-18s: %s' % level)
sys.exit(0)
def cb_init_hook(optname, value):
"""exec arbitrary code to set sys.path for instance"""
exec(value) # pylint: disable=exec-used
if __name__ == '__main__':
Run(sys.argv[1:])
|
"""Thread module emulating a subset of Java's threading model."""
import sys as _sys
try:
import thread
except ImportError:
del _sys.modules[__name__]
raise
import warnings
from collections import deque as _deque
from time import time as _time, sleep as _sleep
from traceback import format_exc as _format_exc
__all__ = ['activeCount', 'active_count', 'Condition', 'currentThread',
'current_thread', 'enumerate', 'Event',
'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Thread',
'Timer', 'setprofile', 'settrace', 'local', 'stack_size']
_start_new_thread = thread.start_new_thread
_allocate_lock = thread.allocate_lock
_get_ident = thread.get_ident
ThreadError = thread.error
del thread
warnings.filterwarnings('ignore', category=DeprecationWarning,
module='threading', message='sys.exc_clear')
_VERBOSE = False
if __debug__:
class _Verbose(object):
def __init__(self, verbose=None):
if verbose is None:
verbose = _VERBOSE
self.__verbose = verbose
def _note(self, format, *args):
if self.__verbose:
format = format % args
# Issue #4188: calling current_thread() can incur an infinite
# recursion if it has to create a DummyThread on the fly.
ident = _get_ident()
try:
name = _active[ident].name
except KeyError:
name = "<OS thread %d>" % ident
format = "%s: %s\n" % (name, format)
_sys.stderr.write(format)
else:
# Disable this when using "python -O"
class _Verbose(object):
def __init__(self, verbose=None):
pass
def _note(self, *args):
pass
_profile_hook = None
_trace_hook = None
def setprofile(func):
"""Set a profile function for all threads started from the threading module.
The func will be passed to sys.setprofile() for each thread, before its
run() method is called.
"""
global _profile_hook
_profile_hook = func
def settrace(func):
"""Set a trace function for all threads started from the threading module.
The func will be passed to sys.settrace() for each thread, before its run()
method is called.
"""
global _trace_hook
_trace_hook = func
Lock = _allocate_lock
def RLock(*args, **kwargs):
"""Factory function that returns a new reentrant lock.
A reentrant lock must be released by the thread that acquired it. Once a
thread has acquired a reentrant lock, the same thread may acquire it again
without blocking; the thread must release it once for each time it has
acquired it.
"""
return _RLock(*args, **kwargs)
class _RLock(_Verbose):
"""A reentrant lock must be released by the thread that acquired it. Once a
thread has acquired a reentrant lock, the same thread may acquire it
again without blocking; the thread must release it once for each time it
has acquired it.
"""
def __init__(self, verbose=None):
_Verbose.__init__(self, verbose)
self.__block = _allocate_lock()
self.__owner = None
self.__count = 0
def __repr__(self):
owner = self.__owner
try:
owner = _active[owner].name
except KeyError:
pass
return "<%s owner=%r count=%d>" % (
self.__class__.__name__, owner, self.__count)
def acquire(self, blocking=1):
"""Acquire a lock, blocking or non-blocking.
When invoked without arguments: if this thread already owns the lock,
increment the recursion level by one, and return immediately. Otherwise,
if another thread owns the lock, block until the lock is unlocked. Once
the lock is unlocked (not owned by any thread), then grab ownership, set
the recursion level to one, and return. If more than one thread is
blocked waiting until the lock is unlocked, only one at a time will be
able to grab ownership of the lock. There is no return value in this
case.
When invoked with the blocking argument set to true, do the same thing
as when called without arguments, and return true.
When invoked with the blocking argument set to false, do not block. If a
call without an argument would block, return false immediately;
otherwise, do the same thing as when called without arguments, and
return true.
"""
me = _get_ident()
if self.__owner == me:
self.__count = self.__count + 1
if __debug__:
self._note("%s.acquire(%s): recursive success", self, blocking)
return 1
rc = self.__block.acquire(blocking)
if rc:
self.__owner = me
self.__count = 1
if __debug__:
self._note("%s.acquire(%s): initial success", self, blocking)
else:
if __debug__:
self._note("%s.acquire(%s): failure", self, blocking)
return rc
__enter__ = acquire
def release(self):
"""Release a lock, decrementing the recursion level.
If after the decrement it is zero, reset the lock to unlocked (not owned
by any thread), and if any other threads are blocked waiting for the
lock to become unlocked, allow exactly one of them to proceed. If after
the decrement the recursion level is still nonzero, the lock remains
locked and owned by the calling thread.
Only call this method when the calling thread owns the lock. A
RuntimeError is raised if this method is called when the lock is
unlocked.
There is no return value.
"""
if self.__owner != _get_ident():
raise RuntimeError("cannot release un-acquired lock")
self.__count = count = self.__count - 1
if not count:
self.__owner = None
self.__block.release()
if __debug__:
self._note("%s.release(): final release", self)
else:
if __debug__:
self._note("%s.release(): non-final release", self)
def __exit__(self, t, v, tb):
self.release()
# Internal methods used by condition variables
def _acquire_restore(self, count_owner):
count, owner = count_owner
self.__block.acquire()
self.__count = count
self.__owner = owner
if __debug__:
self._note("%s._acquire_restore()", self)
def _release_save(self):
if __debug__:
self._note("%s._release_save()", self)
count = self.__count
self.__count = 0
owner = self.__owner
self.__owner = None
self.__block.release()
return (count, owner)
def _is_owned(self):
return self.__owner == _get_ident()
def Condition(*args, **kwargs):
"""Factory function that returns a new condition variable object.
A condition variable allows one or more threads to wait until they are
notified by another thread.
If the lock argument is given and not None, it must be a Lock or RLock
object, and it is used as the underlying lock. Otherwise, a new RLock object
is created and used as the underlying lock.
"""
return _Condition(*args, **kwargs)
class _Condition(_Verbose):
"""Condition variables allow one or more threads to wait until they are
notified by another thread.
"""
def __init__(self, lock=None, verbose=None):
_Verbose.__init__(self, verbose)
if lock is None:
lock = RLock()
self.__lock = lock
# Export the lock's acquire() and release() methods
self.acquire = lock.acquire
self.release = lock.release
# If the lock defines _release_save() and/or _acquire_restore(),
# these override the default implementations (which just call
# release() and acquire() on the lock). Ditto for _is_owned().
try:
self._release_save = lock._release_save
except AttributeError:
pass
try:
self._acquire_restore = lock._acquire_restore
except AttributeError:
pass
try:
self._is_owned = lock._is_owned
except AttributeError:
pass
self.__waiters = []
def __enter__(self):
return self.__lock.__enter__()
def __exit__(self, *args):
return self.__lock.__exit__(*args)
def __repr__(self):
return "<Condition(%s, %d)>" % (self.__lock, len(self.__waiters))
def _release_save(self):
self.__lock.release() # No state to save
def _acquire_restore(self, x):
self.__lock.acquire() # Ignore saved state
def _is_owned(self):
# Return True if lock is owned by current_thread.
# This method is called only if __lock doesn't have _is_owned().
if self.__lock.acquire(0):
self.__lock.release()
return False
else:
return True
def wait(self, timeout=None):
"""Wait until notified or until a timeout occurs.
If the calling thread has not acquired the lock when this method is
called, a RuntimeError is raised.
This method releases the underlying lock, and then blocks until it is
awakened by a notify() or notifyAll() call for the same condition
variable in another thread, or until the optional timeout occurs. Once
awakened or timed out, it re-acquires the lock and returns.
When the timeout argument is present and not None, it should be a
floating point number specifying a timeout for the operation in seconds
(or fractions thereof).
When the underlying lock is an RLock, it is not released using its
release() method, since this may not actually unlock the lock when it
was acquired multiple times recursively. Instead, an internal interface
of the RLock class is used, which really unlocks it even when it has
been recursively acquired several times. Another internal interface is
then used to restore the recursion level when the lock is reacquired.
"""
if not self._is_owned():
raise RuntimeError("cannot wait on un-acquired lock")
waiter = _allocate_lock()
waiter.acquire()
self.__waiters.append(waiter)
saved_state = self._release_save()
try: # restore state no matter what (e.g., KeyboardInterrupt)
if timeout is None:
waiter.acquire()
if __debug__:
self._note("%s.wait(): got it", self)
else:
# Balancing act: We can't afford a pure busy loop, so we
# have to sleep; but if we sleep the whole timeout time,
# we'll be unresponsive. The scheme here sleeps very
# little at first, longer as time goes on, but never longer
# than 20 times per second (or the timeout time remaining).
endtime = _time() + timeout
delay = 0.0005 # 500 us -> initial delay of 1 ms
while True:
gotit = waiter.acquire(0)
if gotit:
break
remaining = endtime - _time()
if remaining <= 0:
break
delay = min(delay * 2, remaining, .05)
_sleep(delay)
if not gotit:
if __debug__:
self._note("%s.wait(%s): timed out", self, timeout)
try:
self.__waiters.remove(waiter)
except ValueError:
pass
else:
if __debug__:
self._note("%s.wait(%s): got it", self, timeout)
finally:
self._acquire_restore(saved_state)
def notify(self, n=1):
"""Wake up one or more threads waiting on this condition, if any.
If the calling thread has not acquired the lock when this method is
called, a RuntimeError is raised.
This method wakes up at most n of the threads waiting for the condition
variable; it is a no-op if no threads are waiting.
"""
if not self._is_owned():
raise RuntimeError("cannot notify on un-acquired lock")
__waiters = self.__waiters
waiters = __waiters[:n]
if not waiters:
if __debug__:
self._note("%s.notify(): no waiters", self)
return
self._note("%s.notify(): notifying %d waiter%s", self, n,
n!=1 and "s" or "")
for waiter in waiters:
waiter.release()
try:
__waiters.remove(waiter)
except ValueError:
pass
def notifyAll(self):
"""Wake up all threads waiting on this condition.
If the calling thread has not acquired the lock when this method
is called, a RuntimeError is raised.
"""
self.notify(len(self.__waiters))
notify_all = notifyAll
def Semaphore(*args, **kwargs):
"""A factory function that returns a new semaphore.
Semaphores manage a counter representing the number of release() calls minus
the number of acquire() calls, plus an initial value. The acquire() method
blocks if necessary until it can return without making the counter
negative. If not given, value defaults to 1.
"""
return _Semaphore(*args, **kwargs)
class _Semaphore(_Verbose):
"""Semaphores manage a counter representing the number of release() calls
minus the number of acquire() calls, plus an initial value. The acquire()
method blocks if necessary until it can return without making the counter
negative. If not given, value defaults to 1.
"""
# After Tim Peters' semaphore class, but not quite the same (no maximum)
def __init__(self, value=1, verbose=None):
if value < 0:
raise ValueError("semaphore initial value must be >= 0")
_Verbose.__init__(self, verbose)
self.__cond = Condition(Lock())
self.__value = value
def acquire(self, blocking=1):
"""Acquire a semaphore, decrementing the internal counter by one.
When invoked without arguments: if the internal counter is larger than
zero on entry, decrement it by one and return immediately. If it is zero
on entry, block, waiting until some other thread has called release() to
make it larger than zero. This is done with proper interlocking so that
if multiple acquire() calls are blocked, release() will wake exactly one
of them up. The implementation may pick one at random, so the order in
which blocked threads are awakened should not be relied on. There is no
return value in this case.
When invoked with blocking set to true, do the same thing as when called
without arguments, and return true.
When invoked with blocking set to false, do not block. If a call without
an argument would block, return false immediately; otherwise, do the
same thing as when called without arguments, and return true.
"""
rc = False
with self.__cond:
while self.__value == 0:
if not blocking:
break
if __debug__:
self._note("%s.acquire(%s): blocked waiting, value=%s",
self, blocking, self.__value)
self.__cond.wait()
else:
self.__value = self.__value - 1
if __debug__:
self._note("%s.acquire: success, value=%s",
self, self.__value)
rc = True
return rc
__enter__ = acquire
def release(self):
"""Release a semaphore, incrementing the internal counter by one.
When the counter is zero on entry and another thread is waiting for it
to become larger than zero again, wake up that thread.
"""
with self.__cond:
self.__value = self.__value + 1
if __debug__:
self._note("%s.release: success, value=%s",
self, self.__value)
self.__cond.notify()
def __exit__(self, t, v, tb):
self.release()
def BoundedSemaphore(*args, **kwargs):
"""A factory function that returns a new bounded semaphore.
A bounded semaphore checks to make sure its current value doesn't exceed its
initial value. If it does, ValueError is raised. In most situations
semaphores are used to guard resources with limited capacity.
If the semaphore is released too many times it's a sign of a bug. If not
given, value defaults to 1.
Like regular semaphores, bounded semaphores manage a counter representing
the number of release() calls minus the number of acquire() calls, plus an
initial value. The acquire() method blocks if necessary until it can return
without making the counter negative. If not given, value defaults to 1.
"""
return _BoundedSemaphore(*args, **kwargs)
class _BoundedSemaphore(_Semaphore):
"""A bounded semaphore checks to make sure its current value doesn't exceed
its initial value. If it does, ValueError is raised. In most situations
semaphores are used to guard resources with limited capacity.
"""
def __init__(self, value=1, verbose=None):
_Semaphore.__init__(self, value, verbose)
self._initial_value = value
def release(self):
"""Release a semaphore, incrementing the internal counter by one.
When the counter is zero on entry and another thread is waiting for it
to become larger than zero again, wake up that thread.
If the number of releases exceeds the number of acquires,
raise a ValueError.
"""
if self._Semaphore__value >= self._initial_value:
raise ValueError("Semaphore released too many times")
return _Semaphore.release(self)
def Event(*args, **kwargs):
"""A factory function that returns a new event.
Events manage a flag that can be set to true with the set() method and reset
to false with the clear() method. The wait() method blocks until the flag is
true.
"""
return _Event(*args, **kwargs)
class _Event(_Verbose):
"""A factory function that returns a new event object. An event manages a
flag that can be set to true with the set() method and reset to false
with the clear() method. The wait() method blocks until the flag is true.
"""
# After Tim Peters' event class (without is_posted())
def __init__(self, verbose=None):
_Verbose.__init__(self, verbose)
self.__cond = Condition(Lock())
self.__flag = False
def _reset_internal_locks(self):
# private! called by Thread._reset_internal_locks by _after_fork()
self.__cond.__init__()
def isSet(self):
'Return true if and only if the internal flag is true.'
return self.__flag
is_set = isSet
def set(self):
"""Set the internal flag to true.
All threads waiting for the flag to become true are awakened. Threads
that call wait() once the flag is true will not block at all.
"""
self.__cond.acquire()
try:
self.__flag = True
self.__cond.notify_all()
finally:
self.__cond.release()
def clear(self):
"""Reset the internal flag to false.
Subsequently, threads calling wait() will block until set() is called to
set the internal flag to true again.
"""
self.__cond.acquire()
try:
self.__flag = False
finally:
self.__cond.release()
def wait(self, timeout=None):
"""Block until the internal flag is true.
If the internal flag is true on entry, return immediately. Otherwise,
block until another thread calls set() to set the flag to true, or until
the optional timeout occurs.
When the timeout argument is present and not None, it should be a
floating point number specifying a timeout for the operation in seconds
(or fractions thereof).
This method returns the internal flag on exit, so it will always return
True except if a timeout is given and the operation times out.
"""
self.__cond.acquire()
try:
if not self.__flag:
self.__cond.wait(timeout)
return self.__flag
finally:
self.__cond.release()
_counter = 0
def _newname(template="Thread-%d"):
global _counter
_counter = _counter + 1
return template % _counter
_active_limbo_lock = _allocate_lock()
_active = {} # maps thread id to Thread object
_limbo = {}
class Thread(_Verbose):
"""A class that represents a thread of control.
This class can be safely subclassed in a limited fashion.
"""
__initialized = False
# Need to store a reference to sys.exc_info for printing
# out exceptions when a thread tries to use a global var. during interp.
# shutdown and thus raises an exception about trying to perform some
# operation on/with a NoneType
__exc_info = _sys.exc_info
# Keep sys.exc_clear too to clear the exception just before
# allowing .join() to return.
__exc_clear = _sys.exc_clear
def __init__(self, group=None, target=None, name=None,
args=(), kwargs=None, verbose=None):
"""This constructor should always be called with keyword arguments. Arguments are:
*group* should be None; reserved for future extension when a ThreadGroup
class is implemented.
*target* is the callable object to be invoked by the run()
method. Defaults to None, meaning nothing is called.
*name* is the thread name. By default, a unique name is constructed of
the form "Thread-N" where N is a small decimal number.
*args* is the argument tuple for the target invocation. Defaults to ().
*kwargs* is a dictionary of keyword arguments for the target
invocation. Defaults to {}.
If a subclass overrides the constructor, it must make sure to invoke
the base class constructor (Thread.__init__()) before doing anything
else to the thread.
"""
assert group is None, "group argument must be None for now"
_Verbose.__init__(self, verbose)
if kwargs is None:
kwargs = {}
self.__target = target
self.__name = str(name or _newname())
self.__args = args
self.__kwargs = kwargs
self.__daemonic = self._set_daemon()
self.__ident = None
self.__started = Event()
self.__stopped = False
self.__block = Condition(Lock())
self.__initialized = True
# sys.stderr is not stored in the class like
# sys.exc_info since it can be changed between instances
self.__stderr = _sys.stderr
def _reset_internal_locks(self):
# private! Called by _after_fork() to reset our internal locks as
# they may be in an invalid state leading to a deadlock or crash.
if hasattr(self, '_Thread__block'): # DummyThread deletes self.__block
self.__block.__init__()
self.__started._reset_internal_locks()
@property
def _block(self):
# used by a unittest
return self.__block
def _set_daemon(self):
# Overridden in _MainThread and _DummyThread
return current_thread().daemon
def __repr__(self):
assert self.__initialized, "Thread.__init__() was not called"
status = "initial"
if self.__started.is_set():
status = "started"
if self.__stopped:
status = "stopped"
if self.__daemonic:
status += " daemon"
if self.__ident is not None:
status += " %s" % self.__ident
return "<%s(%s, %s)>" % (self.__class__.__name__, self.__name, status)
def start(self):
"""Start the thread's activity.
It must be called at most once per thread object. It arranges for the
object's run() method to be invoked in a separate thread of control.
This method will raise a RuntimeError if called more than once on the
same thread object.
"""
if not self.__initialized:
raise RuntimeError("thread.__init__() not called")
if self.__started.is_set():
raise RuntimeError("threads can only be started once")
if __debug__:
self._note("%s.start(): starting thread", self)
with _active_limbo_lock:
_limbo[self] = self
try:
_start_new_thread(self.__bootstrap, ())
except Exception:
with _active_limbo_lock:
del _limbo[self]
raise
self.__started.wait()
def run(self):
"""Method representing the thread's activity.
You may override this method in a subclass. The standard run() method
invokes the callable object passed to the object's constructor as the
target argument, if any, with sequential and keyword arguments taken
from the args and kwargs arguments, respectively.
"""
try:
if self.__target:
self.__target(*self.__args, **self.__kwargs)
finally:
# Avoid a refcycle if the thread is running a function with
# an argument that has a member that points to the thread.
del self.__target, self.__args, self.__kwargs
def __bootstrap(self):
# Wrapper around the real bootstrap code that ignores
# exceptions during interpreter cleanup. Those typically
# happen when a daemon thread wakes up at an unfortunate
# moment, finds the world around it destroyed, and raises some
# random exception *** while trying to report the exception in
# __bootstrap_inner() below ***. Those random exceptions
# don't help anybody, and they confuse users, so we suppress
# them. We suppress them only when it appears that the world
# indeed has already been destroyed, so that exceptions in
# __bootstrap_inner() during normal business hours are properly
# reported. Also, we only suppress them for daemonic threads;
# if a non-daemonic encounters this, something else is wrong.
try:
self.__bootstrap_inner()
except:
if self.__daemonic and _sys is None:
return
raise
def _set_ident(self):
self.__ident = _get_ident()
def __bootstrap_inner(self):
try:
self._set_ident()
self.__started.set()
with _active_limbo_lock:
_active[self.__ident] = self
del _limbo[self]
if __debug__:
self._note("%s.__bootstrap(): thread started", self)
if _trace_hook:
self._note("%s.__bootstrap(): registering trace hook", self)
_sys.settrace(_trace_hook)
if _profile_hook:
self._note("%s.__bootstrap(): registering profile hook", self)
_sys.setprofile(_profile_hook)
try:
self.run()
except SystemExit:
if __debug__:
self._note("%s.__bootstrap(): raised SystemExit", self)
except:
if __debug__:
self._note("%s.__bootstrap(): unhandled exception", self)
# If sys.stderr is no more (most likely from interpreter
# shutdown) use self.__stderr. Otherwise still use sys (as in
# _sys) in case sys.stderr was redefined since the creation of
# self.
if _sys:
_sys.stderr.write("Exception in thread %s:\n%s\n" %
(self.name, _format_exc()))
else:
# Do the best job possible w/o a huge amt. of code to
# approximate a traceback (code ideas from
# Lib/traceback.py)
exc_type, exc_value, exc_tb = self.__exc_info()
try:
print>>self.__stderr, (
"Exception in thread " + self.name +
" (most likely raised during interpreter shutdown):")
print>>self.__stderr, (
"Traceback (most recent call last):")
while exc_tb:
print>>self.__stderr, (
' File "%s", line %s, in %s' %
(exc_tb.tb_frame.f_code.co_filename,
exc_tb.tb_lineno,
exc_tb.tb_frame.f_code.co_name))
exc_tb = exc_tb.tb_next
print>>self.__stderr, ("%s: %s" % (exc_type, exc_value))
# Make sure that exc_tb gets deleted since it is a memory
# hog; deleting everything else is just for thoroughness
finally:
del exc_type, exc_value, exc_tb
else:
if __debug__:
self._note("%s.__bootstrap(): normal return", self)
finally:
# Prevent a race in
# test_threading.test_no_refcycle_through_target when
# the exception keeps the target alive past when we
# assert that it's dead.
self.__exc_clear()
finally:
with _active_limbo_lock:
self.__stop()
try:
# We don't call self.__delete() because it also
# grabs _active_limbo_lock.
del _active[_get_ident()]
except:
pass
def __stop(self):
# DummyThreads delete self.__block, but they have no waiters to
# notify anyway (join() is forbidden on them).
if not hasattr(self, '_Thread__block'):
return
self.__block.acquire()
self.__stopped = True
self.__block.notify_all()
self.__block.release()
def __delete(self):
"Remove current thread from the dict of currently running threads."
# Notes about running with dummy_thread:
#
# Must take care to not raise an exception if dummy_thread is being
# used (and thus this module is being used as an instance of
# dummy_threading). dummy_thread.get_ident() always returns -1 since
# there is only one thread if dummy_thread is being used. Thus
# len(_active) is always <= 1 here, and any Thread instance created
# overwrites the (if any) thread currently registered in _active.
#
# An instance of _MainThread is always created by 'threading'. This
# gets overwritten the instant an instance of Thread is created; both
# threads return -1 from dummy_thread.get_ident() and thus have the
# same key in the dict. So when the _MainThread instance created by
# 'threading' tries to clean itself up when atexit calls this method
# it gets a KeyError if another Thread instance was created.
#
# This all means that KeyError from trying to delete something from
# _active if dummy_threading is being used is a red herring. But
# since it isn't if dummy_threading is *not* being used then don't
# hide the exception.
try:
with _active_limbo_lock:
del _active[_get_ident()]
# There must not be any python code between the previous line
# and after the lock is released. Otherwise a tracing function
# could try to acquire the lock again in the same thread, (in
# current_thread()), and would block.
except KeyError:
if 'dummy_threading' not in _sys.modules:
raise
def join(self, timeout=None):
"""Wait until the thread terminates.
This blocks the calling thread until the thread whose join() method is
called terminates -- either normally or through an unhandled exception
or until the optional timeout occurs.
When the timeout argument is present and not None, it should be a
floating point number specifying a timeout for the operation in seconds
(or fractions thereof). As join() always returns None, you must call
isAlive() after join() to decide whether a timeout happened -- if the
thread is still alive, the join() call timed out.
When the timeout argument is not present or None, the operation will
block until the thread terminates.
A thread can be join()ed many times.
join() raises a RuntimeError if an attempt is made to join the current
thread as that would cause a deadlock. It is also an error to join() a
thread before it has been started and attempts to do so raises the same
exception.
"""
if not self.__initialized:
raise RuntimeError("Thread.__init__() not called")
if not self.__started.is_set():
raise RuntimeError("cannot join thread before it is started")
if self is current_thread():
raise RuntimeError("cannot join current thread")
if __debug__:
if not self.__stopped:
self._note("%s.join(): waiting until thread stops", self)
self.__block.acquire()
try:
if timeout is None:
while not self.__stopped:
self.__block.wait()
if __debug__:
self._note("%s.join(): thread stopped", self)
else:
deadline = _time() + timeout
while not self.__stopped:
delay = deadline - _time()
if delay <= 0:
if __debug__:
self._note("%s.join(): timed out", self)
break
self.__block.wait(delay)
else:
if __debug__:
self._note("%s.join(): thread stopped", self)
finally:
self.__block.release()
@property
def name(self):
"""A string used for identification purposes only.
It has no semantics. Multiple threads may be given the same name. The
initial name is set by the constructor.
"""
assert self.__initialized, "Thread.__init__() not called"
return self.__name
@name.setter
def name(self, name):
assert self.__initialized, "Thread.__init__() not called"
self.__name = str(name)
@property
def ident(self):
"""Thread identifier of this thread or None if it has not been started.
This is a nonzero integer. See the thread.get_ident() function. Thread
identifiers may be recycled when a thread exits and another thread is
created. The identifier is available even after the thread has exited.
"""
assert self.__initialized, "Thread.__init__() not called"
return self.__ident
def isAlive(self):
"""Return whether the thread is alive.
This method returns True just before the run() method starts until just
after the run() method terminates. The module function enumerate()
returns a list of all alive threads.
"""
assert self.__initialized, "Thread.__init__() not called"
return self.__started.is_set() and not self.__stopped
is_alive = isAlive
@property
def daemon(self):
"""A boolean value indicating whether this thread is a daemon thread (True) or not (False).
This must be set before start() is called, otherwise RuntimeError is
raised. Its initial value is inherited from the creating thread; the
main thread is not a daemon thread and therefore all threads created in
the main thread default to daemon = False.
The entire Python program exits when no alive non-daemon threads are
left.
"""
assert self.__initialized, "Thread.__init__() not called"
return self.__daemonic
@daemon.setter
def daemon(self, daemonic):
if not self.__initialized:
raise RuntimeError("Thread.__init__() not called")
if self.__started.is_set():
raise RuntimeError("cannot set daemon status of active thread");
self.__daemonic = daemonic
def isDaemon(self):
return self.daemon
def setDaemon(self, daemonic):
self.daemon = daemonic
def getName(self):
return self.name
def setName(self, name):
self.name = name
def Timer(*args, **kwargs):
"""Factory function to create a Timer object.
Timers call a function after a specified number of seconds:
t = Timer(30.0, f, args=[], kwargs={})
t.start()
t.cancel() # stop the timer's action if it's still waiting
"""
return _Timer(*args, **kwargs)
class _Timer(Thread):
"""Call a function after a specified number of seconds:
t = Timer(30.0, f, args=[], kwargs={})
t.start()
t.cancel() # stop the timer's action if it's still waiting
"""
def __init__(self, interval, function, args=[], kwargs={}):
Thread.__init__(self)
self.interval = interval
self.function = function
self.args = args
self.kwargs = kwargs
self.finished = Event()
def cancel(self):
"""Stop the timer if it hasn't finished yet"""
self.finished.set()
def run(self):
self.finished.wait(self.interval)
if not self.finished.is_set():
self.function(*self.args, **self.kwargs)
self.finished.set()
class _MainThread(Thread):
def __init__(self):
Thread.__init__(self, name="MainThread")
self._Thread__started.set()
self._set_ident()
with _active_limbo_lock:
_active[_get_ident()] = self
def _set_daemon(self):
return False
def _exitfunc(self):
self._Thread__stop()
t = _pickSomeNonDaemonThread()
if t:
if __debug__:
self._note("%s: waiting for other threads", self)
while t:
t.join()
t = _pickSomeNonDaemonThread()
if __debug__:
self._note("%s: exiting", self)
self._Thread__delete()
def _pickSomeNonDaemonThread():
for t in enumerate():
if not t.daemon and t.is_alive():
return t
return None
class _DummyThread(Thread):
def __init__(self):
Thread.__init__(self, name=_newname("Dummy-%d"))
# Thread.__block consumes an OS-level locking primitive, which
# can never be used by a _DummyThread. Since a _DummyThread
# instance is immortal, that's bad, so release this resource.
del self._Thread__block
self._Thread__started.set()
self._set_ident()
with _active_limbo_lock:
_active[_get_ident()] = self
def _set_daemon(self):
return True
def join(self, timeout=None):
assert False, "cannot join a dummy thread"
def currentThread():
"""Return the current Thread object, corresponding to the caller's thread of control.
If the caller's thread of control was not created through the threading
module, a dummy thread object with limited functionality is returned.
"""
try:
return _active[_get_ident()]
except KeyError:
##print "current_thread(): no current thread for", _get_ident()
return _DummyThread()
current_thread = currentThread
def activeCount():
"""Return the number of Thread objects currently alive.
The returned count is equal to the length of the list returned by
enumerate().
"""
with _active_limbo_lock:
return len(_active) + len(_limbo)
active_count = activeCount
def _enumerate():
# Same as enumerate(), but without the lock. Internal use only.
return _active.values() + _limbo.values()
def enumerate():
"""Return a list of all Thread objects currently alive.
The list includes daemonic threads, dummy thread objects created by
current_thread(), and the main thread. It excludes terminated threads and
threads that have not yet been started.
"""
with _active_limbo_lock:
return _active.values() + _limbo.values()
from thread import stack_size
_shutdown = _MainThread()._exitfunc
try:
from thread import _local as local
except ImportError:
from _threading_local import local
def _after_fork():
# This function is called by Python/ceval.c:PyEval_ReInitThreads which
# is called from PyOS_AfterFork. Here we cleanup threading module state
# that should not exist after a fork.
# Reset _active_limbo_lock, in case we forked while the lock was held
# by another (non-forked) thread. http://bugs.python.org/issue874900
global _active_limbo_lock
_active_limbo_lock = _allocate_lock()
# fork() only copied the current thread; clear references to others.
new_active = {}
current = current_thread()
with _active_limbo_lock:
for thread in _active.itervalues():
# Any lock/condition variable may be currently locked or in an
# invalid state, so we reinitialize them.
if hasattr(thread, '_reset_internal_locks'):
thread._reset_internal_locks()
if thread is current:
# There is only one active thread. We reset the ident to
# its new value since it can have changed.
ident = _get_ident()
thread._Thread__ident = ident
new_active[ident] = thread
else:
# All the others are already stopped.
thread._Thread__stop()
_limbo.clear()
_active.clear()
_active.update(new_active)
assert len(_active) == 1
def _test():
class BoundedQueue(_Verbose):
def __init__(self, limit):
_Verbose.__init__(self)
self.mon = RLock()
self.rc = Condition(self.mon)
self.wc = Condition(self.mon)
self.limit = limit
self.queue = _deque()
def put(self, item):
self.mon.acquire()
while len(self.queue) >= self.limit:
self._note("put(%s): queue full", item)
self.wc.wait()
self.queue.append(item)
self._note("put(%s): appended, length now %d",
item, len(self.queue))
self.rc.notify()
self.mon.release()
def get(self):
self.mon.acquire()
while not self.queue:
self._note("get(): queue empty")
self.rc.wait()
item = self.queue.popleft()
self._note("get(): got %s, %d left", item, len(self.queue))
self.wc.notify()
self.mon.release()
return item
class ProducerThread(Thread):
def __init__(self, queue, quota):
Thread.__init__(self, name="Producer")
self.queue = queue
self.quota = quota
def run(self):
from random import random
counter = 0
while counter < self.quota:
counter = counter + 1
self.queue.put("%s.%d" % (self.name, counter))
_sleep(random() * 0.00001)
class ConsumerThread(Thread):
def __init__(self, queue, count):
Thread.__init__(self, name="Consumer")
self.queue = queue
self.count = count
def run(self):
while self.count > 0:
item = self.queue.get()
print item
self.count = self.count - 1
NP = 3
QL = 4
NI = 5
Q = BoundedQueue(QL)
P = []
for i in range(NP):
t = ProducerThread(Q, NI)
t.name = ("Producer-%d" % (i+1))
P.append(t)
C = ConsumerThread(Q, NI*NP)
for t in P:
t.start()
_sleep(0.000001)
C.start()
for t in P:
t.join()
C.join()
if __name__ == '__main__':
_test()
|
from swgpy.object import *
def create(kernel):
result = Static()
result.template = "object/static/structure/naboo/shared_nboo_imprv_lattice_wall_long_s01.iff"
result.attribute_template_id = -1
result.stfName("obj_n","unknown_object")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
from __future__ import print_function
class Solution(object):
def ladderLength(self, beginWord, endWord, wordList):
"""
:type beginWord: str
:type endWord: str
:type wordList: List[str]
:rtype: int
"""
distance, cur, visited, lookup = 0, [beginWord], set([beginWord]), set(wordList)
while cur:
next_queue = []
for word in cur:
if word == endWord:
return distance + 1
for i in xrange(len(word)):
for j in 'abcdefghijklmnopqrstuvwxyz':
candidate = word[:i] + j + word[i + 1:]
if candidate not in visited and candidate in lookup:
next_queue.append(candidate)
visited.add(candidate)
distance += 1
cur = next_queue
return 0
if __name__ == "__main__":
print(Solution().ladderLength("hit", "cog", set(["hot", "dot", "dog", "lot", "log"])))
print(Solution().ladderLength("hit", "cog", set(["hot", "dot", "dog", "lot", "log", "cog"])))
|
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class RedditAccount(ProviderAccount):
def to_str(self):
dflt = super(RedditAccount, self).to_str()
name = self.account.extra_data.get("name", dflt)
return name
class RedditProvider(OAuth2Provider):
id = "reddit"
name = "Reddit"
account_class = RedditAccount
def extract_uid(self, data):
return data["name"]
def extract_common_fields(self, data):
return dict(username=data.get("name"))
def get_default_scope(self):
scope = ["identity"]
return scope
provider_classes = [RedditProvider]
|
"""
"""
from qtpy.QtWidgets import QListWidget, QListWidgetItem
|
from rpython.jit.metainterp.test.test_virtual import VirtualTests, VirtualMiscTests
from rpython.jit.backend.x86.test.test_basic import Jit386Mixin
class MyClass:
pass
class TestsVirtual(Jit386Mixin, VirtualTests):
# for the individual tests see
# ====> ../../../metainterp/test/test_virtual.py
_new_op = 'new_with_vtable'
_field_prefix = 'inst_'
@staticmethod
def _new():
return MyClass()
class TestsVirtualMisc(Jit386Mixin, VirtualMiscTests):
# for the individual tests see
# ====> ../../../metainterp/test/test_virtual.py
pass
|
"""
Premium Question
"""
from bisect import bisect_left
from collections import defaultdict
import sys
__author__ = 'Daniel'
class WordDistance(object):
def __init__(self, words):
"""
initialize your data structure here.
:type words: list[str]
"""
self.word_dict = defaultdict(list)
for i, w in enumerate(words):
self.word_dict[w].append(i)
def shortest(self, word1, word2):
"""
:type word1: str
:type word2: str
:rtype: int
"""
mini = sys.maxint
for i in self.word_dict[word1]:
idx = bisect_left(self.word_dict[word2], i)
for nei in (-1, 0):
if 0 <= idx+nei < len(self.word_dict[word2]):
mini = min(mini, abs(i-self.word_dict[word2][idx+nei]))
return mini
|
r"""
.. dialect:: mysql+pyodbc
:name: PyODBC
:dbapi: pyodbc
:connectstring: mysql+pyodbc://<username>:<password>@<dsnname>
:url: https://pypi.org/project/pyodbc/
.. note::
The PyODBC for MySQL dialect is **not tested as part of
SQLAlchemy's continuous integration**.
The recommended MySQL dialects are mysqlclient and PyMySQL.
However, if you want to use the mysql+pyodbc dialect and require
full support for ``utf8mb4`` characters (including supplementary
characters like emoji) be sure to use a current release of
MySQL Connector/ODBC and specify the "ANSI" (**not** "Unicode")
version of the driver in your DSN or connection string.
Pass through exact pyodbc connection string::
import urllib
connection_string = (
'DRIVER=MySQL ODBC 8.0 ANSI Driver;'
'SERVER=localhost;'
'PORT=3307;'
'DATABASE=mydb;'
'UID=root;'
'PWD=(whatever);'
'charset=utf8mb4;'
)
params = urllib.parse.quote_plus(connection_string)
connection_uri = "mysql+pyodbc:///?odbc_connect=%s" % params
""" # noqa
import re
import sys
from .base import MySQLDialect
from .base import MySQLExecutionContext
from .types import TIME
from ... import util
from ...connectors.pyodbc import PyODBCConnector
from ...sql.sqltypes import Time
class _pyodbcTIME(TIME):
def result_processor(self, dialect, coltype):
def process(value):
# pyodbc returns a datetime.time object; no need to convert
return value
return process
class MySQLExecutionContext_pyodbc(MySQLExecutionContext):
def get_lastrowid(self):
cursor = self.create_cursor()
cursor.execute("SELECT LAST_INSERT_ID()")
lastrowid = cursor.fetchone()[0]
cursor.close()
return lastrowid
class MySQLDialect_pyodbc(PyODBCConnector, MySQLDialect):
supports_statement_cache = True
colspecs = util.update_copy(MySQLDialect.colspecs, {Time: _pyodbcTIME})
supports_unicode_statements = True
execution_ctx_cls = MySQLExecutionContext_pyodbc
pyodbc_driver_name = "MySQL"
def _detect_charset(self, connection):
"""Sniff out the character set in use for connection results."""
# Prefer 'character_set_results' for the current connection over the
# value in the driver. SET NAMES or individual variable SETs will
# change the charset without updating the driver's view of the world.
#
# If it's decided that issuing that sort of SQL leaves you SOL, then
# this can prefer the driver value.
rs = connection.exec_driver_sql(
"SHOW VARIABLES LIKE 'character_set%%'"
)
opts = {row[0]: row[1] for row in self._compat_fetchall(rs)}
for key in ("character_set_connection", "character_set"):
if opts.get(key, None):
return opts[key]
util.warn(
"Could not detect the connection character set. "
"Assuming latin1."
)
return "latin1"
def _extract_error_code(self, exception):
m = re.compile(r"\((\d+)\)").search(str(exception.args))
c = m.group(1)
if c:
return int(c)
else:
return None
def on_connect(self):
super_ = super(MySQLDialect_pyodbc, self).on_connect()
def on_connect(conn):
if super_ is not None:
super_(conn)
# declare Unicode encoding for pyodbc as per
# https://github.com/mkleehammer/pyodbc/wiki/Unicode
pyodbc_SQL_CHAR = 1 # pyodbc.SQL_CHAR
pyodbc_SQL_WCHAR = -8 # pyodbc.SQL_WCHAR
if sys.version_info.major > 2:
conn.setdecoding(pyodbc_SQL_CHAR, encoding="utf-8")
conn.setdecoding(pyodbc_SQL_WCHAR, encoding="utf-8")
conn.setencoding(encoding="utf-8")
else:
conn.setdecoding(pyodbc_SQL_CHAR, encoding="utf-8")
conn.setdecoding(pyodbc_SQL_WCHAR, encoding="utf-8")
conn.setencoding(str, encoding="utf-8")
conn.setencoding(unicode, encoding="utf-8") # noqa: F821
return on_connect
dialect = MySQLDialect_pyodbc
|
"""
Functional test
Deletion Epic
Storyboard is defined within the comments of the program itself
"""
import unittest
from flask import url_for
from biblib.tests.stubdata.stub_data import UserShop, LibraryShop
from biblib.tests.base import TestCaseDatabase, MockEmailService
class TestDeletionEpic(TestCaseDatabase):
"""
Base class used to test the Deletion Epic
"""
def test_deletion_epic(self):
"""
Carries out the epic 'Deletion', where a user wants to delete their
libraries that they have created
:return: no return
"""
# The librarian makes
# 1. two different libraries on her account
# 2. decides she wants to delete one
# 3. decides she wants to delete the next one too
# She then checks that they were deleted
# Load stub data 1
stub_user = UserShop()
stub_library_1 = LibraryShop()
stub_library_2 = LibraryShop()
# Makes the two libraries
url = url_for('userview')
response = self.client.post(
url,
data=stub_library_1.user_view_post_data_json,
headers=stub_user.headers
)
library_name_1 = response.json['name']
self.assertEqual(response.status_code, 200, response)
self.assertTrue('name' in response.json)
self.assertTrue(library_name_1 == stub_library_1.name)
# Second stub data
url = url_for('userview')
response = self.client.post(
url,
data=stub_library_2.user_view_post_data_json,
headers=stub_user.headers
)
library_name_2 = response.json['name']
self.assertEqual(response.status_code, 200, response)
self.assertTrue('name' in response.json)
self.assertTrue(library_name_2 == stub_library_2.name)
# Check the two libraries are not the same
self.assertNotEqual(library_name_1,
library_name_2,
'Name should be unique: {0} == {1}'
.format(library_name_1, library_name_2))
# Deletes the first library
url = url_for('userview')
with MockEmailService(stub_user, end_type='uid'):
response = self.client.get(
url,
headers=stub_user.headers
)
self.assertTrue(len(response.json['libraries']) == 2)
library_id_1 = response.json['libraries'][0]['id']
library_id_2 = response.json['libraries'][1]['id']
# Deletes the second library
url = url_for('documentview', library=library_id_2)
response = self.client.delete(
url,
headers=stub_user.headers
)
self.assertEqual(response.status_code, 200)
# Looks to check there are is only one library
url = url_for('userview')
with MockEmailService(stub_user, end_type='uid'):
response = self.client.get(
url,
headers=stub_user.headers
)
self.assertTrue(len(response.json['libraries']) == 1)
# Deletes the first library
url = url_for('documentview', library=library_id_1)
response = self.client.delete(
url,
headers=stub_user.headers
)
self.assertEqual(response.status_code, 200)
# Looks to check there are is only one libraries
url = url_for('userview')
with MockEmailService(stub_user, end_type='uid'):
response = self.client.get(
url,
headers=stub_user.headers
)
self.assertTrue(len(response.json['libraries']) == 0)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
import sys
try:
# Python 2
str_cls = unicode
except (NameError):
# Python 3
str_cls = str
class NonCleanExitError(Exception):
"""
When an subprocess does not exit cleanly
:param returncode:
The command line integer return code of the subprocess
"""
def __init__(self, returncode):
self.returncode = returncode
def __unicode__(self):
return str_cls(self.returncode)
def __str__(self):
if sys.version_info < (3,):
return self.__bytes__()
return self.__unicode__()
def __bytes__(self):
return self.__unicode__().encode('utf-8')
|
from __future__ import unicode_literals
from .. import Provider as BaseProvider
class Provider(BaseProvider):
jobs = [
"هنرپیشه",
"ناخدا",
"بخشدار",
"خیاط",
"گلهدار",
"باغدار",
"مؤذن",
"ساربان",
"آشپز",
"دندانپزشک",
"نجار",
"چوپان",
"خانهدار",
"شورا",
"نویسنده",
"گارسون",
"استاد",
"فروشنده",
"شیشهساز",
"مدیر",
"نقاش ساختمان",
"قایقران",
"رفتگر",
"وزیر",
"خلبان",
"آرایشگر",
"روحانی",
"متخصص",
"فوتبالیست",
"قصاب",
"ساعتساز",
"بقال",
"تلفنچی",
"تاجر",
"عینکساز",
"خوشنویس",
"جنگلبان",
"معلم",
"مهندس",
"راننده",
"آذین گر",
"نظامی",
"نانوا",
"فرماندار",
"دانشآموز",
"دانشجو",
"تعمیرکار",
"کشاورز",
"هنرمند",
"معاون",
"بانکدار",
"آهنگر",
"رئیس",
"سرتیپ",
"سرایدار",
"کارمند",
"مربی",
"سرهنگ",
"غواص",
"پزشک",
"دربان",
"آتشنشان",
"ماهیگیر",
"میوهفروش",
"نگهبان",
"پاسدار",
"قاضی",
"وکیل",
"کارگر",
"شهردار",
"معدنچی",
"پرستار",
"افسر",
"عکاس",
"لولهکش",
"بازیگر",
"باربر",
"رئیسجمهور",
"نخستوزیر",
"روانشناس",
"خبرنگار",
"بازنشسته",
"مجسمهساز",
"گروهبان",
"مغازهدار",
"خواننده",
"سرباز",
"سخنران",
"جراح",
"سفالگر",
"جهانگرد",
"جوشکار",
"چشمپزشک",
"گزارشگر",
"خطاط",
]
|
from granoclient import Grano
client = Grano(api_key='7a8badec6052a81d5')
project = client.get('openinterests')
for schema in project.schemata:
#if schema.name == 'person':
# schema.label = 'Natural person'
# schema.save()
print schema.label
for entity in project.entities:
print entity
|
BOT_NAME = 'go_fun_with_tiger'
SPIDER_MODULES = ['go_fun_with_tiger.spiders']
NEWSPIDER_MODULE = 'go_fun_with_tiger.spiders'
ITEM_PIPELINES = {'scrapy.pipelines.images.ImagesPipeline': 1}
IMAGES_STORE = '/Calvin_and_Hobbes'
|
from conans.model.ref import PackageReference, ConanFileReference
import os
from conans.util.files import rmdir
import shutil
from conans.errors import ConanException
from conans.client.loader_parse import load_conanfile_class
from conans.client.proxy import ConanProxy
def _prepare_sources(client_cache, user_io, remote_manager, reference):
remote_proxy = ConanProxy(client_cache, user_io, remote_manager, None)
conan_file_path = client_cache.conanfile(reference)
conanfile = load_conanfile_class(conan_file_path)
remote_proxy.complete_recipe_sources(conanfile, reference,
short_paths=conanfile.short_paths)
return conanfile.short_paths
def _get_package_ids(client_cache, reference, package_ids):
if not package_ids:
return []
if package_ids is True:
packages = client_cache.packages(reference)
if os.path.exists(packages):
package_ids = os.listdir(packages)
else:
package_ids = []
return package_ids
def cmd_copy(reference, user_channel, package_ids, client_cache, user_io, remote_manager,
force=False):
"""
param package_ids: Falsey=do not copy binaries. True=All existing. []=list of ids
"""
src_ref = ConanFileReference.loads(reference)
short_paths = _prepare_sources(client_cache, user_io, remote_manager, src_ref)
package_ids = _get_package_ids(client_cache, src_ref, package_ids)
package_copy(src_ref, user_channel, package_ids, client_cache, user_io,
short_paths, force)
def package_copy(src_ref, user_channel, package_ids, paths, user_io,
short_paths=False, force=False):
dest_ref = ConanFileReference.loads("%s/%s@%s" % (src_ref.name,
src_ref.version,
user_channel))
# Copy export
export_origin = paths.export(src_ref)
if not os.path.exists(export_origin):
raise ConanException("'%s' doesn't exist" % str(src_ref))
export_dest = paths.export(dest_ref)
if os.path.exists(export_dest):
if not force and not user_io.request_boolean("'%s' already exist. Override?"
% str(dest_ref)):
return
rmdir(export_dest)
shutil.copytree(export_origin, export_dest, symlinks=True)
user_io.out.info("Copied %s to %s" % (str(src_ref), str(dest_ref)))
export_sources_origin = paths.export_sources(src_ref, short_paths)
export_sources_dest = paths.export_sources(dest_ref, short_paths)
if os.path.exists(export_sources_dest):
rmdir(export_sources_dest)
shutil.copytree(export_sources_origin, export_sources_dest, symlinks=True)
user_io.out.info("Copied sources %s to %s" % (str(src_ref), str(dest_ref)))
# Copy packages
for package_id in package_ids:
package_origin = PackageReference(src_ref, package_id)
package_dest = PackageReference(dest_ref, package_id)
package_path_origin = paths.package(package_origin, short_paths)
package_path_dest = paths.package(package_dest, short_paths)
if os.path.exists(package_path_dest):
if not force and not user_io.request_boolean("Package '%s' already exist."
" Override?" % str(package_id)):
continue
rmdir(package_path_dest)
shutil.copytree(package_path_origin, package_path_dest, symlinks=True)
user_io.out.info("Copied %s to %s" % (str(package_id), str(dest_ref)))
|
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/wearables/base/shared_base_skirt.iff"
result.attribute_template_id = 11
result.stfName("wearables_name","skirt_s03")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
"""D3D retracer generator."""
import sys
from dllretrace import DllRetracer as Retracer
from specs.stdapi import API
from specs.d3d import ddraw, HWND
from specs.ddraw import DDCREATE_LPGUID
class D3DRetracer(Retracer):
def retraceApi(self, api):
print('// Swizzling mapping for lock addresses')
print('static std::map<void *, void *> _maps;')
print()
# TODO: Keep a table of windows
print('static HWND g_hWnd;')
print()
Retracer.retraceApi(self, api)
def invokeInterfaceMethod(self, interface, method):
# keep track of the last used device for state dumping
if interface.name in ('IDirect3DDevice7',):
if method.name == 'Release':
print(r' if (call.ret->toUInt() == 0) {')
print(r' d3d7Dumper.unbindDevice(_this);')
print(r' }')
else:
print(r' d3d7Dumper.bindDevice(_this);')
# create windows as neccessary
hWndArg = method.getArgByType(HWND)
if hWndArg is not None:
# FIXME: Try to guess the window size (e.g., from IDirectDrawSurface7::Blt)
print(r' if (!g_hWnd) {')
print(r' g_hWnd = d3dretrace::createWindow(512, 512);')
print(r' }')
print(r' %s = g_hWnd;' % hWndArg.name)
if method.name == 'Lock':
# Reset _DONOTWAIT flags. Otherwise they may fail, and we have no
# way to cope with it (other than retry).
mapFlagsArg = method.getArgByName('dwFlags')
if mapFlagsArg is not None:
print(r' dwFlags &= ~DDLOCK_DONOTWAIT;')
print(r' dwFlags |= DDLOCK_WAIT;')
Retracer.invokeInterfaceMethod(self, interface, method)
if method.name == 'CreateDevice':
print(r' if (FAILED(_result)) {')
print(r' exit(1);')
print(r' }')
# notify frame has been completed
# process events after presents
if interface.name == 'IDirectDrawSurface7' and method.name == 'Blt':
print(r' DDSCAPS2 ddsCaps;')
print(r' if (SUCCEEDED(_this->GetCaps(&ddsCaps)) &&')
print(r' (ddsCaps.dwCaps & DDSCAPS_PRIMARYSURFACE)) {')
print(r' retrace::frameComplete(call);')
print(r' d3dretrace::processEvents();')
print(r' }')
if method.name == 'Lock':
print(' VOID *_pbData = NULL;')
print(' size_t _MappedSize = 0;')
# FIXME: determine the mapping size
#print ' _getMapInfo(_this, %s, _pbData, _MappedSize);' % ', '.join(method.argNames()[:-1])
print(' if (_MappedSize) {')
print(' _maps[_this] = _pbData;')
# TODO: check pitches match
print(' } else {')
print(' return;')
print(' }')
if method.name == 'Unlock':
print(' VOID *_pbData = 0;')
print(' _pbData = _maps[_this];')
print(' if (_pbData) {')
print(' retrace::delRegionByPointer(_pbData);')
print(' _maps[_this] = 0;')
print(' }')
def extractArg(self, function, arg, arg_type, lvalue, rvalue):
# Handle DDCREATE_* flags
if arg.type is DDCREATE_LPGUID:
print(' if (%s.toArray()) {' % rvalue)
Retracer.extractArg(self, function, arg, arg_type, lvalue, rvalue)
print(' } else {')
print(' %s = static_cast<%s>(%s.toPointer());' % (lvalue, arg_type, rvalue))
print(' }')
return
Retracer.extractArg(self, function, arg, arg_type, lvalue, rvalue)
def main():
print(r'#include <string.h>')
print()
print(r'#include <iostream>')
print()
print(r'#include "d3dretrace.hpp"')
print()
api = API()
print(r'#include "d3dimports.hpp"')
api.addModule(ddraw)
print()
print('''static d3dretrace::D3DDumper<IDirect3DDevice7> d3d7Dumper;''')
print()
retracer = D3DRetracer()
retracer.table_name = 'd3dretrace::ddraw_callbacks'
retracer.retraceApi(api)
if __name__ == '__main__':
main()
|
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Test the ability to configure the $GSCOM construction variable.
"""
import TestSCons
_python_ = TestSCons._python_
test = TestSCons.TestSCons()
test.write('mygs.py', """
import sys
outfile = open(sys.argv[1], 'wb')
for f in sys.argv[2:]:
infile = open(f, 'rb')
for l in filter(lambda l: l != '/*gs*/\\n', infile.readlines()):
outfile.write(l)
sys.exit(0)
""")
test.write('SConstruct', """
env = Environment(tools=['default', 'gs'],
GSCOM = r'%(_python_)s mygs.py $TARGET $SOURCES')
env.PDF(target = 'aaa', source = 'aaa.ps')
""" % locals())
test.write('aaa.ps', "aaa.ps\n/*gs*/\n")
test.run(arguments = '.')
test.must_match('aaa.pdf', "aaa.ps\n")
test.pass_test()
|
import os, sys, argparse
import numpy as np
from tabulate import tabulate
import moving
from cvguipy import cvgui, trajstorage
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="A script to compute CLEAR MOT tracking performance metrics.")
parser.add_argument('databaseFilename', nargs='+', help="Name(s) of the database file(s) containing trajectories to evaluate.")
parser.add_argument('-o', '--homography-file', dest='homography', help="Name of the homography file.", required = True)
parser.add_argument('-a', '--annotation-file', dest='annotationFile', help="Name of the database file containing annotations.", required=True)
parser.add_argument('-t', '--annotation-table', dest='annotationTable', help="Name of the table in the annotation database. If not specified, the latest table is used.")
parser.add_argument('-m', '--matching-distance', dest='matchDistance', type=float, default=10, help="Matching distance for computing tracking performance.")
args = parser.parse_args()
# open the annotation database and load the homography
adb = trajstorage.CVsqlite(args.annotationFile)
hom = np.loadtxt(args.homography)
# NOTE This method of inverting the homography leaves out the conversion
# from homogeneous coordinates (i.e. dividing by w), but it matches
# TrafficIntelligence. It works, but should look into it sometime.
invHom = np.linalg.inv(hom)
if args.annotationTable is None:
# if no table name specified, get the latest annotations in the database
adb.getLatestAnnotation()
annotationTable = adb.latestannotations
else:
# if they did give a table name, make sure it exists
annotationTable = args.annotationTable
if not adb.hasTable(annotationTable):
print("Table '{}' does not exist! Exiting!".format(annotationTable))
sys.exit(1)
# build the bounding box table for the annotations and load them, computing
# the centroid trajectory too
print("Using annotations in table {} ...".format(annotationTable))
adb.createBoundingBoxTable(annotationTable, invHom)
adb.loadAnnotations()
for a in adb.annotations:
a.computeCentroidTrajectory(hom)
# get the first and last frame numbers of the annotations
frameNums = adb.getFrameList()
firstFrame = frameNums[0]
lastFrame = frameNums[-1]
# loop over the databases and compute performance
clearMOT = []
for dbf in args.databaseFilename:
# open the database and load the trajectories
print("Loading objects from database {} ...".format(dbf))
db = trajstorage.CVsqlite(dbf)
db.loadObjects()
# compute CLEAR MOT metrics
motp, mota, mt, mme, fpt, gt = moving.computeClearMOT(adb.annotations, db.objects, args.matchDistance, firstFrame, lastFrame)
# store results in a list of lists
clearMOT.append([dbf, mota, motp, mt, mme, fpt, gt])
# print the results in a table
heads = ['File',
'Accuracy',
'Precision',
'Missed GT Frames',
'Mismatches',
'False Alarm Frames',
'GT Frames']
print()
print(tabulate(clearMOT, headers=heads, tablefmt='grid'))
# TODO do we want to plot the results? or is the table enough?
sys.exit(0)
|
from django.test import TestCase
from django.contrib.auth.models import User
from pinax.apps.tasks.models import Task, TaskHistory, Nudge
class TestTask(TestCase):
fixtures = ["test_tasks.json"]
def setUp(self):
self.task = Task.objects.get(pk__exact=1)
self.other_task = Task.objects.get(pk__exact=2)
self.user_admin = User.objects.get(username__exact="admin")
self.user_joe = User.objects.get(username__exact="joe")
self.task_nudge_count = 2
self.other_task_nudge_count = 1
def tearDown(self):
pass
def test_allowable_states(self):
"""
Doing some simple assertions based off states
"""
# TODO: Add more state checks
# Task is just created
states = self.task.allowable_states(self.user_admin)
self.assertEquals(states,
[("1", "leave open")])
# Now we assign it. This is what the assignee sees
self.task.assignee = self.user_joe
self.task.save()
states = self.task.allowable_states(self.user_joe)
self.assertEquals(states, [("1", "leave open")])
# this is what the creator sees
states = self.task.allowable_states(self.user_admin)
self.assertEquals(states, [("1", "leave open")])
# Task is now moved to in-progress. this is what the assignee can see.
self.task.state = "4"
self.task.save()
states = self.task.allowable_states(self.user_joe)
self.assertEquals(states, [
("4", "still in progress"),
("5", "discussion needed"),
("8", "fix needs review")
])
def test_denudge(self):
"""
We check that:
1. We have nudges across multiple tasks.
2. After denudging the task, the task has no more nudges
3. After denudging the task, the other task still has nudges
"""
# we have nudges across multiple tasks including our sample task
self.assertEquals(len(self.task.task_nudge.all()), self.task_nudge_count)
self.assertEquals(len(self.other_task.task_nudge.all()), self.other_task_nudge_count)
# now we denudge our task
self.task.denudge()
# Our task should have no nudges
self.assertEquals(len(self.task.task_nudge.all()), 0)
# The other task should have its original number of nudges
self.assertEquals(len(self.other_task.task_nudge.all()), self.other_task_nudge_count)
class TestTaskHistory(TestCase):
fixtures = ["test_tasks.json"]
def setUp(self):
self.task = Task.objects.get(pk__exact=1)
self.user_admin = User.objects.get(username__exact="admin")
self.user_joe = User.objects.get(username__exact="joe")
def tearDown(self):
pass
def test_history(self):
"""
let's see if history tracks user changes if done against the task
"""
# we have admin assign joe to the task.
self.task.assignee = self.user_joe
self.task.save()
self.task.save_history(change_owner=self.user_admin)
# fetch the history
history = self.task.history_task.all()[0]
# The task assignee should be joe
self.assertEquals(history.assignee, self.user_joe)
# the person who made the change was admin
self.assertEquals(history.owner, self.user_admin)
def test_change_history_by_non_creator(self):
"""
In CPC task 173 non-comment changes by users besides the task
creator don't save the current user. This checks to see that the
history is changed accurately.
"""
# we have joe assign himself to a task that admin created
self.task.assignee = self.user_joe
self.task.save()
self.task.save_history(change_owner=self.user_joe)
# fetch the history
history = self.task.history_task.all()[0]
# the person who made the change was joe
self.assertEquals(history.owner, self.user_joe)
|
from __future__ import division
from decimal import Decimal
import yaml
import datetime
from flask import (current_app, request, render_template, Blueprint, jsonify,
g, session, Response, abort)
from flask.ext.babel import gettext
from .models import (Block, ShareSlice, UserSettings, make_upper_lower, Credit,
Payout, DeviceSlice, Transaction)
from . import db, root, cache, currencies, algos, locations, babel
from .exceptions import InvalidAddressException
from .utils import (verify_message, collect_user_stats, get_pool_hashrate,
get_alerts, resort_recent_visit, CommandException,
anon_users, collect_pool_stats, get_past_chain_profit,
orphan_percentage, pool_share_tracker)
main = Blueprint('main', __name__)
@main.route("/")
def home():
payout_currencies = currencies.buyable_currencies
past_chain_profit = get_past_chain_profit()
default_curr = current_app.config['pool_payout_currency']
return render_template('home.html',
payout_currencies=payout_currencies,
past_chain_profit=past_chain_profit,
locations=locations,
default_curr=default_curr)
@main.route("/configuration_guide")
def configuration_guide():
payout_currencies = currencies.buyable_currencies
past_chain_profit = get_past_chain_profit()
default_curr = current_app.config['pool_payout_currency']
return render_template('config_guide_wrapper.html',
payout_currencies=payout_currencies,
locations=locations,
past_chain_profit=past_chain_profit,
default_curr=default_curr)
@main.route("/faq")
def faq():
faq = yaml.load(open(root + '/static/yaml/faq.yaml'))
return render_template('faq.html', faq=faq)
@main.route("/news")
def news():
news = yaml.load(open(root + '/static/yaml/news.yaml'))
return render_template('news.html', news=news)
@main.route("/merge_blocks", defaults={"q": Block.merged == True})
@main.route("/blocks", defaults={"q": Block.merged == False})
@main.route("/blocks/<currency>")
def blocks(q=None, currency=None):
page = int(request.args.get('page', 0))
if page < 0:
page = 0
offset = page * 100
blocks = Block.query.order_by(Block.found_at.desc())
if q is not None:
blocks = blocks.filter(q)
currency_data = None
try:
algo = currencies[currency].algo
except KeyError:
algo = None
if currency and algo is not None:
currency_data = {}
for i in [1, 7, 30]:
td = datetime.timedelta(days=i)
orphan_perc = orphan_percentage(currency, timedelta=td)
share_tracker = pool_share_tracker(
algo.key,
user=("pool_currency", ),
worker=(currency, ),
timedelta=td)
currency_data['{} days'.format(i)] = [orphan_perc, share_tracker]
blocks = blocks.filter_by(currency=currency)
elif currency:
blocks = []
if blocks:
blocks = blocks.offset(offset).limit(100)
return render_template('blocks.html', blocks=blocks, page=page,
currency=currency,
currency_data=currency_data)
@main.route("/networks")
def networks():
""" A page to display current information about each of the networks we are
mining on. """
network_data = {}
for currency in currencies.itervalues():
data = cache.get("{}_data".format(currency.key))
if data:
network_data[currency] = data
return render_template('networks.html', network_data=network_data)
@main.route("/leaderboard")
def leaderboard():
# Holds a dictionary keyed by username
users = {}
# A dictionary for copying. The default value of each keyed user in the above
algos_disp = [(a.display, a.key) for a in algos.active_algos()]
algos_disp.append(('Normalized', 'normalized'))
users = cache.get("leaderboard") or {}
return render_template('leaderboard.html', users=users, algos=algos_disp)
@main.route("/<user_address>/account", defaults={'type': 'payout'})
@main.route("/<user_address>/aggr_account", defaults={'type': 'credit'})
def account(user_address, type):
page = int(request.args.get('page', 0))
if page < 0:
page = 0
offset = page * 100
if type == "payout":
payouts = (Payout.query.filter_by(user=user_address).
order_by(Payout.created_at.desc()).limit(100).offset(offset))
return render_template('account.html', payouts=payouts, page=page,
table="payout_table.html")
else:
credits = (Credit.query.filter_by(user=user_address).join(Credit.block).
order_by(Block.found_at.desc()).limit(100).offset(offset))
return render_template('account.html', credits=credits, page=page,
table="credit_table.html")
@main.route("/transaction/<txid>")
def transaction_detail(txid):
tx = Transaction.query.filter_by(txid=txid).first()
return render_template('transaction_details.html', tx=tx)
@main.route("/block/<blockhash>")
def block_detail(blockhash):
block = db.session.query(Block).filter_by(hash=blockhash).first()
return render_template('block_details.html', block=block)
@main.route("/<address>/<worker>")
def worker_detail(address, worker):
return render_template('worker_detail.html', username=address, worker=worker)
@main.route("/pool_stats")
def pool_stats():
pool_stats = collect_pool_stats()
return render_template('pool_stats.html', **pool_stats)
@main.route("/pool_stats/block_tabs/<string:algo>")
def block_stats_tab(algo):
session['block_stats_tab'] = algo
return Response('success')
@babel.localeselector
def get_locale():
new_language = request.args.get('lang')
if new_language:
if str(new_language) in current_app.config['available_locales'].keys():
session['lang'] = new_language
else:
# Add an alert if unable to match the passed in locale
alert = {'severity': 'danger', 'key': -1, 'date': 'Check your URL',
'title': 'Currency code not matched!', 'notify': 'all'}
g.alerts = [alert] if not g.alerts else g.alerts.append(alert)
elif not 'lang' in session:
locales = current_app.config['available_locales'].keys()
session['lang'] = request.accept_languages.best_match(locales)
return session['lang']
@main.before_request
def add_pool_stats():
session.permanent = True
g.algos = {k: v for k, v in algos.iteritems() if v.enabled is True}
g.hashrates = {a: get_pool_hashrate(a) for a in g.algos}
# Dictionary keyed by algo
g.miner_count = cache.get('total_miners') or {}
g.anon_users = anon_users()
# Get alerts
yaml_alerts = get_alerts()
g.alerts = yaml_alerts if not 'alerts' in g else g.alerts.append(yaml_alerts)
get_locale()
@main.route("/close/<int:id>")
def close_alert(id):
dismissed_alerts = session.get('dismissed_alerts', [])
dismissed_alerts.append(id)
session['dismissed_alerts'] = dismissed_alerts
return Response('success')
@main.route("/stats")
def user_stats():
return render_template('stats.html')
@main.route("/exc_test")
def exception():
current_app.logger.warn("Exception test!")
raise Exception()
return ""
@main.route("/stats/<user_address>")
def user_dashboard(user_address):
# Do some checking to make sure the address is valid + payable
try:
currencies.lookup_payable_addr(user_address)
except Exception:
return render_template(
'invalid_address.html',
allowed_currencies=currencies.buyable_currencies)
stats = collect_user_stats(user_address)
# reorganize/create the recently viewed
recent = session.get('recent_user_counts', {})
recent.setdefault(user_address, 0)
recent[user_address] += 1
session['recent_user_counts'] = recent
resort_recent_visit(recent)
return render_template('user_stats.html',
username=user_address,
**stats)
@main.route("/<user_address>/clear")
def address_clear(user_address=None):
# remove address from the recently viewed
recent = session.get('recent_users_counts', {})
try:
del recent[user_address]
except KeyError:
pass
resort_recent_visit(recent)
return jsonify(recent=session['recent_users'])
@main.route("/api/<typ>")
def address_stats(typ):
kwargs = {'worker': tuple()}
kwargs['user'] = request.args['address'].split(",")
if 'worker' in request.args:
kwargs['worker'] = request.args["worker"].split(",")
if typ == "devices":
cls = DeviceSlice
kwargs['stat_val'] = [DeviceSlice.to_db[request.args['stat']]]
else:
cls = ShareSlice
algo = request.args.get('algos')
kwargs['algo'] = tuple()
if algo:
kwargs['algo'] = algo.split(",")
kwargs['share_type'] = request.args.get("share_types", "acc").split(",")
span = int(request.args.get("span", 0))
# store all the raw data of we've grabbed
workers = {}
span_config = ShareSlice.span_config[span]
step = span_config['slice']
res = make_upper_lower(trim=span_config['slice'],
span=span_config['window'],
clip=span_config['slice'] * 2,
fmt="both")
lower, upper, lower_stamp, upper_stamp = res
workers = cls.get_span(lower=lower,
slice_size=span,
upper=upper,
stamp=True,
**kwargs)
highest_value = 0
for worker in workers:
d = worker['data']
# Set the label for this data series
if typ == "shares":
d['label'] = "{} ({})".format(d['worker'] or "[unnamed]", d['algo'])
hps = current_app.config['algos'][d['algo']]['hashes_per_share']
for idx in worker['values']:
worker['values'][idx] *= hps / step.total_seconds()
if worker['values'][idx] > highest_value:
highest_value = worker['values'][idx]
else:
d['label'] = d['device']
for idx in worker['values']:
if worker['values'][idx] > highest_value:
highest_value = worker['values'][idx]
if typ == "shares" or kwargs['stat_val'][0] == 0:
scales = {1000: "KH/s", 1000000: "MH/s", 1000000000: "GH/s"}
scale_label = "H/s"
scale = 1
for amnt, label in scales.iteritems():
if amnt > scale and highest_value > amnt:
scale = amnt
scale_label = label
else:
scale_label = "Temperature"
scale = 1
return jsonify(start=lower_stamp,
end=upper_stamp,
step=step.total_seconds(),
scale=scale,
scale_label=scale_label,
workers=workers)
@main.errorhandler(Exception)
def handle_error(error):
current_app.logger.exception(error)
return render_template("500.html", no_header=True)
def handle_message(address, curr):
alert_cls = "danger"
result = None
vals = request.form
if request.method == "POST":
try:
verify_message(address, curr, vals['message'], vals['signature'])
except CommandException as e:
result = "Error: {}".format(e)
# lets just log all errors people are getting
current_app.logger.info(
"Command exception in Command validation",
exc_info=True)
except Exception as e:
current_app.logger.info(
"Unhandled exception in Command validation",
exc_info=True)
result = "An unhandled error occurred: {}".format(e)
else:
result = "Successfully changed!"
alert_cls = "success"
return result, alert_cls
@main.route("/validate_address", methods=['POST'])
def validate_address():
""" An endpoint that allows us to validate that addresses meet different
types of requirements.
Input is a json dictionary with:
currency: the three letter code, or the string 'Any'
address: the address string
type: the type of address it must be
Return value is like {'LTC': True} where LTC is currency provided
"""
def validate(address, typ, currency):
try:
ver = currencies.validate_bc_address(address)
except InvalidAddressException:
return False
if typ == 'buyable':
lst = currencies.buyable_currencies
elif typ == 'sellable':
lst = currencies.sellable_currencies
elif typ == 'unsellable':
lst = currencies.unsellable_currencies
elif typ == 'unbuyable':
lst = currencies.unbuyable_currencies
else:
abort(400)
for curr in lst:
if ver in curr.address_version:
if curr.key == currency or currency == 'Any':
return True
return False
data = request.json
if validate(data['address'], data['type'], data['currency']):
return jsonify({data['currency']: True})
else:
return jsonify({data['currency']: False})
@main.route("/settings/<user_address>", methods=['POST', 'GET'])
def settings(user_address):
# Do some checking to make sure the address is valid + payable
try:
curr = currencies.lookup_payable_addr(user_address)
except Exception:
return render_template(
'invalid_address.html',
allowed_currencies=currencies.buyable_currencies)
result, alert_cls = handle_message(user_address, curr)
user = UserSettings.query.filter_by(user=user_address).first()
unsellable_mineable = [c for c in currencies.unsellable_currencies
if not c in currencies.unmineable_currencies]
return render_template("user_settings.html",
username=user_address,
result=result,
alert_cls=alert_cls,
user_currency=curr.name,
user_currency_name=curr.key,
user=user,
buyable_currencies=currencies.buyable_currencies,
sellable_currencies=currencies.sellable_currencies,
unsellable_currencies=unsellable_mineable)
@main.route("/crontabs")
def crontabs():
stats = {}
prefix = "cron_last_run_"
prefix_len = len(prefix)
for key in cache.cache._client.keys("{}*".format(prefix)):
stats[key[prefix_len:]] = cache.cache._client.hgetall(key)
return render_template("crontabs.html", stats=stats)
|
import site
import getopt, glob, sys
from PIL import Image
if len(sys.argv) == 1:
print "PIL File 0.4/2003-09-30 -- identify image files"
print "Usage: pilfile [option] files..."
print "Options:"
print " -f list supported file formats"
print " -i show associated info and tile data"
print " -v verify file headers"
print " -q quiet, don't warn for unidentified/missing/broken files"
sys.exit(1)
try:
opt, args = getopt.getopt(sys.argv[1:], "fqivD")
except getopt.error, v:
print v
sys.exit(1)
verbose = quiet = verify = 0
for o, a in opt:
if o == "-f":
Image.init()
id = Image.ID[:]
id.sort()
print "Supported formats:"
for i in id:
print i,
sys.exit(1)
elif o == "-i":
verbose = 1
elif o == "-q":
quiet = 1
elif o == "-v":
verify = 1
elif o == "-D":
Image.DEBUG = Image.DEBUG + 1
def globfix(files):
# expand wildcards where necessary
if sys.platform == "win32":
out = []
for file in files:
if glob.has_magic(file):
out.extend(glob.glob(file))
else:
out.append(file)
return out
return files
for file in globfix(args):
try:
im = Image.open(file)
print "%s:" % file, im.format, "%dx%d" % im.size, im.mode,
if verbose:
print im.info, im.tile,
print
if verify:
try:
im.verify()
except:
if not quiet:
print "failed to verify image",
print "(%s:%s)" % (sys.exc_type, sys.exc_value)
except IOError, v:
if not quiet:
print file, "failed:", v
except:
import traceback
if not quiet:
print file, "failed:", "unexpected error"
traceback.print_exc(file=sys.stdout)
|
from toee import *
from utilities import *
from Co8 import *
def OnBeginSpellCast( spell ):
print "Disintegrate OnBeginSpellCast"
print "spell.target_list=", spell.target_list
print "spell.caster=", spell.caster, " caster.level= ", spell.caster_level
game.particles( "sp-transmutation-conjure", spell.caster )
def OnSpellEffect( spell ):
print "Disintegrate OnSpellEffect"
def OnBeginRound( spell ):
print "Disintegrate OnBeginRound"
def OnBeginProjectile( spell, projectile, index_of_target ):
print "Disintegrate OnBeginProjectile"
#spell.proj_partsys_id = game.particles( 'sp-Ray of Frost', projectile )
projectile.obj_set_int( obj_f_projectile_part_sys_id, game.particles( 'sp-Disintegrate', projectile ) )
def OnEndProjectile( spell, projectile, index_of_target ):
print "Disintegrate OnEndProjectile"
damage_dice = dice_new( "1d6" )
damage_dice.num = min( 40, spell.caster_level * 2 )
spell.duration = 0
is_immune_to_crit = 0
changed_con = 0
target_item = spell.target_list[0]
game.particles_end( projectile.obj_get_int( obj_f_projectile_part_sys_id ) )
return_val = spell.caster.perform_touch_attack( target_item.obj )
if return_val & D20CAF_HIT:
game.particles( 'sp-Disintegrate-Hit', target_item.obj )
if (target_item.obj.name == 14629 or target_item.obj.name == 14621 or target_item.obj.name == 14604) and not is_spell_flag_set(target_item.obj, OSF_IS_FLAMING_SPHERE):
# check for Otiluke's Resilient Sphere
if target_item.obj.d20_query_has_spell_condition(sp_Otilukes_Resilient_Sphere) == 1:
target_item.obj.condition_add_with_args( 'sp-Break Enchantment', spell.id, spell.duration, 0 )
game.particles( 'sp-Otilukes Resilient Sphere-END', target_item.obj.location )
else:
game.particles( 'sp-Stoneskin', target_item.obj.location )
target_item.obj.destroy()
else:
# hit
if target_item.obj.is_category_type( mc_type_construct ) or target_item.obj.is_category_type( mc_type_undead ):
if target_item.obj.stat_base_get(stat_constitution) < 0:
target_item.obj.stat_base_set(stat_constitution, 10)
changed_con = 1
is_immune_to_crit = 1
elif target_item.obj.is_category_type( mc_type_plant ) or target_item.obj.is_category_type( mc_type_ooze ) or target_item.obj.is_category_type( mc_type_elemental ):
is_immune_to_crit = 1
#elif return_val == 2:
# damage_dice.num = damage_dice.num * 2 # handled internally now
if target_item.obj.saving_throw_spell( spell.dc, D20_Save_Fortitude, D20STD_F_NONE, spell.caster, spell.id ):
damage_dice.num = 5
#if return_val == 2 and is_immune_to_crit == 0:
# damage_dice.num = 10
target_item.obj.float_mesfile_line( 'mes\\spell.mes', 30001 )
target_item.obj.spell_damage_weaponlike( spell.caster, D20DT_FORCE, damage_dice, D20DAP_UNSPECIFIED, 100, D20A_CAST_SPELL, spell.id , return_val, index_of_target)
if target_item.obj.stat_level_get(stat_hp_current) < 1:
target_item.obj.critter_kill_by_effect()
target_item.obj.condition_add_with_args( 'sp-Animate Dead', spell.id, spell.duration, 3 )
game.particles( 'sp-Stoneskin', target_item.obj )
# check for Otiluke's Resilient Sphere
if target_item.obj.d20_query_has_spell_condition(sp_Otilukes_Resilient_Sphere) == 1:
target_item.obj.condition_add_with_args( 'sp-Break Enchantment', spell.id, spell.duration, 0 )
game.particles( 'sp-Otilukes Resilient Sphere-END', target_item.obj )
else:
# missed
target_item.obj.float_mesfile_line( 'mes\\spell.mes', 30007 )
game.particles( 'Fizzle', target_item.obj )
if changed_con == 1:
target_item.obj.stat_base_set(stat_constitution, -1)
spell.target_list.remove_target( target_item.obj )
spell.spell_end( spell.id )
def OnEndSpellCast( spell ):
print "Disintegrate OnEndSpellCast"
|
import asyncio
import pytest
from pydf import AsyncPydf
from .utils import pdf_text
def test_async_pdf_gen():
apydf = AsyncPydf()
loop = asyncio.get_event_loop()
pdf_content = loop.run_until_complete(apydf.generate_pdf('<html><body>Is this thing on?</body></html>'))
assert pdf_content[:4] == b'%PDF'
text = pdf_text(pdf_content)
assert 'Is this thing on?\n\n\x0c' == text
def test_invalid_argument():
apydf = AsyncPydf()
loop = asyncio.get_event_loop()
with pytest.raises(RuntimeError) as exc_info:
loop.run_until_complete(apydf.generate_pdf('hello', foobar='broken'))
assert 'error running wkhtmltopdf, command' in str(exc_info)
|
import re
def get_inputs(inputFile="encoded.txt"):
f = open(inputFile, 'r')
temp = f.readline().strip().split()
key = {}
for x, y in zip(temp[::2], temp[1::2]):
key[y] = x
return (f.read(), key)
def decode(args):
message, key, output = args[0], args[1], []
start, finish, keys = 0, 1, key.keys()
while finish <= len(message):
if message[start:finish] in keys:
output.append(key[message[start:finish]])
start = finish
elif not message[start] in "gG":
output.append(message[start])
start = finish
finish += 1
return ''.join(output)
def encode(inputFile="input.txt", outputFile="encoded.txt"):
fi = open(inputFile, 'r')
text = fi.read()
nbits, key = len(set(re.sub("\W", "", text))).bit_length(), {}
outputText, outputCode = [], []
for char in text:
if (not char in key.keys()) and (char.isalpha()):
key[char] = '{:0{length}b}'.format(len(key.keys()),
length = nbits).replace("0", "g").replace("1", "G")
outputText.append(char if not char.isalpha() else key[char])
outputText = ''.join(outputText)
for alpha, code in key.items():
outputCode.append("{} {} ".format(alpha, code))
outputCode = ''.join(outputCode + ['\n'])
fo = open(outputFile, 'w')
fo.write(''.join((outputCode, outputText)))
|
"""Test trait types of the widget packages."""
from unittest import TestCase
from traitlets import HasTraits
from traitlets.tests.test_traitlets import TraitTestBase
from ipywidgets import Color, EventfulDict, EventfulList
class ColorTrait(HasTraits):
value = Color("black")
class TestColor(TraitTestBase):
obj = ColorTrait()
_good_values = ["blue", "#AA0", "#FFFFFF"]
_bad_values = ["vanilla", "blues"]
class TestEventful(TestCase):
def test_list(self):
"""Does the EventfulList work?"""
event_cache = []
class A(HasTraits):
x = EventfulList([c for c in 'abc'])
a = A()
a.x.on_events(lambda i, x: event_cache.append('insert'), \
lambda i, x: event_cache.append('set'), \
lambda i: event_cache.append('del'), \
lambda: event_cache.append('reverse'), \
lambda *p, **k: event_cache.append('sort'))
a.x.remove('c')
# ab
a.x.insert(0, 'z')
# zab
del a.x[1]
# zb
a.x.reverse()
# bz
a.x[1] = 'o'
# bo
a.x.append('a')
# boa
a.x.sort()
# abo
# Were the correct events captured?
self.assertEqual(event_cache, ['del', 'insert', 'del', 'reverse', 'set', 'set', 'sort'])
# Is the output correct?
self.assertEqual(a.x, [c for c in 'abo'])
def test_dict(self):
"""Does the EventfulDict work?"""
event_cache = []
class A(HasTraits):
x = EventfulDict({c: c for c in 'abc'})
a = A()
a.x.on_events(lambda k, v: event_cache.append('add'), \
lambda k, v: event_cache.append('set'), \
lambda k: event_cache.append('del'))
del a.x['c']
# ab
a.x['z'] = 1
# abz
a.x['z'] = 'z'
# abz
a.x.pop('a')
# bz
# Were the correct events captured?
self.assertEqual(event_cache, ['del', 'add', 'set', 'del'])
# Is the output correct?
self.assertEqual(a.x, {c: c for c in 'bz'})
|
import sqlite3
import postgresql.driver as pg_driver
import optparse
def copy(name, lite, pg, src, dst):
print("[+] {0}".format(name))
pg.execute("DELETE FROM {0}".format(dst['table']))
offset = 0
limit = 10000
insert = pg.prepare(dst['query'])
while True:
result = lite.execute(src['query'].format(limit, offset))
r = 0
result = result.fetchall()
r = len(result)
insert.load_rows(result)
if r != limit:
# update the sequence if we inserted rows
if offset + r != 0:
pg.execute(
"SELECT setval('{0}',{1})".format(dst['seq'], offset + r))
break
offset += limit
cando = {
'connections' : ({
# FIXME postgres does not know connection_type pending
# connection_type is an enum, so this may get messy
'query' : """SELECT
connection,
connection_type,
connection_transport,
datetime(connection_timestamp, 'unixepoch') || ' UTC' AS connection_timestamp,
connection_parent,
connection_root,
ifnull(nullif(local_host,''),'0.0.0.0'),
local_port,
ifnull(nullif(remote_host,''),'0.0.0.0'),
remote_port,
connection_protocol,
remote_hostname FROM connections WHERE connection_type != 'pending' LIMIT {:d} OFFSET {:d} \n"""
},
{
'table' : 'dionaea.connections',
'seq' : "dionaea.connections_connection_seq",
'query' : """INSERT INTO dionaea.connections
(connection,
connection_type,
connection_transport,
connection_timestamp,
connection_parent,
connection_root,
local_host,
local_port,
remote_host,
remote_port,
connection_protocol,
remote_hostname)
VALUES
($1,$2,$3,$4::text::timestamp,$5,$6,$7::text::inet,$8,$9::text::inet,$10,$11,$12)""",
}),
'dcerpcbinds': ({
'query' : """SELECT
dcerpcbind,
connection,
dcerpcbind_uuid,
dcerpcbind_transfersyntax FROM dcerpcbinds LIMIT {:d} OFFSET {:d} \n"""
},
{
'table' : 'dionaea.dcerpcbinds',
'seq' : "dionaea.dcerpcbinds_dcerpcbind_seq",
'query' : """INSERT INTO dionaea.dcerpcbinds
(dcerpcbind,
connection,
dcerpcbind_uuid,
dcerpcbind_transfersyntax)
VALUES
($1,$2,$3,$4)""",
}),
'dcerpcrequests' : ({
'query' : """SELECT
dcerpcrequest,
connection,
dcerpcrequest_uuid,
dcerpcrequest_opnum FROM dcerpcrequests LIMIT {:d} OFFSET {:d}"""
},
{ 'table' : 'dionaea.dcerpcrequests',
'seq' : "dionaea.dcerpcrequests_dcerpcrequest_seq",
'query' : """INSERT INTO dionaea.dcerpcrequests
(dcerpcrequest,
connection,
dcerpcrequest_uuid,
dcerpcrequest_opnum)
VALUES
($1,$2,$3,$4)""",
}),
'dcerpcservices' : ({
'query' : """SELECT
dcerpcservice,
dcerpcservice_uuid,
dcerpcservice_name FROM dcerpcservices LIMIT {:d} OFFSET {:d}"""
},
{ 'table' : 'dionaea.dcerpcservices',
'seq' : "dionaea.dcerpcservices_dcerpcservice_seq",
'query' : """INSERT INTO dionaea.dcerpcservices
(dcerpcservice,
dcerpcservice_uuid,
dcerpcservice_name)
VALUES
($1,$2,$3)""",
}),
'dcerpcserviceops' : ({
'query' : """SELECT
dcerpcserviceop,
dcerpcservice,
dcerpcserviceop_name,
dcerpcserviceop_opnum,
dcerpcserviceop_vuln
FROM dcerpcserviceops LIMIT {:d} OFFSET {:d}"""
},
{ 'table' : 'dionaea.dcerpcserviceops',
'seq' : "dionaea.dcerpcserviceops_dcerpcserviceop_seq",
'query' : """INSERT INTO dionaea.dcerpcserviceops
(dcerpcserviceop,
dcerpcservice,
dcerpcserviceop_name,
dcerpcserviceop_opnum,
dcerpcserviceop_vuln)
VALUES
($1,$2,$3,$4,$5)""",
}),
'downloads' : ({
'query' : """SELECT
download,
connection,
download_md5_hash,
download_url FROM downloads LIMIT {:d} OFFSET {:d}"""
},
{ 'table' : 'dionaea.downloads',
'seq' : "dionaea.dcerpcrequests_dcerpcrequest_seq",
'query' : """INSERT INTO dionaea.downloads
(download,
connection,
download_md5_hash,
download_url)
VALUES
($1,$2,$3,$4)""",
}),
'emu_profiles' : ({
'query' : """SELECT
emu_profile,
connection,
emu_profile_json FROM emu_profiles LIMIT {:d} OFFSET {:d}"""
},
{ 'table' : 'dionaea.emu_profiles',
'seq' : "dionaea.emu_profiles_emu_profile_seq",
'query' : """INSERT INTO dionaea.emu_profiles
(emu_profile,
connection,
emu_profile_json)
VALUES
($1,$2,$3)""",
}),
'emu_services' : ({
'query' : """SELECT
emu_serivce,
connection,
emu_service_url FROM emu_services LIMIT {:d} OFFSET {:d}"""
},
{ 'table' : 'dionaea.emu_services',
'seq' : "dionaea.emu_services_emu_service_seq",
'query' : """INSERT INTO dionaea.emu_services
(emu_service,
connection,
emu_service_url)
VALUES
($1,$2,$3)""",
}),
'offers' : ({
'query' : """SELECT
offer,
connection,
offer_url FROM offers LIMIT {:d} OFFSET {:d}"""
},
{ 'table' : 'dionaea.offers',
'seq' : "dionaea.offers_offer_seq",
'query' : """INSERT INTO dionaea.offers
(offer,
connection,
offer_url)
VALUES
($1,$2,$3)""",
}),
'p0fs' : (
{ 'query' : """SELECT
p0f,
connection,
p0f_genre,
p0f_link,
p0f_detail,
p0f_uptime,
p0f_tos,
p0f_dist,
p0f_nat,
p0f_fw FROM p0fs LIMIT {:d} OFFSET {:d}"""
},
{ 'table' : 'dionaea.p0fs',
'seq' : "dionaea.p0fs_p0f_seq",
'query' : """INSERT INTO dionaea.p0fs
( p0f,
connection,
p0f_genre,
p0f_link,
p0f_detail,
p0f_uptime,
p0f_tos,
p0f_dist,
p0f_nat,
p0f_fw)
VALUES
($1,$2,$3,$4,$5,$6,$7,$8,$9,$10)""",
}),
'virustotals': (
{ 'query' : """SELECT
virustotal,
virustotal_md5_hash,
datetime(virustotal_timestamp, 'unixepoch') || ' UTC' AS virustotal_timestamp,
virustotal_permalink
FROM virustotals LIMIT {:d} OFFSET {:d}"""
},
{ 'table' : 'dionaea.virustotals',
'seq' : "dionaea.virustotals_virustotal_seq",
'query' : """INSERT INTO dionaea.virustotals
(
virustotal,
virustotal_md5_hash,
virustotal_timestamp,
virustotal_permalink
)
VALUES
($1,$2,$3::text::timestamptz,$4)""",
}),
'virustotalscans': (
{ 'query' : """SELECT
virustotalscan,
virustotal,
virustotalscan_scanner,
nullif(virustotalscan_result,'')
FROM virustotalscans LIMIT {:d} OFFSET {:d}"""
},
{ 'table' : 'dionaea.virustotalscans',
'seq' : "dionaea.virustotalscans_virustotalscan_seq",
'query' : """INSERT INTO dionaea.virustotalscans
(
virustotalscan,
virustotal,
virustotalscan_scanner,
virustotalscan_result
)
VALUES
($1,$2,$3,$4)""",
}),
# x
'mssql_fingerprints': (
{ 'query' : """SELECT
mssql_fingerprint,
connection,
mssql_fingerprint_hostname,
mssql_fingerprint_appname,
mssql_fingerprint_cltintname FROM mssql_fingerprints LIMIT {:d} OFFSET {:d}"""
},
{ 'table' : 'dionaea.mssql_fingerprints',
'seq' : "dionaea.mssql_fingerprints_mssql_fingerprint_seq",
'query' : """INSERT INTO dionaea.mssql_fingerprints
(
mssql_fingerprint,
connection,
mssql_fingerprint_hostname,
mssql_fingerprint_appname,
mssql_fingerprint_cltintname
)
VALUES
($1,$2,$3,$4,$5)""",
}),
'mssql_commands': (
{ 'query' : """SELECT
mssql_command,
connection,
mssql_command_status,
mssql_command_cmd FROM mssql_commands LIMIT {:d} OFFSET {:d}"""
},
{ 'table' : 'dionaea.mssql_commands',
'seq' : "dionaea.mssql_commands_mssql_command_seq",
'query' : """INSERT INTO dionaea.mssql_commands
(
mssql_command,
connection,
mssql_command_status,
mssql_command_cmd
)
VALUES
($1,$2,$3,$4)""",
}),
'logins': (
{ 'query' : """SELECT
login,
connection,
login_username,
login_password FROM logins LIMIT {:d} OFFSET {:d}"""
},
{ 'table' : 'dionaea.logins',
'seq' : "dionaea.logins_login_seq",
'query' : """INSERT INTO dionaea.logins
(
login,
connection,
login_username,
login_password
)
VALUES
($1,$2,$3,$4)""",
})
}
if __name__ == "__main__":
p = optparse.OptionParser()
p.add_option('-s', '--database-host', dest='database_host',
help='localhost:5432', type="string", action="store")
p.add_option('-d', '--database', dest='database',
help='for example xmpp', type="string", action="store")
p.add_option('-u', '--database-user', dest='database_user',
help='for example xmpp', type="string", action="store")
p.add_option('-p', '--database-password', dest='database_password',
help='the database users password', type="string", action="store")
p.add_option('-f', '--sqlite-file', dest='sqlite_file',
help='path to sqlite db', type="string", action="store")
(options, args) = p.parse_args()
if len(args) == 0:
print("use {} as args".format( ' '.join(cando.keys()) ) )
db = {}
db['sqlite'] = {}
db['sqlite']['dbh'] = sqlite3.connect(options.sqlite_file)
db['sqlite']['cursor'] = db['sqlite']['dbh'].cursor()
db['pg'] = {}
db['pg']['dbh'] = pg_driver.connect(
user = options.database_user,
password = options.database_password,
database = options.database,
host = options.database_host,
port = 5432)
for i in args:
if i in cando:
copy(i,
db['sqlite']['cursor'],
db['pg']['dbh'],
cando[i][0],
cando[i][1])
|
import heapq
import itertools
import struct
import weakref
from ctypes import *
from ctypes.wintypes import HANDLE
from comtypes import IUnknown, IServiceProvider, COMError
import comtypes.client
import comtypes.client.lazybind
import oleacc
import UIAHandler
from comInterfaces.Accessibility import *
from comInterfaces.IAccessible2Lib import *
from logHandler import log
import JABHandler
import eventHandler
import winUser
import api
import NVDAObjects.IAccessible
import NVDAObjects.window
import appModuleHandler
import mouseHandler
import controlTypes
import keyboardHandler
MAX_WINEVENTS=500
MAX_WINEVENTS_PER_THREAD=10
NAVRELATION_LABEL_FOR=0x1002
NAVRELATION_LABELLED_BY=0x1003
NAVRELATION_NODE_CHILD_OF=0x1005
NAVRELATION_EMBEDS=0x1009
IA2_RELATION_FLOWS_FROM = "flowsFrom"
IA2_RELATION_FLOWS_TO = "flowsTo"
MENU_EVENTIDS=(winUser.EVENT_SYSTEM_MENUSTART,winUser.EVENT_SYSTEM_MENUEND,winUser.EVENT_SYSTEM_MENUPOPUPSTART,winUser.EVENT_SYSTEM_MENUPOPUPEND)
class OrderedWinEventLimiter(object):
"""Collects and limits winEvents based on whether they are focus changes, or just generic (all other ones).
Only allow a max of L{maxFocusItems}, if more are added then the oldest focus event is removed to make room.
Only allow one event for one specific object at a time, though push it further forward in time if a duplicate tries to get added. This is true for both generic and focus events.
"""
def __init__(self,maxFocusItems=3):
"""
@param maxFocusItems: the amount of focus changed events allowed to be queued.
@type maxFocusItems: integer
"""
self.maxFocusItems=maxFocusItems
self._focusEventCache={}
self._genericEventCache={}
self._eventHeap=[]
self._eventCounter=itertools.count()
self._lastMenuEvent=None
def addEvent(self,eventID,window,objectID,childID,threadID):
"""Adds a winEvent to the limiter.
@param eventID: the winEvent type
@type eventID: integer
@param window: the window handle of the winEvent
@type window: integer
@param objectID: the objectID of the winEvent
@type objectID: integer
@param childID: the childID of the winEvent
@type childID: integer
@param threadID: the threadID of the winEvent
@type threadID: integer
"""
if eventID==winUser.EVENT_OBJECT_FOCUS:
if objectID in (winUser.OBJID_SYSMENU,winUser.OBJID_MENU) and childID==0:
# This is a focus event on a menu bar itself, which is just silly. Ignore it.
return
#We do not need a focus event on an object if we already got a foreground event for it
if (winUser.EVENT_SYSTEM_FOREGROUND,window,objectID,childID,threadID) in self._focusEventCache:
return
self._focusEventCache[(eventID,window,objectID,childID,threadID)]=next(self._eventCounter)
return
elif eventID==winUser.EVENT_SYSTEM_FOREGROUND:
self._focusEventCache.pop((winUser.EVENT_OBJECT_FOCUS,window,objectID,childID,threadID),None)
self._focusEventCache[(eventID,window,objectID,childID,threadID)]=next(self._eventCounter)
elif eventID==winUser.EVENT_OBJECT_SHOW:
k=(winUser.EVENT_OBJECT_HIDE,window,objectID,childID,threadID)
if k in self._genericEventCache:
del self._genericEventCache[k]
return
elif eventID==winUser.EVENT_OBJECT_HIDE:
k=(winUser.EVENT_OBJECT_SHOW,window,objectID,childID,threadID)
if k in self._genericEventCache:
del self._genericEventCache[k]
return
elif eventID in MENU_EVENTIDS:
self._lastMenuEvent=(next(self._eventCounter),eventID,window,objectID,childID,threadID)
return
self._genericEventCache[(eventID,window,objectID,childID,threadID)]=next(self._eventCounter)
def flushEvents(self):
"""Returns a list of winEvents (tuples of eventID,window,objectID,childID) that have been added, though due to limiting, it will not necessarily be all the winEvents that were originally added. They are definitely garenteed to be in the correct order though.
"""
if self._lastMenuEvent is not None:
heapq.heappush(self._eventHeap,self._lastMenuEvent)
self._lastMenuEvent=None
g=self._genericEventCache
self._genericEventCache={}
threadCounters={}
for k,v in sorted(g.iteritems(),key=lambda item: item[1],reverse=True):
threadCount=threadCounters.get(k[-1],0)
if threadCount>MAX_WINEVENTS_PER_THREAD:
continue
heapq.heappush(self._eventHeap,(v,)+k)
threadCounters[k[-1]]=threadCount+1
f=self._focusEventCache
self._focusEventCache={}
for k,v in sorted(f.iteritems(),key=lambda item: item[1])[0-self.maxFocusItems:]:
heapq.heappush(self._eventHeap,(v,)+k)
e=self._eventHeap
self._eventHeap=[]
r=[]
for count in xrange(len(e)):
event=heapq.heappop(e)[1:-1]
r.append(event)
return r
winEventLimiter=OrderedWinEventLimiter()
liveNVDAObjectTable=weakref.WeakValueDictionary()
IAccessibleRolesToNVDARoles={
oleacc.ROLE_SYSTEM_WINDOW:controlTypes.ROLE_WINDOW,
oleacc.ROLE_SYSTEM_CLIENT:controlTypes.ROLE_PANE,
oleacc.ROLE_SYSTEM_TITLEBAR:controlTypes.ROLE_TITLEBAR,
oleacc.ROLE_SYSTEM_DIALOG:controlTypes.ROLE_DIALOG,
oleacc.ROLE_SYSTEM_PANE:controlTypes.ROLE_PANE,
oleacc.ROLE_SYSTEM_CHECKBUTTON:controlTypes.ROLE_CHECKBOX,
oleacc.ROLE_SYSTEM_RADIOBUTTON:controlTypes.ROLE_RADIOBUTTON,
oleacc.ROLE_SYSTEM_STATICTEXT:controlTypes.ROLE_STATICTEXT,
oleacc.ROLE_SYSTEM_TEXT:controlTypes.ROLE_EDITABLETEXT,
oleacc.ROLE_SYSTEM_PUSHBUTTON:controlTypes.ROLE_BUTTON,
oleacc.ROLE_SYSTEM_MENUBAR:controlTypes.ROLE_MENUBAR,
oleacc.ROLE_SYSTEM_MENUITEM:controlTypes.ROLE_MENUITEM,
oleacc.ROLE_SYSTEM_MENUPOPUP:controlTypes.ROLE_POPUPMENU,
oleacc.ROLE_SYSTEM_COMBOBOX:controlTypes.ROLE_COMBOBOX,
oleacc.ROLE_SYSTEM_LIST:controlTypes.ROLE_LIST,
oleacc.ROLE_SYSTEM_LISTITEM:controlTypes.ROLE_LISTITEM,
oleacc.ROLE_SYSTEM_GRAPHIC:controlTypes.ROLE_GRAPHIC,
oleacc.ROLE_SYSTEM_HELPBALLOON:controlTypes.ROLE_HELPBALLOON,
oleacc.ROLE_SYSTEM_TOOLTIP:controlTypes.ROLE_TOOLTIP,
oleacc.ROLE_SYSTEM_LINK:controlTypes.ROLE_LINK,
oleacc.ROLE_SYSTEM_OUTLINE:controlTypes.ROLE_TREEVIEW,
oleacc.ROLE_SYSTEM_OUTLINEITEM:controlTypes.ROLE_TREEVIEWITEM,
oleacc.ROLE_SYSTEM_OUTLINEBUTTON:controlTypes.ROLE_TREEVIEWITEM,
oleacc.ROLE_SYSTEM_PAGETAB:controlTypes.ROLE_TAB,
oleacc.ROLE_SYSTEM_PAGETABLIST:controlTypes.ROLE_TABCONTROL,
oleacc.ROLE_SYSTEM_SLIDER:controlTypes.ROLE_SLIDER,
oleacc.ROLE_SYSTEM_PROGRESSBAR:controlTypes.ROLE_PROGRESSBAR,
oleacc.ROLE_SYSTEM_SCROLLBAR:controlTypes.ROLE_SCROLLBAR,
oleacc.ROLE_SYSTEM_STATUSBAR:controlTypes.ROLE_STATUSBAR,
oleacc.ROLE_SYSTEM_TABLE:controlTypes.ROLE_TABLE,
oleacc.ROLE_SYSTEM_CELL:controlTypes.ROLE_TABLECELL,
oleacc.ROLE_SYSTEM_COLUMN:controlTypes.ROLE_TABLECOLUMN,
oleacc.ROLE_SYSTEM_ROW:controlTypes.ROLE_TABLEROW,
oleacc.ROLE_SYSTEM_TOOLBAR:controlTypes.ROLE_TOOLBAR,
oleacc.ROLE_SYSTEM_COLUMNHEADER:controlTypes.ROLE_TABLECOLUMNHEADER,
oleacc.ROLE_SYSTEM_ROWHEADER:controlTypes.ROLE_TABLEROWHEADER,
oleacc.ROLE_SYSTEM_SPLITBUTTON:controlTypes.ROLE_SPLITBUTTON,
oleacc.ROLE_SYSTEM_BUTTONDROPDOWN:controlTypes.ROLE_DROPDOWNBUTTON,
oleacc.ROLE_SYSTEM_SEPARATOR:controlTypes.ROLE_SEPARATOR,
oleacc.ROLE_SYSTEM_DOCUMENT:controlTypes.ROLE_DOCUMENT,
oleacc.ROLE_SYSTEM_ANIMATION:controlTypes.ROLE_ANIMATION,
oleacc.ROLE_SYSTEM_APPLICATION:controlTypes.ROLE_APPLICATION,
oleacc.ROLE_SYSTEM_GROUPING:controlTypes.ROLE_GROUPING,
oleacc.ROLE_SYSTEM_PROPERTYPAGE:controlTypes.ROLE_PROPERTYPAGE,
oleacc.ROLE_SYSTEM_ALERT:controlTypes.ROLE_ALERT,
oleacc.ROLE_SYSTEM_BORDER:controlTypes.ROLE_BORDER,
oleacc.ROLE_SYSTEM_BUTTONDROPDOWNGRID:controlTypes.ROLE_DROPDOWNBUTTONGRID,
oleacc.ROLE_SYSTEM_CARET:controlTypes.ROLE_CARET,
oleacc.ROLE_SYSTEM_CHARACTER:controlTypes.ROLE_CHARACTER,
oleacc.ROLE_SYSTEM_CHART:controlTypes.ROLE_CHART,
oleacc.ROLE_SYSTEM_CURSOR:controlTypes.ROLE_CURSOR,
oleacc.ROLE_SYSTEM_DIAGRAM:controlTypes.ROLE_DIAGRAM,
oleacc.ROLE_SYSTEM_DIAL:controlTypes.ROLE_DIAL,
oleacc.ROLE_SYSTEM_DROPLIST:controlTypes.ROLE_DROPLIST,
oleacc.ROLE_SYSTEM_BUTTONMENU:controlTypes.ROLE_MENUBUTTON,
oleacc.ROLE_SYSTEM_EQUATION:controlTypes.ROLE_EQUATION,
oleacc.ROLE_SYSTEM_GRIP:controlTypes.ROLE_GRIP,
oleacc.ROLE_SYSTEM_HOTKEYFIELD:controlTypes.ROLE_HOTKEYFIELD,
oleacc.ROLE_SYSTEM_INDICATOR:controlTypes.ROLE_INDICATOR,
oleacc.ROLE_SYSTEM_SPINBUTTON:controlTypes.ROLE_SPINBUTTON,
oleacc.ROLE_SYSTEM_SOUND:controlTypes.ROLE_SOUND,
oleacc.ROLE_SYSTEM_WHITESPACE:controlTypes.ROLE_WHITESPACE,
oleacc.ROLE_SYSTEM_IPADDRESS:controlTypes.ROLE_IPADDRESS,
oleacc.ROLE_SYSTEM_OUTLINEBUTTON:controlTypes.ROLE_TREEVIEWBUTTON,
oleacc.ROLE_SYSTEM_CLOCK:controlTypes.ROLE_CLOCK,
#IAccessible2 roles
IA2_ROLE_UNKNOWN:controlTypes.ROLE_UNKNOWN,
IA2_ROLE_CANVAS:controlTypes.ROLE_CANVAS,
IA2_ROLE_CAPTION:controlTypes.ROLE_CAPTION,
IA2_ROLE_CHECK_MENU_ITEM:controlTypes.ROLE_CHECKMENUITEM,
IA2_ROLE_COLOR_CHOOSER:controlTypes.ROLE_COLORCHOOSER,
IA2_ROLE_DATE_EDITOR:controlTypes.ROLE_DATEEDITOR,
IA2_ROLE_DESKTOP_ICON:controlTypes.ROLE_DESKTOPICON,
IA2_ROLE_DESKTOP_PANE:controlTypes.ROLE_DESKTOPPANE,
IA2_ROLE_DIRECTORY_PANE:controlTypes.ROLE_DIRECTORYPANE,
IA2_ROLE_EDITBAR:controlTypes.ROLE_EDITBAR,
IA2_ROLE_EMBEDDED_OBJECT:controlTypes.ROLE_EMBEDDEDOBJECT,
IA2_ROLE_ENDNOTE:controlTypes.ROLE_ENDNOTE,
IA2_ROLE_FILE_CHOOSER:controlTypes.ROLE_FILECHOOSER,
IA2_ROLE_FONT_CHOOSER:controlTypes.ROLE_FONTCHOOSER,
IA2_ROLE_FOOTER:controlTypes.ROLE_FOOTER,
IA2_ROLE_FOOTNOTE:controlTypes.ROLE_FOOTNOTE,
IA2_ROLE_FORM:controlTypes.ROLE_FORM,
IA2_ROLE_FRAME:controlTypes.ROLE_FRAME,
IA2_ROLE_GLASS_PANE:controlTypes.ROLE_GLASSPANE,
IA2_ROLE_HEADER:controlTypes.ROLE_HEADER,
IA2_ROLE_HEADING:controlTypes.ROLE_HEADING,
IA2_ROLE_ICON:controlTypes.ROLE_ICON,
IA2_ROLE_IMAGE_MAP:controlTypes.ROLE_IMAGEMAP,
IA2_ROLE_INPUT_METHOD_WINDOW:controlTypes.ROLE_INPUTWINDOW,
IA2_ROLE_INTERNAL_FRAME:controlTypes.ROLE_INTERNALFRAME,
IA2_ROLE_LABEL:controlTypes.ROLE_LABEL,
IA2_ROLE_LAYERED_PANE:controlTypes.ROLE_LAYEREDPANE,
IA2_ROLE_NOTE:controlTypes.ROLE_NOTE,
IA2_ROLE_OPTION_PANE:controlTypes.ROLE_OPTIONPANE,
IA2_ROLE_PAGE:controlTypes.ROLE_PAGE,
IA2_ROLE_PARAGRAPH:controlTypes.ROLE_PARAGRAPH,
IA2_ROLE_RADIO_MENU_ITEM:controlTypes.ROLE_RADIOMENUITEM,
IA2_ROLE_REDUNDANT_OBJECT:controlTypes.ROLE_REDUNDANTOBJECT,
IA2_ROLE_ROOT_PANE:controlTypes.ROLE_ROOTPANE,
IA2_ROLE_RULER:controlTypes.ROLE_RULER,
IA2_ROLE_SCROLL_PANE:controlTypes.ROLE_SCROLLPANE,
IA2_ROLE_SECTION:controlTypes.ROLE_SECTION,
IA2_ROLE_SHAPE:controlTypes.ROLE_SHAPE,
IA2_ROLE_SPLIT_PANE:controlTypes.ROLE_SPLITPANE,
IA2_ROLE_TEAR_OFF_MENU:controlTypes.ROLE_TEAROFFMENU,
IA2_ROLE_TERMINAL:controlTypes.ROLE_TERMINAL,
IA2_ROLE_TEXT_FRAME:controlTypes.ROLE_TEXTFRAME,
IA2_ROLE_TOGGLE_BUTTON:controlTypes.ROLE_TOGGLEBUTTON,
IA2_ROLE_VIEW_PORT:controlTypes.ROLE_VIEWPORT,
#some common string roles
"frame":controlTypes.ROLE_FRAME,
"iframe":controlTypes.ROLE_INTERNALFRAME,
"page":controlTypes.ROLE_PAGE,
"form":controlTypes.ROLE_FORM,
"div":controlTypes.ROLE_SECTION,
"li":controlTypes.ROLE_LISTITEM,
"ul":controlTypes.ROLE_LIST,
"tbody":controlTypes.ROLE_TABLEBODY,
"browser":controlTypes.ROLE_WINDOW,
"h1":controlTypes.ROLE_HEADING1,
"h2":controlTypes.ROLE_HEADING2,
"h3":controlTypes.ROLE_HEADING3,
"h4":controlTypes.ROLE_HEADING4,
"h5":controlTypes.ROLE_HEADING5,
"h6":controlTypes.ROLE_HEADING6,
"p":controlTypes.ROLE_PARAGRAPH,
"hbox":controlTypes.ROLE_BOX,
"embed":controlTypes.ROLE_EMBEDDEDOBJECT,
"object":controlTypes.ROLE_EMBEDDEDOBJECT,
"applet":controlTypes.ROLE_EMBEDDEDOBJECT,
}
IAccessibleStatesToNVDAStates={
oleacc.STATE_SYSTEM_TRAVERSED:controlTypes.STATE_VISITED,
oleacc.STATE_SYSTEM_UNAVAILABLE:controlTypes.STATE_UNAVAILABLE,
oleacc.STATE_SYSTEM_FOCUSED:controlTypes.STATE_FOCUSED,
oleacc.STATE_SYSTEM_SELECTED:controlTypes.STATE_SELECTED,
oleacc.STATE_SYSTEM_BUSY:controlTypes.STATE_BUSY,
oleacc.STATE_SYSTEM_PRESSED:controlTypes.STATE_PRESSED,
oleacc.STATE_SYSTEM_CHECKED:controlTypes.STATE_CHECKED,
oleacc.STATE_SYSTEM_MIXED:controlTypes.STATE_HALFCHECKED,
oleacc.STATE_SYSTEM_READONLY:controlTypes.STATE_READONLY,
oleacc.STATE_SYSTEM_EXPANDED:controlTypes.STATE_EXPANDED,
oleacc.STATE_SYSTEM_COLLAPSED:controlTypes.STATE_COLLAPSED,
oleacc.STATE_SYSTEM_OFFSCREEN:controlTypes.STATE_OFFSCREEN,
oleacc.STATE_SYSTEM_INVISIBLE:controlTypes.STATE_INVISIBLE,
oleacc.STATE_SYSTEM_TRAVERSED:controlTypes.STATE_VISITED,
oleacc.STATE_SYSTEM_LINKED:controlTypes.STATE_LINKED,
oleacc.STATE_SYSTEM_HASPOPUP:controlTypes.STATE_HASPOPUP,
oleacc.STATE_SYSTEM_PROTECTED:controlTypes.STATE_PROTECTED,
oleacc.STATE_SYSTEM_SELECTABLE:controlTypes.STATE_SELECTABLE,
oleacc.STATE_SYSTEM_FOCUSABLE:controlTypes.STATE_FOCUSABLE,
}
IAccessible2StatesToNVDAStates={
IA2_STATE_REQUIRED:controlTypes.STATE_REQUIRED,
IA2_STATE_DEFUNCT:controlTypes.STATE_DEFUNCT,
#IA2_STATE_STALE:controlTypes.STATE_DEFUNCT,
IA2_STATE_INVALID_ENTRY:controlTypes.STATE_INVALID_ENTRY,
IA2_STATE_MODAL:controlTypes.STATE_MODAL,
IA2_STATE_SUPPORTS_AUTOCOMPLETION:controlTypes.STATE_AUTOCOMPLETE,
IA2_STATE_MULTI_LINE:controlTypes.STATE_MULTILINE,
IA2_STATE_ICONIFIED:controlTypes.STATE_ICONIFIED,
IA2_STATE_EDITABLE:controlTypes.STATE_EDITABLE,
}
winEventHookIDs=[]
def normalizeIAccessible(pacc,childID=0):
if not isinstance(pacc,IAccessible):
try:
pacc=pacc.QueryInterface(IAccessible)
except COMError:
raise RuntimeError("%s Not an IAccessible"%pacc)
# #2558: IAccessible2 doesn't support simple children.
# Therefore, it doesn't make sense to use IA2 if the child ID is non-0.
if childID==0 and not isinstance(pacc,IAccessible2):
try:
s=pacc.QueryInterface(IServiceProvider)
pacc2=s.QueryService(IAccessible._iid_,IAccessible2)
if not pacc2:
# QueryService should fail if IA2 is not supported, but some applications such as AIM 7 misbehave and return a null COM pointer.
# Treat this as if QueryService failed.
raise ValueError
pacc=pacc2
except:
pass
return pacc
def accessibleObjectFromEvent(window,objectID,childID):
try:
pacc,childID=oleacc.AccessibleObjectFromEvent(window,objectID,childID)
except Exception as e:
log.debugWarning("oleacc.AccessibleObjectFromEvent with window %s, objectID %s and childID %s: %s"%(window,objectID,childID,e))
return None
return (normalizeIAccessible(pacc,childID),childID)
def accessibleObjectFromPoint(x,y):
try:
pacc, child = oleacc.AccessibleObjectFromPoint(x, y)
except:
return None
return (normalizeIAccessible(pacc,child),child)
def windowFromAccessibleObject(ia):
try:
return oleacc.WindowFromAccessibleObject(ia)
except:
return 0
def accessibleChildren(ia,startIndex,numChildren):
children=[]
for child in oleacc.AccessibleChildren(ia,startIndex,numChildren):
if child is None:
# This is a bug in the server.
# Filtering these out here makes life easier for the caller.
continue
elif isinstance(child,comtypes.client.lazybind.Dispatch) or isinstance(child,comtypes.client.dynamic._Dispatch) or isinstance(child,IUnknown):
child=(normalizeIAccessible(child),0)
elif isinstance(child,int):
child=(ia,child)
children.append(child)
return children
def accFocus(ia):
try:
res=ia.accFocus
if isinstance(res,comtypes.client.lazybind.Dispatch) or isinstance(res,comtypes.client.dynamic._Dispatch) or isinstance(res,IUnknown):
new_ia=normalizeIAccessible(res)
new_child=0
elif res==0:
# #3005: Don't call accChild for CHILDID_SELF.
new_ia=ia
new_child=res
elif isinstance(res,int):
# accFocus can return a child ID even when there is actually an IAccessible for that child; e.g. Lotus Symphony.
try:
new_ia=ia.accChild(res)
except:
new_ia=None
if new_ia:
new_ia=normalizeIAccessible(new_ia)
new_child=0
else:
new_ia=ia
new_child=res
else:
return None
return (new_ia,new_child)
except:
return None
def accHitTest(ia,x,y):
try:
res=ia.accHitTest(x,y)
except COMError:
return None
if isinstance(res,comtypes.client.lazybind.Dispatch) or isinstance(res,comtypes.client.dynamic._Dispatch) or isinstance(res,IUnknown):
return accHitTest(normalizeIAccessible(res),x,y),0
elif isinstance(res,int):
return ia,res
return None
def accChild(ia,child):
try:
res=ia.accChild(child)
if not res:
return (ia,child)
elif isinstance(res,comtypes.client.lazybind.Dispatch) or isinstance(res,comtypes.client.dynamic._Dispatch) or isinstance(res,IUnknown):
return normalizeIAccessible(res),0
except:
pass
return None
def accParent(ia,child):
try:
if not child:
res=ia.accParent
if isinstance(res,comtypes.client.lazybind.Dispatch) or isinstance(res,comtypes.client.dynamic._Dispatch) or isinstance(res,IUnknown):
new_ia=normalizeIAccessible(res)
new_child=0
else:
raise ValueError("no IAccessible interface")
else:
new_ia=ia
new_child=0
return (new_ia,new_child)
except:
return None
def accNavigate(ia,child,direction):
res=None
try:
res=ia.accNavigate(direction,child)
if isinstance(res,int):
new_ia=ia
new_child=res
elif isinstance(res,comtypes.client.lazybind.Dispatch) or isinstance(res,comtypes.client.dynamic._Dispatch) or isinstance(res,IUnknown):
new_ia=normalizeIAccessible(res)
new_child=0
else:
raise RuntimeError
return (new_ia,new_child)
except:
pass
winEventIDsToNVDAEventNames={
winUser.EVENT_SYSTEM_DESKTOPSWITCH:"desktopSwitch",
winUser.EVENT_SYSTEM_FOREGROUND:"gainFocus",
winUser.EVENT_SYSTEM_ALERT:"alert",
winUser.EVENT_SYSTEM_MENUSTART:"menuStart",
winUser.EVENT_SYSTEM_MENUEND:"menuEnd",
winUser.EVENT_SYSTEM_MENUPOPUPSTART:"menuStart",
winUser.EVENT_SYSTEM_MENUPOPUPEND:"menuEnd",
winUser.EVENT_SYSTEM_SCROLLINGSTART:"scrollingStart",
winUser.EVENT_SYSTEM_SWITCHEND:"switchEnd",
winUser.EVENT_OBJECT_FOCUS:"gainFocus",
winUser.EVENT_OBJECT_SHOW:"show",
winUser.EVENT_OBJECT_HIDE:"hide",
winUser.EVENT_OBJECT_DESTROY:"destroy",
winUser.EVENT_OBJECT_DESCRIPTIONCHANGE:"descriptionChange",
winUser.EVENT_OBJECT_LOCATIONCHANGE:"locationChange",
winUser.EVENT_OBJECT_NAMECHANGE:"nameChange",
winUser.EVENT_OBJECT_SELECTION:"selection",
winUser.EVENT_OBJECT_SELECTIONADD:"selectionAdd",
winUser.EVENT_OBJECT_SELECTIONREMOVE:"selectionRemove",
winUser.EVENT_OBJECT_SELECTIONWITHIN:"selectionWithIn",
winUser.EVENT_OBJECT_STATECHANGE:"stateChange",
winUser.EVENT_OBJECT_VALUECHANGE:"valueChange",
IA2_EVENT_TEXT_CARET_MOVED:"caret",
IA2_EVENT_DOCUMENT_LOAD_COMPLETE:"documentLoadComplete",
IA2_EVENT_OBJECT_ATTRIBUTE_CHANGED:"IA2AttributeChange",
}
def winEventToNVDAEvent(eventID,window,objectID,childID,useCache=True):
"""Tries to convert a win event ID to an NVDA event name, and instanciate or fetch an NVDAObject for the win event parameters.
@param eventID: the win event ID (type)
@type eventID: integer
@param window: the win event's window handle
@type window: integer
@param objectID: the win event's object ID
@type objectID: integer
@param childID: the win event's childID
@type childID: the win event's childID
@param useCache: C{True} to use the L{liveNVDAObjectTable} cache when retrieving an NVDAObject, C{False} if the cache should not be used.
@type useCache: boolean
@returns: the NVDA event name and the NVDAObject the event is for
@rtype: tuple of string and L{NVDAObjects.IAccessible.IAccessible}
"""
NVDAEventName=winEventIDsToNVDAEventNames.get(eventID,None)
if not NVDAEventName:
return None
#Ignore any events with invalid window handles
if not window or not winUser.isWindow(window):
return None
#Make sure this window does not have a ghost window if possible
if NVDAObjects.window.GhostWindowFromHungWindow and NVDAObjects.window.GhostWindowFromHungWindow(window):
return None
#We do not support MSAA object proxied from native UIA
if UIAHandler.handler and UIAHandler.handler.isUIAWindow(window):
return None
obj=None
if useCache:
#See if we already know an object by this win event info
obj=liveNVDAObjectTable.get((window,objectID,childID),None)
#If we don't yet have the object, then actually instanciate it.
if not obj:
obj=NVDAObjects.IAccessible.getNVDAObjectFromEvent(window,objectID,childID)
#At this point if we don't have an object then we can't do any more
if not obj:
return None
#SDM MSAA objects sometimes don't contain enough information to be useful
#Sometimes there is a real window that does, so try to get the SDMChild property on the NVDAObject, and if successull use that as obj instead.
if 'bosa_sdm' in obj.windowClassName:
SDMChild=getattr(obj,'SDMChild',None)
if SDMChild: obj=SDMChild
return (NVDAEventName,obj)
def winEventCallback(handle,eventID,window,objectID,childID,threadID,timestamp):
try:
#Ignore all object IDs from alert onwards (sound, nativeom etc) as we don't support them
if objectID<=winUser.OBJID_ALERT:
return
#Ignore all locationChange events except ones for the caret
if eventID==winUser.EVENT_OBJECT_LOCATIONCHANGE and objectID!=winUser.OBJID_CARET:
return
if eventID==winUser.EVENT_OBJECT_DESTROY:
processDestroyWinEvent(window,objectID,childID)
return
#Change window objIDs to client objIDs for better reporting of objects
if (objectID==0) and (childID==0):
objectID=winUser.OBJID_CLIENT
#Ignore events with invalid window handles
isWindow = winUser.isWindow(window) if window else 0
if window==0 or (not isWindow and eventID in (winUser.EVENT_SYSTEM_SWITCHSTART,winUser.EVENT_SYSTEM_SWITCHEND,winUser.EVENT_SYSTEM_MENUEND,winUser.EVENT_SYSTEM_MENUPOPUPEND)):
window=winUser.getDesktopWindow()
elif not isWindow:
return
if childID<0:
tempWindow=window
while tempWindow and not winUser.getWindowStyle(tempWindow)&winUser.WS_POPUP and winUser.getClassName(tempWindow)=="MozillaWindowClass":
tempWindow=winUser.getAncestor(tempWindow,winUser.GA_PARENT)
if tempWindow and winUser.getClassName(tempWindow).startswith('Mozilla'):
window=tempWindow
windowClassName=winUser.getClassName(window)
#At the moment we can't handle show, hide or reorder events on Mozilla Firefox Location bar,as there are just too many of them
#Ignore show, hide and reorder on MozillaDropShadowWindowClass windows.
if windowClassName.startswith('Mozilla') and eventID in (winUser.EVENT_OBJECT_SHOW,winUser.EVENT_OBJECT_HIDE,winUser.EVENT_OBJECT_REORDER) and childID<0:
#Mozilla Gecko can sometimes fire win events on a catch-all window which isn't really the real window
#Move up the ancestry to find the real mozilla Window and use that
if winUser.getClassName(window)=='MozillaDropShadowWindowClass':
return
#We never want to see foreground events for the Program Manager or Shell (task bar)
if eventID==winUser.EVENT_SYSTEM_FOREGROUND and windowClassName in ("Progman","Shell_TrayWnd"):
return
if windowClassName=="MSNHiddenWindowClass":
# HACK: Events get fired by this window in Windows Live Messenger 2009 when it starts.
# If we send a WM_NULL to this window at this point (which happens in accessibleObjectFromEvent), Messenger will silently exit (#677).
# Therefore, completely ignore these events, which is useless to us anyway.
return
winEventLimiter.addEvent(eventID,window,objectID,childID,threadID)
except:
log.error("winEventCallback", exc_info=True)
def processGenericWinEvent(eventID,window,objectID,childID):
"""Converts the win event to an NVDA event,
Checks to see if this NVDAObject equals the current focus.
If all goes well, then the event is queued and we return True
@param eventID: a win event ID (type)
@type eventID: integer
@param window: a win event's window handle
@type window: integer
@param objectID: a win event's object ID
@type objectID: integer
@param childID: a win event's child ID
@type childID: integer
@returns: True if the event was processed, False otherwise.
@rtype: boolean
"""
#Notify appModuleHandler of this new window
appModuleHandler.update(winUser.getWindowThreadProcessID(window)[0])
#Handle particular events for the special MSAA caret object just as if they were for the focus object
focus=eventHandler.lastQueuedFocusObject
if focus and objectID==winUser.OBJID_CARET and eventID in (winUser.EVENT_OBJECT_LOCATIONCHANGE,winUser.EVENT_OBJECT_SHOW):
NVDAEvent=("caret",focus)
else:
NVDAEvent=winEventToNVDAEvent(eventID,window,objectID,childID)
if not NVDAEvent:
return False
if NVDAEvent[0]=="nameChange" and objectID==winUser.OBJID_CURSOR:
mouseHandler.updateMouseShape(NVDAEvent[1].name)
return
if NVDAEvent[1]==focus:
NVDAEvent=(NVDAEvent[0],focus)
eventHandler.queueEvent(*NVDAEvent)
return True
def processFocusWinEvent(window,objectID,childID,force=False):
"""checks to see if the focus win event is not the same as the existing focus,
then converts the win event to an NVDA event (instanciating an NVDA Object) then calls processFocusNVDAEvent. If all is ok it returns True.
@type window: integer
@param objectID: a win event's object ID
@type objectID: integer
@param childID: a win event's child ID
@type childID: integer
@param force: If True, the shouldAllowIAccessibleFocusEvent property of the object is ignored.
@type force: boolean
@returns: True if the focus is valid and was handled, False otherwise.
@rtype: boolean
"""
windowClassName=winUser.getClassName(window)
# Generally, we must ignore focus on child windows of SDM windows as we only want the SDM MSAA events.
# However, we don't want to ignore focus if the child ID isn't 0,
# as this is a child control and the SDM MSAA events don't handle child controls.
if childID==0 and not windowClassName.startswith('bosa_sdm') and winUser.getClassName(winUser.getAncestor(window,winUser.GA_PARENT)).startswith('bosa_sdm'):
return False
rootWindow=winUser.getAncestor(window,winUser.GA_ROOT)
# If this window is not within the foreground window and this window or its root window is not a popup window, and this window's root window is not the highest in the z-order
if not winUser.isDescendantWindow(winUser.getForegroundWindow(),window) and not (winUser.getWindowStyle(window) & winUser.WS_POPUP or winUser.getWindowStyle(rootWindow)&winUser.WS_POPUP) and winUser.getPreviousWindow(rootWindow)!=0:
# This is a focus event from a background window, so ignore it.
return False
#Notify appModuleHandler of this new foreground window
appModuleHandler.update(winUser.getWindowThreadProcessID(window)[0])
#If Java access bridge is running, and this is a java window, then pass it to java and forget about it
if JABHandler.isRunning and JABHandler.isJavaWindow(window):
JABHandler.event_enterJavaWindow(window)
return True
#Convert the win event to an NVDA event
NVDAEvent=winEventToNVDAEvent(winUser.EVENT_OBJECT_FOCUS,window,objectID,childID,useCache=False)
if not NVDAEvent:
return False
eventName,obj=NVDAEvent
if (childID==0 and obj.IAccessibleRole==oleacc.ROLE_SYSTEM_LIST) or (objectID==winUser.OBJID_CLIENT and "SysListView32" in obj.windowClassName):
# Some controls incorrectly fire focus on child ID 0, even when there is a child with focus.
try:
realChildID=obj.IAccessibleObject.accFocus
except:
realChildID=None
if isinstance(realChildID,int) and realChildID>0 and realChildID!=childID:
realObj=NVDAObjects.IAccessible.IAccessible(IAccessibleObject=obj.IAccessibleObject,IAccessibleChildID=realChildID,event_windowHandle=window,event_objectID=objectID,event_childID=realChildID)
if realObj:
obj=realObj
return processFocusNVDAEvent(obj,force=force)
def processFocusNVDAEvent(obj,force=False):
"""Processes a focus NVDA event.
If the focus event is valid, it is queued.
@param obj: the NVDAObject the focus event is for
@type obj: L{NVDAObjects.NVDAObject}
@param force: If True, the shouldAllowIAccessibleFocusEvent property of the object is ignored.
@type force: boolean
@return: C{True} if the focus event is valid and was queued, C{False} otherwise.
@rtype: boolean
"""
if not force and isinstance(obj,NVDAObjects.IAccessible.IAccessible):
focus=eventHandler.lastQueuedFocusObject
if isinstance(focus,NVDAObjects.IAccessible.IAccessible) and focus.isDuplicateIAccessibleEvent(obj):
return True
if not obj.shouldAllowIAccessibleFocusEvent:
return False
eventHandler.queueEvent('gainFocus',obj)
return True
class SecureDesktopNVDAObject(NVDAObjects.window.Desktop):
def findOverlayClasses(self,clsList):
clsList.append(SecureDesktopNVDAObject)
return clsList
def _get_name(self):
# Translators: Message to indicate User Account Control (UAC) or other secure desktop screen is active.
return _("Secure Desktop")
def _get_role(self):
return controlTypes.ROLE_PANE
def event_gainFocus(self):
super(SecureDesktopNVDAObject, self).event_gainFocus()
# After handling the focus, NVDA should sleep while the secure desktop is active.
self.sleepMode = self.SLEEP_FULL
def processDesktopSwitchWinEvent(window,objectID,childID):
hDesk=windll.user32.OpenInputDesktop(0, False, 0)
if hDesk!=0:
windll.user32.CloseDesktop(hDesk)
import wx
wx.CallLater(200, _correctFocus)
else:
# Switching to a secure desktop.
# We don't receive key up events for any keys down before switching to a secure desktop,
# so clear our recorded modifiers.
keyboardHandler.currentModifiers.clear()
obj=SecureDesktopNVDAObject(windowHandle=window)
eventHandler.executeEvent("gainFocus",obj)
def _correctFocus():
eventHandler.queueEvent("gainFocus",api.getDesktopObject().objectWithFocus())
def processForegroundWinEvent(window,objectID,childID):
"""checks to see if the foreground win event is not the same as the existing focus or any of its parents,
then converts the win event to an NVDA event (instanciating an NVDA Object) and then checks the NVDAObject against the existing focus object.
If all is ok it queues the foreground event to NVDA and returns True.
@param window: a win event's window handle
@type window: integer
@param objectID: a win event's object ID
@type objectID: integer
@param childID: a win event's child ID
@type childID: integer
@returns: True if the foreground was processed, False otherwise.
@rtype: boolean
"""
#Ignore foreground events on windows that aren't the current foreground window
if window!=winUser.getForegroundWindow():
return False
# If there is a pending gainFocus, it will handle the foreground object.
oldFocus=eventHandler.lastQueuedFocusObject
#If this foreground win event's window is an ancestor of the existing focus's window, then ignore it
if isinstance(oldFocus,NVDAObjects.window.Window) and winUser.isDescendantWindow(window,oldFocus.windowHandle):
return False
#If the existing focus has the same win event params as these, then ignore this event
if isinstance(oldFocus,NVDAObjects.IAccessible.IAccessible) and window==oldFocus.event_windowHandle and objectID==oldFocus.event_objectID and childID==oldFocus.event_childID:
return False
#Notify appModuleHandler of this new foreground window
appModuleHandler.update(winUser.getWindowThreadProcessID(window)[0])
#If Java access bridge is running, and this is a java window, then pass it to java and forget about it
if JABHandler.isRunning and JABHandler.isJavaWindow(window):
JABHandler.event_enterJavaWindow(window)
return True
#Convert the win event to an NVDA event
NVDAEvent=winEventToNVDAEvent(winUser.EVENT_SYSTEM_FOREGROUND,window,objectID,childID,useCache=False)
if not NVDAEvent:
return False
eventHandler.queueEvent(*NVDAEvent)
return True
def processShowWinEvent(window,objectID,childID):
className=winUser.getClassName(window)
#For now we only support 'show' event for tooltips, IMM candidates and notification bars as otherwize we get flooded
if className in ("Frame Notification Bar","tooltips_class32","mscandui21.candidate","mscandui40.candidate","MSCandUIWindow_Candidate") and objectID==winUser.OBJID_CLIENT:
NVDAEvent=winEventToNVDAEvent(winUser.EVENT_OBJECT_SHOW,window,objectID,childID)
if NVDAEvent:
eventHandler.queueEvent(*NVDAEvent)
def processDestroyWinEvent(window,objectID,childID):
"""Process a destroy win event.
This removes the object associated with the event parameters from L{liveNVDAObjectTable} if such an object exists.
"""
try:
del liveNVDAObjectTable[(window,objectID,childID)]
except KeyError:
pass
#Specific support for input method MSAA candidate lists.
#When their window is destroyed we must correct focus to its parent - which could be a composition string
# so can't use generic focus correction. (#2695)
focus=api.getFocusObject()
from NVDAObjects.IAccessible.mscandui import BaseCandidateItem
if objectID==0 and childID==0 and isinstance(focus,BaseCandidateItem) and window==focus.windowHandle and not eventHandler.isPendingEvents("gainFocus"):
obj=focus.parent
if obj:
eventHandler.queueEvent("gainFocus",obj)
def processMenuStartWinEvent(eventID, window, objectID, childID, validFocus):
"""Process a menuStart win event.
@postcondition: Focus will be directed to the menu if appropriate.
"""
if validFocus:
lastFocus=eventHandler.lastQueuedFocusObject
if isinstance(lastFocus,NVDAObjects.IAccessible.IAccessible) and lastFocus.IAccessibleRole in (oleacc.ROLE_SYSTEM_MENUPOPUP, oleacc.ROLE_SYSTEM_MENUITEM):
# Focus has already been set to a menu or menu item, so we don't need to handle the menuStart.
return
NVDAEvent = winEventToNVDAEvent(eventID, window, objectID, childID)
if not NVDAEvent:
return
eventName, obj = NVDAEvent
if obj.IAccessibleRole != oleacc.ROLE_SYSTEM_MENUPOPUP:
# menuStart on anything other than a menu is silly.
return
processFocusNVDAEvent(obj, force=True)
def processFakeFocusWinEvent(eventID, window, objectID, childID):
"""Process a fake focus win event.
@postcondition: The focus will be found and an event generated for it if appropriate.
"""
# A suitable event for faking the focus has been received with no focus event, so we probably need to find the focus and fake it.
# However, it is possible that the focus event has simply been delayed, so wait a bit and only do it if the focus hasn't changed yet.
import wx
wx.CallLater(50, _fakeFocus, api.getFocusObject())
def _fakeFocus(oldFocus):
if oldFocus is not api.getFocusObject():
# The focus has changed - no need to fake it.
return
focus = api.getDesktopObject().objectWithFocus()
if not focus:
return
processFocusNVDAEvent(focus)
cWinEventCallback=WINFUNCTYPE(None,c_int,c_int,c_int,c_int,c_int,c_int,c_int)(winEventCallback)
accPropServices=None
def initialize():
global accPropServices
try:
accPropServices=comtypes.client.CreateObject(CAccPropServices)
except (WindowsError,COMError) as e:
log.debugWarning("AccPropServices is not available: %s"%e)
for eventType in winEventIDsToNVDAEventNames.keys():
hookID=winUser.setWinEventHook(eventType,eventType,0,cWinEventCallback,0,0,0)
if hookID:
winEventHookIDs.append(hookID)
else:
log.error("initialize: could not register callback for event %s (%s)"%(eventType,winEventIDsToNVDAEventNames[eventType]))
def pumpAll():
#Receive all the winEvents from the limiter for this cycle
winEvents=winEventLimiter.flushEvents()
focusWinEvents=[]
validFocus=False
fakeFocusEvent=None
for winEvent in winEvents[0-MAX_WINEVENTS:]:
#We want to only pass on one focus event to NVDA, but we always want to use the most recent possible one
if winEvent[0] in (winUser.EVENT_OBJECT_FOCUS,winUser.EVENT_SYSTEM_FOREGROUND):
focusWinEvents.append(winEvent)
continue
else:
for focusWinEvent in reversed(focusWinEvents):
procFunc=processForegroundWinEvent if focusWinEvent[0]==winUser.EVENT_SYSTEM_FOREGROUND else processFocusWinEvent
if procFunc(*(focusWinEvent[1:])):
validFocus=True
break
focusWinEvents=[]
if winEvent[0]==winUser.EVENT_SYSTEM_DESKTOPSWITCH:
processDesktopSwitchWinEvent(*winEvent[1:])
elif winEvent[0]==winUser.EVENT_OBJECT_SHOW:
processShowWinEvent(*winEvent[1:])
elif winEvent[0] in MENU_EVENTIDS+(winUser.EVENT_SYSTEM_SWITCHEND,):
# If there is no valid focus event, we may need to use this to fake the focus later.
fakeFocusEvent=winEvent
else:
processGenericWinEvent(*winEvent)
for focusWinEvent in reversed(focusWinEvents):
procFunc=processForegroundWinEvent if focusWinEvent[0]==winUser.EVENT_SYSTEM_FOREGROUND else processFocusWinEvent
if procFunc(*(focusWinEvent[1:])):
validFocus=True
break
if fakeFocusEvent:
# Try this as a last resort.
if fakeFocusEvent[0] in (winUser.EVENT_SYSTEM_MENUSTART, winUser.EVENT_SYSTEM_MENUPOPUPSTART):
# menuStart needs to be handled specially and might act even if there was a valid focus event.
processMenuStartWinEvent(*fakeFocusEvent, validFocus=validFocus)
elif not validFocus:
# Other fake focus events only need to be handled if there was no valid focus event.
processFakeFocusWinEvent(*fakeFocusEvent)
def terminate():
for handle in winEventHookIDs:
winUser.unhookWinEvent(handle)
def getIAccIdentity(pacc,childID):
IAccIdentityObject=pacc.QueryInterface(IAccIdentity)
stringPtr,stringSize=IAccIdentityObject.getIdentityString(childID)
try:
if accPropServices:
hwnd,objectID,childID=accPropServices.DecomposeHwndIdentityString(stringPtr,stringSize)
return dict(windowHandle=hwnd,objectID=c_int(objectID).value,childID=childID)
stringPtr=cast(stringPtr,POINTER(c_char*stringSize))
fields=struct.unpack('IIiI',stringPtr.contents.raw)
d={}
d['childID']=fields[3]
if fields[0]&2:
d['menuHandle']=fields[2]
else:
d['objectID']=fields[2]
d['windowHandle']=fields[1]
return d
finally:
windll.ole32.CoTaskMemFree(stringPtr)
def findGroupboxObject(obj):
prevWindow=winUser.getPreviousWindow(obj.windowHandle)
while prevWindow:
if winUser.getClassName(prevWindow)=="Button" and winUser.getWindowStyle(prevWindow)&winUser.BS_GROUPBOX and winUser.isWindowVisible(prevWindow):
groupObj=NVDAObjects.IAccessible.getNVDAObjectFromEvent(prevWindow,winUser.OBJID_CLIENT,0)
try:
(left,top,width,height)=obj.location
(groupLeft,groupTop,groupWidth,groupHeight)=groupObj.location
except:
return
if groupObj.IAccessibleRole==oleacc.ROLE_SYSTEM_GROUPING and left>=groupLeft and (left+width)<=(groupLeft+groupWidth) and top>=groupTop and (top+height)<=(groupTop+groupHeight):
return groupObj
prevWindow=winUser.getPreviousWindow(prevWindow)
def getRecursiveTextFromIAccessibleTextObject(obj,startOffset=0,endOffset=-1):
if not isinstance(obj,IAccessibleText):
try:
textObject=obj.QueryInterface(IAccessibleText)
except:
textObject=None
else:
textObject=obj
if not isinstance(obj,IAccessible):
try:
accObject=obj.QueryInterface(IAccessible)
except:
return ""
else:
accObject=obj
try:
text=textObject.text(startOffset,endOffset)
except:
text=None
if not text or text.isspace():
try:
name=accObject.accName(0)
except:
name=None
try:
value=accObject.accValue(0)
except:
value=None
try:
description=accObject.accDescription(0)
except:
description=None
return " ".join([x for x in [name,value,description] if x and not x.isspace()])
try:
hypertextObject=accObject.QueryInterface(IAccessibleHypertext)
except:
return text
textList=[]
for i in xrange(len(text)):
t=text[i]
if ord(t)==0xFFFC:
try:
childTextObject=hypertextObject.hyperlink(hypertextObject.hyperlinkIndex(i+startOffset)).QueryInterface(IAccessible)
t=" %s "%getRecursiveTextFromIAccessibleTextObject(childTextObject)
except:
pass
textList.append(t)
return "".join(textList).replace(' ',' ')
def splitIA2Attribs(attribsString):
"""Split an IAccessible2 attributes string into a dict of attribute keys and values.
An invalid attributes string does not cause an error, but strange results may be returned.
Subattributes are handled. Subattribute keys and values are placed into a dict which becomes the value of the attribute.
@param attribsString: The IAccessible2 attributes string to convert.
@type attribsString: str
@return: A dict of the attribute keys and values, where values are strings or dicts.
@rtype: {str: str or {str: str}}
"""
attribsDict = {}
tmp = ""
key = ""
subkey = ""
subattr = {}
inEscape = False
for char in attribsString:
if inEscape:
tmp += char
inEscape = False
elif char == "\\":
inEscape = True
elif char == ":":
# We're about to move on to the value, so save the key and clear tmp.
key = tmp
tmp = ""
elif char == "=":
# This is a subattribute.
# Save the subattribute key and clear tmp, ready for the value.
subkey = tmp
tmp = ""
elif char == ",":
# We're about to move on to a new subattribute.
# Add this subattribute key/value pair to the dict.
if subkey:
subattr[subkey] = tmp
subkey = ""
tmp = ""
elif char == ";":
# We're about to move on to a new attribute.
if subkey:
# Add the last subattribute key/value pair to the dict.
subattr[subkey] = tmp
subkey = ""
if subattr:
# This attribute had subattributes.
# Add the key/subattribute pair to the dict.
attribsDict[key] = subattr
subattr = {}
elif key:
# Add this key/value pair to the dict.
attribsDict[key] = tmp
key = ""
tmp = ""
else:
tmp += char
# If there was no trailing semi-colon, we need to handle the last attribute.
if subkey:
# Add the last subattribute key/value pair to the dict.
subattr[subkey] = tmp
if subattr:
# This attribute had subattributes.
# Add the key/subattribute pair to the dict.
attribsDict[key] = subattr
elif key:
# Add this key/value pair to the dict.
attribsDict[key] = tmp
return attribsDict
def isMarshalledIAccessible(IAccessibleObject):
"""Looks at the location of the first function in the IAccessible object's vtable (IUnknown::AddRef) to see if it was implemented in oleacc.dll (its local) or ole32.dll (its marshalled)."""
if not isinstance(IAccessibleObject,IAccessible):
raise TypeError("object should be of type IAccessible, not %s"%IAccessibleObject)
buf=create_unicode_buffer(1024)
addr=POINTER(c_void_p).from_address(super(comtypes._compointer_base,IAccessibleObject).value).contents.value
handle=HANDLE()
windll.kernel32.GetModuleHandleExW(6,addr,byref(handle))
windll.kernel32.GetModuleFileNameW(handle,buf,1024)
return not buf.value.lower().endswith('oleacc.dll')
|
import urllib2
import urllib
from socket import error as SocketError
from ase.test import NotAvailable
dest = 'demo.ascii'
src = 'http://inac.cea.fr/L_Sim/V_Sim/files/' + dest
try:
e = urllib2.urlopen(src)
urllib.urlretrieve(src, filename=dest)
except (urllib2.URLError, SocketError):
raise NotAvailable('Retrieval of ' + src + ' failed')
from ase.io import read
a = read(dest, format='v_sim')
|
from enum import IntEnum, auto
class YoungLaplaceParam(IntEnum):
BOND = 0
RADIUS = auto()
APEX_X = auto()
APEX_Y = auto()
ROTATION = auto()
|
"""
Boolean geometry utilities.
"""
from __future__ import absolute_import
import __init__
from fabmetheus_utilities.vector3 import Vector3
from fabmetheus_utilities import archive
from fabmetheus_utilities import euclidean
from fabmetheus_utilities import gcodec
from fabmetheus_utilities import settings
import math
import os
import sys
import traceback
__author__ = 'Enrique Perez (perez_enrique@yahoo.com)'
__credits__ = 'Art of Illusion <http://www.artofillusion.org/>'
__date__ = "$Date: 2008/02/05 $"
__license__ = 'GPL 3.0'
globalModuleFunctionsDictionary = {}
def addAttributeWord(evaluatorWords, word):
"Add attribute word and remainder if the word starts with a dot, otherwise add the word."
if len(word) < 2:
evaluatorWords.append(word)
return
if word[0] != '.':
evaluatorWords.append(word)
return
dotIndex = word.find('.', 1)
if dotIndex < 0:
evaluatorWords.append(word)
return
evaluatorWords.append(word[: dotIndex])
addAttributeWord(evaluatorWords, word[dotIndex :])
def addQuoteWord(evaluatorWords, word):
"Add quote word and remainder if the word starts with a quote character or dollar sign, otherwise add the word."
if len(word) < 2:
evaluatorWords.append(word)
return
firstCharacter = word[0]
if firstCharacter == '$':
dotIndex = word.find('.', 1)
if dotIndex > -1:
evaluatorWords.append(word[: dotIndex])
evaluatorWords.append(word[dotIndex :])
return
if firstCharacter != '"' and firstCharacter != "'":
evaluatorWords.append(word)
return
nextQuoteIndex = word.find(firstCharacter, 1)
if nextQuoteIndex < 0 or nextQuoteIndex == len(word) - 1:
evaluatorWords.append(word)
return
nextQuoteIndex += 1
evaluatorWords.append(word[: nextQuoteIndex])
evaluatorWords.append(word[nextQuoteIndex :])
def addPrefixDictionary(dictionary, keys, value):
"Add prefixed key values to dictionary."
for key in keys:
dictionary[key.lstrip('_')] = value
def addToPathsRecursively(paths, vector3Lists):
"Add to vector3 paths recursively."
if vector3Lists.__class__ == Vector3:
paths.append([ vector3Lists ])
return
path = []
for vector3List in vector3Lists:
if vector3List.__class__ == list:
addToPathsRecursively(paths, vector3List)
elif vector3List.__class__ == Vector3:
path.append(vector3List)
if len(path) > 0:
paths.append(path)
def addVector3ToXMLElement(key, vector3, xmlElement):
"Add vector3 to xml element."
xmlElement.attributeDictionary[key] = '[%s,%s,%s]' % (vector3.x, vector3.y, vector3.z)
def compareExecutionOrderAscending(module, otherModule):
"Get comparison in order to sort modules in ascending execution order."
if module.globalExecutionOrder < otherModule.globalExecutionOrder:
return -1
if module.globalExecutionOrder > otherModule.globalExecutionOrder:
return 1
if module.__name__ < otherModule.__name__:
return -1
return int(module.__name__ > otherModule.__name__)
def convertToPaths(dictionary):
'Recursively convert any XMLElements to paths.'
if dictionary.__class__ == Vector3 or dictionary.__class__.__name__ == 'Vector3Index':
return
for key in getKeys(dictionary):
value = dictionary[key]
if value.__class__.__name__ == 'XMLElement':
if value.object != None:
dictionary[key] = getFloatListListsByPaths( value.object.getPaths() )
else:
convertToPaths(dictionary[key])
def executeLeftOperations( evaluators, operationLevel ):
"Evaluate the expression value from the numeric and operation evaluators."
for negativeIndex in xrange( - len(evaluators), - 1 ):
evaluatorIndex = negativeIndex + len(evaluators)
evaluators[evaluatorIndex].executeLeftOperation( evaluators, evaluatorIndex, operationLevel )
def executePairOperations( evaluators, operationLevel ):
"Evaluate the expression value from the numeric and operation evaluators."
for negativeIndex in xrange( 1 - len(evaluators), - 1 ):
evaluatorIndex = negativeIndex + len(evaluators)
evaluators[evaluatorIndex].executePairOperation( evaluators, evaluatorIndex, operationLevel )
def getArchivableObjectAddToParent( archivableClass, xmlElement ):
"Get the archivable object and add it to the parent object."
archivableObject = archivableClass()
archivableObject.xmlElement = xmlElement
xmlElement.object = archivableObject
archivableObject.setToObjectAttributeDictionary()
xmlElement.parent.object.archivableObjects.append(archivableObject)
return archivableObject
def getBracketEvaluators(bracketBeginIndex, bracketEndIndex, evaluators):
'Get the bracket evaluators.'
return getEvaluatedExpressionValueEvaluators( evaluators[ bracketBeginIndex + 1 : bracketEndIndex ] )
def getBracketsExist(evaluators):
"Evaluate the expression value."
bracketBeginIndex = None
for negativeIndex in xrange( - len(evaluators), 0 ):
bracketEndIndex = negativeIndex + len(evaluators)
evaluatorEnd = evaluators[ bracketEndIndex ]
evaluatorWord = evaluatorEnd.word
if evaluatorWord in ['(', '[', '{']:
bracketBeginIndex = bracketEndIndex
elif evaluatorWord in [')', ']', '}']:
if bracketBeginIndex == None:
print('Warning, bracketBeginIndex in evaluateBrackets in evaluate is None.')
print('This may be because the brackets are not balanced.')
print(evaluators)
del evaluators[ bracketEndIndex ]
return
evaluators[ bracketBeginIndex ].executeBracket(bracketBeginIndex, bracketEndIndex, evaluators)
evaluators[ bracketBeginIndex ].word = None
return True
return False
def getBracketValuesDeleteEvaluator(bracketBeginIndex, bracketEndIndex, evaluators):
'Get the bracket values and delete the evaluator.'
evaluatedExpressionValueEvaluators = getBracketEvaluators(bracketBeginIndex, bracketEndIndex, evaluators)
bracketValues = []
for evaluatedExpressionValueEvaluator in evaluatedExpressionValueEvaluators:
bracketValues.append( evaluatedExpressionValueEvaluator.value )
del evaluators[ bracketBeginIndex + 1: bracketEndIndex + 1 ]
return bracketValues
def getCumulativeVector3(prefix, vector3, xmlElement):
"Get cumulative vector3 and delete the prefixed attributes."
cumulativeVector3 = getVector3ByPrefix(prefix + 'rectangular', vector3, xmlElement)
cylindrical = getVector3ByPrefix(prefix + 'cylindrical', Vector3(), xmlElement)
if not cylindrical.getIsDefault():
cylindricalComplex = euclidean.getWiddershinsUnitPolar(math.radians(cylindrical.y)) * cylindrical.x
cumulativeVector3 += Vector3(cylindricalComplex.real, cylindricalComplex.imag, cylindrical.z)
polar = getVector3ByPrefix(prefix + 'polar', Vector3(), xmlElement)
if not polar.getIsDefault():
polarComplex = euclidean.getWiddershinsUnitPolar(math.radians(polar.y)) * polar.x
cumulativeVector3 += Vector3(polarComplex.real, polarComplex.imag)
spherical = getVector3ByPrefix(prefix + 'spherical', Vector3(), xmlElement)
if not spherical.getIsDefault():
radius = spherical.x
elevationComplex = euclidean.getWiddershinsUnitPolar(math.radians(spherical.z)) * radius
azimuthComplex = euclidean.getWiddershinsUnitPolar(math.radians(spherical.y)) * elevationComplex.real
cumulativeVector3 += Vector3(azimuthComplex.real, azimuthComplex.imag, elevationComplex.imag)
return cumulativeVector3
def getDictionarySplitWords(dictionary, value):
"Get split line for evaluators."
if getIsQuoted(value):
return [value]
for dictionaryKey in dictionary.keys():
value = value.replace(dictionaryKey, ' ' + dictionaryKey + ' ')
dictionarySplitWords = []
for word in value.split():
dictionarySplitWords.append(word)
return dictionarySplitWords
def getEndIndexConvertEquationValue( bracketEndIndex, evaluatorIndex, evaluators ):
'Get the bracket end index and convert the equation value evaluators into a string.'
evaluator = evaluators[evaluatorIndex]
if evaluator.__class__ != EvaluatorValue:
return bracketEndIndex
if not evaluator.word.startswith('equation.'):
return bracketEndIndex
if evaluators[ evaluatorIndex + 1 ].word != ':':
return bracketEndIndex
valueBeginIndex = evaluatorIndex + 2
equationValueString = ''
for valueEvaluatorIndex in xrange( valueBeginIndex, len(evaluators) ):
valueEvaluator = evaluators[ valueEvaluatorIndex ]
if valueEvaluator.word == ',' or valueEvaluator.word == '}':
if equationValueString == '':
return bracketEndIndex
else:
evaluators[ valueBeginIndex ] = EvaluatorValue( equationValueString )
valueDeleteIndex = valueBeginIndex + 1
del evaluators[ valueDeleteIndex : valueEvaluatorIndex ]
return bracketEndIndex - valueEvaluatorIndex + valueDeleteIndex
equationValueString += valueEvaluator.word
return bracketEndIndex
def getEvaluatedBooleanDefault( defaultBoolean, key, xmlElement ):
"Get the evaluated boolean as a float."
if key in xmlElement.attributeDictionary:
return euclidean.getBooleanFromValue(getEvaluatedValueObliviously(key, xmlElement))
return defaultBoolean
def getEvaluatedDictionary( evaluationKeys, xmlElement ):
"Get the evaluated dictionary."
evaluatedDictionary = {}
zeroLength = (len(evaluationKeys) == 0)
for key in xmlElement.attributeDictionary.keys():
if key in evaluationKeys or zeroLength:
value = getEvaluatedValueObliviously(key, xmlElement)
if value == None:
valueString = str( xmlElement.attributeDictionary[key] )
print('Warning, getEvaluatedDictionary in evaluate can not get a value for:')
print( valueString )
evaluatedDictionary[key + '__Warning__'] = 'Can not evaluate: ' + valueString.replace('"', ' ').replace( "'", ' ')
else:
evaluatedDictionary[key] = value
return evaluatedDictionary
def getEvaluatedExpressionValue(value, xmlElement):
"Evaluate the expression value."
try:
return getEvaluatedExpressionValueBySplitLine( getEvaluatorSplitWords(value), xmlElement )
except:
print('Warning, in getEvaluatedExpressionValue in evaluate could not get a value for:')
print(value)
traceback.print_exc(file=sys.stdout)
return None
def getEvaluatedExpressionValueBySplitLine(words, xmlElement):
"Evaluate the expression value."
evaluators = []
for wordIndex, word in enumerate(words):
nextWord = ''
nextWordIndex = wordIndex + 1
if nextWordIndex < len(words):
nextWord = words[nextWordIndex]
evaluator = getEvaluator(evaluators, nextWord, word, xmlElement)
if evaluator != None:
evaluators.append(evaluator)
while getBracketsExist(evaluators):
pass
evaluatedExpressionValueEvaluators = getEvaluatedExpressionValueEvaluators(evaluators)
if len( evaluatedExpressionValueEvaluators ) > 0:
return evaluatedExpressionValueEvaluators[0].value
return None
def getEvaluatedExpressionValueEvaluators(evaluators):
"Evaluate the expression value from the numeric and operation evaluators."
for evaluatorIndex, evaluator in enumerate(evaluators):
evaluator.executeCenterOperation(evaluators, evaluatorIndex)
for negativeIndex in xrange( 1 - len(evaluators), 0 ):
evaluatorIndex = negativeIndex + len(evaluators)
evaluators[evaluatorIndex].executeRightOperation(evaluators, evaluatorIndex)
executeLeftOperations( evaluators, 200 )
for operationLevel in [ 80, 60, 40, 20, 15 ]:
executePairOperations( evaluators, operationLevel )
executeLeftOperations( evaluators, 13 )
executePairOperations( evaluators, 12 )
for negativeIndex in xrange( - len(evaluators), 0 ):
evaluatorIndex = negativeIndex + len(evaluators)
evaluators[evaluatorIndex].executePairOperation( evaluators, evaluatorIndex, 10 )
executePairOperations( evaluators, 0 )
return evaluators
def getEvaluatedFloat(key, xmlElement):
"Get the evaluated value as a float."
if key in xmlElement.attributeDictionary:
return euclidean.getFloatFromValue(getEvaluatedValueObliviously(key, xmlElement))
return None
def getEvaluatedFloatByKeys(defaultFloat, keys, xmlElement):
"Get the evaluated value as a float by keys."
for key in keys:
defaultFloat = getEvaluatedFloatDefault(defaultFloat, key, xmlElement)
return defaultFloat
def getEvaluatedFloatDefault( defaultFloat, key, xmlElement ):
"Get the evaluated value as a float."
evaluatedFloat = getEvaluatedFloat(key, xmlElement)
if evaluatedFloat == None:
return defaultFloat
return evaluatedFloat
def getEvaluatedFloatOne(key, xmlElement):
"Get the evaluated value as a float with a default of one."
return getEvaluatedFloatDefault( 1.0, key, xmlElement )
def getEvaluatedFloatZero(key, xmlElement):
"Get the evaluated value as a float with a default of zero."
return getEvaluatedFloatDefault( 0.0, key, xmlElement )
def getEvaluatedInt(key, xmlElement):
"Get the evaluated value as an int."
if key in xmlElement.attributeDictionary:
try:
return getIntFromFloatString(getEvaluatedValueObliviously(key, xmlElement))
except:
print('Warning, could not evaluate the int.')
print(key)
print(xmlElement.attributeDictionary[key])
return None
def getEvaluatedIntByKeys(defaultInt, keys, xmlElement):
"Get the evaluated value as an int by keys."
for key in keys:
defaultInt = getEvaluatedIntDefault(defaultInt, key, xmlElement)
return defaultInt
def getEvaluatedIntDefault(defaultInt, key, xmlElement):
"Get the evaluated value as an int."
evaluatedInt = getEvaluatedInt(key, xmlElement)
if evaluatedInt == None:
return defaultInt
return evaluatedInt
def getEvaluatedIntOne(key, xmlElement):
"Get the evaluated value as an int with a default of one."
return getEvaluatedIntDefault( 1, key, xmlElement )
def getEvaluatedIntZero(key, xmlElement):
"Get the evaluated value as an int with a default of zero."
return getEvaluatedIntDefault( 0, key, xmlElement )
def getEvaluatedLinkValue(word, xmlElement):
"Get the evaluated link value."
if word == '':
return None
if getStartsWithCurlyEqualRoundSquare(word):
return getEvaluatedExpressionValue(word, xmlElement)
return word
def getEvaluatedString(key, xmlElement):
"Get the evaluated value as a string."
if key in xmlElement.attributeDictionary:
return str(getEvaluatedValueObliviously(key, xmlElement))
return None
def getEvaluatedStringDefault(defaultString, key, xmlElement):
"Get the evaluated value as a string."
evaluatedString = getEvaluatedString(key, xmlElement)
if evaluatedString == None:
return defaultString
return evaluatedString
def getEvaluatedValue(key, xmlElement):
"Get the evaluated value."
if key in xmlElement.attributeDictionary:
return getEvaluatedValueObliviously(key, xmlElement)
return None
def getEvaluatedValueObliviously(key, xmlElement):
"Get the evaluated value."
value = str(xmlElement.attributeDictionary[key]).strip()
if key == 'id' or key == 'name':
return value
return getEvaluatedLinkValue(value, xmlElement)
def getEvaluator(evaluators, nextWord, word, xmlElement):
"Get the evaluator."
global globalSplitDictionary
if word in globalSplitDictionary:
return globalSplitDictionary[word](word, xmlElement)
firstCharacter = word[: 1]
if firstCharacter == "'" or firstCharacter == '"':
if len(word) > 1:
if firstCharacter == word[-1]:
return EvaluatorValue(word[1 : -1])
if firstCharacter == '$':
return EvaluatorValue(word[1 :])
dotIndex = word.find('.')
if dotIndex > -1 and len(word) > 1:
if dotIndex == 0 and word[1].isalpha():
return EvaluatorAttribute(word, xmlElement)
if dotIndex > 0:
untilDot = word[: dotIndex]
if untilDot in globalModuleEvaluatorDictionary:
return globalModuleEvaluatorDictionary[untilDot](word, xmlElement)
if firstCharacter.isalpha() or firstCharacter == '_':
functions = xmlElement.getXMLProcessor().functions
if len(functions) > 0:
if word in functions[-1].localDictionary:
return EvaluatorLocal(word, xmlElement)
functionElement = xmlElement.getXMLElementByImportID(word)
if functionElement != None:
if functionElement.className == 'function':
return EvaluatorFunction( word, functionElement )
return EvaluatorValue(word)
return EvaluatorNumeric(word, xmlElement)
def getEvaluatorSplitWords(value):
"Get split words for evaluators."
if value.startswith('='):
value = value[len('=') :]
if len(value) < 1:
return []
global globalDictionaryOperatorBegin
uniqueQuoteIndex = 0
word = ''
quoteString = None
quoteDictionary = {}
for characterIndex in xrange(len(value)):
character = value[characterIndex]
if character == '"' or character == "'":
if quoteString == None:
quoteString = ''
elif quoteString != None:
if character == quoteString[: 1]:
uniqueQuoteIndex = getUniqueQuoteIndex(uniqueQuoteIndex, value)
uniqueToken = getTokenByNumber(uniqueQuoteIndex)
quoteDictionary[uniqueToken] = quoteString + character
character = uniqueToken
quoteString = None
if quoteString == None:
word += character
else:
quoteString += character
beginSplitWords = getDictionarySplitWords(globalDictionaryOperatorBegin, word)
global globalSplitDictionaryOperator
evaluatorSplitWords = []
for beginSplitWord in beginSplitWords:
if beginSplitWord in globalDictionaryOperatorBegin:
evaluatorSplitWords.append(beginSplitWord)
else:
evaluatorSplitWords += getDictionarySplitWords(globalSplitDictionaryOperator, beginSplitWord)
for evaluatorSplitWordIndex, evaluatorSplitWord in enumerate(evaluatorSplitWords):
for quoteDictionaryKey in quoteDictionary.keys():
if quoteDictionaryKey in evaluatorSplitWord:
evaluatorSplitWords[evaluatorSplitWordIndex] = evaluatorSplitWord.replace(quoteDictionaryKey, quoteDictionary[quoteDictionaryKey])
evaluatorTransitionWords = []
for evaluatorSplitWord in evaluatorSplitWords:
addQuoteWord(evaluatorTransitionWords, evaluatorSplitWord)
evaluatorSplitWords = []
for evaluatorTransitionWord in evaluatorTransitionWords:
addAttributeWord(evaluatorSplitWords, evaluatorTransitionWord)
return evaluatorSplitWords
def getFloatListFromBracketedString( bracketedString ):
"Get list from a bracketed string."
if not getIsBracketed( bracketedString ):
return None
bracketedString = bracketedString.strip().replace('[', '').replace(']', '').replace('(', '').replace(')', '')
if len( bracketedString ) < 1:
return []
splitLine = bracketedString.split(',')
floatList = []
for word in splitLine:
evaluatedFloat = euclidean.getFloatFromValue(word)
if evaluatedFloat != None:
floatList.append( evaluatedFloat )
return floatList
def getFloatListListsByPaths(paths):
'Get float lists by paths.'
floatListLists = []
for path in paths:
floatListList = []
for point in path:
floatListList.append( point.getFloatList() )
return floatListLists
def getFromCreationEvaluatorPlugins( namePathDictionary, xmlElement ):
"Get the creation evaluator plugins if the xmlElement is from the creation evaluator."
if getEvaluatedBooleanDefault( False, '_fromCreationEvaluator', xmlElement ):
return getMatchingPlugins( namePathDictionary, xmlElement )
return []
def getKeys(repository):
'Get keys for repository.'
repositoryClass = repository.__class__
if repositoryClass == list or repositoryClass == tuple:
return range(len(repository))
if repositoryClass == dict:
return repository.keys()
return None
def getIntFromFloatString(value):
"Get the int from the string."
floatString = str(value).strip()
if floatString == '':
return None
dotIndex = floatString.find('.')
if dotIndex < 0:
return int(value)
return int( round( float(floatString) ) )
def getIsBracketed(word):
"Determine if the word is bracketed."
if len(word) < 2:
return False
firstCharacter = word[0]
lastCharacter = word[-1]
if firstCharacter == '(' and lastCharacter == ')':
return True
return firstCharacter == '[' and lastCharacter == ']'
def getIsQuoted(word):
"Determine if the word is quoted."
if len(word) < 2:
return False
firstCharacter = word[0]
lastCharacter = word[-1]
if firstCharacter == '"' and lastCharacter == '"':
return True
return firstCharacter == "'" and lastCharacter == "'"
def getLayerThickness(xmlElement):
"Get the layer thickness."
return xmlElement.getCascadeFloat(0.4, 'layerThickness')
def getMatchingPlugins( namePathDictionary, xmlElement ):
"Get the plugins whose names are in the attribute dictionary."
matchingPlugins = []
namePathDictionaryCopy = namePathDictionary.copy()
for key in xmlElement.attributeDictionary:
dotIndex = key.find('.')
if dotIndex > - 1:
keyUntilDot = key[: dotIndex]
if keyUntilDot in namePathDictionaryCopy:
pluginModule = gcodec.getModuleWithPath( namePathDictionaryCopy[ keyUntilDot ] )
del namePathDictionaryCopy[ keyUntilDot ]
if pluginModule != None:
matchingPlugins.append( pluginModule )
return matchingPlugins
def getNextChildIndex(xmlElement):
"Get the next child index."
for childIndex, child in enumerate( xmlElement.parent.children ):
if child == xmlElement:
return childIndex + 1
return len( xmlElement.parent.children )
def getPathByKey(key, xmlElement):
"Get path from prefix and xml element."
if key not in xmlElement.attributeDictionary:
return []
word = str(xmlElement.attributeDictionary[key]).strip()
evaluatedLinkValue = getEvaluatedLinkValue(word, xmlElement)
if evaluatedLinkValue.__class__ == list:
return getPathByList(evaluatedLinkValue)
xmlElementObject = getXMLElementObject(evaluatedLinkValue)
if xmlElementObject == None:
return []
return xmlElementObject.getPaths()[0]
def getPathByList( vertexList ):
"Get the paths by list."
if len( vertexList ) < 1:
return Vector3()
if vertexList[0].__class__ != list:
vertexList = [ vertexList ]
path = []
for floatList in vertexList:
vector3 = getVector3ByFloatList( floatList, Vector3() )
path.append(vector3)
return path
def getPathByPrefix( path, prefix, xmlElement ):
"Get path from prefix and xml element."
if len(path) < 2:
print('Warning, bug, path is too small in evaluate in setPathByPrefix.')
return
pathByKey = getPathByKey( prefix + 'path', xmlElement )
if len( pathByKey ) < len(path):
for pointIndex in xrange( len( pathByKey ) ):
path[ pointIndex ] = pathByKey[ pointIndex ]
else:
path = pathByKey
path[0] = getVector3ByPrefix( prefix + 'start', path[0], xmlElement )
path[-1] = getVector3ByPrefix( prefix + 'end', path[-1], xmlElement )
return path
def getPathsByKey(key, xmlElement):
"Get paths by key."
if key not in xmlElement.attributeDictionary:
return []
word = str(xmlElement.attributeDictionary[key]).strip()
evaluatedLinkValue = getEvaluatedLinkValue(word, xmlElement)
if evaluatedLinkValue.__class__ == dict or evaluatedLinkValue.__class__ == list:
convertToPaths(evaluatedLinkValue)
return getPathsByLists(evaluatedLinkValue)
xmlElementObject = getXMLElementObject(evaluatedLinkValue)
if xmlElementObject == None:
return []
return xmlElementObject.getPaths()
def getPathsByKeys(keys, xmlElement):
"Get paths by keys."
pathsByKeys = []
for key in keys:
pathsByKeys += getPathsByKey(key, xmlElement)
return pathsByKeys
def getPathsByLists(vertexLists):
"Get paths by lists."
vector3Lists = getVector3ListsRecursively(vertexLists)
paths = []
addToPathsRecursively( paths, vector3Lists )
return paths
def getPrecision(xmlElement):
"Get the cascade precision."
return xmlElement.getCascadeFloat(0.1, 'precision')
def getSheetThickness(xmlElement):
"Get the sheet thickness."
return xmlElement.getCascadeFloat(3.0, 'sheetThickness')
def getSidesBasedOnPrecision(radius, xmlElement):
"Get the number of poygon sides."
return math.ceil(math.sqrt(0.5 * radius * math.pi * math.pi / getPrecision(xmlElement)))
def getSidesMinimumThreeBasedOnPrecision(radius, xmlElement):
"Get the number of poygon sides, with a minimum of three."
return max(getSidesBasedOnPrecision(radius, xmlElement), 3.0)
def getSidesMinimumThreeBasedOnPrecisionSides(radius, xmlElement):
"Get the number of poygon sides, with a minimum of three."
sides = getSidesMinimumThreeBasedOnPrecision(radius, xmlElement)
return getEvaluatedFloatDefault(sides, 'sides', xmlElement)
def getSplitDictionary():
"Get split dictionary."
global globalSplitDictionaryOperator
splitDictionary = globalSplitDictionaryOperator.copy()
global globalDictionaryOperatorBegin
splitDictionary.update( globalDictionaryOperatorBegin )
splitDictionary['and'] = EvaluatorAnd
splitDictionary['false'] = EvaluatorFalse
splitDictionary['False'] = EvaluatorFalse
splitDictionary['or'] = EvaluatorOr
splitDictionary['not'] = EvaluatorNot
splitDictionary['true'] = EvaluatorTrue
splitDictionary['True'] = EvaluatorTrue
splitDictionary['none'] = EvaluatorNone
splitDictionary['None'] = EvaluatorNone
return splitDictionary
def getStartsWithCurlyEqualRoundSquare(word):
"Determine if the word starts with round or square brackets."
return word.startswith('{') or word.startswith('=') or word.startswith('(') or word.startswith('[')
def getTokenByNumber(number):
"Get token by number."
return '_%s_' % number
def getTransformedPathByKey(key, xmlElement):
"Get transformed path from prefix and xml element."
if key not in xmlElement.attributeDictionary:
return []
word = str(xmlElement.attributeDictionary[key]).strip()
evaluatedLinkValue = getEvaluatedLinkValue(word, xmlElement)
if evaluatedLinkValue.__class__ == list:
return getPathByList(evaluatedLinkValue)
xmlElementObject = getXMLElementObject(evaluatedLinkValueClass)
if xmlElementObject == None:
return []
return xmlElementObject.getTransformedPaths()[0]
def getTransformedPathByPrefix( path, prefix, xmlElement ):
"Get path from prefix and xml element."
if len(path) < 2:
print('Warning, bug, path is too small in evaluate in setPathByPrefix.')
return
pathByKey = getTransformedPathByKey( prefix + 'path', xmlElement )
if len( pathByKey ) < len(path):
for pointIndex in xrange( len( pathByKey ) ):
path[ pointIndex ] = pathByKey[ pointIndex ]
else:
path = pathByKey
path[0] = getVector3ByPrefix( prefix + 'start', path[0], xmlElement )
path[-1] = getVector3ByPrefix( prefix + 'end', path[-1], xmlElement )
return path
def getTransformedPathsByKey(key, xmlElement):
"Get transformed paths by key."
if key not in xmlElement.attributeDictionary:
return []
word = str(xmlElement.attributeDictionary[key]).strip()
evaluatedLinkValue = getEvaluatedLinkValue(word, xmlElement)
if evaluatedLinkValue.__class__ == dict or evaluatedLinkValue.__class__ == list:
convertToPaths(evaluatedLinkValue)
return getPathsByLists(evaluatedLinkValue)
xmlElementObject = getXMLElementObject(evaluatedLinkValue)
if xmlElementObject == None:
return []
return xmlElementObject.getTransformedPaths()
def getUniqueQuoteIndex( uniqueQuoteIndex, word ):
"Get uniqueQuoteIndex."
uniqueQuoteIndex += 1
while getTokenByNumber(uniqueQuoteIndex) in word:
uniqueQuoteIndex += 1
return uniqueQuoteIndex
def getUniqueToken(word):
'Get unique token.'
uniqueString = '@#!'
for character in uniqueString:
if character not in word:
return character
uniqueNumber = 0
while True:
for character in uniqueString:
uniqueToken = character + str(uniqueNumber)
if uniqueToken not in word:
return uniqueToken
uniqueNumber += 1
def getVector3ByDictionary( dictionary, vector3 ):
"Get vector3 by dictionary."
if 'x' in dictionary:
vector3 = getVector3IfNone(vector3)
vector3.x = euclidean.getFloatFromValue(dictionary['x'])
if 'y' in dictionary:
vector3 = getVector3IfNone(vector3)
vector3.y = euclidean.getFloatFromValue(dictionary['y'])
if 'z' in dictionary:
vector3 = getVector3IfNone(vector3)
vector3.z = euclidean.getFloatFromValue( dictionary['z'] )
return vector3
def getVector3ByDictionaryListValue(value, vector3):
"Get vector3 by dictionary, list or value."
if value.__class__ == Vector3 or value.__class__.__name__ == 'Vector3Index':
return value
if value.__class__ == dict:
return getVector3ByDictionary(value, vector3)
if value.__class__ == list:
return getVector3ByFloatList(value, vector3)
floatFromValue = euclidean.getFloatFromValue(value)
if floatFromValue == None:
return vector3
vector3.setToXYZ(floatFromValue, floatFromValue, floatFromValue)
return vector3
def getVector3ByFloatList(floatList, vector3):
"Get vector3 by float list."
if len(floatList) > 0:
vector3 = getVector3IfNone(vector3)
vector3.x = euclidean.getFloatFromValue(floatList[0])
if len(floatList) > 1:
vector3 = getVector3IfNone(vector3)
vector3.y = euclidean.getFloatFromValue(floatList[1])
if len(floatList) > 2:
vector3 = getVector3IfNone(vector3)
vector3.z = euclidean.getFloatFromValue(floatList[2])
return vector3
def getVector3ByMultiplierPrefix( multiplier, prefix, vector3, xmlElement ):
"Get vector3 from multiplier, prefix and xml element."
if multiplier == 0.0:
return vector3
oldMultipliedValueVector3 = vector3 * multiplier
vector3ByPrefix = getVector3ByPrefix( prefix, oldMultipliedValueVector3.copy(), xmlElement )
if vector3ByPrefix == oldMultipliedValueVector3:
return vector3
return vector3ByPrefix / multiplier
def getVector3ByMultiplierPrefixes( multiplier, prefixes, vector3, xmlElement ):
"Get vector3 from multiplier, prefixes and xml element."
for prefix in prefixes:
vector3 = getVector3ByMultiplierPrefix( multiplier, prefix, vector3, xmlElement )
return vector3
def getVector3ByPrefix(prefix, vector3, xmlElement):
"Get vector3 from prefix and xml element."
value = getEvaluatedValue(prefix, xmlElement)
if value != None:
vector3 = getVector3ByDictionaryListValue(value, vector3)
x = getEvaluatedFloat(prefix + '.x', xmlElement)
if x != None:
vector3 = getVector3IfNone(vector3)
vector3.x = x
y = getEvaluatedFloat(prefix + '.y', xmlElement)
if y != None:
vector3 = getVector3IfNone(vector3)
vector3.y = y
z = getEvaluatedFloat(prefix + '.z', xmlElement)
if z != None:
vector3 = getVector3IfNone(vector3)
vector3.z = z
return vector3
def getVector3ByPrefixes( prefixes, vector3, xmlElement ):
"Get vector3 from prefixes and xml element."
for prefix in prefixes:
vector3 = getVector3ByPrefix(prefix, vector3, xmlElement)
return vector3
def getVector3FromXMLElement(xmlElement):
"Get vector3 from xml element."
vector3 = Vector3(
getEvaluatedFloatZero('x', xmlElement),
getEvaluatedFloatZero('y', xmlElement),
getEvaluatedFloatZero('z', xmlElement))
return getCumulativeVector3('', vector3, xmlElement)
def getVector3IfNone(vector3):
"Get new vector3 if the original vector3 is none."
if vector3 == None:
return Vector3()
return vector3
def getVector3ListsRecursively(floatLists):
"Get vector3 lists recursively."
if len(floatLists) < 1:
return Vector3()
firstElement = floatLists[0]
if firstElement.__class__ == Vector3:
return floatLists
if firstElement.__class__ != list:
return getVector3ByFloatList(floatLists, Vector3())
vector3ListsRecursively = []
for floatList in floatLists:
vector3ListsRecursively.append(getVector3ListsRecursively(floatList))
return vector3ListsRecursively
def getVector3RemoveByPrefix(prefix, vector3, xmlElement):
"Get vector3 from prefix and xml element, then remove prefix attributes from dictionary."
vector3RemoveByPrefix = getVector3ByPrefix(prefix, vector3, xmlElement)
euclidean.removePrefixFromDictionary( xmlElement.attributeDictionary, prefix )
return vector3RemoveByPrefix
def getVisibleObjects(archivableObjects):
"Get the visible objects."
visibleObjects = []
for archivableObject in archivableObjects:
if archivableObject.getVisible():
visibleObjects.append(archivableObject)
return visibleObjects
def getXMLElementByKey(key, xmlElement):
"Get the xml element by key."
if key not in xmlElement.attributeDictionary:
return None
word = str(xmlElement.attributeDictionary[key]).strip()
evaluatedLinkValue = getEvaluatedLinkValue(word, xmlElement)
if evaluatedLinkValue.__class__.__name__ == 'XMLElement':
return evaluatedLinkValue
print('Warning, could not get XMLElement in getXMLElementByKey in evaluate for:')
print(key)
print(evaluatedLinkValue)
print(xmlElement)
return None
def getXMLElementObject(evaluatedLinkValue):
"Get XMLElementObject."
if evaluatedLinkValue.__class__.__name__ != 'XMLElement':
print('Warning, could not get XMLElement in getXMLElementObject in evaluate for:')
print(evaluatedLinkValue)
return None
if evaluatedLinkValue.object == None:
print('Warning, evaluatedLinkValue.object is None in getXMLElementObject in evaluate for:')
print(evaluatedLinkValue)
return None
return evaluatedLinkValue.object
def getXMLElementsByKey(key, xmlElement):
"Get the xml elements by key."
if key not in xmlElement.attributeDictionary:
return []
word = str(xmlElement.attributeDictionary[key]).strip()
evaluatedLinkValue = getEvaluatedLinkValue(word, xmlElement)
if evaluatedLinkValue.__class__.__name__ == 'XMLElement':
return [evaluatedLinkValue]
if evaluatedLinkValue.__class__ == list:
return evaluatedLinkValue
print('Warning, could not get XMLElements in getXMLElementsByKey in evaluate for:')
print(key)
print(evaluatedLinkValue)
print(xmlElement)
return None
def processArchivable(archivableClass, xmlElement):
"Get any new elements and process the archivable."
if xmlElement == None:
return
getArchivableObjectAddToParent(archivableClass, xmlElement)
xmlElement.getXMLProcessor().processChildren(xmlElement)
def processCondition(xmlElement):
"Process the xml element condition."
xmlProcessor = xmlElement.getXMLProcessor()
if xmlElement.object == None:
xmlElement.object = ModuleXMLElement(xmlElement)
if xmlElement.object.conditionSplitWords == None:
return
if len(xmlProcessor.functions ) < 1:
print('Warning, "in" element is not in a function in processCondition in evaluate for:')
print(xmlElement)
return
if int( getEvaluatedExpressionValueBySplitLine( xmlElement.object.conditionSplitWords, xmlElement ) ) > 0:
xmlProcessor.functions[-1].processChildren(xmlElement)
else:
xmlElement.object.processElse(xmlElement)
def setAttributeDictionaryByArguments(argumentNames, arguments, xmlElement):
"Set the attribute dictionary to the arguments."
for argumentIndex, argument in enumerate(arguments):
xmlElement.attributeDictionary[argumentNames[argumentIndex]] = argument
class Evaluator:
'Base evaluator class.'
def __init__(self, word, xmlElement):
'Set value to none.'
self.value = None
self.word = word
def __repr__(self):
"Get the string representation of this Evaluator."
return '%s: %s, %s' % ( self.__class__.__name__, self.word, self.value )
def executeBracket( self, bracketBeginIndex, bracketEndIndex, evaluators ):
'Execute the bracket.'
pass
def executeCenterOperation(self, evaluators, evaluatorIndex):
'Execute operator which acts on the center.'
pass
def executeDictionary(self, dictionary, evaluators, keys, evaluatorIndex, nextEvaluator):
'Execute the dictionary.'
del evaluators[evaluatorIndex]
enumeratorKeys = euclidean.getEnumeratorKeys(dictionary, keys)
if enumeratorKeys.__class__ == list:
nextEvaluator.value = []
for enumeratorKey in enumeratorKeys:
if enumeratorKey in dictionary:
nextEvaluator.value.append(dictionary[enumeratorKey])
else:
print('Warning, key in executeKey in Evaluator in evaluate is not in for:')
print(enumeratorKey)
print(dictionary)
return
if enumeratorKeys in dictionary:
nextEvaluator.value = dictionary[enumeratorKeys]
else:
print('Warning, key in executeKey in Evaluator in evaluate is not in for:')
print(enumeratorKeys)
print(dictionary)
def executeFunction(self, evaluators, evaluatorIndex, nextEvaluator):
'Execute the function.'
pass
def executeKey(self, evaluators, keys, evaluatorIndex, nextEvaluator):
'Execute the key index.'
if self.value.__class__ == str:
self.executeString(evaluators, keys, evaluatorIndex, nextEvaluator)
return
if self.value.__class__ == list:
self.executeList(evaluators, keys, evaluatorIndex, nextEvaluator)
return
if self.value.__class__ == dict:
self.executeDictionary(self.value, evaluators, keys, evaluatorIndex, nextEvaluator)
return
getAccessibleDictionaryFunction = getattr(self.value, '_getAccessibleDictionary', None)
if getAccessibleDictionaryFunction != None:
self.executeDictionary(getAccessibleDictionaryFunction(), evaluators, keys, evaluatorIndex, nextEvaluator)
return
if self.value.__class__.__name__ != 'XMLElement':
return
del evaluators[evaluatorIndex]
enumeratorKeys = euclidean.getEnumeratorKeys(self.value.attributeDictionary, keys)
if enumeratorKeys.__class__ == list:
nextEvaluator.value = []
for enumeratorKey in enumeratorKeys:
if enumeratorKey in self.value.attributeDictionary:
nextEvaluator.value.append(getEvaluatedExpressionValue(self.value.attributeDictionary[enumeratorKey], self.value))
else:
print('Warning, key in executeKey in Evaluator in evaluate is not in for:')
print(enumeratorKey)
print(self.value.attributeDictionary)
return
if enumeratorKeys in self.value.attributeDictionary:
nextEvaluator.value = getEvaluatedExpressionValue(self.value.attributeDictionary[enumeratorKeys], self.value)
else:
print('Warning, key in executeKey in Evaluator in evaluate is not in for:')
print(enumeratorKeys)
print(self.value.attributeDictionary)
def executeLeftOperation(self, evaluators, evaluatorIndex, operationLevel):
'Execute operator which acts from the left.'
pass
def executeList(self, evaluators, keys, evaluatorIndex, nextEvaluator):
'Execute the key index.'
del evaluators[evaluatorIndex]
enumeratorKeys = euclidean.getEnumeratorKeys(self.value, keys)
if enumeratorKeys.__class__ == list:
nextEvaluator.value = []
for enumeratorKey in enumeratorKeys:
intKey = euclidean.getIntFromValue(enumeratorKey)
if self.getIsInRange(intKey):
nextEvaluator.value.append(self.value[intKey])
else:
print('Warning, key in executeList in Evaluator in evaluate is not in for:')
print(enumeratorKey)
print(self.value)
return
intKey = euclidean.getIntFromValue(enumeratorKeys)
if self.getIsInRange(intKey):
nextEvaluator.value = self.value[intKey]
else:
print('Warning, key in executeList in Evaluator in evaluate is not in for:')
print(enumeratorKeys)
print(self.value)
def executePairOperation(self, evaluators, evaluatorIndex, operationLevel):
'Operate on two evaluators.'
pass
def executeRightOperation( self, evaluators, evaluatorIndex ):
'Execute operator which acts from the right.'
pass
def executeString(self, evaluators, keys, evaluatorIndex, nextEvaluator):
'Execute the string.'
del evaluators[evaluatorIndex]
enumeratorKeys = euclidean.getEnumeratorKeys(self.value, keys)
if enumeratorKeys.__class__ == list:
nextEvaluator.value = ''
for enumeratorKey in enumeratorKeys:
intKey = euclidean.getIntFromValue(enumeratorKey)
if self.getIsInRange(intKey):
nextEvaluator.value += self.value[intKey]
else:
print('Warning, key in executeString in Evaluator in evaluate is not in for:')
print(enumeratorKey)
print(self.value)
return
intKey = euclidean.getIntFromValue(enumeratorKeys)
if self.getIsInRange(intKey):
nextEvaluator.value = self.value[intKey]
else:
print('Warning, key in executeString in Evaluator in evaluate is not in for:')
print(enumeratorKeys)
print(self.value)
def getIsInRange(self, keyIndex):
'Determine if the keyIndex is in range.'
if keyIndex == None:
return False
return keyIndex >= -len(self.value) and keyIndex < len(self.value)
class EvaluatorAddition(Evaluator):
'Class to add two evaluators.'
def executePairOperation(self, evaluators, evaluatorIndex, operationLevel):
'Operate on two evaluators.'
if operationLevel == 20:
self.executePair(evaluators, evaluatorIndex)
def executePair( self, evaluators, evaluatorIndex ):
'Add two evaluators.'
leftIndex = evaluatorIndex - 1
rightIndex = evaluatorIndex + 1
if leftIndex < 0:
print('Warning, no leftKey in executePair in EvaluatorAddition for:')
print(evaluators)
print(evaluatorIndex)
print(self)
del evaluators[evaluatorIndex]
return
if rightIndex >= len(evaluators):
print('Warning, no rightKey in executePair in EvaluatorAddition for:')
print(evaluators)
print(evaluatorIndex)
print(self)
del evaluators[evaluatorIndex]
return
rightValue = evaluators[rightIndex].value
evaluators[leftIndex].value = self.getOperationValue(evaluators[leftIndex].value, evaluators[rightIndex].value)
del evaluators[ evaluatorIndex : evaluatorIndex + 2 ]
def getEvaluatedValues(self, enumerable, keys, value):
'Get evaluatedValues.'
if enumerable.__class__ == dict:
evaluatedValues = {}
for key in keys:
evaluatedValues[key] = self.getOperationValue(value, enumerable[key])
return evaluatedValues
evaluatedValues = []
for key in keys:
evaluatedValues.append(self.getOperationValue(value, enumerable[key]))
return evaluatedValues
def getOperationValue(self, leftValue, rightValue):
'Get operation value.'
leftKeys = getKeys(leftValue)
rightKeys = getKeys(rightValue)
if leftKeys == None and rightKeys == None:
return self.getValueFromValuePair(leftValue, rightValue)
if leftKeys == None:
return self.getEvaluatedValues(rightValue, rightKeys, leftValue)
if rightKeys == None:
return self.getEvaluatedValues(leftValue, leftKeys, rightValue)
if leftKeys != rightKeys:
print('Warning, the leftKeys are different from the rightKeys in getOperationValue in EvaluatorAddition for:')
print('leftValue')
print(leftValue)
print(leftKeys)
print('rightValue')
print(rightValue)
print(rightKeys)
print(self)
return None
if leftValue.__class__ == dict or rightValue.__class__ == dict:
evaluatedValues = {}
for leftKey in leftKeys:
evaluatedValues[leftKey] = self.getOperationValue(leftValue[leftKey], rightValue[leftKey])
return evaluatedValues
evaluatedValues = []
for leftKey in leftKeys:
evaluatedValues.append(self.getOperationValue(leftValue[leftKey], rightValue[leftKey]))
return evaluatedValues
def getValueFromValuePair(self, leftValue, rightValue):
'Add two values.'
return leftValue + rightValue
class EvaluatorEqual(EvaluatorAddition):
'Class to compare two evaluators.'
def executePairOperation(self, evaluators, evaluatorIndex, operationLevel):
'Operate on two evaluators.'
if operationLevel == 15:
self.executePair(evaluators, evaluatorIndex)
def getBooleanFromValuePair(self, leftValue, rightValue):
'Compare two values.'
return leftValue == rightValue
def getValueFromValuePair(self, leftValue, rightValue):
'Get value from comparison.'
return self.getBooleanFromValuePair(leftValue, rightValue)
class EvaluatorSubtraction(EvaluatorAddition):
'Class to subtract two evaluators.'
def executeLeft( self, evaluators, evaluatorIndex ):
'Minus the value to the right.'
leftIndex = evaluatorIndex - 1
rightIndex = evaluatorIndex + 1
leftValue = None
if leftIndex >= 0:
leftValue = evaluators[leftIndex].value
if leftValue != None:
return
rightValue = evaluators[rightIndex].value
if rightValue == None:
print('Warning, can not minus.')
print( evaluators[rightIndex].word )
else:
evaluators[rightIndex].value = self.getNegativeValue(rightValue)
del evaluators[evaluatorIndex]
def executeLeftOperation(self, evaluators, evaluatorIndex, operationLevel):
'Minus the value to the right.'
if operationLevel == 200:
self.executeLeft(evaluators, evaluatorIndex)
def getNegativeValue( self, value ):
'Get the negative value.'
keys = getKeys(value)
if keys == None:
return self.getValueFromSingleValue(value)
for key in keys:
value[key] = self.getNegativeValue(value[key])
return value
def getValueFromSingleValue( self, value ):
'Minus value.'
return -value
def getValueFromValuePair(self, leftValue, rightValue):
'Subtract two values.'
return leftValue - rightValue
class EvaluatorAnd(EvaluatorAddition):
'Class to compare two evaluators.'
def executePairOperation(self, evaluators, evaluatorIndex, operationLevel):
'Operate on two evaluators.'
if operationLevel == 12:
self.executePair(evaluators, evaluatorIndex)
def getBooleanFromValuePair(self, leftValue, rightValue):
'And two values.'
return leftValue and rightValue
def getValueFromValuePair(self, leftValue, rightValue):
'Get value from comparison.'
return self.getBooleanFromValuePair(leftValue, rightValue)
class EvaluatorAttribute(Evaluator):
'Class to handle an attribute.'
def executeFunction(self, evaluators, evaluatorIndex, nextEvaluator):
'Execute the function.'
if self.value == None:
print('Warning, executeFunction in EvaluatorAttribute in evaluate can not get a self.value for:')
print(evaluatorIndex)
print(evaluators)
print(self)
return
nextEvaluator.value = self.value(*nextEvaluator.arguments)
del evaluators[evaluatorIndex]
def executeRightOperation( self, evaluators, evaluatorIndex ):
'Execute operator which acts from the right.'
attributeName = self.word[1 :]
previousIndex = evaluatorIndex - 1
previousEvaluator = evaluators[previousIndex]
if previousEvaluator.value.__class__ == dict:
from fabmetheus_utilities.geometry.geometry_utilities.evaluate_enumerables import dictionary_attribute
self.value = dictionary_attribute._getAccessibleAttribute(attributeName, previousEvaluator.value)
elif previousEvaluator.value.__class__ == list:
from fabmetheus_utilities.geometry.geometry_utilities.evaluate_enumerables import list_attribute
self.value = list_attribute._getAccessibleAttribute(attributeName, previousEvaluator.value)
elif previousEvaluator.value.__class__ == str:
from fabmetheus_utilities.geometry.geometry_utilities.evaluate_enumerables import string_attribute
self.value = string_attribute._getAccessibleAttribute(attributeName, previousEvaluator.value)
else:
self.value = getattr(previousEvaluator.value, '_getAccessibleAttribute', None)(attributeName)
if self.value == None:
print('Warning, EvaluatorAttribute in evaluate can not get a getAccessibleAttributeFunction for:')
print(attributeName)
print(previousEvaluator.value)
print(self)
return
del evaluators[previousIndex]
class EvaluatorBracketCurly(Evaluator):
'Class to evaluate a string.'
def executeBracket( self, bracketBeginIndex, bracketEndIndex, evaluators ):
'Execute the bracket.'
for evaluatorIndex in xrange( bracketEndIndex - 3, bracketBeginIndex, - 1 ):
bracketEndIndex = getEndIndexConvertEquationValue( bracketEndIndex, evaluatorIndex, evaluators )
evaluatedExpressionValueEvaluators = getBracketEvaluators(bracketBeginIndex, bracketEndIndex, evaluators)
self.value = {}
for evaluatedExpressionValueEvaluator in evaluatedExpressionValueEvaluators:
keyValue = evaluatedExpressionValueEvaluator.value
self.value[ keyValue.keyTuple[0] ] = keyValue.keyTuple[1]
del evaluators[ bracketBeginIndex + 1: bracketEndIndex + 1 ]
class EvaluatorBracketRound(Evaluator):
'Class to evaluate a string.'
def __init__(self, word, xmlElement):
'Set value to none.'
self.arguments = []
self.value = None
self.word = word
def executeBracket( self, bracketBeginIndex, bracketEndIndex, evaluators ):
'Execute the bracket.'
self.arguments = getBracketValuesDeleteEvaluator(bracketBeginIndex, bracketEndIndex, evaluators)
if len( self.arguments ) < 1:
return
if len( self.arguments ) > 1:
self.value = self.arguments
else:
self.value = self.arguments[0]
def executeRightOperation( self, evaluators, evaluatorIndex ):
'Evaluate the statement and delete the evaluators.'
previousIndex = evaluatorIndex - 1
if previousIndex < 0:
return
evaluators[ previousIndex ].executeFunction( evaluators, previousIndex, self )
class EvaluatorBracketSquare(Evaluator):
'Class to evaluate a string.'
def executeBracket( self, bracketBeginIndex, bracketEndIndex, evaluators ):
'Execute the bracket.'
self.value = getBracketValuesDeleteEvaluator(bracketBeginIndex, bracketEndIndex, evaluators)
def executeRightOperation( self, evaluators, evaluatorIndex ):
'Evaluate the statement and delete the evaluators.'
previousIndex = evaluatorIndex - 1
if previousIndex < 0:
return
if self.value.__class__ != list:
return
evaluators[ previousIndex ].executeKey( evaluators, self.value, previousIndex, self )
class EvaluatorComma(Evaluator):
'Class to join two evaluators.'
def executePairOperation(self, evaluators, evaluatorIndex, operationLevel):
'Operate on two evaluators.'
if operationLevel == 0:
del evaluators[evaluatorIndex]
class EvaluatorConcatenate(Evaluator):
'Class to join two evaluators.'
def executePairOperation(self, evaluators, evaluatorIndex, operationLevel):
'Operate on two evaluators.'
if operationLevel != 80:
return
leftIndex = evaluatorIndex - 1
if leftIndex < 0:
del evaluators[evaluatorIndex]
return
rightIndex = evaluatorIndex + 1
if rightIndex >= len(evaluators):
del evaluators[ leftIndex : rightIndex ]
return
leftValue = evaluators[leftIndex].value
rightValue = evaluators[rightIndex].value
if leftValue.__class__ == rightValue.__class__ and (leftValue.__class__ == list or rightValue.__class__ == str):
evaluators[leftIndex].value = leftValue + rightValue
del evaluators[ evaluatorIndex : evaluatorIndex + 2 ]
return
if leftValue.__class__ == list and rightValue.__class__ == int:
if rightValue > 0:
originalList = leftValue[:]
for copyIndex in xrange( rightValue - 1 ):
leftValue += originalList
evaluators[leftIndex].value = leftValue
del evaluators[ evaluatorIndex : evaluatorIndex + 2 ]
return
if leftValue.__class__ == dict and rightValue.__class__ == dict:
leftValue.update(rightValue)
evaluators[leftIndex].value = leftValue
del evaluators[ evaluatorIndex : evaluatorIndex + 2 ]
return
del evaluators[ leftIndex : evaluatorIndex + 2 ]
class EvaluatorDictionary(Evaluator):
'Class to join two evaluators.'
def executePairOperation(self, evaluators, evaluatorIndex, operationLevel):
'Operate on two evaluators.'
if operationLevel != 10:
return
leftEvaluatorIndex = evaluatorIndex - 1
if leftEvaluatorIndex < 0:
print('Warning, leftEvaluatorIndex is less than zero in EvaluatorDictionary for:')
print(self)
print(evaluators)
return
rightEvaluatorIndex = evaluatorIndex + 1
if rightEvaluatorIndex >= len(evaluators):
print('Warning, rightEvaluatorIndex too high in EvaluatorDictionary for:')
print(rightEvaluatorIndex)
print(self)
print(evaluators)
return
evaluators[rightEvaluatorIndex].value = KeyValue(evaluators[leftEvaluatorIndex].value, evaluators[rightEvaluatorIndex].value)
del evaluators[ leftEvaluatorIndex : rightEvaluatorIndex ]
class EvaluatorDivision(EvaluatorAddition):
'Class to divide two evaluators.'
def executePairOperation(self, evaluators, evaluatorIndex, operationLevel):
'Operate on two evaluators.'
if operationLevel == 40:
self.executePair(evaluators, evaluatorIndex)
def getValueFromValuePair(self, leftValue, rightValue):
'Divide two values.'
return leftValue / rightValue
class EvaluatorElement(Evaluator):
'Element evaluator class.'
def __init__(self, word, xmlElement):
'Set value to none.'
self.value = None
self.word = word
self.xmlElement = xmlElement
def executeCenterOperation(self, evaluators, evaluatorIndex):
'Execute operator which acts on the center.'
dotIndex = self.word.find('.')
if dotIndex < 0:
print('Warning, EvaluatorElement in evaluate can not find the dot for:')
print(functionName)
print(self)
return
attributeName = self.word[dotIndex + 1 :]
moduleName = self.word[: dotIndex]
if moduleName in globalModuleFunctionsDictionary:
self.value = globalModuleFunctionsDictionary[moduleName](attributeName, self.xmlElement)
return
pluginModule = None
if moduleName in globalElementNameSet:
pluginModule = gcodec.getModuleWithPath(archive.getElementsPath(moduleName))
if pluginModule == None:
print('Warning, EvaluatorElement in evaluate can not get a pluginModule for:')
print(moduleName)
print(self)
return
getAccessibleAttributeFunction = pluginModule._getAccessibleAttribute
globalModuleFunctionsDictionary[moduleName] = getAccessibleAttributeFunction
self.value = getAccessibleAttributeFunction(attributeName, self.xmlElement)
def executeFunction(self, evaluators, evaluatorIndex, nextEvaluator):
'Execute the function.'
if self.value == None:
print('Warning, executeFunction in EvaluatorElement in evaluate can not get a self.value for:')
print(evaluatorIndex)
print(evaluators)
print(self)
return
nextEvaluator.value = self.value(*nextEvaluator.arguments)
del evaluators[evaluatorIndex]
class EvaluatorFalse(Evaluator):
'Class to evaluate a string.'
def __init__(self, word, xmlElement):
'Set value to zero.'
self.value = False
self.word = word
class EvaluatorFunction(Evaluator):
'Function evaluator class.'
def __init__(self, word, xmlElement):
'Set value to none.'
self.value = None
self.word = word
self.xmlElement = xmlElement
def executeFunction(self, evaluators, evaluatorIndex, nextEvaluator):
'Execute the function.'
if self.xmlElement.object == None:
if 'return' in self.xmlElement.attributeDictionary:
value = self.xmlElement.attributeDictionary['return']
self.xmlElement.object = getEvaluatorSplitWords(value)
else:
self.xmlElement.object = []
self.function = Function( self.xmlElement.object, self.xmlElement )
self.setFunctionLocalTable( nextEvaluator )
nextEvaluator.value = self.function.getReturnValue()
del evaluators[evaluatorIndex]
def setFunctionLocalTable(self, nextEvaluator):
'Evaluate the function statement and delete the evaluators.'
self.function.localDictionary['_arguments'] = nextEvaluator.arguments
if len(nextEvaluator.arguments) > 0:
firstArgument = nextEvaluator.arguments[0]
if firstArgument.__class__ == dict:
self.function.localDictionary = firstArgument
return
if 'parameters' not in self.function.xmlElement.attributeDictionary:
return
parameters = self.function.xmlElement.attributeDictionary['parameters'].strip()
if parameters == '':
return
parameterWords = parameters.split(',')
for parameterWordIndex, parameterWord in enumerate(parameterWords):
strippedWord = parameterWord.strip()
keyValue = KeyValue().getByEqual(strippedWord)
if parameterWordIndex < len(nextEvaluator.arguments):
self.function.localDictionary[keyValue.keyTuple[0]] = nextEvaluator.arguments[parameterWordIndex]
else:
strippedValue = keyValue.keyTuple[1]
if strippedValue == None:
print('Warning there is no default parameter in getParameterValue for:')
print(strippedWord)
print(parameterWords)
print(nextEvaluator.arguments)
print( self.function.xmlElement.attributeDictionary )
else:
strippedValue = strippedValue.strip()
self.function.localDictionary[keyValue.keyTuple[0].strip()] = strippedValue
if len(nextEvaluator.arguments) > len(parameterWords):
print('Warning there are too many function parameters for:')
print( self.function.xmlElement.attributeDictionary )
print(parameterWords)
print(nextEvaluator.arguments)
class EvaluatorFundamental(EvaluatorAttribute):
'Fundamental evaluator class.'
def executeCenterOperation(self, evaluators, evaluatorIndex):
'Execute operator which acts on the center.'
dotIndex = self.word.find('.')
if dotIndex < 0:
print('Warning, EvaluatorFundamental in evaluate can not find the dot for:')
print(functionName)
print(self)
return
attributeName = self.word[dotIndex + 1 :]
moduleName = self.word[: dotIndex]
if moduleName in globalModuleFunctionsDictionary:
self.value = globalModuleFunctionsDictionary[moduleName](attributeName)
return
pluginModule = None
if moduleName in globalFundamentalNameSet:
pluginModule = gcodec.getModuleWithPath(archive.getFundamentalsPath(moduleName))
else:
underscoredName = '_' + moduleName
if underscoredName in globalFundamentalNameSet:
pluginModule = gcodec.getModuleWithPath(archive.getFundamentalsPath(underscoredName))
if pluginModule == None:
print('Warning, EvaluatorFundamental in evaluate can not get a pluginModule for:')
print(moduleName)
print(self)
return
getAccessibleAttributeFunction = pluginModule._getAccessibleAttribute
globalModuleFunctionsDictionary[moduleName] = getAccessibleAttributeFunction
self.value = getAccessibleAttributeFunction(attributeName)
class EvaluatorGreaterEqual( EvaluatorEqual ):
'Class to compare two evaluators.'
def getBooleanFromValuePair(self, leftValue, rightValue):
'Compare two values.'
return leftValue >= rightValue
class EvaluatorGreater( EvaluatorEqual ):
'Class to compare two evaluators.'
def getBooleanFromValuePair(self, leftValue, rightValue):
'Compare two values.'
return leftValue > rightValue
class EvaluatorLessEqual( EvaluatorEqual ):
'Class to compare two evaluators.'
def getBooleanFromValuePair(self, leftValue, rightValue):
'Compare two values.'
return leftValue <= rightValue
class EvaluatorLess( EvaluatorEqual ):
'Class to compare two evaluators.'
def getBooleanFromValuePair(self, leftValue, rightValue):
'Compare two values.'
return leftValue < rightValue
class EvaluatorLocal(Evaluator):
'Class to get a local variable.'
def __init__(self, word, xmlElement):
'Set value.'
self.word = word
self.value = None
functions = xmlElement.getXMLProcessor().functions
if len(functions) < 1:
return
localDictionary = functions[-1].localDictionary
if word in localDictionary:
self.value = localDictionary[word]
class EvaluatorModulo( EvaluatorDivision ):
'Class to modulo two evaluators.'
def getValueFromValuePair(self, leftValue, rightValue):
'Modulo two values.'
return leftValue % rightValue
class EvaluatorMultiplication( EvaluatorDivision ):
'Class to multiply two evaluators.'
def getValueFromValuePair(self, leftValue, rightValue):
'Multiply two values.'
return leftValue * rightValue
class EvaluatorNone(Evaluator):
'Class to evaluate None.'
def __init__(self, word, xmlElement):
'Set value to none.'
self.value = None
self.word = str(word)
class EvaluatorNot(EvaluatorSubtraction):
'Class to compare two evaluators.'
def executeLeftOperation(self, evaluators, evaluatorIndex, operationLevel):
'Minus the value to the right.'
if operationLevel == 13:
self.executeLeft(evaluators, evaluatorIndex)
def getValueFromSingleValue( self, value ):
'Minus value.'
return not value
class EvaluatorNotEqual( EvaluatorEqual ):
'Class to compare two evaluators.'
def getBooleanFromValuePair(self, leftValue, rightValue):
'Compare two values.'
return leftValue != rightValue
class EvaluatorNumeric(Evaluator):
'Class to evaluate a string.'
def __init__(self, word, xmlElement):
'Set value.'
self.value = None
self.word = word
try:
if '.' in word:
self.value = float(word)
else:
self.value = int(word)
except:
print('Warning, in EvaluatorNumeric in evaluate could not get a numeric value for:')
print(word)
print(xmlElement)
class EvaluatorOr( EvaluatorAnd ):
'Class to compare two evaluators.'
def getBooleanFromValuePair(self, leftValue, rightValue):
'Or two values.'
return leftValue or rightValue
class EvaluatorPower(EvaluatorAddition):
'Class to power two evaluators.'
def executePairOperation(self, evaluators, evaluatorIndex, operationLevel):
'Operate on two evaluators.'
if operationLevel == 60:
self.executePair(evaluators, evaluatorIndex)
def getValueFromValuePair(self, leftValue, rightValue):
'Power of two values.'
return leftValue ** rightValue
class EvaluatorTrue(Evaluator):
'Class to evaluate a string.'
def __init__(self, word, xmlElement):
'Set value to true.'
self.value = True
self.word = word
class EvaluatorValue(Evaluator):
'Class to evaluate a string.'
def __init__(self, word):
'Set value to none.'
self.value = word
self.word = str(word)
class Function:
"Class to get equation results."
def __init__( self, evaluatorSplitLine, xmlElement ):
"Initialize."
self.evaluatorSplitLine = evaluatorSplitLine
self.localDictionary = {}
self.returnValue = None
self.xmlElement = xmlElement
self.xmlProcessor = xmlElement.getXMLProcessor()
self.xmlProcessor.functions.append(self)
def __repr__(self):
"Get the string representation of this Function."
return '%s, %s, %s' % ( self.evaluatorSplitLine, self.localDictionary, self.returnValue )
def getReturnValue(self):
"Get return value."
self.getReturnValueWithoutDeletion()
self.reset()
return self.returnValue
def getReturnValueWithoutDeletion(self):
"Get return value without deleting last function."
if len( self.evaluatorSplitLine ) < 1:
self.shouldReturn = False
self.processChildren(self.xmlElement)
else:
self.returnValue = getEvaluatedExpressionValueBySplitLine( self.evaluatorSplitLine, self.xmlElement )
return self.returnValue
def processChildren(self, xmlElement):
"Process children if shouldReturn is false."
for child in xmlElement.children:
if self.shouldReturn:
return
self.xmlProcessor.processXMLElement( child )
def reset(self):
"Reset functions."
del self.xmlElement.getXMLProcessor().functions[-1]
class KeyValue:
"Class to hold a key value."
def __init__( self, key = None, value = None ):
"Get key value."
if key.__class__ == KeyValue:
self.keyTuple = key.keyTuple + ( value, )
return
self.keyTuple = ( key, value )
def __repr__(self):
"Get the string representation of this KeyValue."
return str( self.keyTuple )
def getByCharacter( self, character, line ):
"Get by character."
dotIndex = line.find( character )
if dotIndex < 0:
self.keyTuple = ( line, None )
return self
self.keyTuple = ( line[: dotIndex], line[ dotIndex + 1 : ] )
return self
def getByDot(self, line):
"Get by dot."
return self.getByCharacter('.', line )
def getByEqual(self, line):
"Get by dot."
return self.getByCharacter('=', line )
class ModuleXMLElement:
"Class to get the in attribute, the index name and the value name."
def __init__( self, xmlElement):
"Initialize."
self.conditionSplitWords = None
self.elseElement = None
if 'condition' in xmlElement.attributeDictionary:
self.conditionSplitWords = getEvaluatorSplitWords( xmlElement.attributeDictionary['condition'] )
else:
print('Warning, could not find the "condition" attribute in ModuleXMLElement in evaluate for:')
print(xmlElement)
return
if len( self.conditionSplitWords ) < 1:
self.conditionSplitWords = None
print('Warning, could not get split words for the "condition" attribute in ModuleXMLElement in evaluate for:')
print(xmlElement)
nextIndex = getNextChildIndex(xmlElement)
if nextIndex >= len( xmlElement.parent.children ):
return
nextXMLElement = xmlElement.parent.children[ nextIndex ]
lowerClassName = nextXMLElement.className.lower()
if lowerClassName != 'else' and lowerClassName != 'elif':
return
xmlProcessor = xmlElement.getXMLProcessor()
if lowerClassName not in xmlProcessor.namePathDictionary:
return
self.pluginModule = gcodec.getModuleWithPath( xmlProcessor.namePathDictionary[ lowerClassName ] )
if self.pluginModule == None:
return
self.elseElement = nextXMLElement
def processElse( self, xmlElement):
"Process the else statement."
if self.elseElement != None:
self.pluginModule.processElse( self.elseElement)
globalCreationDictionary = archive.getGeometryDictionary('creation')
globalDictionaryOperatorBegin = {
'||' : EvaluatorConcatenate,
'==' : EvaluatorEqual,
'>=' : EvaluatorGreaterEqual,
'<=' : EvaluatorLessEqual,
'!=' : EvaluatorNotEqual,
'**' : EvaluatorPower }
globalModuleEvaluatorDictionary = {}
globalFundamentalNameSet = set(gcodec.getPluginFileNamesFromDirectoryPath(archive.getFundamentalsPath()))
addPrefixDictionary(globalModuleEvaluatorDictionary, globalFundamentalNameSet, EvaluatorFundamental)
globalElementNameSet = set(gcodec.getPluginFileNamesFromDirectoryPath(archive.getElementsPath()))
addPrefixDictionary(globalModuleEvaluatorDictionary, globalElementNameSet, EvaluatorElement)
globalSplitDictionaryOperator = {
'+' : EvaluatorAddition,
'{' : EvaluatorBracketCurly,
'}' : Evaluator,
'(' : EvaluatorBracketRound,
')' : Evaluator,
'[' : EvaluatorBracketSquare,
']' : Evaluator,
',' : EvaluatorComma,
':' : EvaluatorDictionary,
'/' : EvaluatorDivision,
'>' : EvaluatorGreater,
'<' : EvaluatorLess,
'%' : EvaluatorModulo,
'*' : EvaluatorMultiplication,
'-' : EvaluatorSubtraction }
globalSplitDictionary = getSplitDictionary() # must be after globalSplitDictionaryOperator
|
from bkr.server.model import RetentionTag, Product
from bkr.server.widgets import ProductWidget
from sqlalchemy.orm.exc import NoResultFound
import logging
log = logging.getLogger(__name__)
class Utility:
_needs_product = 'NEEDS_PRODUCT'
_needs_no_product = 'NEEDS_NO_PRODUCT'
_needs_tag = 'NEEDS_TAG'
@classmethod
def update_retention_tag_and_product(cls, job, retentiontag, product):
if retentiontag.requires_product() != bool(product):
if retentiontag.requires_product():
vars = {cls._needs_product: 1}
else:
vars = {cls._needs_no_product: 1}
return {'success': False,
'msg': 'Incompatible product and tags',
'vars': vars}
job.retention_tag = retentiontag
job.product = product
return {'success':True}
@classmethod
def update_retention_tag(cls, job, retentiontag):
"""
performs logic needed to determine if changing a retention_tag is valid, returns an
error fit for displaying in widget
"""
the_product = job.product
new_retentiontag = retentiontag
if new_retentiontag.requires_product() != bool(the_product):
if new_retentiontag.requires_product():
vars = {cls._needs_product: 1,
'INVALID_PRODUCTS': [ProductWidget.product_deselected]}
else:
vars = {cls._needs_no_product:1}
return {'success': False,
'msg': 'Incompatible product and tags',
'vars': vars}
job.retention_tag = new_retentiontag
return {'success': True}
@classmethod
def update_product(cls, job, product):
"""
performs logic needed to determine if changing a retention_tag is valid, returns an
error fit for displaying in widget
"""
retentiontag = job.retention_tag
if not retentiontag.requires_product() and \
product != None:
return{'success': False,
'msg': 'Current retention tag does not support a product',
'vars': {cls._needs_tag: 1,
'VALID_TAGS': [[tag.id,tag.tag] for tag in \
RetentionTag.list_by_requires_product()]}}
if retentiontag.requires_product() and \
product == None:
return{'success': False,
'msg': 'Current retention tag requires a product',
'vars': {cls._needs_tag: 1,
'VALID_TAGS': [[tag.id,tag.tag] for tag in \
RetentionTag.list_by_requires_product(False)]}}
job.product = product
return {'success': True}
|
"""
Controllers
"""
import copy
import logging
import os
from monty.json import MontyDecoder
from pymatgen.io.vasp.outputs import Vasprun
from pymatgen.analysis.transition_state import NEBAnalysis
from pymatgen.core.structure import Structure
from abipy.flowtk.utils import Directory, File
from abipy.flowtk import events
from abipy.flowtk.scheduler_error_parsers import (MemoryCancelError, MasterProcessMemoryCancelError,
SlaveProcessMemoryCancelError, TimeCancelError)
from abipy.abio.inputs import AbinitInput
from abiflows.core.mastermind_abc import Action
from abiflows.core.mastermind_abc import Controller
from abiflows.core.mastermind_abc import ControllerNote
from abiflows.core.mastermind_abc import ControlledItemType
from abiflows.core.mastermind_abc import PRIORITY_HIGH
from abiflows.core.mastermind_abc import PRIORITY_VERY_LOW
from abiflows.core.mastermind_abc import PRIORITY_LOWEST
logger = logging.getLogger(__name__)
class AbinitController(Controller):
"""
General handler for abinit's events.
Determines whether the calculation ended correctly or not and fixes errors (including unconverged) if Abinit
error handlers are available.
"""
is_handler = True
is_validator = True
can_validate = True
#_controlled_item_types = [ControlledItemType.task_completed(), ControlledItemType.task_failed()]
_controlled_item_types = [ControlledItemType.task_completed()]
def __init__(self, critical_events=None, handlers=None):
"""
Initializes the controller with the critical events that trigger the restart and a list of ErrorHandlers
Args:
critical_events: List of events that trigger a restart due to unconverged calculation
handlers: List of ErrorHandlers (pymatgen.io.abinit.events) used to handle specific events
"""
super().__init__()
critical_events = [] if critical_events is None else critical_events
handlers = [] if handlers is None else handlers
self.critical_events = critical_events if isinstance(critical_events, (list, tuple)) else [critical_events]
self.handlers = handlers if isinstance(handlers, (list, tuple)) else [handlers]
self.set_priority(PRIORITY_HIGH)
def process(self, **kwargs):
"""
Returns the ControllerNote
"""
for kw in ['abinit_input', 'abinit_output_filepath', 'abinit_log_filepath', 'abinit_mpi_abort_filepath',
'abinit_outdir_path']:
if kw not in kwargs:
raise ValueError("kwarg {} is required to process abinit results".format(kw))
queue_adapter = copy.deepcopy(kwargs.get('queue_adapter', None))
abinit_input = copy.deepcopy(kwargs.get('abinit_input'))
abinit_output_file = File(kwargs.get('abinit_output_filepath'))
abinit_log_file = File(kwargs.get('abinit_log_filepath'))
abinit_mpi_abort_file = File(kwargs.get('abinit_mpi_abort_filepath'))
abinit_outdir_path = Directory(kwargs.get('abinit_outdir_path'))
structure = kwargs.get('structure', None)
note = ControllerNote(controller=self)
# Initialize the actions for everything that is passed to kwargs
actions = {}
report = None
try:
report = self.get_event_report(abinit_log_file, abinit_mpi_abort_file)
except Exception as exc:
msg = "%s exception while parsing event_report:\n%s" % (self, exc)
logger.critical(msg)
# If the calculation is ok, parse the outputs
if report is not None:
# the calculation finished without errors
if report.run_completed:
# Check if the calculation converged.
critical_events_found = report.filter_types(self.critical_events)
if critical_events_found:
# self.history.log_unconverged()
# hook
# local_restart, restart_fw, stored_data = self.prepare_restart(fw_spec)
# num_restarts = self.restart_info.num_restarts if self.restart_info else 0
# if num_restarts < self.ftm.fw_policy.max_restarts:
# if local_restart:
# return None
# else:
# stored_data['final_state'] = 'Unconverged'
# return FWAction(detours=restart_fw, stored_data=stored_data)
# else:
# raise UnconvergedError(self, msg="Unconverged after {} restarts".format(num_restarts),
# abiinput=self.abiinput, restart_info=self.restart_info,
# history=self.history)
# Calculation did not converge. A simple restart is enough
note.state = ControllerNote.ERROR_RECOVERABLE
note.simple_restart()
if 'RelaxConvergenceWarning' in [e.name for e in critical_events_found]:
if structure is None:
raise ValueError('Structure should be present in the initial objects for restarting '
'relaxation runs')
# Fake action ... TODO: make something better for the actions ...
actions['structure'] = Action(callable=Structure.__len__)
note.add_problem('Unconverged: {}'.format(', '.join(e.name for e in critical_events_found)))
else:
# calculation converged
#TODO move to a different controler
# check if there are custom parameters that should be converged
# unconverged_params, reset_restart = self.check_parameters_convergence(fw_spec)
# if unconverged_params:
# self.history.log_converge_params(unconverged_params, self.abiinput)
# self.abiinput.set_vars(**unconverged_params)
# local_restart, restart_fw, stored_data = self.prepare_restart(fw_spec, reset=reset_restart)
# num_restarts = self.restart_info.num_restarts if self.restart_info else 0
# if num_restarts < self.ftm.fw_policy.max_restarts:
# if local_restart:
# return None
# else:
# stored_data['final_state'] = 'Unconverged_parameters'
# return FWAction(detours=restart_fw, stored_data=stored_data)
# else:
# raise UnconvergedParametersError(self, abiinput=self.abiinput,
# restart_info=self.restart_info, history=self.history)
# else:
# # everything is ok. conclude the task
# # hook
# update_spec, mod_spec, stored_data = self.conclude_task(fw_spec)
# return FWAction(stored_data=stored_data, update_spec=update_spec, mod_spec=mod_spec)
note.state = ControllerNote.EVERYTHING_OK
note.is_valid = True
elif report.errors:
# Abinit reported problems
# Check if the errors could be handled
logger.debug('Found errors in report')
# for error in report.errors:
# logger.debug(str(error))
# try:
# self.abi_errors.append(error)
# except AttributeError:
# self.abi_errors = [error]
# ABINIT errors, try to handle them
fixed, reset, abiinput_actions = self.fix_abicritical(report=report, abiinput=abinit_input,
queue_adapter=queue_adapter,
outdir=abinit_outdir_path)
if fixed:
note.state = ControllerNote.ERROR_RECOVERABLE
if reset:
note.reset_restart()
else:
note.simple_restart()
actions['abinit_input'] = abiinput_actions
#TODO if the queue_adapter can be modified by the handlers return it
# actions['queue_adapter'] = queue_adapter_actions
else:
msg = "Critical events couldn't be fixed by handlers."
logger.info(msg)
note.state = ControllerNote.ERROR_UNRECOVERABLE
for err in report.errors:
note.add_problem(err)
else:
# Calculation not completed but no errors. No fix could be applied in this controller
note.state = ControllerNote.ERROR_UNRECOVERABLE
note.add_problem('Abinit calculation not completed but no errors in report.')
else:
# report does not exist. No fix could be applied in this controller
note.state = ControllerNote.NOTHING_FOUND
note.add_problem('No Abinit report')
# No errors from abinit. No fix could be applied at this stage.
# The FW will be fizzled.
# Try to save the stderr file for Fortran runtime errors.
#TODO check if some cases could be handled here
# err_msg = None
# if self.stderr_file.exists:
# #TODO length should always be enough, but maybe it's worth cutting the message if it's too long
# err_msg = self.stderr_file.read()
# # It happened that the text file contained non utf-8 characters.
# # sanitize the text to avoid problems during database inserption
# err_msg.decode("utf-8", "ignore")
# logger.error("return code {}".format(self.returncode))
# raise AbinitRuntimeError(self, err_msg)
note.actions = actions
return note
@classmethod
def from_dict(cls, d):
dec = MontyDecoder()
import importlib
critical_events = []
for ced in d['critical_events']:
mod = importlib.import_module(ced['module'])
ce = getattr(mod, ced['name'])
critical_events.append(ce)
return cls(critical_events=critical_events,
# critical_events=dec.process_decoded(d['critical_events']),
handlers=dec.process_decoded(d['handlers']))
def as_dict(self):
critical_events = [{'module': ce.__module__, 'name': ce.__name__} for ce in self.critical_events]
return {'@class': self.__class__.__name__, '@module': self.__class__.__module__,
# 'critical_events': [ce.as_dict() for ce in self.critical_events],
'critical_events': critical_events,
'handlers': [er.as_dict() for er in self.handlers]
}
def get_event_report(self, ofile, mpiabort_file):
"""
Analyzes the main output file for possible Errors or Warnings.
Returns:
:class:`EventReport` instance or None if the main output file does not exist.
"""
parser = events.EventsParser()
if not ofile.exists:
if not mpiabort_file.exists:
return None
else:
# ABINIT abort file without log!
abort_report = parser.parse(mpiabort_file.path)
return abort_report
try:
report = parser.parse(ofile.path)
# Add events found in the ABI_MPIABORTFILE.
if mpiabort_file.exists:
logger.critical("Found ABI_MPIABORTFILE!")
abort_report = parser.parse(mpiabort_file.path)
if len(abort_report) == 0:
logger.warning("ABI_MPIABORTFILE but empty")
else:
if len(abort_report) != 1:
logger.critical("Found more than one event in ABI_MPIABORTFILE")
# Add it to the initial report only if it differs
# from the last one found in the main log file.
last_abort_event = abort_report[-1]
if report and last_abort_event != report[-1]:
report.append(last_abort_event)
else:
report.append(last_abort_event)
return report
#except parser.Error as exc:
except Exception as exc:
# Return a report with an error entry with info on the exception.
logger.critical("{}: Exception while parsing ABINIT events:\n {}".format(ofile, str(exc)))
return parser.report_exception(ofile.path, exc)
def fix_abicritical(self, report, abiinput, outdir, queue_adapter=None):
"""
method to fix crashes/error caused by abinit
Returns:
retcode: 1 if task has been fixed else 0.
reset: True if at least one of the corrections applied requires a reset
"""
if not self.handlers:
logger.info('Empty list of event handlers. Cannot fix abi_critical errors')
return 0, None, []
done = len(self.handlers) * [0]
corrections = []
for event in report:
for i, handler in enumerate(self.handlers):
if handler.can_handle(event) and not done[i]:
logger.info("handler", handler, "will try to fix", event)
try:
#TODO pass the queueadapter to the handlers? the output should be modified in that case
c = handler.handle_input_event(abiinput, outdir, event)
if c:
done[i] += 1
corrections.append(c)
except Exception as exc:
logger.critical(str(exc))
if corrections:
reset = any(c.reset for c in corrections)
# self.history.log_corrections(corrections)
# convert the actions applied on the input to Actions
actions = []
for c in corrections:
# remove vars as a first action, in case incopatible variables have been set.
if '_pop' in c.actions:
actions.append(Action(AbinitInput.remove_vars(c.actions['_pop'])))
if '_set' in c.actions:
actions.append(Action(AbinitInput.set_vars(c.actions['_set'])))
if '_update' in c.actions:
actions.append(Action(AbinitInput.set_vars(c.actions['_update'])))
if '_change_structure' in c.actions:
actions.append(Action(AbinitInput.set_structure(c.actions['_change_structure'])))
return 1, reset, actions
logger.info('We encountered AbiCritical events that could not be fixed')
return 0, None, []
@classmethod
def from_helper(cls, helper):
return cls(critical_events=helper.CRITICAL_EVENTS, handlers=None)
class QueueControllerMixin(object):
def get_queue_errors(self, **kwargs):
# Get the file paths for the stderr and stdout of the resource manager system, as well as the queue_adapter
qerr_filepath = kwargs.get('qerr_filepath', None)
qout_filepath = kwargs.get('qout_filepath', None)
queue_adapter = kwargs.get('queue_adapter', None)
memory_policy = kwargs.get('memory_policy', None)
#TODO: deal with the memory policy in the scheduler parser (whether it is vmem or mem in PBS for example ...)
#if 'queue_adapter' is None:
# raise ValueError('WalltimeController should have access to the queue_adapter')
#if 'qerr_filepath' is None:
# raise ValueError('WalltimeController should have access to the qerr_filepath')
#if 'qout_filepath' is None:
# raise ValueError('WalltimeController should have access to the qout_filepath')
# Analyze the stderr and stdout files of the resource manager system.
qerr_info = None
qout_info = None
if qerr_filepath is not None and os.path.exists(qerr_filepath):
with open(qerr_filepath, "r") as f:
qerr_info = f.read()
if qout_filepath is not None and os.path.exists(qout_filepath):
with open(qout_filepath, "r") as f:
qout_info = f.read()
if qerr_info or qout_info:
from pymatgen.io.abinit.scheduler_error_parsers import get_parser
qtype = queue_adapter.QTYPE
scheduler_parser = get_parser(qtype, err_file=qerr_filepath,
out_file=qout_filepath)
if scheduler_parser is None:
raise ValueError('Cannot find scheduler_parser for qtype {}'.format(qtype))
scheduler_parser.parse()
queue_errors = scheduler_parser.errors
return queue_errors
else:
return None
class WalltimeController(Controller, QueueControllerMixin):
"""
Controller for walltime infringements of the resource manager.
"""
is_handler = True
_controlled_item_types = [ControlledItemType.task_failed()]
def __init__(self, max_timelimit=None, timelimit_increase=None):
"""
Initializes the handler with the directory where the job was run, the standard output and error files
of the queue manager and the queue adapter used.
Args:
max_timelimit: Maximum timelimit (in seconds).
timelimit_increase: Amount of time (in seconds) to increase the timelimit.
"""
super().__init__()
self.max_timelimit = max_timelimit
self.timelimit_increase = timelimit_increase
self.priority = PRIORITY_VERY_LOW
def as_dict(self):
return {'@class': self.__class__.__name__,
'@module': self.__class__.__module__,
'max_timelimit': self.max_timelimit,
'timelimit_increase': self.timelimit_increase
}
@classmethod
def from_dict(cls, d):
return cls(max_timelimit=d['max_timelimit'],
timelimit_increase=d['timelimit_increase'])
@property
def skip_remaining_handlers(self):
return True
@property
def skip_lower_priority_controllers(self):
return True
def process(self, **kwargs):
# Get the Queue Adapter and the queue errors
queue_adapter = kwargs.get('queue_adapter', None)
if queue_adapter is None:
raise ValueError('No queue adapter passed to the WalltimeController')
queue_errors = self.get_queue_errors(**kwargs)
# Create the Controller Note and the actions
note = ControllerNote(controller=self)
actions = {}
# No errors found
if not queue_errors:
note.state = ControllerNote.NOTHING_FOUND
return note
# Get the timelimit error if there is one
timelimit_error = None
for error in queue_errors:
if isinstance(error, TimeCancelError):
logger.debug('found timelimit error.')
timelimit_error = error
# No timelimit error found
if timelimit_error is None:
note.state = ControllerNote.NOTHING_FOUND
return note
# Setup the new timelimit
note.add_problem('Task has been stopped due to timelimit')
if self.max_timelimit is None:
max_timelimit = queue_adapter.timelimit_hard
else:
max_timelimit = self.max_timelimit
# When timelimit_increase is not set, automatically take a tenth of the hard timelimit of the queue
if self.timelimit_increase is None:
timelimit_increase = queue_adapter.timelimit_hard / 10
else:
timelimit_increase = self.timelimit_increase
old_timelimit = queue_adapter.timelimit
if old_timelimit == max_timelimit:
note.state = ControllerNote.ERROR_UNRECOVERABLE
note.add_problem('Maximum timelimit has been reached, cannot increase further')
return note
new_timelimit = old_timelimit + timelimit_increase
# If the new timelimit exceeds the max timelimit, just put it to the max timelimit
if new_timelimit > max_timelimit:
new_timelimit = max_timelimit
# Set the actions to be performed, the state and the type of restart
actions['queue_adapter'] = Action(callable=queue_adapter.__class__.set_timelimit,
timelimit=new_timelimit)
note.state = ControllerNote.ERROR_RECOVERABLE
note.actions = actions
note.reset_restart()
return note
class MemoryController(Controller, QueueControllerMixin):
"""
Controller for memory infringements of the resource manager. The handler should be able to handle the possible
overhead of the master process.
"""
is_handler = True
#_controlled_item_types = [ControlledItemType.task_failed()]
_controlled_item_types = [ControlledItemType.task_completed(), ControlledItemType.task_failed()]
def __init__(self, max_mem_per_proc_mb=8000, mem_per_proc_increase_mb=1000,
max_master_mem_overhead_mb=8000, master_mem_overhead_increase_mb=1000,
memory_policy='physical_memory'):
"""
Initializes the handler with the directory where the job was run, the standard output and error files
of the queue manager and the queue adapter used.
Args:
max_mem_per_proc_mb: Maximum memory per process in megabytes.
mem_per_proc_increase_mb: Amount of memory to increase the memory per process in megabytes.
max_master_mem_overhead_mb: Maximum overhead memory for the master process in megabytes.
master_mem_overhead_increase_mb: Amount of memory to increase the overhead memory for the master process
in megabytes.
memory_policy: Policy for the memory (some weird clusters sometimes use the virtual memory to stop jobs
that overcome some virtual memory limit)
"""
super().__init__()
self.max_mem_per_proc_mb = max_mem_per_proc_mb
self.mem_per_proc_increase_mb = mem_per_proc_increase_mb
self.max_master_mem_overhead_mb = max_master_mem_overhead_mb
self.master_mem_overhead_increase_mb = master_mem_overhead_increase_mb
self.memory_policy = memory_policy
self.priority = PRIORITY_VERY_LOW
@property
def memory_policy(self):
return self._memory_policy
@memory_policy.setter
def memory_policy(self, memory_policy):
if memory_policy not in ['physical_memory', 'virtual_memory']:
raise ValueError('Memory policy is "{}" in MemoryController while itshould be either "physical_memory"'
'or "virtual_memory"'.format(memory_policy))
self._memory_policy = memory_policy
def as_dict(self):
return {'@class': self.__class__.__name__,
'@module': self.__class__.__module__,
'max_mem_per_proc_mb': self.max_mem_per_proc_mb,
'mem_per_proc_increase_mb': self.mem_per_proc_increase_mb,
'max_master_mem_overhead_mb': self.max_master_mem_overhead_mb,
'master_mem_overhead_increase_mb': self.master_mem_overhead_increase_mb,
'memory_policy': self.memory_policy
}
@classmethod
def from_dict(cls, d):
return cls(max_mem_per_proc_mb=d['max_mem_per_proc_mb'],
mem_per_proc_increase_mb=d['mem_per_proc_increase_mb'],
max_master_mem_overhead_mb=d['max_master_mem_overhead_mb'],
master_mem_overhead_increase_mb=d['master_mem_overhead_increase_mb'],
memory_policy=d['memory_policy'])
@property
def skip_remaining_handlers(self):
return True
@property
def skip_lower_priority_controllers(self):
return True
def process(self, **kwargs):
# Get the Queue Adapter and the queue errors
queue_adapter = kwargs.get('queue_adapter', None)
if queue_adapter is None:
raise ValueError('No queue adapter passed to the MemoryController')
queue_errors = self.get_queue_errors(memory_policy=self.memory_policy, **kwargs)
# Create the Controller Note and the actions
note = ControllerNote(controller=self)
actions = {}
# No errors found
if not queue_errors:
note.state = ControllerNote.NOTHING_FOUND
return note
# Get the memory error if there is one
memory_error = None
master_memory_error = None
slave_memory_error = None
for error in queue_errors:
if isinstance(error, MemoryCancelError):
logger.debug('found memory error.')
memory_error = error
elif isinstance(error, MasterProcessMemoryCancelError):
logger.debug('found master memory error.')
master_memory_error = error
elif isinstance(error, SlaveProcessMemoryCancelError):
logger.debug('found slave memory error.')
slave_memory_error = error
# No memory error found
if all(err is None for err in [memory_error, master_memory_error, slave_memory_error]):
note.state = ControllerNote.NOTHING_FOUND
return note
# TODO: allow the possibility to have multiple actions here ? If both the master and the slave gets the error ?
if memory_error or slave_memory_error:
note.add_problem('Task has been stopped due to memory infringement'
'{}'.format('' if memory_error else ' by a slave process'))
old_mem_per_proc = queue_adapter.mem_per_proc
if old_mem_per_proc == self.max_mem_per_proc_mb:
note.state = ControllerNote.ERROR_UNRECOVERABLE
note.add_problem('Maximum mem_per_proc has been reached, cannot increase further')
return note
new_mem_per_proc = old_mem_per_proc + self.mem_per_proc_increase_mb
if new_mem_per_proc > self.max_mem_per_proc_mb:
new_mem_per_proc = self.max_mem_per_proc_mb
actions['queue_adapter'] = Action(callable=queue_adapter.__class__.set_mem_per_proc,
mem_mb=new_mem_per_proc)
if master_memory_error:
note.add_problem('Task has been stopped due to memory infringement by the master process')
old_mem_overhead = self.queue_adapter.master_mem_overhead
if old_mem_overhead == self.max_master_mem_overhead_mb:
note.state = ControllerNote.ERROR_UNRECOVERABLE
note.add_problem('Maximum master_mem_overhead has been reached, cannot increase further')
return note
new_mem_overhead = old_mem_overhead + self.master_mem_overhead_increase_mb
if new_mem_overhead > self.max_master_mem_overhead_mb:
new_mem_overhead = self.max_master_mem_overhead_mb
actions['queue_adapter'] = Action(callable=queue_adapter.__class__.set_master_mem_overhead,
mem_mb=new_mem_overhead)
# Set the actions to be performed, the state and the type of restart
note.state = ControllerNote.ERROR_RECOVERABLE
note.actions = actions
note.reset_restart()
return note
class AbinitZenobeSlaveMemoryController(Controller, QueueControllerMixin):
"""
Controller for memory infringements of the resource manager. The handler should be able to handle the possible
overhead of the master process.
"""
is_handler = True
#_controlled_item_types = [ControlledItemType.task_failed()]
_controlled_item_types = [ControlledItemType.task_completed(), ControlledItemType.task_failed()]
def __init__(self, max_mem_per_proc_mb=8000, mem_per_proc_increase_mb=1000,
memory_policy='physical_memory'):
"""
Initializes the handler with the directory where the job was run, the standard output and error files
of the queue manager and the queue adapter used.
Args:
max_mem_per_proc_mb: Maximum memory per process in megabytes.
mem_per_proc_increase_mb: Amount of memory to increase the memory per process in megabytes.
memory_policy: Policy for the memory (some weird clusters sometimes use the virtual memory to stop jobs
that overcome some virtual memory limit)
"""
super().__init__()
self.max_mem_per_proc_mb = max_mem_per_proc_mb
self.mem_per_proc_increase_mb = mem_per_proc_increase_mb
self.memory_policy = memory_policy
self.priority = PRIORITY_VERY_LOW
@property
def memory_policy(self):
return self._memory_policy
@memory_policy.setter
def memory_policy(self, memory_policy):
if memory_policy not in ['physical_memory', 'virtual_memory']:
raise ValueError('Memory policy is "{}" in MemoryController while it should be either "physical_memory"'
'or "virtual_memory"'.format(memory_policy))
self._memory_policy = memory_policy
def as_dict(self):
return {'@class': self.__class__.__name__,
'@module': self.__class__.__module__,
'max_mem_per_proc_mb': self.max_mem_per_proc_mb,
'mem_per_proc_increase_mb': self.mem_per_proc_increase_mb,
'memory_policy': self.memory_policy
}
@classmethod
def from_dict(cls, d):
return cls(max_mem_per_proc_mb=d['max_mem_per_proc_mb'],
mem_per_proc_increase_mb=d['mem_per_proc_increase_mb'],
memory_policy=d['memory_policy'])
@property
def skip_remaining_handlers(self):
return True
@property
def skip_lower_priority_controllers(self):
return True
def process(self, **kwargs):
# Get the Queue Adapter and the queue errors
queue_adapter = kwargs.get('queue_adapter', None)
if queue_adapter is None:
raise ValueError('No queue adapter passed to the MemoryController')
run_err_filepath = kwargs.get('abinit_err_filepath', None)
qerr_filepath = kwargs.get('qerr_filepath', None)
qout_filepath = kwargs.get('qout_filepath', None)
found = False
# Try to find "dapl_conn_rc" in run.err
if run_err_filepath:
f = open(run_err_filepath, 'r')
run_err = f.readlines()
f.close()
for line in run_err:
if 'dapl_conn_rc' in line:
found = True
break
# Try to find "dapl_conn_rc" in queue.qerr
if not found:
if run_err_filepath:
f = open(qerr_filepath, 'r')
qerr = f.readlines()
f.close()
for line in qerr:
if 'dapl_conn_rc' in line:
found = True
break
# Try to find "dapl_conn_rc" in queue.qout
if not found:
if run_err_filepath:
f = open(qout_filepath, 'r')
qout = f.readlines()
f.close()
for line in qout:
if 'dapl_conn_rc' in line:
found = True
break
# Create the Controller Note and the actions
note = ControllerNote(controller=self)
actions = {}
if not found:
note.state = ControllerNote.NOTHING_FOUND
return note
note.add_problem('Task has been stopped due to memory infringement by a slave process '
'(dapl_conn_rc error on Zenobe ...)')
old_mem_per_proc = queue_adapter.mem_per_proc
if old_mem_per_proc == self.max_mem_per_proc_mb:
note.state = ControllerNote.ERROR_UNRECOVERABLE
note.add_problem('Maximum mem_per_proc has been reached, cannot increase further')
return note
new_mem_per_proc = old_mem_per_proc + self.mem_per_proc_increase_mb
if new_mem_per_proc > self.max_mem_per_proc_mb:
new_mem_per_proc = self.max_mem_per_proc_mb
actions['queue_adapter'] = Action(callable=queue_adapter.__class__.set_mem_per_proc,
mem_mb=new_mem_per_proc)
# Set the actions to be performed, the state and the type of restart
note.state = ControllerNote.ERROR_RECOVERABLE
note.actions = actions
note.reset_restart()
return note
# def process(self, **kwargs):
# # Create the Controller Note
# note = ControllerNote(controller=self)
# # Get the file paths for the stderr and stdout of the resource manager system, as well as the queue_adapter
# qerr_filepath = kwargs.get('qerr_filepath', None)
# qout_filepath = kwargs.get('qout_filepath', None)
# queue_adapter = kwargs.get('queue_adapter', None)
# if 'queue_adapter' is None:
# raise ValueError('WalltimeController should have access to the queue_adapter')
# if 'qerr_filepath' is None:
# raise ValueError('WalltimeController should have access to the qerr_filepath')
# if 'qout_filepath' is None:
# raise ValueError('WalltimeController should have access to the qout_filepath')
# # Initialize the actions for everything that is passed to kwargs
# actions = {}
# # Analyze the stderr and stdout files of the resource manager system.
# qerr_info = None
# qout_info = None
# if qerr_filepath is not None and os.path.exists(qerr_filepath):
# with open(qerr_filepath, "r") as f:
# qerr_info = f.read()
# if qout_filepath is not None and os.path.exists(qout_filepath):
# with open(qout_filepath, "r") as f:
# qout_info = f.read()
# if qerr_info or qout_info:
# from pymatgen.io.abinit.scheduler_error_parsers import get_parser
# qtype = queue_adapter.QTYPE
# scheduler_parser = get_parser(qtype, err_file=qerr_filepath,
# out_file=qout_filepath)
#
# if scheduler_parser is None:
# raise ValueError('Cannot find scheduler_parser for qtype {}'.format(qtype))
#
# scheduler_parser.parse()
# queue_errors = scheduler_parser.errors
#
# # Get the timelimit error if there is one
# timelimit_error = None
# for error in queue_errors:
# if isinstance(error, TimeCancelError):
# logger.debug('found timelimit error.')
# timelimit_error = error
# if timelimit_error is None:
# note.state = ControllerNote.NOTHING_FOUND
# return note
#
# if self.max_timelimit is None:
# max_timelimit = queue_adapter.timelimit_hard
# else:
# max_timelimit = self.max_timelimit
# # When timelimit_increase is not set, automatically take a tenth of the hard timelimit of the queue
# if self.timelimit_increase is None:
# timelimit_increase = queue_adapter.timelimit_hard / 10
# else:
# timelimit_increase = self.timelimit_increase
# old_timelimit = queue_adapter.timelimit
# if old_timelimit == max_timelimit:
# # raise ValueError('Cannot increase beyond maximum timelimit ({:d} seconds) set in '
# # 'WalltimeController. Hard time limit of '
# # 'the queue is {:d} seconds'.format(max_timelimit,
# # queue_adapter.timelimit_hard))
# note.state = ControllerNote.ERROR_UNRECOVERABLE
# return note
# new_timelimit = old_timelimit + timelimit_increase
# # If the new timelimit exceeds the max timelimit, just put it to the max timelimit
# if new_timelimit > max_timelimit:
# new_timelimit = max_timelimit
# actions['queue_adapter'] = Action(callable=queue_adapter.__class__.set_timelimit,
# timelimit=new_timelimit)
# note.state = ControllerNote.ERROR_RECOVERABLE
# else:
# note.state = ControllerNote.NOTHING_FOUND
# note.actions = actions
# note.simple_restart()
# return note
class UltimateMemoryController(Controller, QueueControllerMixin):
"""
Controller for memory infringements of the resource manager. The handler should be able to handle the possible
overhead of the master process.
"""
is_handler = True
#_controlled_item_types = [ControlledItemType.task_failed()]
_controlled_item_types = [ControlledItemType.task_completed(), ControlledItemType.task_failed()]
_only_unfinalized = True
def __init__(self, max_mem_per_proc_mb=8000, mem_per_proc_increase_mb=1000,
max_master_mem_overhead_mb=8000, master_mem_overhead_increase_mb=1000,
memory_policy='physical_memory'):
"""
Initializes the handler with the directory where the job was run, the standard output and error files
of the queue manager and the queue adapter used.
Args:
max_mem_per_proc_mb: Maximum memory per process in megabytes.
mem_per_proc_increase_mb: Amount of memory to increase the memory per process in megabytes.
max_master_mem_overhead_mb: Maximum overhead memory for the master process in megabytes.
master_mem_overhead_increase_mb: Amount of memory to increase the overhead memory for the master process
in megabytes.
memory_policy: Policy for the memory (some weird clusters sometimes use the virtual memory to stop jobs
that overcome some virtual memory limit)
"""
super().__init__()
self.max_mem_per_proc_mb = max_mem_per_proc_mb
self.mem_per_proc_increase_mb = mem_per_proc_increase_mb
self.max_master_mem_overhead_mb = max_master_mem_overhead_mb
self.master_mem_overhead_increase_mb = master_mem_overhead_increase_mb
self.memory_policy = memory_policy
self.priority = PRIORITY_LOWEST
@property
def memory_policy(self):
return self._memory_policy
@memory_policy.setter
def memory_policy(self, memory_policy):
if memory_policy not in ['physical_memory', 'virtual_memory']:
raise ValueError('Memory policy is "{}" in UltimateMemoryController while it should be either '
'"physical_memory" or "virtual_memory"'.format(memory_policy))
self._memory_policy = memory_policy
def as_dict(self):
return {'@class': self.__class__.__name__,
'@module': self.__class__.__module__,
'max_mem_per_proc_mb': self.max_mem_per_proc_mb,
'mem_per_proc_increase_mb': self.mem_per_proc_increase_mb,
'max_master_mem_overhead_mb': self.max_master_mem_overhead_mb,
'master_mem_overhead_increase_mb': self.master_mem_overhead_increase_mb,
'memory_policy': self.memory_policy
}
@classmethod
def from_dict(cls, d):
return cls(max_mem_per_proc_mb=d['max_mem_per_proc_mb'],
mem_per_proc_increase_mb=d['mem_per_proc_increase_mb'],
max_master_mem_overhead_mb=d['max_master_mem_overhead_mb'],
master_mem_overhead_increase_mb=d['master_mem_overhead_increase_mb'],
memory_policy=d['memory_policy'])
@property
def skip_remaining_handlers(self):
return True
@property
def skip_lower_priority_controllers(self):
return True
def process(self, **kwargs):
# Get the Queue Adapter and the queue errors
queue_adapter = kwargs.get('queue_adapter', None)
if queue_adapter is None:
raise ValueError('No queue adapter passed to the UltimateMemoryController')
queue_errors = self.get_queue_errors(memory_policy=self.memory_policy, **kwargs)
# Create the Controller Note and the actions
note = ControllerNote(controller=self)
actions = {}
# No errors found
if queue_errors:
raise ValueError('Error should have been caught by another controller ...')
# No memory error found
note.add_problem('Task has been stopped and we assume it is because of memory infringement by a slave process')
old_mem_per_proc = queue_adapter.mem_per_proc
if old_mem_per_proc == self.max_mem_per_proc_mb:
note.state = ControllerNote.ERROR_UNRECOVERABLE
note.add_problem('Maximum mem_per_proc has been reached, cannot increase further')
return note
new_mem_per_proc = old_mem_per_proc + self.mem_per_proc_increase_mb
if new_mem_per_proc > self.max_mem_per_proc_mb:
new_mem_per_proc = self.max_mem_per_proc_mb
actions['queue_adapter'] = Action(callable=queue_adapter.__class__.set_mem_per_proc,
mem_mb=new_mem_per_proc)
# Set the actions to be performed, the state and the type of restart
note.state = ControllerNote.ERROR_RECOVERABLE
note.actions = actions
note.reset_restart()
return note
class SimpleValidatorController(Controller):
"""
Simple validator controller to be applied after all other controllers (PRIORITY_LOWEST).
This validator controller can be used when no "real" validator exists, but just handlers/monitors
and that we suppose that if nothing is found by the handlers/monitors, then it means that it is ok.
"""
can_validate = True
_controlled_item_types = [ControlledItemType.task_completed()]
def __init__(self):
super().__init__()
self.priority = PRIORITY_LOWEST
def as_dict(self):
return {'@class': self.__class__.__name__,
'@module': self.__class__.__module__}
@classmethod
def from_dict(cls, d):
return cls()
@property
def skip_remaining_handlers(self):
return True
@property
def skip_lower_priority_controllers(self):
return True
def process(self, **kwargs):
# Create the Controller Note
note = ControllerNote(controller=self)
note.state = ControllerNote.EVERYTHING_OK
note.is_valid = True
return note
@property
def validated(self):
return True
class VaspXMLValidatorController(Controller):
"""
Checks that a valid vasprun.xml was generated
"""
can_validate = True
_controlled_item_types = [ControlledItemType.task_completed()]
def __init__(self):
super().__init__()
self.priority = PRIORITY_LOWEST
def as_dict(self):
return {'@class': self.__class__.__name__,
'@module': self.__class__.__module__}
@classmethod
def from_dict(cls, d):
return cls()
@property
def skip_remaining_handlers(self):
return True
@property
def skip_lower_priority_controllers(self):
return True
def process(self, **kwargs):
# Create the Controller Note
if 'vasprun_xml_file' not in kwargs:
raise ValueError('kwarg "vasprun_xml_file" is required to validate vasprun.xml file')
vasprun_xml_file = kwargs['vasprun_xml_file']
note = ControllerNote(controller=self)
note.state = ControllerNote.EVERYTHING_OK
note.is_valid = True
try:
Vasprun(vasprun_xml_file)
except Exception:
note.state = ControllerNote.ERROR_NOFIX
note.is_valid = False
return note
@property
def validated(self):
return True
class VaspNEBValidatorController(Controller):
"""
Checks that a valid vasprun.xml was generated
"""
can_validate = True
_controlled_item_types = [ControlledItemType.task_completed()]
def __init__(self):
super().__init__()
self.priority = PRIORITY_LOWEST
def as_dict(self):
return {'@class': self.__class__.__name__,
'@module': self.__class__.__module__}
@classmethod
def from_dict(cls, d):
return cls()
@property
def skip_remaining_handlers(self):
return True
@property
def skip_lower_priority_controllers(self):
return True
def process(self, **kwargs):
# Create the Controller Note
if 'run_dir' not in kwargs:
raise ValueError('kwarg "run_dir" is required to validate NEB vasp calculations')
if 'additional_vasp_wf_info' not in kwargs:
raise ValueError('kwarg "additional_vasp_wf_info" is required to validate NEB vasp calculations')
if 'terminal_start_run_dir' not in kwargs['additional_vasp_wf_info']:
raise ValueError('"terminal_start_run_dir" has to be in additional_vasp_wf_info is required '
'to validate NEB vasp calculations')
if 'terminal_end_run_dir' not in kwargs['additional_vasp_wf_info']:
raise ValueError('"terminal_end_run_dir" has to be in additional_vasp_wf_info is required '
'to validate NEB vasp calculations')
run_dir = kwargs['run_dir']
note = ControllerNote(controller=self)
note.state = ControllerNote.EVERYTHING_OK
note.is_valid = True
terminal_dirs = (kwargs['additional_vasp_wf_info']['terminal_start_run_dir'],
kwargs['additional_vasp_wf_info']['terminal_end_run_dir'])
try:
NEBAnalysis.from_dir(run_dir, relaxation_dirs=terminal_dirs)
except Exception:
note.state = ControllerNote.ERROR_NOFIX
note.is_valid = False
return note
@property
def validated(self):
return True
|
import attr
from cfme.infrastructure.config_management.config_systems import ConfigSystemsCollection
from cfme.modeling.base import BaseCollection
from cfme.modeling.base import BaseEntity
from cfme.utils.appliance.implementations.ui import navigate_to
from cfme.utils.pretty import Pretty
from cfme.utils.wait import wait_for
@attr.s
class ConfigProfile(BaseEntity, Pretty):
"""Configuration profile object (foreman-side hostgroup)
Args:
name: Name of the profile
manager: ConfigManager object which this profile is bound to
"""
pretty_attrs = ['name', 'manager']
name = attr.ib()
manager = attr.ib()
_collections = {"config_systems": ConfigSystemsCollection}
@property
def type(self):
return (
"Inventory Group" if self.manager.type == "ansible_tower" else "Configuration Profile"
)
@property
def config_systems(self):
"""Returns 'ConfigSystem' objects that are active under this profile"""
return self.collections.config_systems.all()
@attr.s
class ConfigProfilesCollection(BaseCollection):
""" Collection of ConfigProfiles (nested collection of ConfigManager) """
ENTITY = ConfigProfile
def all(self):
"""Returns 'ConfigProfile' configuration profiles (hostgroups) available on this manager"""
view = navigate_to(self.parent, "Details")
wait_for(lambda: view.entities.elements.is_displayed, fail_func=view.toolbar.refresh.click,
handle_exception=True, num_sec=60, delay=5)
config_profiles = []
for row in view.entities.elements:
if self.parent.ui_type == 'Ansible Tower':
name = row.name.text
else:
name = row.description.text
if 'unassigned' in name.lower():
continue
config_profiles.append(self.instantiate(name=name, manager=self.parent))
return config_profiles
|
"""A class to help start/stop the PyWebSocket server used by layout tests."""
import logging
import os
import sys
import time
from webkitpy.layout_tests.servers import http_server
from webkitpy.layout_tests.servers import http_server_base
_log = logging.getLogger(__name__)
_WS_LOG_NAME = 'pywebsocket.ws.log'
_WSS_LOG_NAME = 'pywebsocket.wss.log'
_DEFAULT_WS_PORT = 8880
_DEFAULT_WSS_PORT = 9323
class PyWebSocket(http_server.Lighttpd):
def __init__(self, port_obj, output_dir, port=_DEFAULT_WS_PORT,
root=None, use_tls=False,
private_key=None, certificate=None, ca_certificate=None,
pidfile=None):
"""Args:
output_dir: the absolute path to the layout test result directory
"""
http_server.Lighttpd.__init__(self, port_obj, output_dir,
port=_DEFAULT_WS_PORT,
root=root)
self._output_dir = output_dir
self._pid_file = pidfile
self._process = None
self._port = port
self._root = root
self._use_tls = use_tls
self._name = 'pywebsocket'
if self._use_tls:
self._name = 'pywebsocket_secure'
if private_key:
self._private_key = private_key
else:
self._private_key = self._pem_file
if certificate:
self._certificate = certificate
else:
self._certificate = self._pem_file
self._ca_certificate = ca_certificate
if self._port:
self._port = int(self._port)
self._wsin = None
self._wsout = None
self._mappings = [{'port': self._port}]
if not self._pid_file:
self._pid_file = self._filesystem.join(self._runtime_path, '%s.pid' % self._name)
# Webkit tests
# FIXME: This is the wrong way to detect if we're in Chrome vs. WebKit!
# The port objects are supposed to abstract this.
if self._root:
self._layout_tests = self._filesystem.abspath(self._root)
self._web_socket_tests = self._filesystem.abspath(self._filesystem.join(self._root, 'http', 'tests', 'websocket', 'tests'))
else:
try:
self._layout_tests = self._port_obj.layout_tests_dir()
self._web_socket_tests = self._filesystem.join(self._layout_tests, 'http', 'tests', 'websocket', 'tests')
except:
self._web_socket_tests = None
if self._use_tls:
self._log_prefix = _WSS_LOG_NAME
else:
self._log_prefix = _WS_LOG_NAME
def _prepare_config(self):
log_file_name = self._log_prefix
# FIXME: Doesn't Executive have a devnull, so that we don't have to use os.devnull directly?
self._wsin = open(os.devnull, 'r')
error_log = self._filesystem.join(self._output_dir, log_file_name + "-err.txt")
output_log = self._filesystem.join(self._output_dir, log_file_name + "-out.txt")
self._wsout = self._filesystem.open_text_file_for_writing(output_log)
from webkitpy.thirdparty import mod_pywebsocket
python_interp = sys.executable
# FIXME: Use self._filesystem.path_to_module(self.__module__) instead of __file__
# I think this is trying to get the chrome directory? Doesn't the port object know that?
pywebsocket_base = self._filesystem.join(self._filesystem.dirname(self._filesystem.dirname(self._filesystem.dirname(self._filesystem.abspath(__file__)))), 'thirdparty')
pywebsocket_script = self._filesystem.join(pywebsocket_base, 'mod_pywebsocket', 'standalone.py')
start_cmd = [
python_interp, '-u', pywebsocket_script,
'--server-host', 'localhost',
'--port', str(self._port),
# FIXME: Don't we have a self._port_obj.layout_test_path?
'--document-root', self._filesystem.join(self._layout_tests, 'http', 'tests'),
'--scan-dir', self._web_socket_tests,
'--cgi-paths', '/websocket/tests',
'--log-file', error_log,
]
handler_map_file = self._filesystem.join(self._web_socket_tests, 'handler_map.txt')
if self._filesystem.exists(handler_map_file):
_log.debug('Using handler_map_file: %s' % handler_map_file)
start_cmd.append('--websock-handlers-map-file')
start_cmd.append(handler_map_file)
else:
_log.warning('No handler_map_file found')
if self._use_tls:
start_cmd.extend(['-t', '-k', self._private_key,
'-c', self._certificate])
if self._ca_certificate:
start_cmd.append('--tls-client-ca')
start_cmd.append(self._ca_certificate)
self._start_cmd = start_cmd
server_name = self._filesystem.basename(pywebsocket_script)
self._env = self._port_obj.setup_environ_for_server(server_name)
self._env['PYTHONPATH'] = (pywebsocket_base + os.path.pathsep + self._env.get('PYTHONPATH', ''))
def _remove_stale_logs(self):
try:
self._remove_log_files(self._output_dir, self._log_prefix)
except OSError, e:
_log.warning('Failed to remove stale %s log files: %s' % (self._name, str(e)))
def _spawn_process(self):
_log.debug('Starting %s server, cmd="%s"' % (self._name, self._start_cmd))
self._process = self._executive.popen(self._start_cmd, env=self._env, shell=False, stdin=self._wsin, stdout=self._wsout, stderr=self._executive.STDOUT)
self._filesystem.write_text_file(self._pid_file, str(self._process.pid))
return self._process.pid
def _stop_running_server(self):
super(PyWebSocket, self)._stop_running_server()
if self._wsin:
self._wsin.close()
self._wsin = None
if self._wsout:
self._wsout.close()
self._wsout = None
|
import sys
import os
import tempfile
import shutil
from os.path import dirname, join, abspath, splitext
this_dir = abspath(dirname(__file__))
bb_root_dir = abspath(join(this_dir, "..", ".."))
from distutils.core import setup
includes = []
for package in ["buildbot.changes", "buildbot.process", "buildbot.status"]:
__import__(package)
p = sys.modules[package]
for fname in os.listdir(p.__path__[0]):
base, ext = splitext(fname)
if not fname.startswith("_") and ext == ".py":
includes.append(p.__name__ + "." + base)
includes.extend("""
buildbot.scheduler
buildbot.slave.bot
buildbot.master
twisted.internet.win32eventreactor
twisted.web.resource""".split())
includes = ",".join(includes)
py2exe_options = {"bundle_files": 1,
"includes": includes,
}
buildbot_target = {
"script": join(bb_root_dir, "bin", "buildbot"),
}
service_target = {
"modules": ["buildbot_service"],
"cmdline_style": "custom",
}
import servicemanager
msg_file = join(tempfile.gettempdir(), "buildbot.msg")
shutil.copy(servicemanager.__file__, msg_file)
data_files = [
["", [msg_file]],
["", [join(bb_root_dir, "buildbot", "status", "web", "default.css")]],
["", [join(bb_root_dir, "buildbot", "buildbot.png")]],
]
try:
setup(name="buildbot",
# The buildbot script as a normal executable
console=[buildbot_target],
service=[service_target],
options={'py2exe': py2exe_options},
data_files = data_files,
zipfile = "buildbot.library", # 'library.zip' invites trouble :)
)
finally:
os.unlink(msg_file)
|
bl_info = {"name": "blendmaxwell",
"description": "Maxwell Render exporter for Blender",
"author": "Jakub Uhlik",
"version": (0, 4, 5),
"blender": (2, 77, 0),
"location": "Info header > render engine menu",
"warning": "Alpha",
"wiki_url": "https://github.com/uhlik/blendmaxwell/wiki",
"tracker_url": "https://github.com/uhlik/blendmaxwell/issues",
"category": "Render", }
if "bpy" in locals():
import imp
imp.reload(log)
imp.reload(system)
imp.reload(rfbin)
imp.reload(mxs)
imp.reload(maths)
imp.reload(utils)
imp.reload(engine)
imp.reload(props)
imp.reload(ops)
imp.reload(ui)
imp.reload(export)
imp.reload(impmxs)
imp.reload(tmpio)
else:
from . import log
from . import system
from . import rfbin
from . import mxs
from . import maths
from . import utils
from . import engine
from . import props
from . import ops
from . import ui
from . import export
from . import impmxs
from . import tmpio
import os
import platform
import math
import bpy
from bpy.props import StringProperty, EnumProperty, BoolProperty
class MaxwellRenderPreferences(bpy.types.AddonPreferences):
bl_idname = __name__
python_path = StringProperty(name="Python Path", default="", subtype='DIR_PATH', description="", )
maxwell_path = StringProperty(name="Maxwell Render Directory", default="", subtype='DIR_PATH', description="", )
advanced = BoolProperty(name="Advanced Settings", default=False, )
tmp_dir_use = EnumProperty(name="Temp Files", items=[('BLEND_DIRECTORY', "Blend File Directory (Default)", ""), ('SPECIFIC_DIRECTORY', "Specific Directory", ""), ], default='BLEND_DIRECTORY', description="", )
tmp_dir_path = StringProperty(name="Temp Files Directory", default="//", subtype='DIR_PATH', description="", )
default_new_world_type = EnumProperty(name="Default World Type", items=[('NONE', "None", ""), ('PHYSICAL_SKY', "Physical Sky", ""), ('IMAGE_BASED', "Image Based", "")], default='PHYSICAL_SKY', )
default_new_material_type = EnumProperty(name="Default Material Type", items=[('REFERENCE', "Reference", ""), ('CUSTOM', "Custom", ""), ('EMITTER', "Emitter", ""), ('AGS', "AGS", ""), ('OPAQUE', "Opaque", ""), ('TRANSPARENT', "Transparent", ""), ('METAL', "Metal", ""), ('TRANSLUCENT', "Translucent", ""), ('CARPAINT', "Carpaint", ""), ('HAIR', "Hair", ""), ], default='CUSTOM', )
default_new_particles_type = EnumProperty(name="Default Particles Type", items=[('HAIR', "Hair", ""), ('PARTICLES', "Particles", ""), ('CLONER', "Cloner", ""), ('PARTICLE_INSTANCES', "Instances", ""), ('NONE', "None", "")], default='NONE', )
def draw(self, context):
l = self.layout
s = platform.system()
if(s == 'Darwin'):
l.prop(self, "python_path")
l.prop(self, "maxwell_path")
l.label("Default Types:")
r = l.row()
r.prop(self, "default_new_world_type", text="Environment", )
r.prop(self, "default_new_material_type", text="Material", )
r.prop(self, "default_new_particles_type", text="Particles", )
l.prop(self, 'advanced')
if(self.advanced):
r = l.row()
s = r.split(percentage=0.333)
s.prop(self, "tmp_dir_use", )
s = s.split(percentage=1.0)
s.prop(self, "tmp_dir_path", )
if(self.tmp_dir_use != 'SPECIFIC_DIRECTORY'):
s.enabled = False
def get_selected_panels():
l = ['DATA_PT_camera_display', 'bl_ui.properties_data_mesh', 'bl_ui.properties_particle',
'bl_ui.properties_render_layer', 'bl_ui.properties_texture', 'bl_ui.properties_scene',
'bl_ui.properties_world', ]
e = ['RENDERLAYER_PT_layer_options', 'RENDERLAYER_PT_layer_passes', 'RENDERLAYER_UL_renderlayers',
'SCENE_PT_color_management', 'RENDERLAYER_PT_views', 'WORLD_PT_ambient_occlusion', 'WORLD_PT_custom_props',
'WORLD_PT_environment_lighting', 'WORLD_PT_gather', 'WORLD_PT_indirect_lighting', 'WORLD_PT_mist',
'WORLD_PT_preview', 'WORLD_PT_world', 'WORLD_PT_context_world', 'PARTICLE_PT_context_particles', ]
a = get_all_panels()
r = []
for p in a:
if(p.__name__ in l or p.__module__ in l):
if(p.__name__ not in e and p.__module__ not in e):
r.append(p)
return r
def get_all_panels():
ts = dir(bpy.types)
r = []
for t in ts:
o = getattr(bpy.types, t)
if(hasattr(o, 'COMPAT_ENGINES')):
if('BLENDER_RENDER' in o.COMPAT_ENGINES):
r.append(o)
return r
def get_default_presets():
def get_prefs():
a = os.path.split(os.path.split(os.path.realpath(__file__))[0])[1]
p = bpy.context.user_preferences.addons[a].preferences
return p
presets = {
'procedural_textures': {
'subdirs': False,
'defines': [],
'presets': {},
},
'exposure': {
'subdirs': False,
'defines': [
"import bpy",
"m = bpy.context.camera.maxwell_render",
"m.lock_exposure = False",
],
'presets': {
'dark_interior': {'shutter': 10.0, 'fstop': 5.6, },
'bright_interior': {'shutter': 50.0, 'fstop': 5.6, },
'overcast_exterior': {'shutter': 200.0, 'fstop': 5.6, },
'bright_exterior': {'shutter': 500.0, 'fstop': 5.6, },
'night_exterior': {'shutter': 20.0, 'fstop': 5.6, },
}
},
'environment': {
'subdirs': False,
'defines': [
"import bpy",
"m = bpy.context.world.maxwell_render",
],
'presets': {
'blue_clear_darker_horizon_mihai': {
'env_type': 'PHYSICAL_SKY', 'sky_type': 'PHYSICAL', 'sky_use_preset': False, 'sky_preset': '',
'sky_intensity': 1.0, 'sky_planet_refl': 8.0, 'sky_ozone': 1.3, 'sky_water': 0.1, 'sky_turbidity_coeff': 0.002,
'sky_wavelength_exp': 0.16, 'sky_reflectance': 50.0, 'sky_asymmetry': 0.0,
'sun_type': 'PHYSICAL', 'sun_power': 1.0, 'sun_radius_factor': 1.0, 'sun_color': (1.0, 1.0, 1.0), 'sun_temp': 5777.0,
'sun_location_type': 'LATLONG', 'sun_latlong_lat': 43.678, 'sun_latlong_lon': 3.617, 'sun_date': '02.06.2015', 'sun_time': '17:00',
'sun_latlong_gmt': 1.0, 'sun_latlong_gmt_auto': False, 'sun_latlong_ground_rotation': math.radians(187.147),
'sun_angles_zenith': math.radians(45.0), 'sun_angles_azimuth': math.radians(45.0),
'sun_dir_x': 0.0, 'sun_dir_y': 0.0, 'sun_dir_z': 1.0,
'ibl_intensity': 1.0, 'ibl_interpolation': False, 'ibl_screen_mapping': False,
'ibl_bg_type': 'HDR_IMAGE', 'ibl_bg_map': "", 'ibl_bg_intensity': 1.0, 'ibl_bg_scale_x': 1.0, 'ibl_bg_scale_y': 1.0, 'ibl_bg_offset_x': 0.0, 'ibl_bg_offset_y': 0.0,
'ibl_refl_type': 'SAME_AS_BG', 'ibl_refl_map': "", 'ibl_refl_intensity': 1.0, 'ibl_refl_scale_x': 1.0, 'ibl_refl_scale_y': 1.0, 'ibl_refl_offset_x': 0.0, 'ibl_refl_offset_y': 0.0,
'ibl_refr_type': 'SAME_AS_BG', 'ibl_refr_map': "", 'ibl_refr_intensity': 1.0, 'ibl_refr_scale_x': 1.0, 'ibl_refr_scale_y': 1.0, 'ibl_refr_offset_x': 0.0, 'ibl_refr_offset_y': 0.0,
'ibl_illum_type': 'SAME_AS_BG', 'ibl_illum_map': "", 'ibl_illum_intensity': 1.0, 'ibl_illum_scale_x': 1.0, 'ibl_illum_scale_y': 1.0, 'ibl_illum_offset_x': 0.0, 'ibl_illum_offset_y': 0.0,
},
'clear_sky_tom': {
'env_type': 'PHYSICAL_SKY', 'sky_type': 'PHYSICAL', 'sky_use_preset': False, 'sky_preset': '',
'sky_intensity': 1.0, 'sky_planet_refl': 1.0, 'sky_ozone': 1.0, 'sky_water': 2.0, 'sky_turbidity_coeff': 0.04,
'sky_wavelength_exp': 1.2, 'sky_reflectance': 20.0, 'sky_asymmetry': 0.9,
'sun_type': 'PHYSICAL', 'sun_power': 1.0, 'sun_radius_factor': 1.0, 'sun_color': (1.0, 1.0, 1.0), 'sun_temp': 5777.0,
'sun_location_type': 'LATLONG', 'sun_latlong_lat': 41.017, 'sun_latlong_lon': 28.967, 'sun_date': '23.05.2015', 'sun_time': '09:30',
'sun_latlong_gmt': 2.0, 'sun_latlong_gmt_auto': False, 'sun_latlong_ground_rotation': math.radians(0.0),
'sun_angles_zenith': math.radians(45.0), 'sun_angles_azimuth': math.radians(45.0),
'sun_dir_x': 0.0, 'sun_dir_y': 0.0, 'sun_dir_z': 1.0,
'ibl_intensity': 1.0, 'ibl_interpolation': False, 'ibl_screen_mapping': False,
'ibl_bg_type': 'HDR_IMAGE', 'ibl_bg_map': "", 'ibl_bg_intensity': 1.0, 'ibl_bg_scale_x': 1.0, 'ibl_bg_scale_y': 1.0, 'ibl_bg_offset_x': 0.0, 'ibl_bg_offset_y': 0.0,
'ibl_refl_type': 'SAME_AS_BG', 'ibl_refl_map': "", 'ibl_refl_intensity': 1.0, 'ibl_refl_scale_x': 1.0, 'ibl_refl_scale_y': 1.0, 'ibl_refl_offset_x': 0.0, 'ibl_refl_offset_y': 0.0,
'ibl_refr_type': 'SAME_AS_BG', 'ibl_refr_map': "", 'ibl_refr_intensity': 1.0, 'ibl_refr_scale_x': 1.0, 'ibl_refr_scale_y': 1.0, 'ibl_refr_offset_x': 0.0, 'ibl_refr_offset_y': 0.0,
'ibl_illum_type': 'SAME_AS_BG', 'ibl_illum_map': "", 'ibl_illum_intensity': 1.0, 'ibl_illum_scale_x': 1.0, 'ibl_illum_scale_y': 1.0, 'ibl_illum_offset_x': 0.0, 'ibl_illum_offset_y': 0.0,
},
'default': {
'env_type': 'PHYSICAL_SKY', 'sky_type': 'PHYSICAL', 'sky_use_preset': False, 'sky_preset': '',
'sky_intensity': 1.0, 'sky_planet_refl': 25.0, 'sky_ozone': 0.4, 'sky_water': 2.0, 'sky_turbidity_coeff': 0.04,
'sky_wavelength_exp': 1.2, 'sky_reflectance': 80.0, 'sky_asymmetry': 0.7,
'sun_type': 'PHYSICAL', 'sun_power': 1.0, 'sun_radius_factor': 1.0, 'sun_color': (1.0, 1.0, 1.0), 'sun_temp': 5777.0,
'sun_location_type': 'LATLONG', 'sun_latlong_lat': 40.0, 'sun_latlong_lon': 10.0, 'sun_date': '18.07.2015', 'sun_time': '12:30',
'sun_latlong_gmt': 0.0, 'sun_latlong_gmt_auto': False, 'sun_latlong_ground_rotation': math.radians(0.0),
'sun_angles_zenith': math.radians(45.0), 'sun_angles_azimuth': math.radians(45.0),
'sun_dir_x': 0.0, 'sun_dir_y': 0.0, 'sun_dir_z': 1.0,
'ibl_intensity': 1.0, 'ibl_interpolation': False, 'ibl_screen_mapping': False,
'ibl_bg_type': 'HDR_IMAGE', 'ibl_bg_map': "", 'ibl_bg_intensity': 1.0, 'ibl_bg_scale_x': 1.0, 'ibl_bg_scale_y': 1.0, 'ibl_bg_offset_x': 0.0, 'ibl_bg_offset_y': 0.0,
'ibl_refl_type': 'SAME_AS_BG', 'ibl_refl_map': "", 'ibl_refl_intensity': 1.0, 'ibl_refl_scale_x': 1.0, 'ibl_refl_scale_y': 1.0, 'ibl_refl_offset_x': 0.0, 'ibl_refl_offset_y': 0.0,
'ibl_refr_type': 'SAME_AS_BG', 'ibl_refr_map': "", 'ibl_refr_intensity': 1.0, 'ibl_refr_scale_x': 1.0, 'ibl_refr_scale_y': 1.0, 'ibl_refr_offset_x': 0.0, 'ibl_refr_offset_y': 0.0,
'ibl_illum_type': 'SAME_AS_BG', 'ibl_illum_map': "", 'ibl_illum_intensity': 1.0, 'ibl_illum_scale_x': 1.0, 'ibl_illum_scale_y': 1.0, 'ibl_illum_offset_x': 0.0, 'ibl_illum_offset_y': 0.0,
},
'hell_mihai': {
'env_type': 'PHYSICAL_SKY', 'sky_type': 'PHYSICAL', 'sky_use_preset': False, 'sky_preset': '',
'sky_intensity': 1.0, 'sky_planet_refl': 80.0, 'sky_ozone': 0.1, 'sky_water': 1.0, 'sky_turbidity_coeff': 0.06,
'sky_wavelength_exp': 80.0, 'sky_reflectance': 50.0, 'sky_asymmetry': 0.0,
'sun_type': 'DISABLED', 'sun_power': 1.0, 'sun_radius_factor': 1.0, 'sun_color': (1.0, 1.0, 1.0), 'sun_temp': 1050.0,
'sun_location_type': 'LATLONG', 'sun_latlong_lat': 43.678, 'sun_latlong_lon': 3.617, 'sun_date': '03.06.2015', 'sun_time': '17:00',
'sun_latlong_gmt': 1.0, 'sun_latlong_gmt_auto': False, 'sun_latlong_ground_rotation': math.radians(194.274),
'sun_angles_zenith': math.radians(45.0), 'sun_angles_azimuth': math.radians(45.0),
'sun_dir_x': 0.0, 'sun_dir_y': 0.0, 'sun_dir_z': 1.0,
'ibl_intensity': 1.0, 'ibl_interpolation': False, 'ibl_screen_mapping': False,
'ibl_bg_type': 'HDR_IMAGE', 'ibl_bg_map': "", 'ibl_bg_intensity': 1.0, 'ibl_bg_scale_x': 1.0, 'ibl_bg_scale_y': 1.0, 'ibl_bg_offset_x': 0.0, 'ibl_bg_offset_y': 0.0,
'ibl_refl_type': 'SAME_AS_BG', 'ibl_refl_map': "", 'ibl_refl_intensity': 1.0, 'ibl_refl_scale_x': 1.0, 'ibl_refl_scale_y': 1.0, 'ibl_refl_offset_x': 0.0, 'ibl_refl_offset_y': 0.0,
'ibl_refr_type': 'SAME_AS_BG', 'ibl_refr_map': "", 'ibl_refr_intensity': 1.0, 'ibl_refr_scale_x': 1.0, 'ibl_refr_scale_y': 1.0, 'ibl_refr_offset_x': 0.0, 'ibl_refr_offset_y': 0.0,
'ibl_illum_type': 'SAME_AS_BG', 'ibl_illum_map': "", 'ibl_illum_intensity': 1.0, 'ibl_illum_scale_x': 1.0, 'ibl_illum_scale_y': 1.0, 'ibl_illum_offset_x': 0.0, 'ibl_illum_offset_y': 0.0,
},
'pyramid': {
'env_type': 'PHYSICAL_SKY', 'sky_type': 'PHYSICAL', 'sky_use_preset': False, 'sky_preset': '',
'sky_intensity': 1.0, 'sky_planet_refl': 4.0, 'sky_ozone': 0.88, 'sky_water': 52.0, 'sky_turbidity_coeff': 0.077,
'sky_wavelength_exp': 1.0, 'sky_reflectance': 90.0, 'sky_asymmetry': 0.822,
'sun_type': 'PHYSICAL', 'sun_power': 1.5, 'sun_radius_factor': 1.0, 'sun_color': (1.0, 1.0, 1.0), 'sun_temp': 5234.7,
'sun_location_type': 'LATLONG', 'sun_latlong_lat': 29.867, 'sun_latlong_lon': 31.333, 'sun_date': '10.06.2015', 'sun_time': '17:28',
'sun_latlong_gmt': 1.0, 'sun_latlong_gmt_auto': False, 'sun_latlong_ground_rotation': math.radians(0.0),
'sun_angles_zenith': math.radians(45.0), 'sun_angles_azimuth': math.radians(45.0),
'sun_dir_x': 0.0, 'sun_dir_y': 0.0, 'sun_dir_z': 1.0,
'ibl_intensity': 1.0, 'ibl_interpolation': False, 'ibl_screen_mapping': False,
'ibl_bg_type': 'HDR_IMAGE', 'ibl_bg_map': "", 'ibl_bg_intensity': 1.0, 'ibl_bg_scale_x': 1.0, 'ibl_bg_scale_y': 1.0, 'ibl_bg_offset_x': 0.0, 'ibl_bg_offset_y': 0.0,
'ibl_refl_type': 'SAME_AS_BG', 'ibl_refl_map': "", 'ibl_refl_intensity': 1.0, 'ibl_refl_scale_x': 1.0, 'ibl_refl_scale_y': 1.0, 'ibl_refl_offset_x': 0.0, 'ibl_refl_offset_y': 0.0,
'ibl_refr_type': 'SAME_AS_BG', 'ibl_refr_map': "", 'ibl_refr_intensity': 1.0, 'ibl_refr_scale_x': 1.0, 'ibl_refr_scale_y': 1.0, 'ibl_refr_offset_x': 0.0, 'ibl_refr_offset_y': 0.0,
'ibl_illum_type': 'SAME_AS_BG', 'ibl_illum_map': "", 'ibl_illum_intensity': 1.0, 'ibl_illum_scale_x': 1.0, 'ibl_illum_scale_y': 1.0, 'ibl_illum_offset_x': 0.0, 'ibl_illum_offset_y': 0.0,
},
'san_francisco_fog': {
'env_type': 'PHYSICAL_SKY', 'sky_type': 'PHYSICAL', 'sky_use_preset': False, 'sky_preset': '',
'sky_intensity': 1.0, 'sky_planet_refl': 25.0, 'sky_ozone': 3.0, 'sky_water': 50.0, 'sky_turbidity_coeff': 0.5,
'sky_wavelength_exp': 1.2, 'sky_reflectance': 80.0, 'sky_asymmetry': 0.5,
'sun_type': 'PHYSICAL', 'sun_power': 1.0, 'sun_radius_factor': 1.0, 'sun_color': (1.0, 1.0, 1.0), 'sun_temp': 5777.0,
'sun_location_type': 'LATLONG', 'sun_latlong_lat': 37.617, 'sun_latlong_lon': -122.383, 'sun_date': '19.07.2015', 'sun_time': '14:58',
'sun_latlong_gmt': -8.0, 'sun_latlong_gmt_auto': False, 'sun_latlong_ground_rotation': math.radians(-198.0),
'sun_angles_zenith': math.radians(45.0), 'sun_angles_azimuth': math.radians(45.0),
'sun_dir_x': 0.0, 'sun_dir_y': 0.0, 'sun_dir_z': 1.0,
'ibl_intensity': 1.0, 'ibl_interpolation': False, 'ibl_screen_mapping': False,
'ibl_bg_type': 'HDR_IMAGE', 'ibl_bg_map': "", 'ibl_bg_intensity': 1.0, 'ibl_bg_scale_x': 1.0, 'ibl_bg_scale_y': 1.0, 'ibl_bg_offset_x': 0.0, 'ibl_bg_offset_y': 0.0,
'ibl_refl_type': 'SAME_AS_BG', 'ibl_refl_map': "", 'ibl_refl_intensity': 1.0, 'ibl_refl_scale_x': 1.0, 'ibl_refl_scale_y': 1.0, 'ibl_refl_offset_x': 0.0, 'ibl_refl_offset_y': 0.0,
'ibl_refr_type': 'SAME_AS_BG', 'ibl_refr_map': "", 'ibl_refr_intensity': 1.0, 'ibl_refr_scale_x': 1.0, 'ibl_refr_scale_y': 1.0, 'ibl_refr_offset_x': 0.0, 'ibl_refr_offset_y': 0.0,
'ibl_illum_type': 'SAME_AS_BG', 'ibl_illum_map': "", 'ibl_illum_intensity': 1.0, 'ibl_illum_scale_x': 1.0, 'ibl_illum_scale_y': 1.0, 'ibl_illum_offset_x': 0.0, 'ibl_illum_offset_y': 0.0,
},
'sunset_orange_mihai': {
'env_type': 'PHYSICAL_SKY', 'sky_type': 'PHYSICAL', 'sky_use_preset': False, 'sky_preset': '',
'sky_intensity': 1.0, 'sky_planet_refl': 32.0, 'sky_ozone': 0.425, 'sky_water': 3.0, 'sky_turbidity_coeff': 0.08,
'sky_wavelength_exp': 2.2, 'sky_reflectance': 85.0, 'sky_asymmetry': 0.813,
'sun_type': 'PHYSICAL', 'sun_power': 1.0, 'sun_radius_factor': 1.0, 'sun_color': (1.0, 1.0, 1.0), 'sun_temp': 5777.0,
'sun_location_type': 'LATLONG', 'sun_latlong_lat': 43.678, 'sun_latlong_lon': 3.617, 'sun_date': '03.06.2015', 'sun_time': '20:08',
'sun_latlong_gmt': 1.0, 'sun_latlong_gmt_auto': False, 'sun_latlong_ground_rotation': math.radians(49.0),
'sun_angles_zenith': math.radians(45.0), 'sun_angles_azimuth': math.radians(45.0),
'sun_dir_x': 0.0, 'sun_dir_y': 0.0, 'sun_dir_z': 1.0,
'ibl_intensity': 1.0, 'ibl_interpolation': False, 'ibl_screen_mapping': False,
'ibl_bg_type': 'HDR_IMAGE', 'ibl_bg_map': "", 'ibl_bg_intensity': 1.0, 'ibl_bg_scale_x': 1.0, 'ibl_bg_scale_y': 1.0, 'ibl_bg_offset_x': 0.0, 'ibl_bg_offset_y': 0.0,
'ibl_refl_type': 'SAME_AS_BG', 'ibl_refl_map': "", 'ibl_refl_intensity': 1.0, 'ibl_refl_scale_x': 1.0, 'ibl_refl_scale_y': 1.0, 'ibl_refl_offset_x': 0.0, 'ibl_refl_offset_y': 0.0,
'ibl_refr_type': 'SAME_AS_BG', 'ibl_refr_map': "", 'ibl_refr_intensity': 1.0, 'ibl_refr_scale_x': 1.0, 'ibl_refr_scale_y': 1.0, 'ibl_refr_offset_x': 0.0, 'ibl_refr_offset_y': 0.0,
'ibl_illum_type': 'SAME_AS_BG', 'ibl_illum_map': "", 'ibl_illum_intensity': 1.0, 'ibl_illum_scale_x': 1.0, 'ibl_illum_scale_y': 1.0, 'ibl_illum_offset_x': 0.0, 'ibl_illum_offset_y': 0.0,
},
}
},
'emitter': {
'subdirs': False,
'defines': [
"import bpy",
"mx = bpy.context.object.active_material.maxwell_render",
"m = mx.custom_layers.layers[mx.custom_layers.index].emitter",
],
'presets': {
'incandescent_lamp_40w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 2700.0,
'luminance': '0', 'luminance_power': 40.0, 'luminance_efficacy': 10.8, 'luminance_output': 430.0, },
'incandescent_lamp_60w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 2700.0,
'luminance': '0', 'luminance_power': 60.0, 'luminance_efficacy': 11.5, 'luminance_output': 690.0, },
'incandescent_lamp_100w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 2700.0,
'luminance': '0', 'luminance_power': 100.0, 'luminance_efficacy': 13.8, 'luminance_output': 1380.0, },
'compact_fluorescent_lamp_warm_7w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 3000.0,
'luminance': '0', 'luminance_power': 7.0, 'luminance_efficacy': 57.1, 'luminance_output': 399.7, },
'compact_fluorescent_lamp_warm_9w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 3000.0,
'luminance': '0', 'luminance_power': 9.0, 'luminance_efficacy': 66.7, 'luminance_output': 600.3, },
'compact_fluorescent_lamp_cold_7w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 5000.0,
'luminance': '0', 'luminance_power': 7.0, 'luminance_efficacy': 57.1, 'luminance_output': 399.7, },
'compact_fluorescent_lamp_cold_9w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 5000.0,
'luminance': '0', 'luminance_power': 9.0, 'luminance_efficacy': 66.7, 'luminance_output': 600.3, },
'tubular_fluorescent_lamp_warm_20w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 3000.0,
'luminance': '0', 'luminance_power': 20.0, 'luminance_efficacy': 51.5, 'luminance_output': 1030, },
'tubular_fluorescent_lamp_warm_40w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 3000.0,
'luminance': '0', 'luminance_power': 40.0, 'luminance_efficacy': 65.0, 'luminance_output': 2600, },
'tubular_fluorescent_lamp_warm_65w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 3000.0,
'luminance': '0', 'luminance_power': 65.0, 'luminance_efficacy': 63.0, 'luminance_output': 4095, },
'tubular_fluorescent_lamp_midrange_20w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 4500.0,
'luminance': '0', 'luminance_power': 20.0, 'luminance_efficacy': 51.5, 'luminance_output': 1030, },
'tubular_fluorescent_lamp_midrange_40w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 4500.0,
'luminance': '0', 'luminance_power': 40.0, 'luminance_efficacy': 65.0, 'luminance_output': 2600, },
'tubular_fluorescent_lamp_midrange_65w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 4500.0,
'luminance': '0', 'luminance_power': 65.0, 'luminance_efficacy': 63.0, 'luminance_output': 4095, },
'tubular_fluorescent_lamp_cold_20w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 6000.0,
'luminance': '0', 'luminance_power': 20.0, 'luminance_efficacy': 51.5, 'luminance_output': 1030, },
'tubular_fluorescent_lamp_cold_40w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 6000.0,
'luminance': '0', 'luminance_power': 40.0, 'luminance_efficacy': 65.0, 'luminance_output': 2600, },
'tubular_fluorescent_lamp_cold_65w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 6000.0,
'luminance': '0', 'luminance_power': 65.0, 'luminance_efficacy': 63.0, 'luminance_output': 4095, },
'high_pressure_mercury_lamp_250w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 3500.0,
'luminance': '0', 'luminance_power': 250.0, 'luminance_efficacy': 54.0, 'luminance_output': 13500.0, },
'high_pressure_mercury_lamp_400w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 3500.0,
'luminance': '0', 'luminance_power': 400.0, 'luminance_efficacy': 57.5, 'luminance_output': 23000.0, },
'high_pressure_mercury_lamp_700w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 3500.0,
'luminance': '0', 'luminance_power': 700.0, 'luminance_efficacy': 60.0, 'luminance_output': 42000.0, },
'high_pressure_sodium_lamp_250w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 2300.0,
'luminance': '0', 'luminance_power': 250.0, 'luminance_efficacy': 100.0, 'luminance_output': 25000.0, },
'high_pressure_sodium_lamp_400w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 2300.0,
'luminance': '0', 'luminance_power': 400.0, 'luminance_efficacy': 118.0, 'luminance_output': 47200.0, },
'high_pressure_sodium_lamp_1000w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 2300.0,
'luminance': '0', 'luminance_power': 1000.0, 'luminance_efficacy': 120.0, 'luminance_output': 120000.0, },
'low_pressure_sodium_lamp_55w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 1800.0,
'luminance': '0', 'luminance_power': 55.0, 'luminance_efficacy': 145.0, 'luminance_output': 7975.0, },
'low_pressure_sodium_lamp_135w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 1800.0,
'luminance': '0', 'luminance_power': 135.0, 'luminance_efficacy': 167.0, 'luminance_output': 22545.0, },
'low_pressure_sodium_lamp_180w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 1800.0,
'luminance': '0', 'luminance_power': 180.0, 'luminance_efficacy': 180.0, 'luminance_output': 32400.0, },
'tungsten_halogen_low_tension_lamp_20w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 3000.0,
'luminance': '0', 'luminance_power': 20.0, 'luminance_efficacy': 16.0, 'luminance_output': 320.0, },
'tungsten_halogen_low_tension_lamp_35w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 3000.0,
'luminance': '0', 'luminance_power': 35.0, 'luminance_efficacy': 17.0, 'luminance_output': 595.0, },
'tungsten_halogen_low_tension_lamp_50w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 3000.0,
'luminance': '0', 'luminance_power': 50.0, 'luminance_efficacy': 18.2, 'luminance_output': 910.0, },
'tungsten_halogen_tension_lamp_40w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 3000.0,
'luminance': '0', 'luminance_power': 40.0, 'luminance_efficacy': 12.2, 'luminance_output': 490.0, },
'tungsten_halogen_tension_lamp_60w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 3000.0,
'luminance': '0', 'luminance_power': 60.0, 'luminance_efficacy': 14.0, 'luminance_output': 840.0, },
'tungsten_halogen_tension_lamp_100w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 3000.0,
'luminance': '0', 'luminance_power': 100.0, 'luminance_efficacy': 16.0, 'luminance_output': 1600.0, },
'tungsten_halogen_tension_lamp_150w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 3000.0,
'luminance': '0', 'luminance_power': 150.0, 'luminance_efficacy': 17.0, 'luminance_output': 2550.0, },
'metal_halide_hmi_lamp_200w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 6000.0,
'luminance': '0', 'luminance_power': 200.0, 'luminance_efficacy': 80.0, 'luminance_output': 16000.0, },
'metal_halide_hmi_lamp_400w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 6000.0,
'luminance': '0', 'luminance_power': 400.0, 'luminance_efficacy': 82.5, 'luminance_output': 33000.0, },
'metal_halide_hmi_lamp_575w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 6000.0,
'luminance': '0', 'luminance_power': 575.0, 'luminance_efficacy': 85.2, 'luminance_output': 48990.0, },
'metal_halide_hmi_lamp_1200w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 6000.0,
'luminance': '0', 'luminance_power': 1200.0, 'luminance_efficacy': 91.6, 'luminance_output': 109920.0, },
'metal_halide_hmi_lamp_2500w': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 6000.0,
'luminance': '0', 'luminance_power': 2500.0, 'luminance_efficacy': 96.0, 'luminance_output': 240000.0, },
'candle': {'color': (255 / 255, 255 / 255, 255 / 255), 'color_black_body_enabled': True, 'color_black_body': 1200.0,
'luminance': '0', 'luminance_power': 40.0, 'luminance_efficacy': 0.3, 'luminance_output': 12.4, },
}
},
'grass_modifier': {
'subdirs': False,
'defines': [
"import bpy",
"m = bpy.context.object.maxwell_render.grass",
],
'presets': {
'wild_grass': {
'primitive_type': '1', 'points_per_blade': 8, 'density': 3000, 'density_map': "", 'seed': 0, 'length': 7.5, 'length_map': "", 'length_variation': 60.0,
'root_width': 6.0, 'tip_width': 2.5, 'direction_type': 0.0, 'initial_angle': math.radians(60.0), 'initial_angle_variation': 50.0, 'initial_angle_map': "",
'start_bend': 40.0, 'start_bend_variation': 25.0, 'start_bend_map': "", 'bend_radius': 5.0, 'bend_radius_variation': 50.0, 'bend_radius_map': "",
'bend_angle': math.radians(80.0), 'bend_angle_variation': 50.0, 'bend_angle_map': "", 'cut_off': 100.0, 'cut_off_variation': 0.0, 'cut_off_map': "",
'lod': False, 'lod_min_distance': 10.0, 'lod_max_distance': 50.0, 'lod_max_distance_density': 10.0, 'display_percent': 10.0, 'display_max_blades': 1000,
},
'golf_green': {
'primitive_type': '1', 'points_per_blade': 4, 'density': 150000, 'density_map': "", 'seed': 0, 'length': 1.5, 'length_map': "", 'length_variation': 20.0,
'root_width': 2.0, 'tip_width': 0.5, 'direction_type': 0.0, 'initial_angle': math.radians(70.0), 'initial_angle_variation': 50.0, 'initial_angle_map': "",
'start_bend': 40.0, 'start_bend_variation': 25.0, 'start_bend_map': "", 'bend_radius': 1.0, 'bend_radius_variation': 25.0, 'bend_radius_map': "",
'bend_angle': math.radians(80.0), 'bend_angle_variation': 50.0, 'bend_angle_map': "", 'cut_off': 65.0, 'cut_off_variation': 15.0, 'cut_off_map': "",
'lod': False, 'lod_min_distance': 10.0, 'lod_max_distance': 50.0, 'lod_max_distance_density': 10.0, 'display_percent': 10.0, 'display_max_blades': 1000,
},
'tall_big_grass': {
'primitive_type': '1', 'points_per_blade': 16, 'density': 2500, 'density_map': "", 'seed': 0, 'length': 40.0, 'length_map': "", 'length_variation': 30.0,
'root_width': 11.0, 'tip_width': 3.0, 'direction_type': 0.0, 'initial_angle': math.radians(80.0), 'initial_angle_variation': 25.0, 'initial_angle_map': "",
'start_bend': 30.0, 'start_bend_variation': 25.0, 'start_bend_map': "", 'bend_radius': 25.0, 'bend_radius_variation': 25.0, 'bend_radius_map': "",
'bend_angle': math.radians(80.0), 'bend_angle_variation': 25.0, 'bend_angle_map': "", 'cut_off': 100.0, 'cut_off_variation': 0.0, 'cut_off_map': "",
'lod': False, 'lod_min_distance': 10.0, 'lod_max_distance': 50.0, 'lod_max_distance_density': 10.0, 'display_percent': 10.0, 'display_max_blades': 1000,
},
'fast_generic_grass': {
'primitive_type': '1', 'points_per_blade': 6, 'density': 4500, 'density_map': "", 'seed': 0, 'length': 8.0, 'length_map': "", 'length_variation': 50.0,
'root_width': 5.0, 'tip_width': 2.0, 'direction_type': 0.0, 'initial_angle': math.radians(80.0), 'initial_angle_variation': 40.0, 'initial_angle_map': "",
'start_bend': 30.0, 'start_bend_variation': 50.0, 'start_bend_map': "", 'bend_radius': 10.0, 'bend_radius_variation': 30.0, 'bend_radius_map': "",
'bend_angle': math.radians(80.0), 'bend_angle_variation': 40.0, 'bend_angle_map': "", 'cut_off': 100.0, 'cut_off_variation': 0.0, 'cut_off_map': "",
'lod': False, 'lod_min_distance': 10.0, 'lod_max_distance': 50.0, 'lod_max_distance_density': 10.0, 'display_percent': 10.0, 'display_max_blades': 1000,
},
}
},
'material': {
'subdirs': True,
'defines': [
"import bpy",
"m = bpy.context.object.active_material.maxwell_render.extension",
" ",
"def texture(d):",
" mat = bpy.context.object.active_material",
" ts = mat.texture_slots",
" for i in range(len(ts)):",
" mat.texture_slots.clear(i)",
" slot = ts.create(0)",
" tex = bpy.data.textures.new(d['name'], 'IMAGE')",
" slot.texture = tex",
" image = bpy.data.images.load(d['path'])",
" tex.image = image",
" tm = tex.maxwell_render",
" tm.use_global_map = d['use_global_map']",
" tm.channel = d['channel']",
" tm.tiling_method = d['tiling_method']",
" tm.tiling_units = d['tiling_units']",
" tm.repeat = d['repeat']",
" tm.mirror_x = d['mirror_x']",
" tm.mirror_y = d['mirror_y']",
" tm.offset = d['offset']",
" tm.rotation = d['rotation']",
" tm.invert = d['invert']",
" tm.use_alpha = d['use_alpha']",
" tm.interpolation = d['interpolation']",
" tm.brightness = d['brightness']",
" tm.contrast = d['contrast']",
" tm.saturation = d['saturation']",
" tm.hue = d['hue']",
" tm.clamp = d['clamp']",
" tm.normal_mapping_flip_red = d['normal_mapping_flip_red']",
" tm.normal_mapping_flip_green = d['normal_mapping_flip_green']",
" tm.normal_mapping_full_range_blue = d['normal_mapping_full_range_blue']",
" return tex.name",
" ",
],
'presets': {
'opaque': {
'white_clay': {'opaque_color_type': False, 'opaque_color': (220 / 255, 220 / 255, 220 / 255), 'opaque_color_map': "", 'opaque_shininess_type': False,
'opaque_shininess': 40.0, 'opaque_shininess_map': "", 'opaque_roughness_type': False, 'opaque_roughness': 25.0, 'opaque_roughness_map': "",
'opaque_clearcoat': False, },
},
'transparent': {
'high_grade_glass': {'transparent_color_type': False, 'transparent_color': (182 / 255, 182 / 255, 182 / 255), 'transparent_color_map': "", 'transparent_ior': 1.51,
'transparent_transparency': 30, 'transparent_roughness_type': 0, 'transparent_roughness': 0, 'transparent_roughness_map': "",
'transparent_specular_tint': 0, 'transparent_dispersion': 0, 'transparent_clearcoat': False, },
'low_grade_glass': {'transparent_color_type': False, 'transparent_color': (204 / 255, 220 / 255, 194 / 255), 'transparent_color_map': "", 'transparent_ior': 1.51,
'transparent_transparency': 20, 'transparent_roughness_type': 0, 'transparent_roughness': 0, 'transparent_roughness_map': "",
'transparent_specular_tint': 0, 'transparent_dispersion': 0, 'transparent_clearcoat': False, },
'brown_bottle_glass': {'transparent_color_type': False, 'transparent_color': (236 / 255, 170 / 255, 0 / 255), 'transparent_color_map': "", 'transparent_ior': 1.51,
'transparent_transparency': 0.2, 'transparent_roughness_type': 0, 'transparent_roughness': 5, 'transparent_roughness_map': "",
'transparent_specular_tint': 0, 'transparent_dispersion': 0, 'transparent_clearcoat': False, },
'green_bottle_glass': {'transparent_color_type': False, 'transparent_color': (212 / 255, 236 / 255, 0 / 255), 'transparent_color_map': "", 'transparent_ior': 1.51,
'transparent_transparency': 0.2, 'transparent_roughness_type': 0, 'transparent_roughness': 5, 'transparent_roughness_map': "",
'transparent_specular_tint': 0, 'transparent_dispersion': 0, 'transparent_clearcoat': False, },
'turquoise_glassware': {'transparent_color_type': False, 'transparent_color': (115 / 255, 226 / 255, 234 / 255), 'transparent_color_map': "", 'transparent_ior': 1.51,
'transparent_transparency': 3, 'transparent_roughness_type': 0, 'transparent_roughness': 0, 'transparent_roughness_map': "",
'transparent_specular_tint': 0, 'transparent_dispersion': 0, 'transparent_clearcoat': False, },
'frosted_clear_glass': {'transparent_color_type': False, 'transparent_color': (182 / 255, 182 / 255, 182 / 255), 'transparent_color_map': "", 'transparent_ior': 1.51,
'transparent_transparency': 30, 'transparent_roughness_type': 0, 'transparent_roughness': 100, 'transparent_roughness_map': "",
'transparent_specular_tint': 0, 'transparent_dispersion': 0, 'transparent_clearcoat': False, },
'frosted_color_glass': {'transparent_color_type': False, 'transparent_color': (220 / 255, 0 / 255, 0 / 255), 'transparent_color_map': "", 'transparent_ior': 1.51,
'transparent_transparency': 30, 'transparent_roughness_type': 0, 'transparent_roughness': 100, 'transparent_roughness_map': "",
'transparent_specular_tint': 30, 'transparent_dispersion': 0, 'transparent_clearcoat': False, },
'water': {'transparent_color_type': False, 'transparent_color': (255 / 255, 255 / 255, 255 / 255), 'transparent_color_map': "", 'transparent_ior': 1.33,
'transparent_transparency': 30, 'transparent_roughness_type': 0, 'transparent_roughness': 0, 'transparent_roughness_map': "",
'transparent_specular_tint': 0, 'transparent_dispersion': 0, 'transparent_clearcoat': False, },
'ice': {'transparent_color_type': False, 'transparent_color': (255 / 255, 255 / 255, 255 / 255), 'transparent_color_map': "", 'transparent_ior': 1.33,
'transparent_transparency': 30, 'transparent_roughness_type': 0, 'transparent_roughness': 70, 'transparent_roughness_map': "",
'transparent_specular_tint': 0, 'transparent_dispersion': 0, 'transparent_clearcoat': True, },
'diamond': {'transparent_color_type': False, 'transparent_color': (220 / 255, 220 / 255, 220 / 255), 'transparent_color_map': "", 'transparent_ior': 2.41,
'transparent_transparency': 30, 'transparent_roughness_type': 0, 'transparent_roughness': 0, 'transparent_roughness_map': "",
'transparent_specular_tint': 0, 'transparent_dispersion': 44.7, 'transparent_clearcoat': False, },
'ruby': {'transparent_color_type': False, 'transparent_color': (162 / 255, 0 / 255, 0 / 255), 'transparent_color_map': "", 'transparent_ior': 1.76,
'transparent_transparency': 3, 'transparent_roughness_type': 0, 'transparent_roughness': 0, 'transparent_roughness_map': "",
'transparent_specular_tint': 0, 'transparent_dispersion': 27.8, 'transparent_clearcoat': False, },
'emerald': {'transparent_color_type': False, 'transparent_color': (0 / 255, 162 / 255, 0 / 255), 'transparent_color_map': "", 'transparent_ior': 1.57,
'transparent_transparency': 3, 'transparent_roughness_type': 0, 'transparent_roughness': 0, 'transparent_roughness_map': "",
'transparent_specular_tint': 0, 'transparent_dispersion': 0, 'transparent_clearcoat': False, },
},
'metal': {
'aluminium': {'metal_ior': "0", 'metal_color_type': False, 'metal_color': (167 / 255, 167 / 255, 168 / 255), 'metal_color_map': "", 'metal_tint': 0.0,
'metal_roughness_type': 0, 'metal_roughness': 30.0, 'metal_roughness_map': "", 'metal_anisotropy_type': 0, 'metal_anisotropy': 0,
'metal_anisotropy_map': "", 'metal_angle_type': 0, 'metal_angle': 0, 'metal_angle_map': "", 'metal_dust_type': 0, 'metal_dust': 0,
'metal_dust_map': "", 'metal_perforation_enabled': False, 'metal_perforation_map': "", },
'24k_jewelry_gold': {'metal_ior': "5", 'metal_color_type': False, 'metal_color': (206 / 255, 90 / 255, 7 / 255), 'metal_color_map': "", 'metal_tint': 0.0,
'metal_roughness_type': 0, 'metal_roughness': 0.0, 'metal_roughness_map': "", 'metal_anisotropy_type': 0, 'metal_anisotropy': 0,
'metal_anisotropy_map': "", 'metal_angle_type': 0, 'metal_angle': 0, 'metal_angle_map': "", 'metal_dust_type': 0, 'metal_dust': 0,
'metal_dust_map': "", 'metal_perforation_enabled': False, 'metal_perforation_map': "", },
'mirror': {'metal_ior': "8", 'metal_color_type': False, 'metal_color': (255 / 255, 255 / 255, 255 / 255), 'metal_color_map': "", 'metal_tint': 0.0,
'metal_roughness_type': 0, 'metal_roughness': 0.0, 'metal_roughness_map': "", 'metal_anisotropy_type': 0, 'metal_anisotropy': 0,
'metal_anisotropy_map': "", 'metal_angle_type': 0, 'metal_angle': 0, 'metal_angle_map': "", 'metal_dust_type': 0, 'metal_dust': 0,
'metal_dust_map': "", 'metal_perforation_enabled': False, 'metal_perforation_map': "", },
},
'translucent': {
'silicone_gel': {'translucent_scale': 8.0, 'translucent_ior': 1.3, 'translucent_color_type': False, 'translucent_color': (250 / 255, 245 / 255, 230 / 255),
'translucent_color_map': "", 'translucent_hue_shift': 0.0, 'translucent_invert_hue': True, 'translucent_vibrance': 11, 'translucent_density': 90,
'translucent_opacity': 50, 'translucent_roughness_type': False, 'translucent_roughness': 17, 'translucent_roughness_map': "",
'translucent_specular_tint': 0.0, 'translucent_clearcoat': False, 'translucent_clearcoat_ior': 1.3, },
'polyurethane': {'translucent_scale': 0.2, 'translucent_ior': 1.3, 'translucent_color_type': False, 'translucent_color': (236 / 255, 220 / 255, 122 / 255),
'translucent_color_map': "", 'translucent_hue_shift': 19, 'translucent_invert_hue': False, 'translucent_vibrance': 10, 'translucent_density': 20,
'translucent_opacity': 50, 'translucent_roughness_type': False, 'translucent_roughness': 35, 'translucent_roughness_map': "",
'translucent_specular_tint': 20, 'translucent_clearcoat': False, 'translucent_clearcoat_ior': 1.3, },
'orange_juice': {'translucent_scale': 0.8, 'translucent_ior': 1.3, 'translucent_color_type': False, 'translucent_color': (232 / 255, 169 / 255, 52 / 255),
'translucent_color_map': "", 'translucent_hue_shift': 34, 'translucent_invert_hue': False, 'translucent_vibrance': 45, 'translucent_density': 50,
'translucent_opacity': 50, 'translucent_roughness_type': False, 'translucent_roughness': 0, 'translucent_roughness_map': "",
'translucent_specular_tint': 20, 'translucent_clearcoat': False, 'translucent_clearcoat_ior': 1.3, },
'honey': {'translucent_scale': 0.7, 'translucent_ior': 1.3, 'translucent_color_type': False, 'translucent_color': (191 / 255, 121 / 255, 10 / 255),
'translucent_color_map': "", 'translucent_hue_shift': 0, 'translucent_invert_hue': False, 'translucent_vibrance': 30, 'translucent_density': 20,
'translucent_opacity': 50, 'translucent_roughness_type': False, 'translucent_roughness': 0, 'translucent_roughness_map': "",
'translucent_specular_tint': 0, 'translucent_clearcoat': False, 'translucent_clearcoat_ior': 1.3, },
'caoutchouc': {'translucent_scale': 0.3, 'translucent_ior': 1.2, 'translucent_color_type': False, 'translucent_color': (195 / 255, 147 / 255, 5 / 255),
'translucent_color_map': "", 'translucent_hue_shift': 20, 'translucent_invert_hue': False, 'translucent_vibrance': 20, 'translucent_density': 30,
'translucent_opacity': 50, 'translucent_roughness_type': False, 'translucent_roughness': 80, 'translucent_roughness_map': "",
'translucent_specular_tint': 20, 'translucent_clearcoat': False, 'translucent_clearcoat_ior': 1.3, },
'flint': {'translucent_scale': 1.5, 'translucent_ior': 1.3, 'translucent_color_type': False, 'translucent_color': (242 / 255, 234 / 255, 217 / 255),
'translucent_color_map': "", 'translucent_hue_shift': 0, 'translucent_invert_hue': True, 'translucent_vibrance': 7, 'translucent_density': 90,
'translucent_opacity': 75, 'translucent_roughness_type': False, 'translucent_roughness': 20, 'translucent_roughness_map': "",
'translucent_specular_tint': 0, 'translucent_clearcoat': False, 'translucent_clearcoat_ior': 1.3, },
'milk': {'translucent_scale': 0.5, 'translucent_ior': 1.3, 'translucent_color_type': False, 'translucent_color': (216 / 255, 210 / 255, 193 / 255),
'translucent_color_map': "", 'translucent_hue_shift': -10, 'translucent_invert_hue': True, 'translucent_vibrance': 8, 'translucent_density': 80,
'translucent_opacity': 80, 'translucent_roughness_type': False, 'translucent_roughness': 0, 'translucent_roughness_map': "",
'translucent_specular_tint': 0, 'translucent_clearcoat': False, 'translucent_clearcoat_ior': 1.3, },
'wax_red': {'translucent_scale': 0.2, 'translucent_ior': 1.3, 'translucent_color_type': False, 'translucent_color': (90 / 255, 1 / 255, 4 / 255),
'translucent_color_map': "", 'translucent_hue_shift': 0, 'translucent_invert_hue': False, 'translucent_vibrance': 25, 'translucent_density': 40,
'translucent_opacity': 50, 'translucent_roughness_type': False, 'translucent_roughness': 40, 'translucent_roughness_map': "",
'translucent_specular_tint': 20, 'translucent_clearcoat': False, 'translucent_clearcoat_ior': 1.3, },
'rubber_blue': {'translucent_scale': 1, 'translucent_ior': 1.3, 'translucent_color_type': False, 'translucent_color': (6 / 255, 36 / 255, 204 / 255),
'translucent_color_map': "", 'translucent_hue_shift': 10, 'translucent_invert_hue': False, 'translucent_vibrance': 25, 'translucent_density': 80,
'translucent_opacity': 50, 'translucent_roughness_type': False, 'translucent_roughness': 100, 'translucent_roughness_map': "",
'translucent_specular_tint': 80, 'translucent_clearcoat': False, 'translucent_clearcoat_ior': 1.3, },
'rubber_lime': {'translucent_scale': 1, 'translucent_ior': 1.3, 'translucent_color_type': False, 'translucent_color': (200 / 255, 233 / 255, 2 / 255),
'translucent_color_map': "", 'translucent_hue_shift': 10, 'translucent_invert_hue': False, 'translucent_vibrance': 10, 'translucent_density': 50,
'translucent_opacity': 50, 'translucent_roughness_type': False, 'translucent_roughness': 30, 'translucent_roughness_map': "",
'translucent_specular_tint': 80, 'translucent_clearcoat': False, 'translucent_clearcoat_ior': 1.3, },
'vapor': {'translucent_scale': 1, 'translucent_ior': 1.001, 'translucent_color_type': False, 'translucent_color': (255 / 255, 255 / 255, 255 / 255),
'translucent_color_map': "", 'translucent_hue_shift': 0, 'translucent_invert_hue': False, 'translucent_vibrance': 0, 'translucent_density': 30,
'translucent_opacity': 50, 'translucent_roughness_type': False, 'translucent_roughness': 0, 'translucent_roughness_map': "",
'translucent_specular_tint': 0, 'translucent_clearcoat': False, 'translucent_clearcoat_ior': 1.3, },
'smoke': {'translucent_scale': 1, 'translucent_ior': 1.001, 'translucent_color_type': False, 'translucent_color': (1 / 255, 1 / 255, 1 / 255),
'translucent_color_map': "", 'translucent_hue_shift': 0, 'translucent_invert_hue': False, 'translucent_vibrance': 0, 'translucent_density': 30,
'translucent_opacity': 50, 'translucent_roughness_type': False, 'translucent_roughness': 0, 'translucent_roughness_map': "",
'translucent_specular_tint': 0, 'translucent_clearcoat': False, 'translucent_clearcoat_ior': 1.3, },
},
'carpaint': {
'cherry_metallic': {'carpaint_color': (100 / 255, 0 / 255, 16 / 255), 'carpaint_metallic': 100.0, 'carpaint_topcoat': 50.0, },
},
'emitter': {
'incandescent_lamp_40w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 2700.0,
'emitter_luminance': '0', 'emitter_luminance_power': 40.0, 'emitter_luminance_efficacy': 10.8, 'emitter_luminance_output': 430.0, },
'incandescent_lamp_60w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 2700.0,
'emitter_luminance': '0', 'emitter_luminance_power': 60.0, 'emitter_luminance_efficacy': 11.5, 'emitter_luminance_output': 690.0, },
'incandescent_lamp_100w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 2700.0,
'emitter_luminance': '0', 'emitter_luminance_power': 100.0, 'emitter_luminance_efficacy': 13.8, 'emitter_luminance_output': 1380.0, },
'compact_fluorescent_lamp_warm_7w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 3000.0,
'emitter_luminance': '0', 'emitter_luminance_power': 7.0, 'emitter_luminance_efficacy': 57.1, 'emitter_luminance_output': 399.7, },
'compact_fluorescent_lamp_warm_9w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 3000.0,
'emitter_luminance': '0', 'emitter_luminance_power': 9.0, 'emitter_luminance_efficacy': 66.7, 'emitter_luminance_output': 600.3, },
'compact_fluorescent_lamp_cold_7w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 5000.0,
'emitter_luminance': '0', 'emitter_luminance_power': 7.0, 'emitter_luminance_efficacy': 57.1, 'emitter_luminance_output': 399.7, },
'compact_fluorescent_lamp_cold_9w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 5000.0,
'emitter_luminance': '0', 'emitter_luminance_power': 9.0, 'emitter_luminance_efficacy': 66.7, 'emitter_luminance_output': 600.3, },
'tubular_fluorescent_lamp_warm_20w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 3000.0,
'emitter_luminance': '0', 'emitter_luminance_power': 20.0, 'emitter_luminance_efficacy': 51.5, 'emitter_luminance_output': 1030, },
'tubular_fluorescent_lamp_warm_40w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 3000.0,
'emitter_luminance': '0', 'emitter_luminance_power': 40.0, 'emitter_luminance_efficacy': 65.0, 'emitter_luminance_output': 2600, },
'tubular_fluorescent_lamp_warm_65w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 3000.0,
'emitter_luminance': '0', 'emitter_luminance_power': 65.0, 'emitter_luminance_efficacy': 63.0, 'emitter_luminance_output': 4095, },
'tubular_fluorescent_lamp_midrange_20w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 4500.0,
'emitter_luminance': '0', 'emitter_luminance_power': 20.0, 'emitter_luminance_efficacy': 51.5, 'emitter_luminance_output': 1030, },
'tubular_fluorescent_lamp_midrange_40w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 4500.0,
'emitter_luminance': '0', 'emitter_luminance_power': 40.0, 'emitter_luminance_efficacy': 65.0, 'emitter_luminance_output': 2600, },
'tubular_fluorescent_lamp_midrange_65w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 4500.0,
'emitter_luminance': '0', 'emitter_luminance_power': 65.0, 'emitter_luminance_efficacy': 63.0, 'emitter_luminance_output': 4095, },
'tubular_fluorescent_lamp_cold_20w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 6000.0,
'emitter_luminance': '0', 'emitter_luminance_power': 20.0, 'emitter_luminance_efficacy': 51.5, 'emitter_luminance_output': 1030, },
'tubular_fluorescent_lamp_cold_40w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 6000.0,
'emitter_luminance': '0', 'emitter_luminance_power': 40.0, 'emitter_luminance_efficacy': 65.0, 'emitter_luminance_output': 2600, },
'tubular_fluorescent_lamp_cold_65w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 6000.0,
'emitter_luminance': '0', 'emitter_luminance_power': 65.0, 'emitter_luminance_efficacy': 63.0, 'emitter_luminance_output': 4095, },
'high_pressure_mercury_lamp_250w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 3500.0,
'emitter_luminance': '0', 'emitter_luminance_power': 250.0, 'emitter_luminance_efficacy': 54.0, 'emitter_luminance_output': 13500.0, },
'high_pressure_mercury_lamp_400w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 3500.0,
'emitter_luminance': '0', 'emitter_luminance_power': 400.0, 'emitter_luminance_efficacy': 57.5, 'emitter_luminance_output': 23000.0, },
'high_pressure_mercury_lamp_700w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 3500.0,
'emitter_luminance': '0', 'emitter_luminance_power': 700.0, 'emitter_luminance_efficacy': 60.0, 'emitter_luminance_output': 42000.0, },
'high_pressure_sodium_lamp_250w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 2300.0,
'emitter_luminance': '0', 'emitter_luminance_power': 250.0, 'emitter_luminance_efficacy': 100.0, 'emitter_luminance_output': 25000.0, },
'high_pressure_sodium_lamp_400w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 2300.0,
'emitter_luminance': '0', 'emitter_luminance_power': 400.0, 'emitter_luminance_efficacy': 118.0, 'emitter_luminance_output': 47200.0, },
'high_pressure_sodium_lamp_1000w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 2300.0,
'emitter_luminance': '0', 'emitter_luminance_power': 1000.0, 'emitter_luminance_efficacy': 120.0, 'emitter_luminance_output': 120000.0, },
'low_pressure_sodium_lamp_55w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 1800.0,
'emitter_luminance': '0', 'emitter_luminance_power': 55.0, 'emitter_luminance_efficacy': 145.0, 'emitter_luminance_output': 7975.0, },
'low_pressure_sodium_lamp_135w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 1800.0,
'emitter_luminance': '0', 'emitter_luminance_power': 135.0, 'emitter_luminance_efficacy': 167.0, 'emitter_luminance_output': 22545.0, },
'low_pressure_sodium_lamp_180w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 1800.0,
'emitter_luminance': '0', 'emitter_luminance_power': 180.0, 'emitter_luminance_efficacy': 180.0, 'emitter_luminance_output': 32400.0, },
'tungsten_halogen_low_tension_lamp_20w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 3000.0,
'emitter_luminance': '0', 'emitter_luminance_power': 20.0, 'emitter_luminance_efficacy': 16.0, 'emitter_luminance_output': 320.0, },
'tungsten_halogen_low_tension_lamp_35w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 3000.0,
'emitter_luminance': '0', 'emitter_luminance_power': 35.0, 'emitter_luminance_efficacy': 17.0, 'emitter_luminance_output': 595.0, },
'tungsten_halogen_low_tension_lamp_50w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 3000.0,
'emitter_luminance': '0', 'emitter_luminance_power': 50.0, 'emitter_luminance_efficacy': 18.2, 'emitter_luminance_output': 910.0, },
'tungsten_halogen_tension_lamp_40w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 3000.0,
'emitter_luminance': '0', 'emitter_luminance_power': 40.0, 'emitter_luminance_efficacy': 12.2, 'emitter_luminance_output': 490.0, },
'tungsten_halogen_tension_lamp_60w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 3000.0,
'emitter_luminance': '0', 'emitter_luminance_power': 60.0, 'emitter_luminance_efficacy': 14.0, 'emitter_luminance_output': 840.0, },
'tungsten_halogen_tension_lamp_100w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 3000.0,
'emitter_luminance': '0', 'emitter_luminance_power': 100.0, 'emitter_luminance_efficacy': 16.0, 'emitter_luminance_output': 1600.0, },
'tungsten_halogen_tension_lamp_150w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 3000.0,
'emitter_luminance': '0', 'emitter_luminance_power': 150.0, 'emitter_luminance_efficacy': 17.0, 'emitter_luminance_output': 2550.0, },
'metal_halide_hmi_lamp_200w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 6000.0,
'emitter_luminance': '0', 'emitter_luminance_power': 200.0, 'emitter_luminance_efficacy': 80.0, 'emitter_luminance_output': 16000.0, },
'metal_halide_hmi_lamp_400w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 6000.0,
'emitter_luminance': '0', 'emitter_luminance_power': 400.0, 'emitter_luminance_efficacy': 82.5, 'emitter_luminance_output': 33000.0, },
'metal_halide_hmi_lamp_575w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 6000.0,
'emitter_luminance': '0', 'emitter_luminance_power': 575.0, 'emitter_luminance_efficacy': 85.2, 'emitter_luminance_output': 48990.0, },
'metal_halide_hmi_lamp_1200w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 6000.0,
'emitter_luminance': '0', 'emitter_luminance_power': 1200.0, 'emitter_luminance_efficacy': 91.6, 'emitter_luminance_output': 109920.0, },
'metal_halide_hmi_lamp_2500w': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 6000.0,
'emitter_luminance': '0', 'emitter_luminance_power': 2500.0, 'emitter_luminance_efficacy': 96.0, 'emitter_luminance_output': 240000.0, },
'candle': {'emitter_color': (255 / 255, 255 / 255, 255 / 255), 'emitter_color_black_body_enabled': True, 'emitter_color_black_body': 1200.0,
'emitter_luminance': '0', 'emitter_luminance_power': 40.0, 'emitter_luminance_efficacy': 0.3, 'emitter_luminance_output': 12.4, },
},
'hair': {
'black_hair': {
'hair_color_type': False,
'hair_color': (6 / 255, 6 / 255, 7 / 255),
'hair_color_map': '',
'hair_root_tip_map': '',
'hair_root_tip_weight_type': False,
'hair_root_tip_weight': 50.0,
'hair_root_tip_weight_map': '',
'hair_primary_highlight_strength': 40.0,
'hair_primary_highlight_spread': 36.0,
'hair_primary_highlight_tint': (245 / 255, 245 / 255, 255 / 255),
'hair_secondary_highlight_strength': 40.0,
'hair_secondary_highlight_spread': 45.0,
'hair_secondary_highlight_tint': (131 / 255, 135 / 255, 140 / 255),
},
'dark_brown': {
'hair_color_type': False,
'hair_color': (40 / 255, 22 / 255, 11 / 255),
'hair_color_map': '',
'hair_root_tip_map': '',
'hair_root_tip_weight_type': False,
'hair_root_tip_weight': 50.0,
'hair_root_tip_weight_map': '',
'hair_primary_highlight_strength': 65.0,
'hair_primary_highlight_spread': 36.0,
'hair_primary_highlight_tint': (255 / 255, 255 / 255, 255 / 255),
'hair_secondary_highlight_strength': 60.0,
'hair_secondary_highlight_spread': 45.0,
'hair_secondary_highlight_tint': (128 / 255, 74 / 255, 58 / 255),
},
'light_brown': {
'hair_color_type': True,
'hair_color': (55 / 255, 30 / 255, 15 / 255),
'hair_color_map': {
'name': 'light_brown-hair_color_map',
'path': os.path.join(get_prefs().maxwell_path, 'materials database', 'textures', 'hairTex_lightBrown.png'),
'use_global_map': 0, 'channel': 1, 'tiling_method': 'TILE_XY', 'tiling_units': '0', 'repeat': (3.0, 3.0, ), 'mirror_x': False, 'mirror_y': False,
'offset': (0.0, 0.0, ), 'rotation': 0.0, 'invert': False, 'use_alpha': False, 'interpolation': False,
'brightness': 3.0, 'contrast': 0.0, 'saturation': -4.0, 'hue': 0.0, 'clamp': (0, 255, ),
'normal_mapping_flip_red': False, 'normal_mapping_flip_green': True, 'normal_mapping_full_range_blue': False,
},
'hair_root_tip_map': '',
'hair_root_tip_weight_type': False,
'hair_root_tip_weight': 50.0,
'hair_root_tip_weight_map': '',
'hair_primary_highlight_strength': 55.0,
'hair_primary_highlight_spread': 40.0,
'hair_primary_highlight_tint': (255 / 255, 255 / 255, 255 / 255),
'hair_secondary_highlight_strength': 55.0,
'hair_secondary_highlight_spread': 55.0,
'hair_secondary_highlight_tint': (160 / 255, 116 / 255, 86 / 255),
},
'dark_blonde': {
'hair_color_type': True,
'hair_color': (55 / 255, 30 / 255, 15 / 255),
'hair_color_map': {
'name': 'dark_blonde-hair_color_map',
'path': os.path.join(get_prefs().maxwell_path, 'materials database', 'textures', 'hairTex_Blonde.png'),
'use_global_map': 0, 'channel': 1, 'tiling_method': 'TILE_XY', 'tiling_units': '0', 'repeat': (2.0, 2.0, ), 'mirror_x': False, 'mirror_y': False,
'offset': (0.0, 0.0, ), 'rotation': 0.0, 'invert': False, 'use_alpha': False, 'interpolation': False,
'brightness': -6.0, 'contrast': 25.0, 'saturation': -50.0, 'hue': -6.0, 'clamp': (0, 255, ),
'normal_mapping_flip_red': False, 'normal_mapping_flip_green': True, 'normal_mapping_full_range_blue': False,
},
'hair_root_tip_map': '',
'hair_root_tip_weight_type': False,
'hair_root_tip_weight': 50.0,
'hair_root_tip_weight_map': '',
'hair_primary_highlight_strength': 75.0,
'hair_primary_highlight_spread': 36.0,
'hair_primary_highlight_tint': (241 / 255, 235 / 255, 226 / 255),
'hair_secondary_highlight_strength': 85.0,
'hair_secondary_highlight_spread': 45.0,
'hair_secondary_highlight_tint': (226 / 255, 167 / 255, 139 / 255),
},
'hollywood_blonde': {
'hair_color_type': True,
'hair_color': (55 / 255, 30 / 255, 15 / 255),
'hair_color_map': {
'name': 'hollywood_blonde-hair_color_map',
'path': os.path.join(get_prefs().maxwell_path, 'materials database', 'textures', 'hairTex_Blonde.png'),
'use_global_map': 0, 'channel': 1, 'tiling_method': 'TILE_XY', 'tiling_units': '0', 'repeat': (2.0, 2.0, ), 'mirror_x': False, 'mirror_y': False,
'offset': (0.0, 0.0, ), 'rotation': 0.0, 'invert': False, 'use_alpha': False, 'interpolation': False,
'brightness': 25.0, 'contrast': -29.0, 'saturation': -7.0, 'hue': -5.0, 'clamp': (0, 255, ),
'normal_mapping_flip_red': False, 'normal_mapping_flip_green': True, 'normal_mapping_full_range_blue': False,
},
'hair_root_tip_map': '',
'hair_root_tip_weight_type': False,
'hair_root_tip_weight': 50.0,
'hair_root_tip_weight_map': '',
'hair_primary_highlight_strength': 100.0,
'hair_primary_highlight_spread': 90.0,
'hair_primary_highlight_tint': (241 / 255, 235 / 255, 226 / 255),
'hair_secondary_highlight_strength': 95.0,
'hair_secondary_highlight_spread': 75.0,
'hair_secondary_highlight_tint': (226 / 255, 192 / 255, 113 / 255),
},
'red_hair': {
'hair_color_type': True,
'hair_color': (55 / 255, 30 / 255, 15 / 255),
'hair_color_map': {
'name': 'red_hair-hair_color_map',
'path': os.path.join(get_prefs().maxwell_path, 'materials database', 'textures', 'hairTex_Red.png'),
'use_global_map': 0, 'channel': 1, 'tiling_method': 'TILE_XY', 'tiling_units': '0', 'repeat': (2.0, 2.0, ), 'mirror_x': False, 'mirror_y': False,
'offset': (0.0, 0.0, ), 'rotation': 0.0, 'invert': False, 'use_alpha': False, 'interpolation': False,
'brightness': 25.0, 'contrast': -29.0, 'saturation': -7.0, 'hue': -5.0, 'clamp': (0, 255, ),
'normal_mapping_flip_red': False, 'normal_mapping_flip_green': True, 'normal_mapping_full_range_blue': False,
},
'hair_root_tip_map': '',
'hair_root_tip_weight_type': False,
'hair_root_tip_weight': 50.0,
'hair_root_tip_weight_map': '',
'hair_primary_highlight_strength': 100.0,
'hair_primary_highlight_spread': 90.0,
'hair_primary_highlight_tint': (241 / 255, 235 / 255, 226 / 255),
'hair_secondary_highlight_strength': 95.0,
'hair_secondary_highlight_spread': 75.0,
'hair_secondary_highlight_tint': (226 / 255, 192 / 255, 113 / 255),
},
},
},
},
}
return presets
def setup():
# make all subdirs for presets
pd = os.path.join(bpy.utils.user_resource('SCRIPTS'), "presets", "blendmaxwell")
l = ['camera', 'channels', 'environment', 'exposure', 'material', 'render', ]
for d in l:
p = os.path.join(pd, d)
if(not os.path.exists(p)):
os.makedirs(p)
defaults = get_default_presets()
for subdir, presets in defaults.items():
preset_subdir = os.path.join("blendmaxwell", subdir)
preset_directory = os.path.join(bpy.utils.user_resource('SCRIPTS'), "presets", preset_subdir)
preset_paths = bpy.utils.preset_paths(preset_subdir)
if(preset_directory not in preset_paths):
if(not os.path.exists(preset_directory)):
os.makedirs(preset_directory)
# search for presets, .py file is considered as preset
def walk(p):
r = {'files': [], 'dirs': [], }
for(root, dirs, files) in os.walk(p):
r['files'].extend(files)
r['dirs'].extend(dirs)
break
return r
subdirs = presets['subdirs']
defines = presets['defines']
if(subdirs):
for k, v in presets['presets'].items():
found = []
p = os.path.join(preset_directory, k)
if(not os.path.exists(p)):
os.makedirs(p)
c = walk(p)
for f in c['files']:
if(f.endswith(".py")):
found.append(f[:-3])
for k2, v2 in v.items():
if(k2 not in found):
e = "\n"
s = ""
for i in range(len(defines)):
s += defines[i] + e
for k3, v3 in v2.items():
if(type(v3) is dict):
s += 'm.{} = texture({}){}'.format(k3, v3, e)
elif(type(v3) is str and v3 != ""):
s += 'm.{} = "{}"{}'.format(k3, v3, e)
elif(v3 == ""):
s += 'm.{} = ""{}'.format(k3, e)
else:
s += 'm.{} = {}{}'.format(k3, v3, e)
with open(os.path.join(p, "{}.py".format(k2)), mode='w', encoding='utf-8') as f:
f.write(s)
else:
found = []
p = preset_directory
c = walk(p)
for f in c['files']:
if(f.endswith(".py")):
found.append(f[:-3])
for k, v in presets['presets'].items():
if(k not in found):
e = "\n"
s = ""
for i in range(len(defines)):
s += defines[i] + e
for k3, v3 in v.items():
if(type(v3) is str and v3 != ""):
s += 'm.{} = "{}"{}'.format(k3, v3, e)
elif(v3 == ""):
s += 'm.{} = ""{}'.format(k3, e)
else:
s += 'm.{} = {}{}'.format(k3, v3, e)
with open(os.path.join(p, "{}.py".format(k)), mode='w', encoding='utf-8') as f:
f.write(s)
def register():
# bpy.utils.register_module(__name__, verbose=True)
bpy.utils.register_module(__name__)
# get preferences
a = os.path.split(os.path.split(os.path.realpath(__file__))[0])[1]
p = bpy.context.user_preferences.addons[a].preferences
s = platform.system()
if(p.python_path == ''):
if(s == 'Darwin'):
py = '/Library/Frameworks/Python.framework/Versions/3.5/'
elif(s == 'Linux'):
py = '/usr/bin/'
elif(s == 'Windows'):
py = ""
else:
raise OSError("Unknown platform: {}.".format(s))
p.python_path = py
else:
# user set something, leave it as it is
pass
if(p.maxwell_path == ''):
if(s == 'Darwin'):
mx = '/Applications/Maxwell 3/'
elif(s == 'Linux'):
mx = os.environ.get("MAXWELL3_ROOT")
elif(s == 'Windows'):
mx = os.environ.get("MAXWELL3_ROOT")
else:
raise OSError("Unknown platform: {}.".format(s))
p.maxwell_path = mx
else:
# user set something, leave it as it is
pass
setup()
for p in get_selected_panels():
p.COMPAT_ENGINES.add(engine.MaxwellRenderExportEngine.bl_idname)
def unregister():
# bpy.utils.unregister_module(__name__, verbose=True)
bpy.utils.unregister_module(__name__)
for p in get_selected_panels():
p.COMPAT_ENGINES.remove(engine.MaxwellRenderExportEngine.bl_idname)
if __name__ == "__main__":
register()
# oh, btw, run this from time to time..
# pep8 --ignore=W293,E501,E402 .
|
import sys
import httplib
if ( len(sys.argv) != 5 ):
print "usage tinyWebClient.py [host] [port] [method] [path]"
else:
host = sys.argv[1]
port = sys.argv[2]
method = sys.argv[3]
path = sys.argv[4]
info = (host, port)
print("%s:%s" % info)
conn = httplib.HTTPConnection("%s:%s" % info)
conn.request(method, path)
print(conn.getresponse().msg)
|
__author__ = 'Tom Schaul, tom@idsia.ch'
from pylab import figure, savefig, imshow, axes, axis, cm, show
from scipy import array, amin, amax
class ColorMap:
def __init__(self, mat, cmap=None, pixelspervalue=20, minvalue=None, maxvalue=None):
""" Make a colormap image of a matrix
:key mat: the matrix to be used for the colormap.
"""
if minvalue == None:
minvalue = amin(mat)
if maxvalue == None:
maxvalue = amax(mat)
if not cmap:
cmap = cm.hot
figsize = (array(mat.shape) / 100. * pixelspervalue)[::-1]
self.fig = figure(figsize=figsize)
axes([0, 0, 1, 1]) # Make the plot occupy the whole canvas
axis('off')
self.fig.set_size_inches(figsize)
imshow(mat, cmap=cmap, clim=(minvalue, maxvalue), interpolation='nearest')
def show(self):
""" have the image popup """
show()
def save(self, filename):
""" save colormap to file"""
savefig(filename, fig=self.fig, facecolor='black', edgecolor='black')
|
"""
***************************************************************************
SelectByLocation.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
__revision__ = '$Format:%H$'
import os
from PyQt4.QtXml import QDomDocument
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.parameters import ParameterFile
from processing.core.parameters import ParameterRaster
from processing.core.outputs import OutputRaster
from processing.tools import dataobjects
from qgis.utils import iface
class SetRasterStyle(GeoAlgorithm):
INPUT = 'INPUT'
STYLE = 'STYLE'
OUTPUT = 'OUTPUT'
def defineCharacteristics(self):
self.name = 'Set style for raster layer'
self.group = 'Raster general tools'
self.addParameter(ParameterRaster(self.INPUT,
self.tr('Raster layer')))
self.addParameter(ParameterFile(self.STYLE,
self.tr('Style file'), False, False, 'qml'))
self.addOutput(OutputRaster(self.OUTPUT, self.tr('Styled'), True))
def processAlgorithm(self, progress):
filename = self.getParameterValue(self.INPUT)
layer = dataobjects.getObjectFromUri(filename)
style = self.getParameterValue(self.STYLE)
if layer is None:
dataobjects.load(filename, os.path.basename(filename), style=style)
self.getOutputFromName(self.OUTPUT).open = False
else:
with open(style) as f:
xml = "".join(f.readlines())
d = QDomDocument()
d.setContent(xml)
n = d.firstChild()
layer.readSymbology(n, '')
self.setOutputValue(self.OUTPUT, filename)
iface.mapCanvas().refresh()
iface.legendInterface().refreshLayerSymbology(layer)
|
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('feature', '0009_remove_feature_user_detail'),
('changeset', '0029_suspiciousfeature_timestamp'),
]
operations = [
migrations.AlterUniqueTogether(
name='suspicionscore',
unique_together=set([]),
),
migrations.RemoveField(
model_name='suspicionscore',
name='changeset',
),
migrations.RemoveField(
model_name='suspiciousfeature',
name='changeset',
),
migrations.RemoveField(
model_name='suspiciousfeature',
name='reasons',
),
migrations.AlterUniqueTogether(
name='usersuspicionscore',
unique_together=set([]),
),
migrations.RemoveField(
model_name='usersuspicionscore',
name='user',
),
migrations.RemoveField(
model_name='changeset',
name='user_detail',
),
migrations.DeleteModel(
name='SuspicionScore',
),
migrations.DeleteModel(
name='SuspiciousFeature',
),
migrations.DeleteModel(
name='UserDetail',
),
migrations.DeleteModel(
name='UserSuspicionScore',
),
]
|
import os
import netifaces
from mss.agent.lib.utils import grep, get_config_option
from mss.agent.managers.translation import TranslationManager
_ = TranslationManager().translate
def get_config_info():
args = []
for interface in netifaces.interfaces():
if interface.startswith("eth"):
args.append(interface + "_name")
args.append(interface + "_type")
args.append(interface + "_method")
args.append(interface + "_addr")
args.append(interface + "_netmask")
args.append(interface + "_dns1")
args.append(interface + "_dns2")
args.append(interface + "_domain")
args.append(interface + "_gateway")
args = args + ["fw_mss_lan", "fw_ssh_lan", "fw_mss_wan", "fw_ssh_wan"]
return ("setup-network.sh", args)
def get_interfaces_config(config):
"""
Dynamic configuration for ethernet interfaces
"""
CONFIG_DIR = "/etc/sysconfig/network-scripts"
for interface in netifaces.interfaces():
if interface.startswith("eth"):
if_file = os.path.join(CONFIG_DIR, "ifcfg-%s" % interface)
if_detail = netifaces.ifaddresses(interface)
configured = os.path.exists(if_file) and netifaces.AF_INET in if_detail
method = "none"
addr = ""
netmask = ""
dns1 = ""
dns2 = ""
domain = ""
gateway = ""
if configured:
if grep("BOOTPROTO=dhcp", if_file):
method = "dhcp"
else:
method = "static"
addr = if_detail[netifaces.AF_INET][0]['addr']
netmask = if_detail[netifaces.AF_INET][0]['netmask']
dns1 = get_config_option(if_file, "DNS1")
dns2 = get_config_option(if_file, "DNS2")
domain = get_config_option(if_file, "DOMAIN")
gateway = get_config_option(if_file, "GATEWAY")
config.append({'slug': 'network',
'type': 'subtitle', 'label': interface})
config.append({'slug': 'network', 'name': interface + '_name',
'type': 'text', 'hidden': 'yes', 'default': interface})
config.append({'slug': 'network',
'name': interface + '_type',
'require': 'yes',
'label': _('Interface type', 'network'),
'help': _('Choose "External" if the interface is connected to the Internet. If the interface is connected to an internal network, choose "Internal"', 'network'),
'type': 'options',
'options': [
{'name': _('Internal network', 'network'), 'value': 'lan' + interface[-1]},
{'name': _('External network', 'network'), 'value': 'wan' + interface[-1]}
]
})
config.append({'slug': 'network',
'name': interface + '_method',
'default': method, 'require': 'yes',
'label': _('Configuration method', 'network'),
'type': 'options',
'options': [
{'name': _('No configuration', 'network'), 'value': 'none'},
{'name': _('DHCP configuration', 'network'), 'value': 'dhcp'},
{'name': _('Manual configuration', 'network'), 'value': 'static',
'toggle': [interface + '_addr', interface + '_netmask', interface + '_dns1',
interface + '_dns2', interface + '_domain', interface + '_gateway']}
]
})
config.append({'slug': 'network',
'name': interface + '_addr',
'default': addr,
'label': _('IP Address', 'network'),
'type': 'text', 'validation': 'ip'})
config.append({'slug': 'network',
'name': interface + '_netmask',
'default': netmask,
'label': _('Netmask', 'network'),
'type': 'text', 'validation': 'ip'})
config.append({'slug': 'network',
'name': interface + '_dns1',
'default': dns1,
'label': _('First DNS server', 'network'),
'type': 'text', 'validation': 'ip'})
config.append({'slug': 'network',
'name': interface + '_dns2',
'default': dns2,
'label': _('Second DNS server', 'network'),
'type': 'text', 'validation': 'ip'})
config.append({'slug': 'network',
'name': interface + '_domain',
'default': domain,
'label': _('Domain search', 'network'),
'type': 'text', 'validation': 'fqdn'})
config.append({'slug': 'network',
'name': interface + '_gateway',
'default': gateway,
'label': _('Gateway', 'network'),
'type': 'text', 'validation': 'ip'})
config.append({'slug': 'network',
'type': 'subtitle', 'label': _('Firewall configuration', 'network')})
config.append({'slug': 'network',
'name': 'fw_mss_lan',
'default': 'on',
'label': _('Allow access to Mandriva Server Setup from internal networks', 'network'),
'type': 'check'})
config.append({'slug': 'network',
'name': 'fw_ssh_lan',
'default': 'on',
'label': _('Allow SSH access from internal networks', 'network'),
'type': 'check'})
config.append({'slug': 'network',
'name': 'fw_mss_wan',
'default': 'off',
'label': _('Allow access to Mandriva Server Setup from external networks', 'network'),
'type': 'check'})
config.append({'slug': 'network',
'name': 'fw_ssh_wan',
'default': 'on',
'label': _('Allow SSH access from external networks', 'network'),
'type': 'check'})
return config
|
import unittest
import doctest
from trytond.model import descriptors
def suite():
suite = unittest.TestSuite()
suite.addTest(doctest.DocTestSuite(descriptors))
return suite
|
"""A demo of ankura functionality"""
from __future__ import print_function
import ankura
@ankura.util.memoize
@ankura.util.pickle_cache('newsgroups.pickle')
def get_newsgroups():
"""Retrieves the 20 newsgroups dataset"""
datadir = '/local/jlund3/data/'
news_glob = datadir + 'newsgroups-dedup/*/*'
engl_stop = datadir + 'stopwords/english.txt'
news_stop = datadir + 'stopwords/newsgroups.txt'
name_stop = datadir + 'stopwords/malenames.txt'
dataset = ankura.read_glob(news_glob,
tokenizer=ankura.tokenize.news,
labeler=ankura.label.title_dirname)
dataset = ankura.filter_stopwords(dataset, engl_stop)
dataset = ankura.filter_stopwords(dataset, news_stop)
dataset = ankura.combine_words(dataset, name_stop, '<name>')
dataset = ankura.filter_rarewords(dataset, 100)
dataset = ankura.filter_commonwords(dataset, 1500)
return dataset
def demo():
"""Runs the newsgroups demo"""
dataset = get_newsgroups()
anchors = ankura.gramschmidt_anchors(dataset, 20, 500)
topics = ankura.recover_topics(dataset, anchors)
for topic in ankura.topic.topic_summary_tokens(topics, dataset, 20):
print(' '.join(topic))
if __name__ == '__main__':
demo()
|
"""
Template file used by the OPF Experiment Generator to generate the actual
description.py file by replacing $XXXXXXXX tokens with desired values.
This description.py file was generated by:
'~/nta/eng/lib/python2.6/site-packages/nupic/frameworks/opf/expGenerator/ExpGenerator.py'
"""
from nupic.frameworks.opf.expdescriptionapi import ExperimentDescriptionAPI
from nupic.frameworks.opf.expdescriptionhelpers import (
updateConfigFromSubConfig,
applyValueGettersToContainer,
DeferredDictLookup)
from nupic.frameworks.opf.clamodelcallbacks import *
from nupic.frameworks.opf.metrics import MetricSpec
from nupic.frameworks.opf.opfutils import (InferenceType,
InferenceElement)
from nupic.support import aggregationDivide
from nupic.frameworks.opf.opftaskdriver import (
IterationPhaseSpecLearnOnly,
IterationPhaseSpecInferOnly,
IterationPhaseSpecLearnAndInfer)
config = {
# Type of model that the rest of these parameters apply to.
'model': "CLA",
# Version that specifies the format of the config.
'version': 1,
# Intermediate variables used to compute fields in modelParams and also
# referenced from the control section.
'aggregationInfo': { 'fields': [ ('numericFieldNameA', 'mean'),
('numericFieldNameB', 'sum'),
('categoryFieldNameC', 'first')],
'hours': 0},
'predictAheadTime': None,
# Model parameter dictionary.
'modelParams': {
# The type of inference that this model will perform
'inferenceType': 'NontemporalAnomaly',
'sensorParams': {
# Sensor diagnostic output verbosity control;
# if > 0: sensor region will print out on screen what it's sensing
# at each step 0: silent; >=1: some info; >=2: more info;
# >=3: even more info (see compute() in py/regions/RecordSensor.py)
'verbosity' : 0,
# Example:
# dsEncoderSchema = [
# DeferredDictLookup('__field_name_encoder'),
# ],
#
# (value generated from DS_ENCODER_SCHEMA)
'encoders': {
'f0': dict(fieldname='f0', n=100, name='f0', type='SDRCategoryEncoder', w=21),
},
# A dictionary specifying the period for automatically-generated
# resets from a RecordSensor;
#
# None = disable automatically-generated resets (also disabled if
# all of the specified values evaluate to 0).
# Valid keys is the desired combination of the following:
# days, hours, minutes, seconds, milliseconds, microseconds, weeks
#
# Example for 1.5 days: sensorAutoReset = dict(days=1,hours=12),
#
# (value generated from SENSOR_AUTO_RESET)
'sensorAutoReset' : None,
},
'spEnable': True,
'spParams': {
# SP diagnostic output verbosity control;
# 0: silent; >=1: some info; >=2: more info;
'spVerbosity' : 0,
'globalInhibition': 1,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
'inputWidth': 0,
# SP inhibition control (absolute value);
# Maximum number of active columns in the SP region's output (when
# there are more, the weaker ones are suppressed)
'numActivePerInhArea': 40,
'seed': 1956,
# coincInputPoolPct
# What percent of the columns's receptive field is available
# for potential synapses. At initialization time, we will
# choose coincInputPoolPct * (2*coincInputRadius+1)^2
'coincInputPoolPct': 0.5,
# The default connected threshold. Any synapse whose
# permanence value is above the connected threshold is
# a "connected synapse", meaning it can contribute to the
# cell's firing. Typical value is 0.10. Cells whose activity
# level before inhibition falls below minDutyCycleBeforeInh
# will have their own internal synPermConnectedCell
# threshold set below this default value.
# (This concept applies to both SP and TP and so 'cells'
# is correct here as opposed to 'columns')
'synPermConnected': 0.1,
'synPermActiveInc': 0.1,
'synPermInactiveDec': 0.01,
},
# Controls whether TP is enabled or disabled;
# TP is necessary for making temporal predictions, such as predicting
# the next inputs. Without TP, the model is only capable of
# reconstructing missing sensor inputs (via SP).
'tpEnable' : True,
'tpParams': {
# TP diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
# (see verbosity in nta/trunk/py/nupic/research/TP.py and TP10X*.py)
'verbosity': 0,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
# The number of cells (i.e., states), allocated per column.
'cellsPerColumn': 32,
'inputWidth': 2048,
'seed': 1960,
# Temporal Pooler implementation selector (see _getTPClass in
# CLARegion.py).
'temporalImp': 'cpp',
# New Synapse formation count
# NOTE: If None, use spNumActivePerInhArea
#
# TODO: need better explanation
'newSynapseCount': 20,
# Maximum number of synapses per segment
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TP
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSynapsesPerSegment': 32,
# Maximum number of segments per cell
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TP
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSegmentsPerCell': 128,
# Initial Permanence
# TODO: need better explanation
'initialPerm': 0.21,
# Permanence Increment
'permanenceInc': 0.1,
# Permanence Decrement
# If set to None, will automatically default to tpPermanenceInc
# value.
'permanenceDec' : 0.1,
'globalDecay': 0.0,
'maxAge': 0,
# Minimum number of active synapses for a segment to be considered
# during search for the best-matching segments.
# None=use default
# Replaces: tpMinThreshold
'minThreshold': 12,
# Segment activation threshold.
# A segment is active if it has >= tpSegmentActivationThreshold
# connected synapses that are active due to infActiveState
# None=use default
# Replaces: tpActivationThreshold
'activationThreshold': 16,
'outputType': 'normal',
# "Pay Attention Mode" length. This tells the TP how many new
# elements to append to the end of a learned sequence at a time.
# Smaller values are better for datasets with short sequences,
# higher values are better for datasets with long sequences.
'pamLength': 1,
},
'clParams': {
'regionName' : 'CLAClassifierRegion',
# Classifier diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
'clVerbosity' : 0,
# This controls how fast the classifier learns/forgets. Higher values
# make it adapt faster and forget older patterns faster.
'alpha': 0.001,
# This is set after the call to updateConfigFromSubConfig and is
# computed from the aggregationInfo and predictAheadTime.
'steps': '1',
},
'trainSPNetOnlyIfRequested': False,
},
}
updateConfigFromSubConfig(config)
if config['predictAheadTime'] is not None:
predictionSteps = int(round(aggregationDivide(
config['predictAheadTime'], config['aggregationInfo'])))
assert (predictionSteps >= 1)
config['modelParams']['clParams']['steps'] = str(predictionSteps)
applyValueGettersToContainer(config)
control = dict(
environment='opfExperiment',
tasks = [
{
# Task label; this label string may be used for diagnostic logging and for
# constructing filenames or directory pathnames for task-specific files, etc.
'taskLabel' : "Anomaly",
# Input stream specification per py/nupic/cluster/database/StreamDef.json.
#
'dataset' : {
'info': 'test_NoProviders',
'version': 1,
'streams': [
{
'columns': ['*'],
'info': 'my simple dataset',
'source': 'file://'+os.path.join(os.path.dirname(__file__), 'data.csv'),
}
],
# TODO: Aggregation is not supported yet by OpfRunExperiment.py
#'aggregation' : config['aggregationInfo']
},
# Iteration count: maximum number of iterations. Each iteration corresponds
# to one record from the (possibly aggregated) dataset. The task is
# terminated when either number of iterations reaches iterationCount or
# all records in the (possibly aggregated) database have been processed,
# whichever occurs first.
#
# iterationCount of -1 = iterate over the entire dataset
'iterationCount' : -1,
# Task Control parameters for OPFTaskDriver (per opfTaskControlSchema.json)
'taskControl' : {
# Iteration cycle list consisting of opftaskdriver.IterationPhaseSpecXXXXX
# instances.
'iterationCycle' : [
#IterationPhaseSpecLearnOnly(1000),
IterationPhaseSpecLearnAndInfer(1000, inferenceArgs=None),
#IterationPhaseSpecInferOnly(10, inferenceArgs=None),
],
'metrics' : [
],
# Logged Metrics: A sequence of regular expressions that specify which of
# the metrics from the Inference Specifications section MUST be logged for
# every prediction. The regex's correspond to the automatically generated
# metric labels. This is similar to the way the optimization metric is
# specified in permutations.py.
'loggedMetrics': ['.*grokScore.*'],
# Callbacks for experimentation/research (optional)
'callbacks' : {
# Callbacks to be called at the beginning of a task, before model iterations.
# Signature: callback(<reference to OPFExperiment>); returns nothing
'setup' : [],
# Callbacks to be called after every learning/inference iteration
# Signature: callback(<reference to OPFExperiment>); returns nothing
'postIter' : [],
# Callbacks to be called when the experiment task is finished
# Signature: callback(<reference to OPFExperiment>); returns nothing
'finish' : []
}
} # End of taskControl
}, # End of task
]
)
descriptionInterface = ExperimentDescriptionAPI(modelConfig=config,
control=control)
|
from __future__ import division
import argparse
import esc_tools as et
DEBUG=1
SPECIAL="h"
elements = { 1 : "H", 2 : "He", 3 : "Li", 4 : "Be", 5 : "B", 6 : "C", 7 : "N", \
8 : "O", 9 : "F", 10 : "Ne", 11 : "Na", 12 : "Mg", 13 : "Al", 14 : "Si", \
15 : "P", 16 : "S", 17 : "Cl", 18 : "Ar", 19 : "K", 20 : "Ca", 21 : "Sc", \
22 : "Ti", 23 : "V", 24 : "Cr", 25 : "Mn", 26 : "Fe", 27 : "Co", 28 : "Ni", \
29 : "Cu", 30 : "Zn", 31 : "Ga", 32 : "Ge", 33 : "As", 34 : "Se", \
35 : "Br", 36 : "Kr", 37 : "Rb", 38 : "Sr", 39 : "Y", 40 : "Zr", 41 : "Nb", \
42 : "Mo", 43 : "Tc", 44 : "Ru", 45 : "Rh", 46 : "Pd", 47 : "Ag", \
48 : "Cd", 49 : "In", 50 : "Sn", 51 : "Sb", 52 : "Te", 53 : "I", 54 : "Xe", \
55 : "Cs", 56 : "Ba", 57 : "La", 58 : "Ce", 59 : "Pr", 60 : "Nd", \
61 : "Pm", 62 : "Sm", 63 : "Eu", 64 : "Gd", 65 : "Tb", 66 : "Dy", \
67 : "Ho", 68 : "Er", 69 : "Tm", 70 : "Yb", 71 : "Lu", \
72 : "Hf", 73 : "Ta", 74 : "W", 75 : "Re", 76 : "Os", 77 : "Ir", 78 : "Pt", \
79 : "Au", 80 : "Hg", 81 : "Tl", 82 : "Pb", 83 : "Bi", 84 : "Po", \
85 : "At", 86 : "Rn", 87 : "Fr", 88 : "Ra", 89 : "Ac", 90 : "Th", \
91 : "Pa", 92 : "U", 93 : "Np", 94 : "Pu", 95 : "Am", 96 : "Cm", 97 : "Bk", \
98 : "Cf", 99 : "Es", 100 : "Fm", 101 : "Md", 102 : "No", 103 : "Lr", \
104 : "Rf", 105 : "Db", 106 : "Sg", 107 : "Bh", 108 : "Hs", 109 : "Ds", \
110 : "Ds", 111 : "Rg", 112 : "Uub", 113 : "Uut", 114 : "Uuq", 115 : "Uup", \
116 : "Uuh", 117 : "Uus", 118 : "Uuo", 0 : "UKN"}
def getElementZ(elstr):
""" Z = getElementZ(elstr)
Given a string that contains either a Z number OR an element
abbreviation like Cu, MG, whatever, generates and returns the
appropriate integer Z.
"""
# Is it an integer?
try:
Z = int(elstr)
return Z
except ValueError:
# Not an integer.
if elstr.title() not in elements.values():
print "(libesc.getElementZ) Warning: Element %s is not in the elements dictionary. Returning 0 for element Z." % elstr
return 0
else:
for key, value in elements.items():
if elstr.title() == value:
return key
def remove_comments(lines, comment_delim="#",just_blanks=False):
""" stripped = remove_comments(lines, comment_delim="#", just_blanks=False)
Takes a sequence of lines (presumably from a data file)
and strips all comments, including ones at the end of
lines that are otherwise not comments. Note that we can
only deal with one kind of comment at a time - just apply
multiple times to strip multiple comment types.
Note that we also remove *blank* lines in here, just in
case we're going to do line-by-line processing subsequently
rather than join/splitting (eg. like for XSF files).
If just_blanks is specified, we only eliminate the blanks.
"""
stripped = []
for line in lines:
if just_blanks:
if line.strip() != "":
stripped.append(line.strip())
else:
if not (line.strip().startswith(comment_delim) or line.strip() == ""):
stripped.append(line.partition(comment_delim)[0].strip())
return stripped
def uniqify(sequence, trans=None):
""" unique = uniqify(sequence, trans)
Produces an order-preserved list of unique elements in the passed
sequence. Supports a transform function so that passed elements
can be transformed before comparison if necessary.
"""
if trans is None:
def trans(x): return x
seen = {}
unique = []
for item in sequence:
marker = trans(item)
if marker in seen: continue
seen[marker] = 1
unique.append(item)
return unique
def index_of_species_index(species, Z, n):
""" i = index_of_species_index(Z, n)
Returns the absolute index in the species list of the nth species of element Z.
"""
si = -1
for i, s in enumerate(species):
if DEBUG:
print "Testing atom %d, element %d to see if it matches %d." % (i, s, n)
if s == Z:
si += 1
if si == n:
return i
# Didn't find it
return -1
if __name__ == '__main__':
print "qe_special_atom version 1.0"
print ""
print "Written by Kane O'Donnell, October 2014"
print ""
# Parse the command line arguments
parser = argparse.ArgumentParser(description="Make a named atom special in a qe input file.")
parser.add_argument('INPUT', help="Input file name")
parser.add_argument('Z', help="Element (e.g. C or 6) to make special.")
parser.add_argument('n', type=int, help="Make the nth atom of element Z special. 1-based.")
parser.add_argument('PSP', help="Name of pseudopotential for the special species.")
args = parser.parse_args()
# Convert from 1-based n and possibly-string Z to 0-based n and integer Z.
n = args.n - 1
z = getElementZ(args.Z)
# Sanity check on inputs
if n < 0:
print "ERROR: It looks like you've specified a negative number for the atomic index. Try again. Exiting..."
exit(0)
if z == 0:
print "ERROR: You've specified an unknown element - it has to be one of the ones in our universe, not imaginary ones! Try again. Exiting..."
exit(0)
# Read the input and check for any species "0" - i.e. if we already have special atoms.
#f = open(args.INPUT, 'r')
#lines = f.readlines()
#f.close()
#p, s, props = parse_cell(lines)
a = et.Atoms(args.INPUT,"qe,input")
if 0 in a.species:
print "ERROR: There are unknown species in this file already - you already have at least one special atom. For safety reasons, cannot continue. Goodbye!"
exit(0)
si = index_of_species_index(a.species, z, n)
if si == -1:
print "ERROR: Didn't find atom %d of species %s. Exiting..." % (n, args.Z)
exit(0)
#write_new_cell(args.SEED, p, s, props, si)
# Set species of the special atom to 0.
a.species[si] = 0
a.parameters["&system"]["ntyp"] = str(int(a.parameters["&system"]["ntyp"]) + 1)
print a.parameters
# Change the element dictionary itself so it writes the correct string.
et.elements[0] = elements[z] + SPECIAL
# Add an extra line to the atomic_species block (at the start).
a.parameters["atomic_species"].append("%s 1.0 %s" % (et.elements[0], args.PSP))
a.parameters["atomic_species"].insert(0, a.parameters["atomic_species"].pop())
# Move the special atom to the beginning of the positions and species list.
#a.positions.insert(0, a.positions.pop(si))
#a.species.insert(0, a.species.pop(si))
# Now tell the Atoms object to write itself.
fname = ".".join(args.INPUT.split('.')[0:-1]) + "_special.in"
a.writeQEInput(fname)
print "Goodbye!"
exit(0)
|
from ..filter import Filter
from ..filter_descriptor import FilterDescriptor
from ..io_descriptor import IODescriptor
from ...IO.image import Image
from ...IO.compressed_image import CompressedImage
class CompressImage(Filter):
descriptor = FilterDescriptor("CompressImage", "Compresses an image.",
inputs=[IODescriptor("input", "Uncompressed image.", Image)],
outputs=[IODescriptor("output", "Compressed image.", CompressedImage)])
def initialize(self):
print "Init %s" % self.name
def execute(self, time=0):
im = self.get_input("input")
if im:
im2 = CompressedImage(im.get_image())
im.copy_header(im2)
self.set_output("output", im2)
|
"""Unit test for the SNES nonlinear solver"""
from __future__ import print_function
"""Solve the Yamabe PDE which arises in the differential geometry of
general relativity. http://arxiv.org/abs/1107.0360.
The Yamabe equation is highly nonlinear and supports many
solutions. However, only one of these is of physical relevance -- the
positive solution.
This unit test demonstrates the capability of the SNES solver to
accept bounds on the resulting solution. The plain Newton method
converges to an unphysical negative solution, while the SNES solution
with {sign: nonnegative} converges to the physical positive solution.
An alternative interface to SNESVI allows the user to set explicitly
more complex bounds as GenericVectors or Function.
"""
from dolfin import *
import pytest
import os
from dolfin_utils.test import *
parameter_degree = set_parameters_fixture("form_compiler.quadrature_degree", \
[5])
parameter_backend = set_parameters_fixture("linear_algebra_backend", ["PETSc"])
@fixture
def mesh(datadir):
return Mesh(os.path.join(datadir, "doughnut.xml.gz"))
@fixture
def V(mesh):
return FunctionSpace(mesh, "CG", 1)
@fixture
def bcs(V):
return [DirichletBC(V, 1.0, "on_boundary")]
@fixture
def u(V):
u = Function(V)
u.interpolate(Constant(-1000.0))
return u
@fixture
def v(V):
return TestFunction(V)
@fixture
def F(u, v, mesh):
x = SpatialCoordinate(mesh)
r = sqrt(x[0]**2 + x[1]**2)
rho = 1.0/r**3
return (8*inner(grad(u), grad(v))*dx + rho * inner(u**5, v)*dx \
+ (-1.0/8.0)*inner(u, v)*dx)
@fixture
def J(V, u, F):
du = TrialFunction(V)
return derivative(F, u, du)
@fixture
def lb(V):
return Function(interpolate(Constant(0.), V))
@fixture
def ub(V):
return Function(interpolate(Constant(100.), V))
@fixture
def newton_solver_parameters():
return{"nonlinear_solver": "newton",
"newton_solver": {"linear_solver": "lu",
"maximum_iterations": 100,
"report": False}}
@fixture
def snes_solver_parameters_sign():
return {"nonlinear_solver": "snes",
"snes_solver": {"linear_solver": "lu",
"maximum_iterations": 100,
"sign": "nonnegative",
"report": True}}
@fixture
def snes_solver_parameters_bounds():
return {"nonlinear_solver": "snes",
"snes_solver": {"linear_solver": "lu",
"maximum_iterations": 100,
"sign": "default",
"report": True}}
@skip_if_not_PETSc
def test_snes_solver(F, bcs, u, snes_solver_parameters_sign, parameter_degree,\
parameter_backend):
u.interpolate(Constant(-1000.0))
solve(F == 0, u, bcs, solver_parameters=snes_solver_parameters_sign)
assert u.vector().min() >= 0
@skip_if_not_PETSc
def test_newton_solver(F, u, bcs, newton_solver_parameters, parameter_degree,\
parameter_backend):
u.interpolate(Constant(-1000.0))
solve(F == 0, u, bcs, solver_parameters=newton_solver_parameters)
assert u.vector().min() < 0
@skip_if_not_PETSc
def test_snes_solver_bound_functions(F, u, bcs, J, \
snes_solver_parameters_bounds,
lb, ub, parameter_degree, \
parameter_backend):
u.interpolate(Constant(-1000.0))
problem = NonlinearVariationalProblem(F, u, bcs, J)
solver = NonlinearVariationalSolver(problem)
solver.parameters.update(snes_solver_parameters_bounds)
solver.solve(lb, ub)
assert u.vector().min() >= 0
@skip_if_not_PETSc
def test_snes_solver_bound_vectors(F, u, bcs, J, snes_solver_parameters_bounds,
lb, ub, parameter_degree, \
parameter_backend):
u.interpolate(Constant(-1000.0))
problem = NonlinearVariationalProblem(F, u, bcs, J)
solver = NonlinearVariationalSolver(problem)
solver.parameters.update(snes_solver_parameters_bounds)
solver.solve(lb.vector(), ub.vector())
assert u.vector().min() >= 0
|
from distutils.core import setup, Extension
vtc_scrypt_module = Extension('vtc_scrypt',
sources = ['scryptmodule.c',
'scrypt.c'],
include_dirs=['.'], extra_compile_args=['-O2'])
setup (name = 'vtc_scrypt',
version = '1.0',
description = 'Bindings for scrypt proof of work used by Vertcoin',
ext_modules = [vtc_scrypt_module])
|
"""
tkRAD - tkinter Rapid Application Development library
(c) 2013+ Raphaël SEBAN <motus@laposte.net>
This program is free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program.
If not, see: http://www.gnu.org/licenses/
"""
import tkinter as TK
from . import rad_widget_base as RW
class RADCanvas (RW.RADWidgetBase, TK.Canvas):
r"""
generic tkinter.Canvas + RADWidgetBase subclass;
implements all tkRAD app-wide services by default;
acts as a tkinter.Canvas widget;
"""
# class constant defs
CONFIG = {
"borderwidth": 0,
"highlightthickness": 0,
"relief": TK.FLAT,
} # end of CONFIG
def __init__ (self, master=None, **kw):
# default values
self.CONFIG = self.CONFIG.copy()
self.CONFIG.update(kw)
# super inits
TK.Canvas.__init__(self, master)
self.configure(**self._only_tk(self.CONFIG))
RW.RADWidgetBase.__init__(self, master, **self.CONFIG)
# end def
|
__license__ = 'GPL v3'
__copyright__ = '2010, Kovid Goyal <kovid@kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
import time
from functools import partial
from PyQt5.Qt import QTimer, QDialog, QDialogButtonBox, QCheckBox, QVBoxLayout, QLabel, Qt
from calibre.gui2 import error_dialog
from calibre.gui2.actions import InterfaceAction
class Choose(QDialog):
def __init__(self, fmts, parent=None):
QDialog.__init__(self, parent)
self.l = l = QVBoxLayout(self)
self.setLayout(l)
self.setWindowTitle(_('Choose format to edit'))
self.la = la = QLabel(_(
'This book has multiple formats that can be edited. Choose the format you want to edit.'))
l.addWidget(la)
self.rem = QCheckBox(_('Always ask when more than one format is available'))
self.rem.setChecked(True)
l.addWidget(self.rem)
self.bb = bb = QDialogButtonBox(self)
l.addWidget(bb)
bb.accepted.connect(self.accept)
bb.rejected.connect(self.reject)
self.buts = buts = []
for fmt in fmts:
b = bb.addButton(fmt.upper(), bb.AcceptRole)
b.clicked.connect(partial(self.chosen, fmt))
buts.append(b)
self.fmt = None
self.resize(self.sizeHint())
def chosen(self, fmt):
self.fmt = fmt
def accept(self):
from calibre.gui2.tweak_book import tprefs
tprefs['choose_tweak_fmt'] = self.rem.isChecked()
QDialog.accept(self)
class TweakEpubAction(InterfaceAction):
name = 'Tweak ePub'
action_spec = (_('Edit book'), 'edit_book.png', _('Edit books in the EPUB or AZW formats'), _('T'))
dont_add_to = frozenset(['context-menu-device'])
action_type = 'current'
accepts_drops = True
def accept_enter_event(self, event, mime_data):
if mime_data.hasFormat("application/calibre+from_library"):
return True
return False
def accept_drag_move_event(self, event, mime_data):
if mime_data.hasFormat("application/calibre+from_library"):
return True
return False
def drop_event(self, event, mime_data):
mime = 'application/calibre+from_library'
if mime_data.hasFormat(mime):
self.dropped_ids = tuple(map(int, str(mime_data.data(mime)).split()))
QTimer.singleShot(1, self.do_drop)
return True
return False
def do_drop(self):
book_ids = self.dropped_ids
del self.dropped_ids
if book_ids:
self.do_tweak(book_ids[0])
def genesis(self):
self.qaction.triggered.connect(self.tweak_book)
def tweak_book(self):
row = self.gui.library_view.currentIndex()
if not row.isValid():
return error_dialog(self.gui, _('Cannot Edit book'),
_('No book selected'), show=True)
book_id = self.gui.library_view.model().id(row)
self.do_tweak(book_id)
def do_tweak(self, book_id):
if self.gui.current_view() is not self.gui.library_view:
return error_dialog(self.gui, _('Cannot edit book'), _(
'Editing of books on the device is not supported'), show=True)
from calibre.ebooks.oeb.polish.main import SUPPORTED
db = self.gui.library_view.model().db
fmts = db.formats(book_id, index_is_id=True) or ''
fmts = [x.upper().strip() for x in fmts.split(',')]
tweakable_fmts = set(fmts).intersection(SUPPORTED)
if not tweakable_fmts:
return error_dialog(self.gui, _('Cannot edit book'),
_('The book must be in the %s formats to edit.'
'\n\nFirst convert the book to one of these formats.') % (_(' or ').join(SUPPORTED)),
show=True)
from calibre.gui2.tweak_book import tprefs
tprefs.refresh() # In case they were changed in a Tweak Book process
if len(tweakable_fmts) > 1:
if tprefs['choose_tweak_fmt']:
d = Choose(sorted(tweakable_fmts, key=tprefs.defaults['tweak_fmt_order'].index), self.gui)
if d.exec_() != d.Accepted:
return
tweakable_fmts = {d.fmt}
else:
fmts = [f for f in tprefs['tweak_fmt_order'] if f in tweakable_fmts]
if not fmts:
fmts = [f for f in tprefs.defaults['tweak_fmt_order'] if f in tweakable_fmts]
tweakable_fmts = {fmts[0]}
fmt = tuple(tweakable_fmts)[0]
path = db.new_api.format_abspath(book_id, fmt)
if path is None:
return error_dialog(self.gui, _('File missing'), _(
'The %s format is missing from the calibre library. You should run'
' library maintenance.') % fmt, show=True)
tweak = 'ebook-edit'
try:
self.gui.setCursor(Qt.BusyCursor)
if tprefs['update_metadata_from_calibre']:
db.new_api.embed_metadata((book_id,), only_fmts={fmt})
notify = '%d:%s:%s:%s' % (book_id, fmt, db.library_id, db.library_path)
self.gui.job_manager.launch_gui_app(tweak, kwargs=dict(path=path, notify=notify))
time.sleep(2)
finally:
self.gui.unsetCursor()
|
test = {
'name': 'Question 3',
'points': 1,
'suites': [
{
'cases': [
{
'code': r"""
>>> connection.execute(query_q3).fetchall() == [('BERNIE 2016', 3),('CARSON AMERICA', 958),('CRUZ FOR PRESIDENT', 2000),('JEB 2016, INC.', 2000),('MARCO RUBIO FOR PRESIDENT', 2460),('KASICH FOR AMERICA INC', 3000),('DONALD J. TRUMP FOR PRESIDENT, INC.', 19000),('HILLARY FOR AMERICA', 61200),('4 MA PAC', None),('AAPI VICTORY FUND', None),('ABSOLUTE ENERGY PAC', None),('ACADIA HEALTHCARE COMPANY INC FEDPAC', None),('ACE CASH EXPRESS, INC. PAC', None),('ACTRIGHT', None),('ADAM SMITH FOR CONGRESS COMMITTEE', None),('ADVANCE AMERICA CASH ADVANCE CENTERS INC. PAC', None),('AES CORPORATION POLITICAL ACTION COMMITTEE; THE', None),('AFL-CIO COPE POLITICAL CONTRIBUTIONS COMMITTEE', None),('AIRCRAFT OWNERS AND PILOTS ASSOCIATION POLITICAL ACTION COMMITTEE', None),('AKIN GUMP STRAUSS HAUER & FELD LLP CIVIC ACTION COMMITTEE (AKA AGSH&F CIVIC ACTION COMMITT', None)]
True
""",
'hidden': False,
'locked': False
},
],
'scored': True,
'setup': '',
'teardown': '',
'type': 'doctest'
}
]
}
|
from itertools import product
import numpy as np
import bpy
from bpy.props import BoolVectorProperty, EnumProperty
from mathutils import Matrix
from sverchok.node_tree import SverchCustomTreeNode
from sverchok.utils.nodes_mixins.recursive_nodes import SvRecursiveNode
from sverchok.data_structure import dataCorrect, updateNode
EDGES = [
(0, 1), (1, 3), (3, 2), (2, 0), # bottom edges
(4, 5), (5, 7), (7, 6), (6, 4), # top edges
(0, 4), (1, 5), (2, 6), (3, 7) # sides
]
def generate_matrix(maxmin, dims, to_2d):
center = [(u+v)*.5 for u, v in maxmin[:dims]]
scale = [(u-v) for u, v in maxmin[:dims]]
if to_2d:
center += [0]
scale += [1]
mat = Matrix.Translation(center)
for i, sca in enumerate(scale):
mat[i][i] = sca
return mat
def generate_mean_np(verts, dims, to_2d):
avr = (np.sum(verts[:, :dims], axis=0)/len(verts)).tolist()
if to_2d:
avr += [0]
return [avr]
def generate_mean(verts, dims, to_2d):
avr = list(map(sum, zip(*verts)))
avr = [n/len(verts) for n in avr[:dims]]
if to_2d:
avr += [0]
return [avr]
def bounding_box(verts,
box_dimensions='2D',
output_verts=True,
output_mat=True,
output_mean=True,
output_limits=True):
'''
verts expects a list of level 3 [[[0,0,0],[1,1,1]..],[]]
returns per sublist:
verts_out: vertices of the bounding box
edges_out: edges of the bounding box
mean_out: mean of all vertcies
mat_out: Matrix that would transform a box of 1 unit into the bbox
*min_vals, Min X, Y and Z of the bounding box
*max_vals, Max X, Y and Z of the bounding box
*size_vals Size X, Y and Z of the bounding box
'''
verts_out = []
edges_out = []
edges = EDGES
mat_out = []
mean_out = []
min_vals = [[], [], []]
max_vals = [[], [], []]
size_vals = [[], [], []]
to_2d = box_dimensions == '2D'
dims = int(box_dimensions[0])
calc_maxmin = output_mat or output_verts or output_limits
for vec in verts:
if calc_maxmin:
if isinstance(vec, np.ndarray):
np_vec = vec
else:
np_vec = np.array(vec)
bbox_max = np.amax(np_vec, axis=0)
bbox_min = np.amin(np_vec, axis=0)
maxmin = np.concatenate([bbox_max, bbox_min]).reshape(2,3).T.tolist()
if output_verts:
out = list(product(*reversed(maxmin)))
v_out = [l[::-1] for l in out[::-1]]
if to_2d:
verts_out.append([[v[0], v[1], 0] for v in v_out[:4]])
edges = edges[:4]
else:
verts_out.append(v_out)
edges_out.append(edges)
if output_mat:
mat_out.append(generate_matrix(maxmin, dims, to_2d))
if output_mean:
if calc_maxmin:
mean_out.append(generate_mean_np(np_vec, dims, to_2d))
else:
if isinstance(vec, np.ndarray):
mean_out.append(generate_mean_np(vec, dims, to_2d))
else:
mean_out.append(generate_mean(vec, dims, to_2d))
if output_limits:
for i in range(dims):
min_vals[i].append([maxmin[i][1]])
max_vals[i].append([maxmin[i][0]])
size_vals[i].append([maxmin[i][0] - maxmin[i][1]])
return (verts_out,
edges_out,
mean_out,
mat_out,
*min_vals,
*max_vals,
*size_vals)
class SvBBoxNodeMk3(bpy.types.Node, SverchCustomTreeNode, SvRecursiveNode):
"""
Triggers: Bbox 2D or 3D
Tooltip: Get vertices bounding box (vertices, sizes, center)
"""
bl_idname = 'SvBBoxNodeMk3'
bl_label = 'Bounding box'
bl_icon = 'SHADING_BBOX'
sv_icon = 'SV_BOUNDING_BOX'
def update_sockets(self, context):
bools = [self.min_list, self.max_list, self.size_list]
dims = int(self.box_dimensions[0])
for i in range(3):
for j in range(3):
out_index = 4 + j + 3*i
hidden = self.outputs[out_index].hide_safe
if bools[i][j] and j < dims:
if hidden:
self.outputs[out_index].hide_safe = False
else:
self.outputs[out_index].hide_safe = True
updateNode(self, context)
min_list: BoolVectorProperty(
name='Min', description="Show Minimum values sockets", size=3, update=update_sockets)
max_list: BoolVectorProperty(
name='Max', description="Show Maximum values sockets", size=3, update=update_sockets)
size_list: BoolVectorProperty(
name='Size', description="Show Size values sockets", size=3, update=update_sockets)
implentation_modes = [
("2D", "2D", "Outputs Rectangle over XY plane", 0),
("3D", "3D", "Outputs standard bounding box", 1)]
box_dimensions: EnumProperty(
name='Implementation', items=implentation_modes,
description='Choose calculation method',
default="3D", update=update_sockets)
def draw_buttons(self, context, layout):
layout .prop(self, 'box_dimensions', expand=True)
col = layout.column(align=True)
titles = ["Min", "Max", "Size"]
prop = ['min_list', 'max_list', 'size_list']
dims = int(self.box_dimensions[0])
for i in range(3):
row = col.row(align=True)
row.label(text=titles[i])
row2 = row.row(align=True)
for j in range(dims):
row2 .prop(self, prop[i], index=j, text='XYZ'[j], toggle=True)
def sv_init(self, context):
son = self.outputs.new
self.inputs.new('SvVerticesSocket', 'Vertices').is_mandatory = True
son('SvVerticesSocket', 'Vertices')
son('SvStringsSocket', 'Edges')
son('SvVerticesSocket', 'Mean')
son('SvMatrixSocket', 'Center')
titles = ['Min', 'Max', 'Size']
for j in range(3):
for i in range(3):
son('SvStringsSocket', titles[j] + ' ' + 'XYZ'[i])
self.update_sockets(context)
def migrate_from(self, old_node):
self.box_dimensions = old_node.dimensions
def process_data(self, params):
verts = params[0]
output_mat = self.outputs['Center'].is_linked
output_mean = self.outputs['Mean'].is_linked
output_verts = self.outputs['Vertices'].is_linked
output_limits = any(s.is_linked for s in self.outputs[4:])
return bounding_box(verts,
box_dimensions=self.box_dimensions,
output_verts=output_verts,
output_mat=output_mat,
output_mean=output_mean,
output_limits=output_limits)
def register():
bpy.utils.register_class(SvBBoxNodeMk3)
def unregister():
bpy.utils.unregister_class(SvBBoxNodeMk3)
|
import json
from collections import ChainMap
CONFIG_DEFAULTS = {"verbose": "info", "backend": "local", "check_updates": True}
class FileConfig:
def __init__(self, path, data, defaults=None):
self.path = path
if defaults is None:
defaults = {}
self._data = ChainMap(data, defaults)
self._validators = {}
def validator(self, key):
"""Register a configuration key validator function."""
def _inner(func):
self._validators[key] = func
return _inner
def _validate_value(self, key, value):
if key in self._validators:
self._validators[key](value)
def get(self, key, default=None):
value = self._data.get(key)
if value is None:
return default
self._validate_value(key, value)
return value
def __getitem__(self, key):
value = self._data[key]
self._validate_value(key, value)
return value
def __setitem__(self, key, value):
self._validate_value(key, value)
self._data[key] = value
def __delitem__(self, key):
if key in self._data:
del self._data[key]
def __len__(self):
return len(self._data)
def __iter__(self):
return iter(self._data)
def dump(self):
"""Dump the configuration to disk."""
with open(str(self.path), "w+") as config_file:
json.dump(dict(self._data.maps[0]), config_file, indent=4, sort_keys=True)
@classmethod
def load(cls, path, defaults=None):
"""Load configuration from a file.
Reads configuration from `file` and returns a :class:`Config` instance
with the configuration. The `defaults` will be merged into the
configuration.
:param path str: Path to the configuration file.
:param defaults dict: A set of defaults to merge into the configuration.
"""
try:
with open(str(path)) as config_file:
data = json.load(config_file)
except FileNotFoundError:
data = {}
return cls(path=path, data=data, defaults=defaults)
def config_from_path(path):
return FileConfig.load(path, defaults=CONFIG_DEFAULTS)
config = config_from_path(".gwfconf.json")
|
"""
This module contains experimental code for using the (extend) UHSDR API
This program is free software: you can redistribute it and/or modify it under
the terms of the GNU General Public License as published by the Free Software
Foundation, either version 3 of the License, or (at your option) any later
version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
this program. If not, see <http://www.gnu.org/licenses/>.
"""
from __future__ import print_function
__author__ = "DB4PLE"
__copyright__ = "Copyright 2018, UHSDR project"
__credits__ = ["DB4PLE"]
__license__ = "GPLv3"
__status__ = "Prototype"
import sys
import os
class CatCmdFt817:
"""
List of used/supported FT817 CAT command codes
This list includes the officially known FT817 codes (including the "undocumented" ones)
"""
READ_EEPROM = 0xBB
WRITE_EEPROM = 0xBC
class CatCmd(CatCmdFt817):
"""
This list extends the FT817 by the special UHSDR codes implemented only in the UHSDR dialect
of FT817 CAT (which must be different from the FT817 ones)
"""
UHSDR_ID = 0x42
"""
this will return the bytes ['U', 'H' , 'S', 'D', 'R' ] and is used to identify an UHSDR with high enough firmware level
"""
class UhsdrConfigIndex:
"""
this is a completely incomplete list of config value indicies as used in the UHSDR firmware
we list only the absolutely necessary ids here for the moment (those must never change in different firmware versions, would break this code)
"""
VER_MAJOR = 176
VER_MINOR = 310
VER_BUILD = 171
NUMBER_OF_ENTRIES = 407
def eprint(*args, **kwargs):
"""
a small function to print to stderr, used for error and logging messages
"""
print(*args, file=sys.stderr, **kwargs)
class catSerial:
"""
Low Level FT817 CAT protocol handling on a serial communication
"""
def __init__(self, comObj):
self.comObj = comObj
def sendCommand(self, command):
bytesWritten = self.comObj.write(command)
return bytesWritten == 5
def readResponse(self,count):
response = self.comObj.read(count)
return (len(response) == count,response)
class catCommands:
"""
CAT API: Here we have direct access to CAT API ations, each logical API function has its direct counterpart in this
class
We do not implement any extra control logic in here, just the call of the API function and returning the response
This may be enriched to have all availabl CAT API functions, right now it is just what we need now
"""
def __init__(self, catObj):
self.catObj = catObj
def execute(self, cmd, count):
if self.catObj.sendCommand(cmd):
ok,res = self.catObj.readResponse(count)
return ok,bytearray(res)
else:
return (False,bytearray([]))
def readEEPROM(self, addr):
cmd = bytearray([ (addr & 0xff00)>>8,addr & 0xff, 0x00, 0x00, CatCmd.READ_EEPROM])
ok,res = self.execute(cmd,2)
if ok:
return res[1] * 256 + res[0]
else:
return ok
def readUHSDR(self):
cmd = bytearray([ 0x00, 0x00 , 0x00, 0x00, CatCmd.UHSDR_ID])
ok,res = self.execute(cmd,5)
return res == bytearray("UHSDR")
def writeEEPROM(self, addr, value16bit):
cmd = bytearray([ (addr & 0xff00)>>8,addr & 0xff, (value16bit & 0xff) >> 0, (value16bit & 0xff00) >> 8, CatCmd.WRITE_EEPROM])
ok,res = self.execute(cmd,1)
return ok
def readUHSDRConfig(self, index):
return self.readEEPROM(index + 0x8000);
def writeUHSDRConfig(self, index, value):
return self.writeEEPROM(index + 0x8000, value);
class UhsdrConfig():
"""
CONFIG MANAGEMENT: Handling of reading / writing TRX configurations, detection of TRX presence etc.
This class represents high-level actions, should involve proper parameter checking etc.
"""
def __init__(self, catObj):
self.catObj = catObj
def getVersion(self):
"""
return firmware version as integer tuple (major,minor,build)
or (False,False,False) if something goes wrong
"""
return (self.catObj.readUHSDRConfig(UhsdrConfigIndex.VER_MAJOR),
self.catObj.readUHSDRConfig(UhsdrConfigIndex.VER_MINOR),
self.catObj.readUHSDRConfig(UhsdrConfigIndex.VER_BUILD))
def isUhsdrConnected(self):
"""
we test if an UHSDR with extended API is connected by using an identification API call not present
on a FT817 or older UHSDR / mcHF firmwares
returns: True if a suitable TRX is connected, False otherwise
"""
return self.catObj.readUHSDR()
#
def getConfigValueCount(self):
return self.catObj.readUHSDRConfig(UhsdrConfigIndex.NUMBER_OF_ENTRIES)
def getValue(self, index):
# TODO: do some range checking here
return self.catObj.readUHSDRConfig(index)
def setValue(self, index, value):
# TODO: do some range checking here
retval = False
if self.catObj.writeUHSDRConfig(index, value):
retval = value == self.getValue(index)
return retval
def configToJson(self):
"""
read the configuration from TRX into a data dictionary
returns tuple with boolean success state and read data
right now the returned JSON structure is quite simple
we store the version number as list of 3 integers under the 'version' key
we store the date and time of backup as UTC under the 'when' key
we store each configuration value as addr/value pair under the 'eeprom' key
"""
from datetime import datetime
retval = False
valList = []
self.data = {}
self.data['version'] = self.getVersion()
self.data['when'] = str(datetime.utcnow())
self.data['eeprom'] = []
numberOfValues = self.getConfigValueCount()
for index in range(numberOfValues):
val = self.getValue(index)
valList.append(val)
self.data['eeprom'].append({ 'addr' : index , 'value' : val })
retval = all(val is not False for val in valList) or len(valList) != 0
return retval,self.data
def jsonToConfig(self,data):
"""
write the configuration in passed data dictionary to TRX
returns tuple with boolean success state and human readable return msg
data dictionary must conform to format generated by configToJson()
"""
retval = True
retmsg = "OK"
if data['when'] != None and len(data['version']) == 3 and len(data['eeprom']) == data['eeprom'][UhsdrConfigIndex.NUMBER_OF_ENTRIES]['value']:
numberOfValues = data['eeprom'][UhsdrConfigIndex.NUMBER_OF_ENTRIES]['value']
# we do not restore index 0 as it contains EEPROM type. This is never changed during configuration backup, too dangerous
for index in range(numberOfValues):
addr = data['eeprom'][index]['addr']
value = data['eeprom'][index]['value']
if addr != 0:
if self.setValue(addr,value) == False:
retmsg = "Restoring value {} at addr {} failed".format(value,addr)
retval = False
break
else:
retmsg = "Configuration data failed consistency check"
retval = False
return retval,retmsg
|
"""
Django settings for wikicoding project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
SECRET_KEY = '<<redacted>>'
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = ['.wikicoding.org']
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites', # django 1.6.2
'django.contrib.humanize',
'django_nyt',
'mptt',
'sekizai',
'sorl.thumbnail',
'wiki',
'wiki.plugins.attachments',
'wiki.plugins.macros',
'south',
'django.contrib.sitemaps'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'wikicoding.urls'
WSGI_APPLICATION = 'wikicoding.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'wikicoding_db',
'USER': '<<redacted>>',
'PASSWORD': '<<redacted>>',
}
}
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_URL = '/static/'
SITE_ID = 1
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.request",
"django.core.context_processors.static",
"django.core.context_processors.tz",
"django.contrib.messages.context_processors.messages",
"sekizai.context_processors.sekizai",
)
WIKI_ANONYMOUS_WRITE = True
WIKI_ANONYMOUS_CREATE = True
WIKI_ANONYMOUS_UPLOAD = False
WIKI_LOG_IPS_ANONYMOUS = True
ADMINS = (
('<<redacted>>', '<<redacted>>')
)
STATIC_ROOT = os.path.join(BASE_DIR, "wiki/static/")
|
import unittest
from totalopenstation.formats import Point
from totalopenstation.formats.polar import BasePoint, PolarPoint
class TestPolar(unittest.TestCase):
def setUp(self):
self.bp0 = BasePoint(x='0', y='0', z='0', ih='1.0', b_zero_st='0.0')
self.bp1 = BasePoint(x='0', y='0', z='0', ih='1.324', b_zero_st='0.0')
self.p0 = PolarPoint(angle_unit='deg',
z_angle_type='z',
dist_type='s',
dist=9,
angle=180,
z_angle=90,
th=0,
base_point=self.bp0,
pid=1,
text='Test Point',
coordorder='ENZ')
self.p1 = PolarPoint(angle_unit='gon',
z_angle_type='z',
dist_type='s',
dist=24.567,
angle=34.120,
z_angle=100,
th=1.500,
base_point=self.bp0,
pid=2,
text='Real Point',
coordorder='NEZ')
self.p2 = PolarPoint(angle_unit='dms',
z_angle_type='z',
dist_type='s',
dist=13.825,
angle=35.45100,
z_angle=91.17510,
th=1.300,
base_point=self.bp1,
pid=3,
text='Real Point',
coordorder='ENZ')
def test_polar(self):
p0_test = Point(0.0, -9.0, 1.0)
self.assertAlmostEqual(self.p0.to_point().x, p0_test.x)
self.assertAlmostEqual(self.p0.to_point().y, p0_test.y)
self.assertAlmostEqual(self.p0.to_point().z, p0_test.z)
p1_test = Point(21.1222392859, 12.5454572076, -0.5)
self.assertAlmostEqual(self.p1.to_point().x, p1_test.x)
self.assertAlmostEqual(self.p1.to_point().y, p1_test.y)
self.assertAlmostEqual(self.p1.to_point().z, p1_test.z)
p2_test = Point(8.0757244, 11.21674196, -0.2890493)
self.assertAlmostEqual(self.p2.to_point().x, p2_test.x)
self.assertAlmostEqual(self.p2.to_point().y, p2_test.y)
self.assertAlmostEqual(self.p2.to_point().z, p2_test.z)
|
from .exceptions import ReadError
from .parsers import ebml
from .mkv import MKV
from .parsers import ebml
import logging
import codecs
import os
import io
__all__ = ['Subtitle']
logger = logging.getLogger(__name__)
class Subtitle(object):
"""Subtitle extractor for Matroska Video File.
Currently only SRT subtitles stored without lacing are supported
"""
def __init__(self, stream):
"""Read the available subtitles from a MKV file-like object"""
self._stream = stream
#Use the MKV class to parse the META information
mkv = MKV(stream)
self._timecode_scale = mkv.info.timecode_scale
self._subtitles = mkv.get_srt_subtitles_track_by_language()
def has_subtitle(self, language):
return language in self._subtitles
def write_subtitle_to_stream(self, language):
"""Write a single subtitle to stream or return None if language not available"""
if language in self._subtitles:
subtitle = self._subtitles[language]
return _write_track_to_srt_stream(self._stream,subtitle.number,self._timecode_scale)
logger.info("Writing subtitle for language %s to stream",language)
else:
logger.info("Subtitle for language %s not found",language)
def write_subtitles_to_stream(self):
"""Write all available subtitles as streams to a dictionary with language as the key"""
subtitles = dict()
for language in self._subtitles:
subtitles[language] = self.write_subtitle_to_stream(language)
return subtitles
def _write_track_to_srt_stream(mkv_stream, track, timecode_scale):
srt_stream = io.StringIO()
index = 0
for cluster in _parse_segment(mkv_stream,track):
for blockgroup in cluster.blockgroups:
index = index + 1
timeRange = _print_time_range(timecode_scale,cluster.timecode,blockgroup.block.timecode,blockgroup.duration)
srt_stream.write(str(index) + '\n')
srt_stream.write(timeRange + '\n')
srt_stream.write(codecs.decode(blockgroup.block.data.read(),'utf-8') + '\n')
srt_stream.write('\n')
return srt_stream
def _parse_segment(stream,track):
stream.seek(0)
specs = ebml.get_matroska_specs()
# Find all level 1 Cluster elements and its subelements. Speed up this process by excluding all other currently known level 1 elements
try:
segments = ebml.parse(stream, specs,include_element_names=['Segment','Cluster','BlockGroup','Timecode','Block','BlockDuration',],max_level=3)
except ReadError:
pass
clusters = []
for cluster in segments[0].data:
_parse_cluster(track, clusters, cluster)
return clusters
def _parse_cluster(track, clusters, cluster):
blockgroups = []
timecode = None
for child in cluster.data:
if child.name == 'BlockGroup':
_parse_blockgroup(track, blockgroups, child)
elif child.name == 'Timecode':
timecode = child.data
if len(blockgroups) > 0 and timecode != None:
clusters.append(Cluster(timecode, blockgroups))
def _parse_blockgroup(track, blockgroups, blockgroup):
block = None
duration = None
for child in blockgroup.data:
if child.name == 'Block':
block = Block.fromelement(child)
if block.track != track:
block = None
elif child.name == 'BlockDuration':
duration = child.data
if duration != None and block != None:
blockgroups.append(BlockGroup(block, duration))
def _print_time_range(timecode_scale,clusterTimecode,blockTimecode,duration):
timecode_scale_ms = timecode_scale / 1000000 #Timecode
rawTimecode = clusterTimecode + blockTimecode
startTimeMilleSeconds = (rawTimecode) * timecode_scale_ms
endTimeMilleSeconds = (rawTimecode + duration) * timecode_scale_ms
return _print_time(startTimeMilleSeconds) + " --> " + _print_time(endTimeMilleSeconds)
def _print_time(timeInMilleSeconds):
timeInSeconds, milleSeconds = divmod(timeInMilleSeconds, 1000)
timeInMinutes, seconds = divmod(timeInSeconds, 60)
hours, minutes = divmod(timeInMinutes, 60)
return '%d:%02d:%02d,%d' % (hours,minutes,seconds,milleSeconds)
class Cluster(object):
def __init__(self,timecode=None, blockgroups=[]):
self.timecode = timecode
self.blockgroups = blockgroups
class BlockGroup(object):
def __init__(self,block=None,duration=None):
self.block = block
self.duration = duration
class Block(object):
def __init__(self, track=None, timecode=None, invisible=False, lacing=None, flags=None, data=None):
self.track = track
self.timecode = timecode
self.invisible = invisible
self.lacing = lacing
self.flags = flags
self.data = data
@classmethod
def fromelement(cls,element):
stream = element.data
track = ebml.read_element_size(stream)
timecode = ebml.read_element_integer(stream,2)
flags = ord(stream.read(1))
invisible = bool(flags & 0x8)
if (flags & 0x6):
lacing = 'EBML'
elif (flags & 0x4):
lacing = 'fixed-size'
elif (flags & 0x2):
lacing = 'Xiph'
else:
lacing = None
if lacing:
raise ReadError('Laced blocks are not implemented yet')
data = ebml.read_element_binary(stream, element.size - stream.tell())
return cls(track,timecode,invisible,lacing,flags,data)
def __repr__(self):
return '<%s track=%d, timecode=%d, invisible=%d, lacing=%s>' % (self.__class__.__name__, self.track,self.timecode,self.invisible,self.lacing)
class SimpleBlock(Block):
def __init__(self, track=None, timecode=None, keyframe=False, invisible=False, lacing=None, flags=None, data=None, discardable=False):
super(SimpleBlock,self).__init__(track,timecode,invisible,lacing,flags,data)
self.keyframe = keyframe
self.discardable = discardable
def fromelement(cls,element):
simpleblock = super(SimpleBlock, cls).fromelement(element)
simpleblock.keyframe = bool(simpleblock.flags & 0x80)
simpleblock.discardable = bool(simpleblock.flags & 0x1)
return simpleblock
def __repr__(self):
return '<%s track=%d, timecode=%d, keyframe=%d, invisible=%d, lacing=%s, discardable=%d>' % (self.__class__.__name__, self.track,self.timecode,self.keyframe,self.invisible,self.lacing,self.discardable)
|
from configargparse import ArgParser
from tupa.scripts.export import load_model
def main():
argparser = ArgParser(description="Load TUPA model and save again to a different file.")
argparser.add_argument("models", nargs="+", help="model file basename(s) to load")
argparser.add_argument("-s", "--suffix", default=".1", help="filename suffix to append")
args = argparser.parse_args()
for filename in args.models:
model = load_model(filename)
model.filename += args.suffix
model.classifier.filename += args.suffix
model.save()
if __name__ == "__main__":
main()
|
import unittest
import numpy
from safe.common.geodesy import Point
class TestCase(unittest.TestCase):
def setUp(self):
self.eps = 0.001 # Accept 0.1 % relative error
self.RSISE = Point(-35.27456, 149.12065)
self.Home = Point(-35.25629, 149.12494) # 28 Scrivener Street, ACT
self.Syd = Point(-33.93479, 151.16794) # Sydney Airport
self.Nadi = Point(-17.75330, 177.45148) # Nadi Airport
self.Kobenhavn = Point(55.70248, 12.58364) # Kobenhavn, Denmark
self.Muncar = Point(-8.43, 114.33) # Muncar, Indonesia
def testBearingNorth(self):
"""Bearing due north (0 deg) correct within double precision
"""
eps = 1.0e-12
p1 = Point(0.0, 0.0)
p2 = Point(1.0, 0.0)
b = p1.bearing_to(p2)
msg = 'Computed northward bearing: %d, Should have been: %d' % (b, 0)
assert numpy.allclose(b, 0, rtol=eps, atol=eps), msg
def testBearingSouth(self):
"""Bearing due south (180 deg) is correct within double precision
"""
eps = 1.0e-12
B = 180 # True bearing
p1 = Point(0.0, 0.0)
p2 = Point(1.0, 0.0)
b = p2.bearing_to(p1)
msg = 'Computed southward bearing %d. Expected %d' % (b, B)
assert numpy.allclose(b, B, rtol=eps, atol=eps), msg
def testBearingEast(self):
"""Bearing due west (270 deg) is correct within double precision
"""
eps = 1.0e-12
B = 90 # True bearing
p1 = Point(0.0, 0.0)
p3 = Point(0.0, 1.0)
b = p1.bearing_to(p3)
msg = 'Computed southward bearing %d. Expected %d' % (b, B)
assert numpy.allclose(b, B, rtol=eps, atol=eps), msg
def testBearingWest(self):
"""Bearing due west (270 deg) is correct within double precision
"""
eps = 1.0e-12
B = 270 # True bearing
p1 = Point(0.0, 0.0)
p3 = Point(0.0, 1.0)
b = p3.bearing_to(p1)
msg = 'Computed southward bearing %d. Expected %d' % (b, B)
assert numpy.allclose(b, B, rtol=eps, atol=eps), msg
def testRSISE2Home(self):
"""Distance and bearing of real example (RSISE -> Home) are correct
"""
D = 2068.855 # True Distance to Home
B = 11 # True Bearing to Home
d = self.RSISE.distance_to(self.Home)
msg = 'Dist from RSISE to Home %f. Expected %f' % (d, D)
assert numpy.allclose(d, D, rtol=1.0e-6), msg
b = self.RSISE.bearing_to(self.Home)
msg = 'Bearing from RSISE to Home %i. Expected %i' % (b, B)
assert b == B, msg
def testRSISE2Sydney(self):
"""Distance and bearing of real example (RSISE -> Syd) are correct
"""
D = 239407.67 # True Distance to Sydney Airport
B = 52 # True Bearing to Sydney Airport
d = self.RSISE.distance_to(self.Syd)
msg = 'Dist from RSISE to Sydney airport %f. Expected %f' % (d, D)
assert numpy.allclose(d, D, rtol=1.0e-6), msg
b = self.RSISE.bearing_to(self.Syd)
msg = 'Bearing from RSISE to Sydney airport %i. Expected %i' % (b, B)
assert b == B, msg
def testRSISE2Nadi(self):
"""Distance and bearing of real example (RSISE -> Nadi) are correct
"""
D = 3406100 # True Distance to Nadi Airport
B = 63 # True Bearing to Nadi Airport
d = self.RSISE.distance_to(self.Nadi)
msg = 'Dist from RSISE to Nadi airport %f. Expected %f' % (d, D)
assert numpy.allclose(d, D, rtol=1.0e-4), msg
b = self.RSISE.bearing_to(self.Nadi)
msg = 'Bearing from RSISE to Nadi airport %i. Expected %i' % (b, B)
assert b == B, msg
def testRSISE2Kobenhavn(self):
"""Distance and bearing of real example (RSISE -> Kbh) are correct
"""
D = 16025 * 1000 # True Distance to Kobenhavn
B = 319 # True Bearing to Kobenhavn
d = self.RSISE.distance_to(self.Kobenhavn)
msg = 'Dist from RSISE to Kobenhavn %f. Expected %f' % (d, D)
assert numpy.allclose(d, D, rtol=1.0e-3), msg
b = self.RSISE.bearing_to(self.Kobenhavn)
msg = 'Bearing from RSISE to Nadi airport %i. Expected %i' % (b, B)
assert b == B, msg
def testEarthquake2Muncar(self):
"""Distance and bearing of real example (quake -> Muncar) are correct
"""
# Test data from http://www.movable-type.co.uk/scripts/latlong.html
D = 151318 # True Distance [m]
B = 26 # 26 19 42 / 26 13 57 # Bearing to between points (start, end)
p1 = Point(latitude=-9.65, longitude=113.72)
d = p1.distance_to(self.Muncar)
msg = 'Dist to Muncar failed %f. Expected %f' % (d, D)
assert numpy.allclose(d, D), msg
b = p1.bearing_to(self.Muncar)
msg = 'Bearing to Muncar %i. Expected %i' % (b, B)
assert b == B, msg
def test_equator_example(self):
"""Distance and bearing of real example (near equator) are correct
"""
# Test data from http://www.movable-type.co.uk/scripts/latlong.html
D = 11448.0959593 # True Distance [m]
p1 = Point(latitude=-0.59, longitude=117.10)
p2 = Point(latitude=-0.50, longitude=117.15)
d = p1.distance_to(p2)
msg = 'Dist to point failed %f. Expected %f' % (d, D)
assert numpy.allclose(d, D, rtol=1.0e-3), msg
def test_generate_circle(self):
"""A circle with a given radius can be generated correctly
"""
# Generate a circle around Sydney airport with radius 3km
radius = 3000
C = self.Syd.generate_circle(radius)
# Check distance around the circle
# Note that not every point will be exactly 3000m
# because the circle in defined in geographic coordinates
for c in C:
p = Point(c[1], c[0])
d = self.Syd.distance_to(p)
msg = ('Radius %f not with in expected tolerance. Expected %d'
% (d, radius))
assert numpy.allclose(d, radius, rtol=2.0e-1), msg
# Store and view
#from safe.storage.vector import Vector
#Vector(geometry=[C],
# geometry_type='polygon').write_to_file('circle.shp')
#Vector(geometry=C,
# geometry_type='point').write_to_file('circle_as_points.shp')
#Vector(geometry=[[self.Syd.longitude, self.Syd.latitude]],
# geometry_type='point',
# data=None).write_to_file('center.shp')
if __name__ == '__main__':
mysuite = unittest.makeSuite(TestCase, 'test')
runner = unittest.TextTestRunner(verbosity=2)
runner.run(mysuite)
|
from .anime import ConfigAnime
from .backup import ConfigBackupRestore
from .general import ConfigGeneral
from .index import Config
from .notifications import ConfigNotifications
from .post_processing import ConfigPostProcessing
from .providers import ConfigProviders
from .search import ConfigSearch
from .shares import ConfigShares
from .subtitles import ConfigSubtitles
|
from constant import *
from lang import __, getDefaultLanguage
from pprint import pprint
from utils import *
import Queue as Q
import apt
import apt_pkg
import errno
import glib
import hashlib
import optparse
import os.path
import subprocess
import sys
import textwrap
import threading as td
import time
import urllib
import urllib2
import utils
import xmlrpclib
(ARIA2_MAJOR_VERSION, ARIA2_MINOR_VERSION, _) = utils.getAria2Version()
class Download(td.Thread):
def __init__(self, pkgName, rpcListenPort, updateCallback, finishCallback, messageCallback):
# Init.
td.Thread.__init__(self)
self.setDaemon(True) # make thread exit when main program exit
self.cache = apt.Cache()
self.pkgName = pkgName
self.rpcListenPort = rpcListenPort
self.updateCallback = updateCallback
self.finishCallback = finishCallback
self.messageCallback = messageCallback
self.server = xmlrpclib.ServerProxy('http://localhost:%s/rpc' % (self.rpcListenPort))
self.downloadStatus = {}
self.totalLength = 0
self.cacheLength = 0
self.progress = 0
self.archiveDir = apt_pkg.config.find_dir('Dir::Cache::Archives')
self.partialDir = os.path.join(self.archiveDir, "tealinux_software_center_cache", pkgName)
self.retryTicker = 0 # retry ticker
self.updateInterval = 1 # in seconds
self.signalChannel = Q.Queue()
self.maxConcurrentDownloads = 50 # maximum number of parallel downloads for every static (HTTP/FTP) URI,
# torrent and metalink.
self.metalinkServers = 5 # the number of servers to connect to simultaneously
self.maxConnectionPerServer = 10 # the maximum number of connections to one server for each download
self.minSplitSize = "1M" # minimum split size (1M - 1024M)
self.maxOverallDownloadLimit = "100K" # max overall download speed in bytes/sec
self.autoSaveInterval = 10 # time to auto save progress, in seconds
if not self.archiveDir:
raise Exception(('No archive dir is set.'
' Usually it is /var/cache/apt/archives/'))
def run(self):
'''Run'''
# Build command line.
cmdline = ['aria2c',
'--dir=%s' % (self.partialDir),
'--file-allocation=none',
'--auto-file-renaming=false',
'--summary-interval=0',
'--remote-time=true',
'--auto-save-interval=%s' % (self.autoSaveInterval),
'--max-concurrent-downloads=%s' % (self.maxConcurrentDownloads),
'--metalink-servers=%s' % (self.metalinkServers),
'--check-integrity=true',
'--disable-ipv6=true',
# '--max-overall-download-limit=%s' % (self.maxOverallDownloadLimit),
]
# Compatible with aria2c 1.12.x, damn Japanese, why change options every version? Damn you!
if ARIA2_MAJOR_VERSION >= 1 and ARIA2_MINOR_VERSION >= 12:
cmdline.append('--enable-rpc=true')
cmdline.append('--rpc-listen-port=%s' % (self.rpcListenPort))
else:
cmdline.append('--enable-xml-rpc=true')
cmdline.append('--xml-rpc-listen-port=%s' % (self.rpcListenPort))
# Add `max-connection-per-server` and `min-split-size` options if aria2c >= 1.10.x.
if ARIA2_MAJOR_VERSION >= 1 and ARIA2_MINOR_VERSION >= 10:
cmdline.append('--max-connection-per-server=%s' % (self.maxConnectionPerServer))
cmdline.append('--min-split-size=%s' % (self.minSplitSize))
# Make software center can work with aria2c 1.9.x.
if ARIA2_MAJOR_VERSION >= 1 and ARIA2_MINOR_VERSION <= 9:
cmdline.append("--no-conf")
cmdline.append("--continue")
else:
cmdline.append("--no-conf=true")
cmdline.append("--continue=true")
# Append proxy configuration.
# proxyString = utils.parseProxyString()
# if proxyString != None:
# cmdline.append("=".join(["--all-proxy", proxyString]))
# Start child process.
self.proc = subprocess.Popen(cmdline)
# Get process result.
result = DOWNLOAD_STATUS_FAILED
try:
result = self.download([self.pkgName])
self.server.aria2.shutdown()
except Exception, e:
self.messageCallback((__("% s: Download failed, please check your network link.") % self.pkgName))
self.updateCallback(self.pkgName, self.progress, __("Download failed"))
result = DOWNLOAD_STATUS_FAILED
print "Download error: ", e
# Kill child process.
killProcess(self.proc)
print self.proc.returncode
# Call callback.
self.finishCallback(self.pkgName, result)
def download(self, pkg_names):
# Mark packages.
for pkg_name in pkg_names:
if pkg_name in self.cache:
pkg = self.cache[pkg_name]
if not pkg.installed:
pkg.mark_install()
elif pkg.is_upgradable:
pkg.mark_upgrade()
else:
raise Exception('%s is not found' % pkg_name)
return self._get_changes()
def _get_changes(self):
pkgs = sorted(self.cache.get_changes(), key=lambda p: p.name)
if len(pkgs) != 0:
# Get total length.
self.totalLength = self.cache.required_download
# Get packages to download.
pkgs = [pkg for pkg in pkgs if not pkg.marked_delete and not self._file_downloaded(pkg)]
# Return DOWNLOAD_STATUS_DONT_NEED haven't packages need download,
if len(pkgs) == 0:
self.updateCallback(self.pkgName, 100, __("Download Finish"))
return DOWNLOAD_STATUS_DONT_NEED
# Otherwise download.
else:
return self._download(pkgs)
else:
# Return DOWNLOAD_STATUS_DONT_NEED if don't need download anything.
return DOWNLOAD_STATUS_DONT_NEED
def make_metalink(self, pkgs):
'''Make metalink.'''
lines = []
lines.append('<?xml version="1.0" encoding="UTF-8"?>')
lines.append('<metalink xmlns="urn:ietf:params:xml:ns:metalink">')
for pkg in pkgs:
version = pkg.candidate
hashtype, hashvalue = get_hash(version)
lines.append('<file name="{0}">'.format(get_filename(version)))
lines.append('<size>{0}</size>'.format(version.size))
if hashtype:
lines.append('<hash type="{0}">{1}</hash>'.format(hashtype, hashvalue))
for uri in version.uris:
# Debug.
print "Add link %s" % (uri)
lines.append('<url priority="1">{0}</url>'.format(uri))
lines.append('</file>')
lines.append('</metalink>')
return ''.join(lines)
def _download(self, pkgs):
# Update status.
self.updateCallback(self.pkgName, 0, __("Start Download"))
# Make metalink.
self.server.aria2.addMetalink(xmlrpclib.Binary(self.make_metalink(pkgs)))
# Download loop.
downloadCompleted = False
while not downloadCompleted:
# Sleep thread.
time.sleep(self.updateInterval)
# Stop download if reach retry times.
if self.retryTicker > DOWNLOAD_TIMEOUT:
self.messageCallback((__("% s: Download timeout, please check your network link.") % (self.pkgName)))
self.updateCallback(self.pkgName, self.progress, __("Download Timeout"))
return DOWNLOAD_STATUS_TIMEOUT
elif self.retryTicker > 0:
print "Retry (%s/%s)" % (self.retryTicker, DOWNLOAD_TIMEOUT)
# Stop download if received signal.
if not self.signalChannel.empty():
signal = self.signalChannel.get_nowait()
if signal == "STOP":
return DOWNLOAD_STATUS_STOP
elif signal == "PAUSE":
self.updateCallback(self.pkgName, self.progress, __("Download Pause"), APP_STATE_DOWNLOAD_PAUSE)
return DOWNLOAD_STATUS_PAUSE
# Otherwise wait download complete.
else:
# Get status list.
statusList = self.server.aria2.tellActive()
completedStatus = []
for status in statusList:
gid = status['gid']
self.downloadStatus[gid] = int(status['completedLength'])
completedStatus.append(status['status'] == 'complete')
# Get current download length.
currentLength = sum(self.downloadStatus.values())
# Get download speed.
if self.cacheLength == 0:
downloadSpeed = 0
else:
downloadSpeed = currentLength - self.cacheLength / self.updateInterval
# Store cache length.
self.cacheLength = currentLength
# Increases retry ticker if download speed is zero.
if downloadSpeed == 0:
self.retryTicker += 1
# Init retry ticker if download speed is not zero.
else:
self.retryTicker = 0
# Get progress.
if self.totalLength == 0:
self.progress = 0
else:
self.progress = int(currentLength * 100 / self.totalLength)
# Update status.
self.updateCallback(self.pkgName, self.progress, utils.formatFileSize(downloadSpeed) + "/s")
# Whether all download complete.
downloadCompleted = all(completedStatus)
link_success = True
# Link archives/pkgName/*.deb to archives/
for pkg in pkgs:
filename = get_filename(pkg.candidate)
dst = os.path.join(self.archiveDir, filename)
src = os.path.join(self.partialDir, filename)
ctrl_file = ''.join([src, '.aria2'])
# If control file exists, we assume download is not
# complete.
if os.path.exists(ctrl_file):
continue
try:
# Making hard link because aria2c needs file in
# partial directory to know download is complete
# in the next invocation.
os.rename(src, dst)
except OSError, e:
if e.errno != errno.ENOENT:
print "Failed to move archive file", e
link_success = False
# Return DOWNLOAD_STATUS_COMPLETE if link success.
if link_success:
self.updateCallback(self.pkgName, 100, __("Download Finish"))
# Send download count to server.
SendDownloadCount(self.pkgName).start()
return DOWNLOAD_STATUS_COMPLETE
# Otherwise return DOWNLOAD_STATUS_FAILED.
else:
return DOWNLOAD_STATUS_FAILED
def _file_downloaded(self, pkg):
# Check whether file has downloaded.
candidate = pkg.candidate
path = os.path.join(self.archiveDir, get_filename(candidate))
if not os.path.exists(path) or os.stat(path).st_size != candidate.size:
return False
# Hash check.
hash_type, hash_value = get_hash(pkg.candidate)
try:
return check_hash(path, hash_type, hash_value)
except IOError, e:
if e.errno != errno.ENOENT:
print "Failed to check hash", e
self.messageCallback((__("%s checkout failed.") % self.pkgName))
return False
class SendDownloadCount(td.Thread):
'''Send download count.'''
def __init__(self, pkgName):
'''Init for vote.'''
td.Thread.__init__(self)
self.setDaemon(True) # make thread exit when main program exit
self.pkgName = pkgName
def run(self):
'''Run'''
try:
args = {
'a' : 'd',
'n' : self.pkgName}
connection = urllib2.urlopen(
"%s/softcenter/v1/analytics" % (SERVER_ADDRESS),
data=urllib.urlencode(args),
timeout=POST_TIMEOUT
)
print "Send download count (%s) successful." % (self.pkgName)
except Exception, e:
print "Send download count (%s) failed." % (self.pkgName)
print "Error: ", e
def check_hash(path, hash_type, hash_value):
'''Check hash value.'''
hash_fun = hashlib.new(hash_type)
with open(path) as f:
while 1:
bytes = f.read(4096)
if not bytes:
break
hash_fun.update(bytes)
return hash_fun.hexdigest() == hash_value
def get_hash(version):
'''Get hash value.'''
if version.sha256:
return ("sha256", version.sha256)
elif version.sha1:
return ("sha1", version.sha1)
elif version.md5:
return ("md5", version.md5)
else:
return (None, None)
def get_filename(version):
'''Get file name.'''
return os.path.basename(version.filename)
class DownloadQueue(object):
'''Download queue'''
def __init__(self, updateCallback, finishCallback, failedCallback, messageCallback):
'''Init for download queue.'''
# Init.
self.maxConcurrentDownloads = 5 # max concurrent download
self.downloadingQueue = []
self.downloadingChannel = {}
self.portTicker = 7000
self.waitQueue = []
self.updateCallback = updateCallback
self.finishCallback = finishCallback
self.failedCallback = failedCallback
self.messageCallback = messageCallback
def startDownloadThread(self, pkgName):
'''Start download thread.'''
# Add in download list.
utils.addInList(self.downloadingQueue, pkgName)
# Start download thread.
self.portTicker += 1 # generate new rpc listen port
download = Download(pkgName, self.portTicker, self.updateCallback, self.finishDownloadCallback, self.messageCallback)
download.start()
# Add signal channel.
self.downloadingChannel[pkgName] = download
def addDownload(self, pkgName):
'''Add new download'''
if len(self.downloadingQueue) >= self.maxConcurrentDownloads:
utils.addInList(self.waitQueue, pkgName)
else:
self.startDownloadThread(pkgName)
def stopDownload(self, pkgName):
'''Stop download.'''
# Send pause signal if package at download list.
if pkgName in self.downloadingQueue:
if self.downloadingChannel.has_key(pkgName):
# Pause download.
self.downloadingChannel[pkgName].signalChannel.put('PAUSE')
else:
print "Impossible: downloadingChannel not key '%s'" % (pkgName)
# Otherwise just simple remove from download queue.
else:
utils.removeFromList(self.waitQueue, pkgName)
def finishDownloadCallback(self, pkgName, downloadStatus):
'''Finish download, start new download if have download in queue.'''
# Remove pkgName from download list.
utils.removeFromList(self.downloadingQueue, pkgName)
del self.downloadingChannel[pkgName]
# Call back if download success.
if downloadStatus in [DOWNLOAD_STATUS_COMPLETE, DOWNLOAD_STATUS_DONT_NEED]:
self.finishCallback(pkgName)
elif downloadStatus == DOWNLOAD_STATUS_FAILED:
self.failedCallback(pkgName)
elif downloadStatus == DOWNLOAD_STATUS_TIMEOUT:
print "Download %s timeout." % (pkgName)
self.failedCallback(pkgName)
elif downloadStatus == DOWNLOAD_STATUS_STOP:
print "Download %s stop." % (pkgName)
elif downloadStatus == DOWNLOAD_STATUS_PAUSE:
print "Download %s pause." % (pkgName)
# Start new download thread if download list's length is not reach max limit.
if len(self.downloadingQueue) < self.maxConcurrentDownloads and len(self.waitQueue) > 0:
self.startDownloadThread(self.waitQueue.pop(0))
def getDownloadPkgs(self):
'''Get download packages.'''
return self.downloadingQueue + self.waitQueue
def stopAllDownloads(self):
'''Stop all download task.'''
for channel in self.downloadingChannel.values():
channel.signalChannel.put('STOP')
killProcess(channel.proc) # must kill here, otherwise aria2c process exit even send STOP signal
|
"""
MAP Client, a program to generate detailed musculoskeletal models for OpenSim.
Copyright (C) 2012 University of Auckland
This file is part of MAP Client. (http://launchpad.net/mapclient)
MAP Client is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
MAP Client is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with MAP Client. If not, see <http://www.gnu.org/licenses/>..
"""
from mapclient.mountpoints.workflowstep import WorkflowStepMountPoint
class SkeletonStep(WorkflowStepMountPoint):
"""
Skeleton step which is intended to be used as a starting point
for new steps.
"""
def __init__(self, location):
super(SkeletonStep, self).__init__('Skeleton', location)
def configure(self):
pass
def getIdentifier(self):
return 'skeleton'
def setIdentifier(self, identifier):
pass
def serialize(self):
pass
def deserialize(self, string):
pass
|
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.errors import AnsibleError
from ansible.plugins.action import ActionBase
from ansible.template import Templar
from ansible.utils.boolean import boolean
class ActionModule(ActionBase):
TRANSFERS_FILES = False
def run(self, tmp=None, task_vars=dict()):
templar = Templar(loader=self._loader, variables=task_vars)
facts = dict()
if self._task.args:
for (k, v) in self._task.args.iteritems():
k = templar.template(k)
if isinstance(v, basestring) and v.lower() in ('true', 'false', 'yes', 'no'):
v = boolean(v)
facts[k] = v
return dict(changed=True, ansible_facts=facts)
|
import IMP.core
import IMP.container
import IMP.algebra
import IMP.display
import sys
IMP.setup_from_argv(sys.argv, "Optimize balls example")
if IMP.get_is_quick_test():
num_balls = 2
num_mc_steps = 10
else:
num_balls = 20
num_mc_steps = 1000
m = IMP.Model()
bb = IMP.algebra.BoundingBox3D(IMP.algebra.Vector3D(0, 0, 0),
IMP.algebra.Vector3D(30, 30, 30))
ps = []
for i in range(0, num_balls):
ps.append(IMP.Particle(m))
d = IMP.core.XYZR.setup_particle(ps[-1])
d.set_radius(10)
d.set_coordinates(IMP.algebra.get_random_vector_in(bb))
d.set_coordinates_are_optimized(True)
psl = IMP.container.ListSingletonContainer(m, ps)
r = IMP.core.ExcludedVolumeRestraint(psl, 1)
rb = IMP.container.SingletonsRestraint(
IMP.core.BoundingBox3DSingletonScore(
IMP.core.HarmonicUpperBound(0, 1), bb),
psl)
mc = IMP.core.MonteCarlo(m)
isf = IMP.core.IncrementalScoringFunction(m, ps, [r, rb])
mc.set_incremental_scoring_function(isf)
mvs = [IMP.core.BallMover(m, p, 5) for p in ps]
sm = IMP.core.SerialMover(mvs)
mc.add_mover(sm)
IMP.set_log_level(IMP.SILENT)
print("initial", isf.evaluate(False))
after = mc.optimize(num_mc_steps)
print("final", after)
name = IMP.create_temporary_file_name("incremental_mc", ".pym")
w = IMP.display.PymolWriter(name)
for p in ps:
g = IMP.core.XYZRGeometry(p)
w.add_geometry(g)
print("pymol", name)
|
"""
AMP passport - grant abilities to a server.
Quickly checks permissions to operate in
specific capacities.
* Storage (proof & key-trust check)
* Encrypted Storage (longer proof & key-trust check)
* Publish (hidden server storing data)
* Subscribe (hidden server looking for data)
* Rendesvous (publish side load-balance IP)
* Courier (subscribe side load-balance IP)
* Sanctuary (recursive use of encrypted storage, off the books)
* Safe Passage (permission to connect with forwardable packets)
Permissions are granted/lost on:
* Behavior
* Manual Overide
* Key Trust Depth
* Server Load
Could possibly introduce rate limits?
"""
from __future__ import absolute_import
from __future__ import unicode_literals
from twisted.protocols.amp import Command, DateTime
from ..types import ObjHash, ObjSig, ObjPubKey
"""
Storage applies a censor policy. If they want to avoid
that policy, they'll have to ask for an encryption permit
instead.
"""
class StoreRequest(Command):
arguments = [(b'my_pubkey', ObjPubKey()),
(b'my_start', DateTime()),
(b'my_ttl', DateTime()),
(b'my_hash', ObjHash()),
(b'my_sig', ObjSig())]
response = [(b'your_pubkey', ObjPubKey()),
(b'your_start', DateTime()),
(b'your_ttl', DateTime()),
(b'your_hash', ObjHash()),
(b'my_sig', ObjSig())]
"""
Encrypted storage applies an inverted sensor. It better
look like it's ben tripleSec encrypted, and we'll do it
again to keep the storage provider from indexing it.
"""
class StoreEncryptedRequest(Command):
arguments = [(b'my_pubkey', ObjPubKey()),
(b'my_start', DateTime()),
(b'my_ttl', DateTime()),
(b'my_hash', ObjHash()),
(b'my_sig', ObjSig())]
response = [(b'your_pubkey', ObjPubKey()),
(b'your_start', DateTime()),
(b'your_ttl', DateTime()),
(b'your_hash', ObjHash()),
(b'my_sig', ObjSig())]
"""
As a private server, a user will connect to them to find any
new content. This is where the actual data sits, if in
strict encryption mode. Otherwise, the rendesvous & courier
may be allowed to cache the request.
"""
class PublishRequest(Command):
arguments = [(b'my_pubkey', ObjPubKey()),
(b'my_start', DateTime()),
(b'my_ttl', DateTime()),
(b'my_hash', ObjHash()),
(b'my_sig', ObjSig())]
response = [(b'your_pubkey', ObjPubKey()),
(b'your_start', DateTime()),
(b'your_ttl', DateTime()),
(b'your_hash', ObjHash()),
(b'my_sig', ObjSig())]
class SubscribeRequest(Command):
arguments = [(b'my_pubkey', ObjPubKey()),
(b'my_start', DateTime()),
(b'my_ttl', DateTime()),
(b'my_hash', ObjHash()),
(b'my_sig', ObjSig())]
response = [(b'your_pubkey', ObjPubKey()),
(b'your_start', DateTime()),
(b'your_ttl', DateTime()),
(b'your_hash', ObjHash()),
(b'my_sig', ObjSig())]
"""
The Rendesvous (or publishing) and Courier (or subscribing)
relays are public addresses that speak on behalf of a server.
"""
class RendesvousRequest(Command):
arguments = [(b'my_pubkey', ObjPubKey()),
(b'my_start', DateTime()),
(b'my_ttl', DateTime()),
(b'my_hash', ObjHash()),
(b'my_sig', ObjSig())]
response = [(b'your_pubkey', ObjPubKey()),
(b'your_start', DateTime()),
(b'your_ttl', DateTime()),
(b'your_hash', ObjHash()),
(b'my_sig', ObjSig())]
class CourierRequest(Command):
arguments = [(b'my_pubkey', ObjPubKey()),
(b'my_start', DateTime()),
(b'my_ttl', DateTime()),
(b'my_hash', ObjHash()),
(b'my_sig', ObjSig())]
response = [(b'your_pubkey', ObjPubKey()),
(b'your_start', DateTime()),
(b'your_ttl', DateTime()),
(b'your_hash', ObjHash()),
(b'my_sig', ObjSig())]
"""
Sanctuary is an unlisted, encrypted server that
backs up, encrypted, to other servers. Safe Passage
is the permission to act as a tor-like relay
"""
class SanctuaryRequest(Command):
arguments = [(b'my_pubkey', ObjPubKey()),
(b'my_start', DateTime()),
(b'my_ttl', DateTime()),
(b'my_hash', ObjHash()),
(b'my_sig', ObjSig())]
response = [(b'your_pubkey', ObjPubKey()),
(b'your_start', DateTime()),
(b'your_ttl', DateTime()),
(b'your_hash', ObjHash()),
(b'my_sig', ObjSig())]
class SafePassageRequest(Command):
arguments = [(b'my_pubkey', ObjPubKey()),
(b'my_start', DateTime()),
(b'my_ttl', DateTime()),
(b'my_hash', ObjHash()),
(b'my_sig', ObjSig())]
response = [(b'your_pubkey', ObjPubKey()),
(b'your_start', DateTime()),
(b'your_ttl', DateTime()),
(b'your_hash', ObjHash()),
(b'my_sig', ObjSig())]
class passportResponders(object):
redis = None
neo4j = None
def __init__(self, GivenRedis, GivenNeo4j):
# would pull Neo4j online
self.redis = GivenRedis
self.neo4j = GivenNeo4j
@StoreRequest.responder
def StoreGrant(self, pubkey, ttl):
"""
Compute
"""
# somehow store TTL?
if self.neo4j.TrustWithin(3):
self.redis.write(b'permissions:store', pubkey)
signedStorage = b'TBD'
ttl = b'TBD'
return signedStorage, ttl
@StoreEncryptedRequest.responder
def StoreEncryptedGrant(self, pubkey, ttl):
if self.neo4j.TrustWithin(2):
self.redis.write(b'permissions:storeEncrypted', pubkey)
signedStorage = b'TBD'
ttl = b'TBD'
return signedStorage, ttl
@PublishRequest.responder
def PublishGrant(self, pubkey, ttl):
if self.neo4j.TrustWithin(3):
self.redis.write(b'permissions:publish', pubkey)
signedPublish = b'TBD'
ttl = b'TBD'
return signedPublish, ttl
@SubscribeRequest.responder
def SubscribeGrant(self, pubkey, ttl):
if self.neo4j.TrustWithin(2):
self.redis.write(b'permissions:subscribe', pubkey)
signedSubscribe = b'TBD'
ttl = b'TBD'
return signedSubscribe, ttl
@RendesvousRequest.responder
def RendesvousGrant(self, pubkey, ttl):
if self.neo4j.TrustWithin(1):
self.redis.write(b'rendesvous', pubkey)
signedRendesvous = b'TBD'
ttl = b'TBD'
return signedRendesvous, ttl
@CourierRequest.responder
def CourierGrant(self, pubkey, ttl):
if self.neo4j.TrustWithin(1):
self.redis.write(b'courier', pubkey)
signedCourier = b'TBD'
ttl = b'TBD'
return signedCourier, ttl
@SanctuaryRequest.responder
def SanctuaryGrant(self, pubkey):
if self.neo4j.TrustWithin(1):
self.redis.write(b'permissions:sanctuary', pubkey)
signedSanctuary = b'TBD'
ttl = b'TBD'
return signedSanctuary, ttl
@SafePassageRequest.responder
def SafePassageGrant(self, pubkey):
if self.neo4j.TrustWithin(1):
self.redis.write(b'permissions:onion', pubkey)
signedSafePassage = b'TBD'
ttl = b'TBD'
return signedSafePassage, ttl
|
"""
Copyright (c) 2012-2013 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from django.db import models
class DiskStat(models.Model):
name = models.CharField(max_length=128)
reads_completed = models.FloatField()
reads_merged = models.FloatField()
sectors_read = models.FloatField()
ms_reading = models.FloatField()
writes_completed = models.FloatField()
writes_merged = models.FloatField()
sectors_written = models.FloatField()
ms_writing = models.FloatField()
ios_progress = models.FloatField()
ms_ios = models.FloatField()
weighted_ios = models.FloatField()
ts = models.DateTimeField(db_index=True)
class Meta:
app_label = 'smart_manager'
|
from django.db.models import ImageField
from weblate.utils.validators import validate_bitmap
class ScreenshotField(ImageField):
"""File field which forces certain image types."""
default_validators = [validate_bitmap]
|
import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "particle_0 geometry" not in marker_sets:
s=new_marker_set('particle_0 geometry')
marker_sets["particle_0 geometry"]=s
s= marker_sets["particle_0 geometry"]
mark=s.place_marker((1763.63, 9213.27, -518.799), (0.7, 0.7, 0.7), 890.203)
if "particle_1 geometry" not in marker_sets:
s=new_marker_set('particle_1 geometry')
marker_sets["particle_1 geometry"]=s
s= marker_sets["particle_1 geometry"]
mark=s.place_marker((3309.97, 8662.91, 136), (0.7, 0.7, 0.7), 792.956)
if "particle_2 geometry" not in marker_sets:
s=new_marker_set('particle_2 geometry')
marker_sets["particle_2 geometry"]=s
s= marker_sets["particle_2 geometry"]
mark=s.place_marker((3317.57, 9132.18, 1977.62), (0.7, 0.7, 0.7), 856.786)
if "particle_3 geometry" not in marker_sets:
s=new_marker_set('particle_3 geometry')
marker_sets["particle_3 geometry"]=s
s= marker_sets["particle_3 geometry"]
mark=s.place_marker((2440.59, 10920.5, 732.058), (0.7, 0.7, 0.7), 963.679)
if "particle_4 geometry" not in marker_sets:
s=new_marker_set('particle_4 geometry')
marker_sets["particle_4 geometry"]=s
s= marker_sets["particle_4 geometry"]
mark=s.place_marker((2068.99, 11968.1, 2129.88), (0.7, 0.7, 0.7), 761.442)
if "particle_5 geometry" not in marker_sets:
s=new_marker_set('particle_5 geometry')
marker_sets["particle_5 geometry"]=s
s= marker_sets["particle_5 geometry"]
mark=s.place_marker((2451.24, 10719.2, 4253.39), (0.7, 0.7, 0.7), 961.183)
if "particle_6 geometry" not in marker_sets:
s=new_marker_set('particle_6 geometry')
marker_sets["particle_6 geometry"]=s
s= marker_sets["particle_6 geometry"]
mark=s.place_marker((2032.67, 9637.23, 5541.44), (0.7, 0.7, 0.7), 753.151)
if "particle_7 geometry" not in marker_sets:
s=new_marker_set('particle_7 geometry')
marker_sets["particle_7 geometry"]=s
s= marker_sets["particle_7 geometry"]
mark=s.place_marker((1648.03, 10308.5, 5091.72), (1, 0.7, 0), 1098.07)
if "particle_8 geometry" not in marker_sets:
s=new_marker_set('particle_8 geometry')
marker_sets["particle_8 geometry"]=s
s= marker_sets["particle_8 geometry"]
mark=s.place_marker((2028.47, 8076.3, 6487.58), (0.7, 0.7, 0.7), 1010.42)
if "particle_9 geometry" not in marker_sets:
s=new_marker_set('particle_9 geometry')
marker_sets["particle_9 geometry"]=s
s= marker_sets["particle_9 geometry"]
mark=s.place_marker((596.613, 7300.97, 6873.22), (1, 0.7, 0), 821.043)
if "particle_10 geometry" not in marker_sets:
s=new_marker_set('particle_10 geometry')
marker_sets["particle_10 geometry"]=s
s= marker_sets["particle_10 geometry"]
mark=s.place_marker((1622.77, 5797.16, 7428.69), (0.7, 0.7, 0.7), 873.876)
if "particle_11 geometry" not in marker_sets:
s=new_marker_set('particle_11 geometry')
marker_sets["particle_11 geometry"]=s
s= marker_sets["particle_11 geometry"]
mark=s.place_marker((2072.23, 5412.05, 6358.62), (0.7, 0.7, 0.7), 625.532)
if "particle_12 geometry" not in marker_sets:
s=new_marker_set('particle_12 geometry')
marker_sets["particle_12 geometry"]=s
s= marker_sets["particle_12 geometry"]
mark=s.place_marker((2644.13, 4344.42, 5338.84), (0.7, 0.7, 0.7), 880.474)
if "particle_13 geometry" not in marker_sets:
s=new_marker_set('particle_13 geometry')
marker_sets["particle_13 geometry"]=s
s= marker_sets["particle_13 geometry"]
mark=s.place_marker((2064.54, 4414.58, 5078.98), (0.7, 0.7, 0.7), 659.161)
if "particle_14 geometry" not in marker_sets:
s=new_marker_set('particle_14 geometry')
marker_sets["particle_14 geometry"]=s
s= marker_sets["particle_14 geometry"]
mark=s.place_marker((1993.17, 2272.91, 4278.07), (0.7, 0.7, 0.7), 831.745)
if "particle_15 geometry" not in marker_sets:
s=new_marker_set('particle_15 geometry')
marker_sets["particle_15 geometry"]=s
s= marker_sets["particle_15 geometry"]
mark=s.place_marker((4190.18, 600.094, 2907.28), (0.7, 0.7, 0.7), 803.065)
if "particle_16 geometry" not in marker_sets:
s=new_marker_set('particle_16 geometry')
marker_sets["particle_16 geometry"]=s
s= marker_sets["particle_16 geometry"]
mark=s.place_marker((5668.41, 1893.96, 2947.28), (0.7, 0.7, 0.7), 610.262)
if "particle_17 geometry" not in marker_sets:
s=new_marker_set('particle_17 geometry')
marker_sets["particle_17 geometry"]=s
s= marker_sets["particle_17 geometry"]
mark=s.place_marker((4997.87, 2002.87, 2645.48), (0.7, 0.7, 0.7), 741.265)
if "particle_18 geometry" not in marker_sets:
s=new_marker_set('particle_18 geometry')
marker_sets["particle_18 geometry"]=s
s= marker_sets["particle_18 geometry"]
mark=s.place_marker((4117.48, 2724.13, 3838.84), (0.7, 0.7, 0.7), 748.625)
if "particle_19 geometry" not in marker_sets:
s=new_marker_set('particle_19 geometry')
marker_sets["particle_19 geometry"]=s
s= marker_sets["particle_19 geometry"]
mark=s.place_marker((3205.57, 2135.47, 4808.76), (0.7, 0.7, 0.7), 677.181)
if "particle_20 geometry" not in marker_sets:
s=new_marker_set('particle_20 geometry')
marker_sets["particle_20 geometry"]=s
s= marker_sets["particle_20 geometry"]
mark=s.place_marker((3562.53, 4303.46, 5826), (0.7, 0.7, 0.7), 616.015)
if "particle_21 geometry" not in marker_sets:
s=new_marker_set('particle_21 geometry')
marker_sets["particle_21 geometry"]=s
s= marker_sets["particle_21 geometry"]
mark=s.place_marker((3938.5, 2629.87, 4687.97), (0.7, 0.7, 0.7), 653.154)
if "particle_22 geometry" not in marker_sets:
s=new_marker_set('particle_22 geometry')
marker_sets["particle_22 geometry"]=s
s= marker_sets["particle_22 geometry"]
mark=s.place_marker((4452.42, 2679.14, 4643.36), (0.7, 0.7, 0.7), 595.33)
if "particle_23 geometry" not in marker_sets:
s=new_marker_set('particle_23 geometry')
marker_sets["particle_23 geometry"]=s
s= marker_sets["particle_23 geometry"]
mark=s.place_marker((5484.43, 2353.51, 3888.16), (0.7, 0.7, 0.7), 627.901)
if "particle_24 geometry" not in marker_sets:
s=new_marker_set('particle_24 geometry')
marker_sets["particle_24 geometry"]=s
s= marker_sets["particle_24 geometry"]
mark=s.place_marker((5252.8, 2648.38, 2545.56), (0.7, 0.7, 0.7), 663.941)
if "particle_25 geometry" not in marker_sets:
s=new_marker_set('particle_25 geometry')
marker_sets["particle_25 geometry"]=s
s= marker_sets["particle_25 geometry"]
mark=s.place_marker((5080.03, 2001.81, 1149.15), (0.7, 0.7, 0.7), 663.899)
if "particle_26 geometry" not in marker_sets:
s=new_marker_set('particle_26 geometry')
marker_sets["particle_26 geometry"]=s
s= marker_sets["particle_26 geometry"]
mark=s.place_marker((4684.71, 2449.1, 2586.64), (0.7, 0.7, 0.7), 644.694)
if "particle_27 geometry" not in marker_sets:
s=new_marker_set('particle_27 geometry')
marker_sets["particle_27 geometry"]=s
s= marker_sets["particle_27 geometry"]
mark=s.place_marker((3706.73, 4136.91, 3577.34), (0.7, 0.7, 0.7), 896.802)
if "particle_28 geometry" not in marker_sets:
s=new_marker_set('particle_28 geometry')
marker_sets["particle_28 geometry"]=s
s= marker_sets["particle_28 geometry"]
mark=s.place_marker((4253.49, 3733.45, 4423), (0.7, 0.7, 0.7), 576.38)
if "particle_29 geometry" not in marker_sets:
s=new_marker_set('particle_29 geometry')
marker_sets["particle_29 geometry"]=s
s= marker_sets["particle_29 geometry"]
mark=s.place_marker((3982.59, 4016.25, 5671.6), (0.7, 0.7, 0.7), 635.092)
if "particle_30 geometry" not in marker_sets:
s=new_marker_set('particle_30 geometry')
marker_sets["particle_30 geometry"]=s
s= marker_sets["particle_30 geometry"]
mark=s.place_marker((4813.15, 4177.07, 5458.13), (0.7, 0.7, 0.7), 651.505)
if "particle_31 geometry" not in marker_sets:
s=new_marker_set('particle_31 geometry')
marker_sets["particle_31 geometry"]=s
s= marker_sets["particle_31 geometry"]
mark=s.place_marker((3275.81, 4802.84, 5990.7), (0.7, 0.7, 0.7), 718.042)
if "particle_32 geometry" not in marker_sets:
s=new_marker_set('particle_32 geometry')
marker_sets["particle_32 geometry"]=s
s= marker_sets["particle_32 geometry"]
mark=s.place_marker((4129.49, 3220.85, 6336.97), (0.7, 0.7, 0.7), 726.714)
if "particle_33 geometry" not in marker_sets:
s=new_marker_set('particle_33 geometry')
marker_sets["particle_33 geometry"]=s
s= marker_sets["particle_33 geometry"]
mark=s.place_marker((5222.49, 3192.06, 5301.22), (0.7, 0.7, 0.7), 673.585)
if "particle_34 geometry" not in marker_sets:
s=new_marker_set('particle_34 geometry')
marker_sets["particle_34 geometry"]=s
s= marker_sets["particle_34 geometry"]
mark=s.place_marker((4880.64, 2658.56, 4119.05), (0.7, 0.7, 0.7), 598.418)
if "particle_35 geometry" not in marker_sets:
s=new_marker_set('particle_35 geometry')
marker_sets["particle_35 geometry"]=s
s= marker_sets["particle_35 geometry"]
mark=s.place_marker((4572.17, 1465.65, 3587.54), (0.7, 0.7, 0.7), 693.382)
if "particle_36 geometry" not in marker_sets:
s=new_marker_set('particle_36 geometry')
marker_sets["particle_36 geometry"]=s
s= marker_sets["particle_36 geometry"]
mark=s.place_marker((4232.44, 3479.79, 4966.03), (0.7, 0.7, 0.7), 804.038)
if "particle_37 geometry" not in marker_sets:
s=new_marker_set('particle_37 geometry')
marker_sets["particle_37 geometry"]=s
s= marker_sets["particle_37 geometry"]
mark=s.place_marker((5418.46, 2110.26, 4452.81), (0.7, 0.7, 0.7), 816.178)
if "particle_38 geometry" not in marker_sets:
s=new_marker_set('particle_38 geometry')
marker_sets["particle_38 geometry"]=s
s= marker_sets["particle_38 geometry"]
mark=s.place_marker((5443.94, 3115.55, 3971.47), (0.7, 0.7, 0.7), 776.628)
if "particle_39 geometry" not in marker_sets:
s=new_marker_set('particle_39 geometry')
marker_sets["particle_39 geometry"]=s
s= marker_sets["particle_39 geometry"]
mark=s.place_marker((5336.96, 1974.3, 5064.95), (0.7, 0.7, 0.7), 750.656)
if "particle_40 geometry" not in marker_sets:
s=new_marker_set('particle_40 geometry')
marker_sets["particle_40 geometry"]=s
s= marker_sets["particle_40 geometry"]
mark=s.place_marker((5405.45, 3262.03, 6115.96), (0.7, 0.7, 0.7), 709.625)
if "particle_41 geometry" not in marker_sets:
s=new_marker_set('particle_41 geometry')
marker_sets["particle_41 geometry"]=s
s= marker_sets["particle_41 geometry"]
mark=s.place_marker((4549.66, 3189.02, 7704.26), (0.7, 0.7, 0.7), 927.681)
if "particle_42 geometry" not in marker_sets:
s=new_marker_set('particle_42 geometry')
marker_sets["particle_42 geometry"]=s
s= marker_sets["particle_42 geometry"]
mark=s.place_marker((6312.17, 1237.97, 7929.2), (0.7, 0.7, 0.7), 1088.21)
if "particle_43 geometry" not in marker_sets:
s=new_marker_set('particle_43 geometry')
marker_sets["particle_43 geometry"]=s
s= marker_sets["particle_43 geometry"]
mark=s.place_marker((5228.03, 2666.29, 8525.72), (0.7, 0.7, 0.7), 736.147)
if "particle_44 geometry" not in marker_sets:
s=new_marker_set('particle_44 geometry')
marker_sets["particle_44 geometry"]=s
s= marker_sets["particle_44 geometry"]
mark=s.place_marker((5369.67, 2736, 7105.29), (0.7, 0.7, 0.7), 861.101)
if "particle_45 geometry" not in marker_sets:
s=new_marker_set('particle_45 geometry')
marker_sets["particle_45 geometry"]=s
s= marker_sets["particle_45 geometry"]
mark=s.place_marker((5039.66, 4690.98, 6899.14), (0.7, 0.7, 0.7), 924.213)
if "particle_46 geometry" not in marker_sets:
s=new_marker_set('particle_46 geometry')
marker_sets["particle_46 geometry"]=s
s= marker_sets["particle_46 geometry"]
mark=s.place_marker((7009.86, 4859.37, 7068.66), (0.7, 0.7, 0.7), 881.828)
if "particle_47 geometry" not in marker_sets:
s=new_marker_set('particle_47 geometry')
marker_sets["particle_47 geometry"]=s
s= marker_sets["particle_47 geometry"]
mark=s.place_marker((7494.1, 3201.82, 8207.77), (0.7, 0.7, 0.7), 927.681)
if "particle_48 geometry" not in marker_sets:
s=new_marker_set('particle_48 geometry')
marker_sets["particle_48 geometry"]=s
s= marker_sets["particle_48 geometry"]
mark=s.place_marker((7356.34, 5044.2, 7914.2), (0.7, 0.7, 0.7), 831.576)
if "particle_49 geometry" not in marker_sets:
s=new_marker_set('particle_49 geometry')
marker_sets["particle_49 geometry"]=s
s= marker_sets["particle_49 geometry"]
mark=s.place_marker((6222.52, 6503.29, 7467.99), (0.7, 0.7, 0.7), 859.494)
if "particle_50 geometry" not in marker_sets:
s=new_marker_set('particle_50 geometry')
marker_sets["particle_50 geometry"]=s
s= marker_sets["particle_50 geometry"]
mark=s.place_marker((6463.3, 5961.1, 8425.62), (0.7, 0.7, 0.7), 704.845)
if "particle_51 geometry" not in marker_sets:
s=new_marker_set('particle_51 geometry')
marker_sets["particle_51 geometry"]=s
s= marker_sets["particle_51 geometry"]
mark=s.place_marker((5052.38, 6497.54, 7726.89), (0.7, 0.7, 0.7), 804.461)
if "particle_52 geometry" not in marker_sets:
s=new_marker_set('particle_52 geometry')
marker_sets["particle_52 geometry"]=s
s= marker_sets["particle_52 geometry"]
mark=s.place_marker((3926.31, 7028.86, 6514.25), (0.7, 0.7, 0.7), 934.111)
if "particle_53 geometry" not in marker_sets:
s=new_marker_set('particle_53 geometry')
marker_sets["particle_53 geometry"]=s
s= marker_sets["particle_53 geometry"]
mark=s.place_marker((3129.11, 7937.58, 7469.18), (0.7, 0.7, 0.7), 988.339)
if "particle_54 geometry" not in marker_sets:
s=new_marker_set('particle_54 geometry')
marker_sets["particle_54 geometry"]=s
s= marker_sets["particle_54 geometry"]
mark=s.place_marker((3645.56, 8006.32, 8128.35), (1, 0.7, 0), 803.7)
if "particle_55 geometry" not in marker_sets:
s=new_marker_set('particle_55 geometry')
marker_sets["particle_55 geometry"]=s
s= marker_sets["particle_55 geometry"]
mark=s.place_marker((5357.71, 7204.34, 7232), (0.7, 0.7, 0.7), 812.118)
if "particle_56 geometry" not in marker_sets:
s=new_marker_set('particle_56 geometry')
marker_sets["particle_56 geometry"]=s
s= marker_sets["particle_56 geometry"]
mark=s.place_marker((6129.5, 8721.26, 5862.1), (0.7, 0.7, 0.7), 1177.93)
if "particle_57 geometry" not in marker_sets:
s=new_marker_set('particle_57 geometry')
marker_sets["particle_57 geometry"]=s
s= marker_sets["particle_57 geometry"]
mark=s.place_marker((8245.1, 9253.27, 4574.85), (0.7, 0.7, 0.7), 1038.21)
if "particle_58 geometry" not in marker_sets:
s=new_marker_set('particle_58 geometry')
marker_sets["particle_58 geometry"]=s
s= marker_sets["particle_58 geometry"]
mark=s.place_marker((8732.54, 9307.22, 4240.71), (1, 0.7, 0), 758.016)
if "particle_59 geometry" not in marker_sets:
s=new_marker_set('particle_59 geometry')
marker_sets["particle_59 geometry"]=s
s= marker_sets["particle_59 geometry"]
mark=s.place_marker((8531.85, 10075, 4496.44), (0.7, 0.7, 0.7), 824.046)
if "particle_60 geometry" not in marker_sets:
s=new_marker_set('particle_60 geometry')
marker_sets["particle_60 geometry"]=s
s= marker_sets["particle_60 geometry"]
mark=s.place_marker((8308.25, 9312.51, 4968.89), (0.7, 0.7, 0.7), 793.379)
if "particle_61 geometry" not in marker_sets:
s=new_marker_set('particle_61 geometry')
marker_sets["particle_61 geometry"]=s
s= marker_sets["particle_61 geometry"]
mark=s.place_marker((8729.72, 9933.28, 5405.63), (0.7, 0.7, 0.7), 1011.56)
if "particle_62 geometry" not in marker_sets:
s=new_marker_set('particle_62 geometry')
marker_sets["particle_62 geometry"]=s
s= marker_sets["particle_62 geometry"]
mark=s.place_marker((7124.83, 8947.42, 5297.39), (0.7, 0.7, 0.7), 1097.01)
if "particle_63 geometry" not in marker_sets:
s=new_marker_set('particle_63 geometry')
marker_sets["particle_63 geometry"]=s
s= marker_sets["particle_63 geometry"]
mark=s.place_marker((8016.58, 7917.08, 4000.83), (0.7, 0.7, 0.7), 851.626)
if "particle_64 geometry" not in marker_sets:
s=new_marker_set('particle_64 geometry')
marker_sets["particle_64 geometry"]=s
s= marker_sets["particle_64 geometry"]
mark=s.place_marker((9010.09, 7190.81, 2494.69), (0.7, 0.7, 0.7), 869.434)
if "particle_65 geometry" not in marker_sets:
s=new_marker_set('particle_65 geometry')
marker_sets["particle_65 geometry"]=s
s= marker_sets["particle_65 geometry"]
mark=s.place_marker((8949.76, 6440.63, 4158.49), (0.7, 0.7, 0.7), 818.463)
if "particle_66 geometry" not in marker_sets:
s=new_marker_set('particle_66 geometry')
marker_sets["particle_66 geometry"]=s
s= marker_sets["particle_66 geometry"]
mark=s.place_marker((10630.8, 6906.33, 4214.24), (0.7, 0.7, 0.7), 759.539)
if "particle_67 geometry" not in marker_sets:
s=new_marker_set('particle_67 geometry')
marker_sets["particle_67 geometry"]=s
s= marker_sets["particle_67 geometry"]
mark=s.place_marker((8716.11, 7864.75, 5311.51), (0.7, 0.7, 0.7), 1088.59)
if "particle_68 geometry" not in marker_sets:
s=new_marker_set('particle_68 geometry')
marker_sets["particle_68 geometry"]=s
s= marker_sets["particle_68 geometry"]
mark=s.place_marker((10512.1, 9101.56, 5073.39), (0.7, 0.7, 0.7), 822.312)
if "particle_69 geometry" not in marker_sets:
s=new_marker_set('particle_69 geometry')
marker_sets["particle_69 geometry"]=s
s= marker_sets["particle_69 geometry"]
mark=s.place_marker((11728.1, 8472.49, 4459.71), (0.7, 0.7, 0.7), 749.81)
if "particle_70 geometry" not in marker_sets:
s=new_marker_set('particle_70 geometry')
marker_sets["particle_70 geometry"]=s
s= marker_sets["particle_70 geometry"]
mark=s.place_marker((10626, 8078.92, 4883.95), (0.7, 0.7, 0.7), 764.488)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
|
import BoostBuild
t = BoostBuild.Tester()
t.write("c.cpp", "\n")
t.write("r.cpp", """
void helper();
int main( int ac, char * av[] )
{
helper();
for ( int i = 1; i < ac; ++i )
std::cout << av[ i ] << '\\n';
}
""")
t.write("c-f.cpp", """
int
""")
t.write("r-f.cpp", """
int main() { return 1; }
""")
t.write("jamfile.jam", """
import testing ;
compile c.cpp ;
compile-fail c-f.cpp ;
run r.cpp libs//helper : foo bar ;
run-fail r-f.cpp ;
""")
t.write("libs/jamfile.jam", """
lib helper : helper.cpp ;
""")
t.write("libs/helper.cpp", """
void
__declspec(dllexport)
helper() {}
""")
t.write("jamroot.jam", "")
t.run_build_system("hardcode-dll-paths=false", stderr=None, status=None)
t.expect_addition("bin/c.test/$toolset/debug/c.test")
t.expect_addition("bin/c-f.test/$toolset/debug/c-f.test")
t.expect_addition("bin/r.test/$toolset/debug/r.test")
t.expect_addition("bin/r-f.test/$toolset/debug/r-f.test")
t.expect_content("bin/r.test/$toolset/debug/r.output",
"foo\nbar\n*\nEXIT STATUS: 0*\n", True)
t.write("r.cpp", """
int main( int ac, char * av[] )
{
for ( int i = 1; i < ac; ++i )
{
std::ifstream ifs( av[ i ] );
std::cout << ifs.rdbuf();
}
}
""")
t.write("dir/input.txt", "test input")
t.write("jamfile.jam", """
import testing ;
compile c.cpp ;
obj c-obj : c.cpp ;
compile-fail c-f.cpp ;
run r.cpp : : dir/input.txt ;
run-fail r-f.cpp ;
time execution : r ;
time compilation : c-obj ;
""")
t.run_build_system('hardcode-dll-paths=false')
t.expect_content("bin/r.test/$toolset/debug/r.output",
"test input\nEXIT STATUS: 0\n")
t.expect_addition('bin/$toolset/debug/execution.time')
t.expect_addition('bin/$toolset/debug/compilation.time')
t.write("jamfile.jam", """
import testing ;
compile-fail c.cpp ;
compile c-f.cpp ;
run-fail r.cpp : : dir/input.txt ;
run r-f.cpp ;
""")
t.touch(BoostBuild.List("c.cpp c-f.cpp r.cpp r-f.cpp"))
t.run_build_system("hardcode-dll-paths=false", stderr=None, status=1)
t.expect_removal("bin/c.test/$toolset/debug/c.test")
t.expect_removal("bin/c-f.test/$toolset/debug/c-f.test")
t.expect_removal("bin/r.test/$toolset/debug/r.test")
t.expect_removal("bin/r-f.test/$toolset/debug/r-f.test")
t.cleanup()
|
import json
from os import path
import pytest
from ethereum.tools.tester import Chain, ABIContract
from ethereum.tools._solidity import (
get_solidity,
compile_file,
solidity_get_contract_data,
)
SOLIDITY_AVAILABLE = get_solidity() is not None
CONTRACTS_DIR = path.join(path.dirname(__file__), 'contracts')
@pytest.mark.skipif(not SOLIDITY_AVAILABLE, reason='solc compiler not available')
def test_abicontract_interface():
""" Test for issue #370. """
tester_state = Chain()
contract_path = path.join(CONTRACTS_DIR, 'simple_contract.sol')
contract_name = 'Simple'
simple_compiled = compile_file(contract_path)
simple_data = solidity_get_contract_data(
simple_compiled,
contract_path,
contract_name,
)
simple_address = tester_state.contract(simple_data['bin'])
# ABIContract class must accept json_abi
abi_json = json.dumps(simple_data['abi']).encode('utf-8')
abi = ABIContract(
_chain=tester_state,
_abi=abi_json,
address=simple_address,
)
assert abi.test() == 1 # pylint: disable=no-member
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class AttachmentsConfig(AppConfig):
"""
The Django App Config class to store information about the users app
and do startup time things.
"""
name = 'kuma.attachments'
verbose_name = _('Attachments')
def ready(self):
# Register signal handlers
from . import signal_handlers # noqa
|
"""Module providing a factory for instantiating a temporal memory instance."""
import inspect
from nupic.research.temporal_memory import TemporalMemory
from nupic.bindings.algorithms import TemporalMemory as TemporalMemoryCPP
from htmresearch.algorithms.extended_temporal_memory import (
ExtendedTemporalMemory)
from nupic.research.monitor_mixin.temporal_memory_monitor_mixin import (
TemporalMemoryMonitorMixin)
class MonitoredTemporalMemory(TemporalMemoryMonitorMixin, TemporalMemory): pass
class TemporalMemoryTypes(object):
""" Enumeration of supported classification model types, mapping userland
identifier to constructor. See createModel() for actual factory method
implementation.
"""
extended = ExtendedTemporalMemory
tm = TemporalMemory
tmMixin = MonitoredTemporalMemory
tmCPP = TemporalMemoryCPP
@classmethod
def getTypes(cls):
""" Get sequence of acceptable model types. Iterates through class
attributes and separates the user-defined enumerations from the default
attributes implicit to Python classes. i.e. this function returns the names
of the attributes explicitly defined above.
"""
for attrName in dir(cls):
attrValue = getattr(cls, attrName)
if (isinstance(attrValue, type)):
yield attrName # attrName is an acceptable model name and
def createModel(modelName, **kwargs):
"""
Return a classification model of the appropriate type. The model could be any
supported subclass of ClassficationModel based on modelName.
@param modelName (str) A supported temporal memory type
@param kwargs (dict) Constructor argument for the class that will be
instantiated. Keyword parameters specific to each
model type should be passed in here.
"""
if modelName not in TemporalMemoryTypes.getTypes():
raise RuntimeError("Unknown model type: " + modelName)
return getattr(TemporalMemoryTypes, modelName)(**kwargs)
def getConstructorArguments(modelName):
"""
Return constructor arguments and associated default values for the
given model type.
@param modelName (str) A supported temporal memory type
@return argNames (list of str) a list of strings corresponding to constructor
arguments for the given model type, excluding
'self'.
@return defaults (list) a list of default values for each argument
"""
if modelName not in TemporalMemoryTypes.getTypes():
raise RuntimeError("Unknown model type: " + modelName)
argspec = inspect.getargspec(
getattr(TemporalMemoryTypes, modelName).__init__)
return (argspec.args[1:], argspec.defaults)
|
""" symenc.py
"""
import six
from cryptography.exceptions import UnsupportedAlgorithm
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.ciphers import Cipher
from cryptography.hazmat.primitives.ciphers import modes
from .errors import PGPDecryptionError
from .errors import PGPEncryptionError
from .errors import PGPInsecureCipher
__all__ = ['_encrypt',
'_decrypt']
def _encrypt(pt, key, alg, iv=None):
if iv is None:
iv = b'\x00' * (alg.block_size // 8)
if alg.is_insecure:
raise PGPInsecureCipher("{:s} is not secure. Do not use it for encryption!".format(alg.name))
if not callable(alg.cipher):
raise PGPEncryptionError("Cipher {:s} not supported".format(alg.name))
try:
encryptor = Cipher(alg.cipher(key), modes.CFB(iv), default_backend()).encryptor()
except UnsupportedAlgorithm as ex: # pragma: no cover
six.raise_from(PGPEncryptionError, ex)
else:
return bytearray(encryptor.update(pt) + encryptor.finalize())
def _decrypt(ct, key, alg, iv=None):
if iv is None:
"""
Instead of using an IV, OpenPGP prefixes a string of length
equal to the block size of the cipher plus two to the data before it
is encrypted. The first block-size octets (for example, 8 octets for
a 64-bit block length) are random, and the following two octets are
copies of the last two octets of the IV.
"""
iv = b'\x00' * (alg.block_size // 8)
try:
decryptor = Cipher(alg.cipher(key), modes.CFB(iv), default_backend()).decryptor()
except UnsupportedAlgorithm as ex: # pragma: no cover
six.raise_from(PGPDecryptionError, ex)
else:
return bytearray(decryptor.update(ct) + decryptor.finalize())
|
{
'name': 'Account Invoice Line Stock Move Info',
'version': "8.0.1.0.0",
'license': "AGPL-3",
'author': 'OdooMRP team,'
'AvanzOSC,'
'Serv. Tecnol. Avanzados - Pedro M. Baeza',
'website': "http://www.odoomrp.com",
"contributors": [
"Pedro M. Baeza <pedro.baeza@serviciosbaeza.com",
"Ana Juaristi <anajuaristi@avanzosc.es>",
"Alfredo de la Fuente <alfredodelafuente@avanzosc.es>",
],
'category': 'Accounting & Finance',
'depends': ['product',
'sale_stock',
'stock_account',
],
'data': ["views/account_invoice_line_view.xml",
],
'installable': True,
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.