prefix
stringlengths
0
918k
middle
stringlengths
0
812k
suffix
stringlengths
0
962k
#!/usr/bin/env python import argparse import sys import re import os import locale import subprocess from multiprocessing import Process def dbquery(query): import psycopg2 db = psycopg2.connect(dbname = "firmware", user = "firmadyne", password = "firmadyne", host = "127.0.0.1") ret = None try: cur = db.cursor() cur.execute(query) except BaseException: traceback.print_exc() finally: if cur: ret = cur.fetchall() cur.close() return ret def source(iid): # source code analysis script = os.getcwd() + '/analysis/source.sh' p = subprocess.run([script, str(iid)], stdout=subprocess.PIPE, stderr=subprocess.PIPE) print(p.stdout.decode()) print(p.stderr.decode()) # calculate the score of security resultdir = os.getcwd() + '/results/' + str(iid) + '/source' firmware_score = 0 for (rootdir, dirs, files) in os.walk(resultdir): for outfile in files: if outfile.endswith('.dec.c.out'): file_score = 0 # calculate the score of this file for line in open(rootdir + '/' + outfile, "r"): line = line.strip() if re.search('Hits/KSLOC@level\+', line): sp = line.split() file_score += float(sp[3]) file_score += float(sp[5]) file_score += float(sp[7]) file_score += float(sp[9]) file_score += float(sp[11]) file_score += float(sp[13]) # file_score transition function file_score = 10 - 600 / (file_score + 60) # store the file_score information in the database #print(rootdir + '/' + outfile + ": " + str(file_score)) firmware_score += file_score # firmware_score transition function firmware_score = 10 - 500 / (firmware_score + 50) # store the firmware_score information in the database #print(str(iid) + ": " + firmware_score) def angr(iid): print('warning: the Angr function is under development') # TODO def afl(iid): sys.path.append('./analysis') import afl resultdir = os.getcwd() + '/results/' + iid + '/afl' afl.process(iid, resultdir) def netafl(iid, ip): resultdir = os.getcwd() + '/results/' + iid + '/netafl' script = os.getcwd() + '/analysis/netafl.py' print('warning: the network AFL function is under development') # TODO def metasploit(iid, ip): sys.path.append('./analysis/metasploit') import runExploits exploits = list (runExploits.METASPLOIT_EXPLOITS.keys()) + list (runExploits.SHELL_EXPLOITS.keys()) resultdir = os.getcwd() + '/results/' + iid + '/metasploit' if not os.path.isdir(resultdir): if os.path.exists(resultdir): os.remove(resultdir) os.makedirs(resultdir, 0o755) outfile = resultdir + "/%(exploit)s.log" runExploits.process(ip, exploits, outfile) def extract(input_file): sys.path.append('./scripts') import extractor e = extractor.Extractor(input_file, 'images', True, False, False, '127.0.0.1', None) ocwd = os.getcwd() (iid, repeated) = e.extract() os.chdir(ocwd) return (iid, repeated) def importdb(iid): sys.path.append('./db') import importdb image = './images/' + str(iid) + '.tar.gz' importdb.getarch(image) importdb.process(iid, image) def makeimage(iid): p = subprocess.run(['sudo', './qemu/scripts/makeImage.sh', str(iid)], stdout=subprocess.PIPE, stderr=subprocess.PIPE) print(p.stdout.decode()) print(p.stderr.decode()) def infernetwork(iid): p = subprocess.run(['./qemu/scripts/inferNetwork.sh', str(iid)], stdout=subprocess.PIPE, stderr=subprocess.PIPE) print(p.stdout.decode()) print(p.stderr.decode()) def getIP(iid): ip = None import psycopg2 db = psycopg2.connect(dbname = "firmware", user = "firmadyne", password = "firmadyne", host = "127.0.0.1") try: cur = db.cursor() cur.execute("SELECT ip FROM image WHERE id=" + iid) except BaseException: traceback.print_exc() finally: if cur: ip = cur.fetchone()[0] cur.close() return ip def rootfs_extracted(iid): query = 'select rootfs_extracted from image where id=' + iid + ';' return dbquery(query)[0][0] def main(): os.chdir(os.path.dirname(os.path.realpath(__file__))) parser = argparse.ArgumentParser(description="Linux-based firmware analysis") parser.add_argument("input_file", action="store", help="Input firmware image") parser.add_argument("-i", dest="id", action="store", default=None, help="firmware ID") parser.add_argument("-s", dest="source", action="store_true", default=False, help="Enable source code analysis") parser.add_argument("-a", dest="angr", action="store_true", default=False, help="Enable static analysis with Angr") parser.add_argument("-f", dest="afl", action="store_true", default=False, help="Fuzzin
g the firmware binaries
with AFL") parser.add_argument("-n", dest="netafl", action="store_true", default=False, help="Fuzzing the network services with AFL") parser.add_argument("-m", dest="metasploit", action="store_true", default=False, help="Penetration test with metasploit exploits") arg = parser.parse_args() (iid, repeated) = extract(arg.input_file) if arg.id != None and iid != arg.id: print('error: frontend firmware ID and backend image ID conflict') sys.exit(1) if not rootfs_extracted(iid): print('error: cannot find rootfs') sys.exit(1) # importdb if not repeated: importdb(iid) if arg.source: iid = arg.id s = Process(target=source, args=(iid,)) s.start() # makeImage, inferNetwork if not repeated: makeimage(iid) infernetwork(iid) ip = getIP(iid) if not ip: print('warning: no interface detected') if arg.angr: a = Process(target=angr, args=(iid,)) a.start() if arg.afl: f = Process(target=afl, args=(iid,)) f.start() if arg.netafl and ip: n = Process(target=netafl, args=(iid, ip)) n.start() if arg.metasploit and ip: m = Process(target=metasploit, args=(iid, ip)) m.start() # join if arg.source: s.join() if arg.angr: a.join() if arg.afl: f.join() if arg.netafl and ip: n.join() if arg.metasploit and ip: m.join() if __name__ == '__main__': main ()
// image_modulus - 1] = img RT_blender[ii // image_modulus - 1, :] = RT.reshape(12) R_i2bcam[ii // image_modulus - 1, :] = R.reshape(9) # do some image processing and visual odometry ii += 1 image_data.create_dataset('K', data=K) image_data.create_dataset('i_state', data=i_state) image_data.create_dataset('time', data=time) def blender_inertial_lissajous(gen_images=False): """Move around the asteroid in the inertial frame, but assume no rotation of the asteroid """ # simulation parameters output_path = './visualization/blender' asteroid_name = 'itokawa_high' # create a HDF5 dataset hdf5_path = './data/asteroid_circumnavigate/{}_inertial_no_ast_rotation_lissajous.hdf5'.format( datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S")) dataset_name = 'landing' render = 'BLENDER' image_modulus = 1 RelTol = 1e-6 AbsTol = 1e-6 ast_name = 'itokawa' num_faces = 64 t0 = 0 dt = 1 tf = 3600 * 2 num_steps = 3600 * 2 loops = 2 periodic_pos = np.array([1.495746722510590,0.000001002669660,0.006129720493607]) periodic_vel = np.array([0.000000302161724,-0.000899607989820,-0.000000013286327]) ast = asteroid.Asteroid(ast_name,num_faces) dum = dumbbell.Dumbbell(m1=500, m2=500, l=0.003) # instantiate the blender scene once camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene = blender.blender_init(render_engine=render, asteroid_name=asteroid_name) # get some of the camera parameters K = blender_camera.get_calibration_matrix_K_from_blender(camera) # set initial state for inertial EOMs initial_pos = np.array([3, 3, 0]) # km for center of mass in body frame initial_vel = periodic_vel + attitude.hat_map(ast.omega*np.array([0,0,1])).dot(initial_pos) initial_R = attitude.rot3(np.pi).reshape(9) # transforms from dumbbell body frame to the inertial frame initial_w = np.array([0.01, 0.01, 0.01]) initial_state = np.hstack((initial_pos, initial_vel, initial_R, initial_w)) # instantiate ode object system = integrate.ode(eoms.eoms_controlled_inertial_lissajous) system.set_integrator('lsoda', atol=AbsTol, rtol=RelTol, nsteps=1000) system.set_initial_value(initial_state, t0) system.set_f_params(dum, ast, tf, loops) i_state = np.zeros((num_steps+1, 18)) time = np.zeros(num_steps+1) i_state[0, :] = initial_state with h5py.File(hdf5_path) as image_data: # create a dataset if gen_images: images = image_data.create_dataset(dataset_name, (244, 537, 3, num_steps/image_modulus), dtype='uint8') RT_blender = image_data.create_dataset('RT', (num_steps/image_modulus, 12)) R_i2bcam = image_data.create_dataset('R_i2bcam', (num_steps/image_modulus, 9)) ii = 1 while system.successful() and system.t < tf: # integrate the system and save state to an array time[ii] = (system.t + dt) i_state[ii, :] = (system.integrate(system.t + dt)) # generate the view of the asteroid at this state if int(time[ii]) % image_modulus == 0 and gen_images: # img, RT, R = blender.gen_image(i_state[ii,0:3], i_state[ii,6:15].reshape((3, 3)), # ast.omega * time[ii], # camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene, # [5, 0, 1], 'test') img, RT, R = blender.gen_image_fixed_ast(i_state[ii,0:3], i_state[ii,6:15].reshape((3,3)), camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene, [5, 0, 1], 'test') images[:, :, :, ii // image_modulus - 1] = img RT_blender[ii // image_modulus - 1, :] = RT.reshape(12) R_i2bcam[ii // image_modulus - 1, :] = R.reshape(9) # do some image processing and visual odometry ii += 1 image_data.create_dataset('K', data=K) image_data.create_dataset('i_state', data=i_state) image_data.create_dataset('time', data=time) def blender_inertial_quarter_equatorial(gen_images=False): """Move around the asteroid in the inertial frame, but assume no rotation of the asteroid Moves in the xy positive quadrant in the equatorial plane """ # simulation parameters output_path = './visualization/blender' asteroid_name = 'itokawa_high' # create a HDF5 dataset hdf5_path = './data/asteroid_circumnavigate/{}_inertial_no_ast_rotation_quarter_xy.hdf5'.format( datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S")) dataset_name = 'landing' render = 'BLENDER' image_modulus = 1 RelTol = 1e-6 AbsTol = 1e-6 ast_name = 'itokawa' num_faces = 64 t0 = 0 dt = 1 tf = 3600 * 4 num_steps = 3600 * 4 loops = 4 periodic_pos = np.array([1.495746722510590,0.000001002669660,0.006129720493607]) periodic_vel = np.array([0.000000302161724,-0.000899607989820,-0.000000013286327]) ast = asteroid.Asteroid(ast_name,num_faces) dum = dumbbell.Dumbbell(m1=500, m2=500, l=0.003) # instantiate the blender scene once camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene = blender.blender_init(render_engine=render, asteroid_name=asteroid_name) # get some of the camera parameters K = blender_camera.get_calibration_matrix_K_from_blender(camera) # set initial state for inertial EOMs initial_pos = np.array([3, 0, 0]) # km for center of mass in body frame initial_vel = periodic_vel + attitude.hat_map(ast.omega*np.array([0,0,1])).dot(initial_pos) initial_R = attitude.rot3(np.pi).reshape(9) # transforms from dumbbell body frame to the inertial frame initial_w = np.array([0.01, 0.01, 0.01]) initial_state = np.hstack((initial_pos, initial_vel, initial_R, initial_w)) # instantiate ode object system = integrate.ode(eoms.eoms_controlled_inertial_quarter_equatorial) system.set_integrator('lsoda', atol=AbsTol, rtol=RelTol, nsteps=1000) system.set_initial_value(initial_state, t0) system.set_f_params(dum, ast, tf, loops) i_state = np.zeros((num_steps+1, 18)) time = np.zeros(num_steps+1) i_state[0, :] = initial_state with h5py.File(hdf5_path) as image_data: # create a dataset if gen_images: images = image_data.create_dataset(dataset_name, (244, 537, 3, num_steps/image_modulus), dtype='uint8') RT_blender = image_data.create_dataset('RT', (num_steps/image_modulus, 12)) R_i2bcam = image_data.create_dataset('R_i2bcam', (num_steps/image_modulus, 9))
ii = 1 while system.successful() and system.t < tf: # integrate the system and save state to an array time[ii] = (system.t + dt)
i_state[ii, :] = (system.integrate(system.t + dt)) # generate the view of the asteroid at this state if int(time[ii]) % image_modulus == 0 and gen_images: # img, RT, R = blender.gen_image(i_state[ii,0:3], i_state[ii,6:15].reshape((3, 3)), # ast.omega * time[ii], # camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene, # [5, 0, 1], 'test') img, RT, R = blender.gen_image_fixed_ast(i_state[ii,0:3], i_state[ii,6:15].reshape((3,3)), camera_obj, camera, lamp_obj, lamp, itokawa_obj, scene, [5, 0, 1], 'test') images[:, :,
"""Functions for the backend of LetterBoy""" def lb_standardcase(): """Capitalise the first letter of each sentence, and set all others to lowercase.""" pass def lb_uppercase(): """Capitalise each letter.""" pass def lb_lowercase(): """Set all letters to lowercase.""" pass def lb_camelcase(): """Capitalise the first letter of each word, and set all others to lowercase.""" pass def lb_staggercase():
"""Alternate eac
h character between upper- and lower-case.""" pass def lb_jumbles_nontrobo(): """Jumble up text between the first and last letters in each word.""" pass def lb_zcorrupt(): """Add glitch text to the plaintext.""" pass def lb_zstrip(): """Remove glitch text.""" pass
import uuid import datetime
as dt import json import urllib.request import urllib.parse from Main.handlers.settings import RECAPTCHA_SECRET_KEY def get_title(title=""): if title == "": return "GetCompany info" else: return title + " - GetCompany info" def get_new_token(): ret
urn str(str(uuid.uuid4()) + str(uuid.uuid4())).replace("-", "")[:32] def get_timestamp(datetime): return int(dt.datetime.strptime(datetime, "%Y-%m-%d %H:%M:%S.%f").timestamp()) def remove_microseconds(datetime): return dt.datetime.strptime(datetime, "%Y-%m-%d %H:%M:%S.%f") def get_remote_IP(request): ip = request.META.get('HTTP_CF_CONNECTING_IP') if ip is None: ip = request.META.get('REMOTE_ADDR') return ip def check_recaptcha(response, ip): if response == "": return False data = urllib.parse.urlencode({"secret": RECAPTCHA_SECRET_KEY, "response": response, "remoteip": ip}) binary_data = data.encode('utf-8') u = urllib.request.urlopen("https://www.google.com/recaptcha/api/siteverify", binary_data) result = u.read() recaptcha_result = json.loads(result.decode('utf-8')) return recaptcha_result["success"]
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class EffectiveNetworkSecurityRule(Model): """Effective network security rules. :param name: The name of the security rule specified by the user (if created by the user). :type name: str :param protocol: The network protocol this rule applies to. Possible values are: 'Tcp', 'Udp', and 'All'. Possible values include: 'Tcp', 'Udp', 'All' :type protocol: str or ~azure.mgmt.network.v2017_09_01.models.EffectiveSecurityRuleProtocol :param source_port_range: The source port or range. :type source_port_range: str :param destination_port_range: The destination port or range. :type destination_port_range: str :param source_port_ranges: The source port ranges. Expected values include a single integer between 0 and 65535, a range using '-' as seperator (e.g. 100-400), or an asterix (*) :type source_port_ranges: list[str] :param destination_port_ranges: The destination port ranges. Expected values include a single integer between 0 and 65535, a range using '-' as seperator (e.g. 100-400), or an asterix (*) :type destination_port_ranges: list[str] :param source_address_prefix: The source address prefix. :type source_address_prefix: str :param destination_address_prefix: The destination address prefix. :type destination_address_prefix: str :param source_address_prefixes: The source address prefixes. Expected values include CIDR IP ranges, Default Tags (VirtualNetwork, AureLoadBalancer, Internet), System Tags, and the asterix (*). :type source_address_prefixes: list[str] :param destination_address_prefixes: The destination address prefixes. Expected values include CIDR IP ranges, Default Tags (VirtualNetwork, AureLoadBalancer, Internet), System Tags, and the asterix (*). :type destination_address_prefixes: list[str] :param expanded_source_address_prefix: The expanded source address prefix. :type expanded_source_address_prefix: list[str] :param expanded_destination_address_prefix: Expanded destination address prefix. :type expanded_destination_address_prefix: list[str] :param access: Whether network traffic is allowed or denied. Possible values are: 'Allow' and 'Deny'. Possible values include: 'Allow', 'Deny' :type access: str or ~azure.mgmt.network.v2017_09_01.models.SecurityRuleAccess :param priority: The priority of the rule. :type priority: int :param direction: The direction of the rule. Possible values are: 'Inbound and Outbound'. Possible values include: 'Inbound', 'Outbound' :type direction: str or ~azure.mgmt.network.v2017_09_01.models.SecurityRuleDirection """ _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'protocol': {'key': 'protocol', 'type': 'str'}, 'source_port_range': {'key': 'sourcePortRange', 'type': 'str'}, 'destination_port_range': {'key': 'destinationPortRange', 'type': 'str'}, 'source_port_ranges': {'key': 'sourcePortRanges', 'type': '[str]'}, 'destination_port_ranges': {'key': 'destinationPortRanges', 'type': '[str]'}, 'source_address_prefix': {'key': 'sourceAddress
Prefix', 'type': 'str'}, 'destination_address_prefix': {'key': 'destinationAddressPrefix', 'type': 'str'}, 'source_address_prefixes': {'key': 'sourceAddressPrefixes', 'type': '[str]'}, 'destination_address_prefixes': {'key': 'destinationAddressPrefixes', 'type': '[str]'}, 'expanded_source_address_prefix': {'key': 'expandedSourceAddressPrefix', 'type': '[str]'}, 'expanded_destination_addr
ess_prefix': {'key': 'expandedDestinationAddressPrefix', 'type': '[str]'}, 'access': {'key': 'access', 'type': 'str'}, 'priority': {'key': 'priority', 'type': 'int'}, 'direction': {'key': 'direction', 'type': 'str'}, } def __init__(self, **kwargs): super(EffectiveNetworkSecurityRule, self).__init__(**kwargs) self.name = kwargs.get('name', None) self.protocol = kwargs.get('protocol', None) self.source_port_range = kwargs.get('source_port_range', None) self.destination_port_range = kwargs.get('destination_port_range', None) self.source_port_ranges = kwargs.get('source_port_ranges', None) self.destination_port_ranges = kwargs.get('destination_port_ranges', None) self.source_address_prefix = kwargs.get('source_address_prefix', None) self.destination_address_prefix = kwargs.get('destination_address_prefix', None) self.source_address_prefixes = kwargs.get('source_address_prefixes', None) self.destination_address_prefixes = kwargs.get('destination_address_prefixes', None) self.expanded_source_address_prefix = kwargs.get('expanded_source_address_prefix', None) self.expanded_destination_address_prefix = kwargs.get('expanded_destination_address_prefix', None) self.access = kwargs.get('access', None) self.priority = kwargs.get('priority', None) self.direction = kwargs.get('direction', None)
der the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import logging import jsonpickle from azure.cosmosdb.table import TableService from azure.mgmt.storage.models import IPRule, \ NetworkRuleSet, StorageAccountUpdateParameters, VirtualNetworkRule from azure.storage.blob import BlockBlobService from azure.storage.common.models import RetentionPolicy, Logging from azure.storage.file import FileService from azure.storage.queue import QueueService from c7n_azure.actions.base import AzureBaseAction from c7n_azure.constants import BLOB_TYPE, FILE_TYPE, QUEUE_TYPE, TABLE_TYPE from c7n_azure.filters import FirewallRulesFilter, ValueFilter from c7n_azure.provider import resources from c7n_azure.resources.arm import ArmResourceManager from c7n_azure.storage_utils import StorageUtilities from c7n_azure.utils import ThreadHelper from netaddr import IPSet from c7n.exceptions import PolicyValidationError from c7n.filters.core import type_schema from c7n.utils import local_session, get_annotation_prefix @resources.register('storage') class Storage(ArmResourceManager): """Storage Account Resource :example: Finds all Storage Accounts in the subscription. .. code-block:: yaml policies: - name: find-all-storage-accounts resource: azure.storage """ class resource_type(ArmResourceManager.resource_type): doc_groups = ['Storage'] service = 'azure.mgmt.storage' client = 'StorageManagementClient' enum_spec = ('storage_accounts', 'list', None) diagnostic_settings_enabled = False resource_type = 'Microsoft.Storage/storageAccounts' @Storage.action_registry.register('set-network-rules') class StorageSetNetworkRulesAction(AzureBaseAction): """ Set Network Rules Action Updates Azure Storage Firewalls and Virtual Networks settings. :example: Find storage accounts without any firewall rules. Configure default-action to ``Deny`` and then allow: - Azure Logging and Metrics services - Two specific IPs - Two subnets .. code-block:: yaml policies: - name: add-storage-firewall resource: azure.storage filters: - type: value key: properties.networkAcls.ipRules value_type: size op: eq value: 0 actions: - type: set-network-rules default-action: Deny bypass: [Logging, Metrics] ip-rules: - ip-address-or-range: 11.12.13.14 - ip-address-or-range: 21.22.23.24 virtual-network-rules: - virtual-network-resource-id: <subnet_resource_id> - virtual-network-resource-id: <subnet_resource_id> """ schema = type_schema( 'set-network-rules', required=['default-action'], **{ 'default-action': {'enum': ['Allow', 'Deny']}, 'bypass': {'type': 'array', 'items': {'enum': ['AzureServices', 'Logging', 'Metrics']}}, 'ip-rules': { 'type': 'array', 'items': {'ip-address-or-range': {'type': 'string'}} }, 'virtual-network-rules': { 'type': 'array', 'items': {'virtual-network-resource-id': {'type': 'string'}} } } ) def _prepare_processing(self,): self.client = self.manager.get_client() def _process_resource(self, resource): rule_set = NetworkRuleSet(default_action=self.data['default-action']) if 'ip-rules' in self.data: rule_set.ip_rules = [ IPRule( ip_address_or_range=r['ip-address-or-range'], action='Allow') # 'Allow' is the only allowed action for r in self.data['ip-rules']] if 'virtual-network-rules' in self.data: rule_set.virtual_network_rules = [ VirtualNetworkRule( virtual_network_resource_id=r['virtual-network-resource-id'], action='Allow') # 'Allow' is the only allowed action for r in self.data['virtual-network-rules']] if len(self.data.get('bypass', [])) > 0: rule_set.bypass = ','.join(self.data['bypass']) else: rule_set.bypass = 'None' self.client.storage_accounts.update( resource['resourceGroup'], resource['name'], StorageAccountUpdateParameters(network_rule_set=rule_set)) @Storage.filter_registry.register('firewall-rules') class StorageFirewallRulesFilter(FirewallRulesFilter): def __init__(self, data, manager=None): super(StorageFirewallRulesFilter, self).__init__(data, manager) self._log = logging.getLogger('custodian.azure.storage') @property def log(self): return self._log def _query_rules(self, resource): ip_rules = resource['properties']['networkAcls']['ipRules'] resource_rules = IPSet([r['value'] for r in ip_rules]) return resource_rules @Storage.filter_registry.register('storage-diagnostic-settings') class StorageDiagnosticSettingsFilter(ValueFilter): """Filters storage accounts based on its diagnostic settings. The filter requires specifying the storage type (blob, queue, table, file) and will filter based on the settings for that specific type. :example: Find all storage accounts that have a 'delete' logging setting disabled. .. code-block:: yaml policies: - name: find-accounts-with-delete-logging-disabled resource: azure.storage filters: - or: - type: storage-diagnostic-settings storage-type: blob key: logging.delete op: eq value: False - type: storage-diagnostic-settings storage-type: queue key: logging.delete op: eq value: False - type: storage-diagnostic-settings storage-type: table key: logging.delete op: eq value: False """ schema = type_schema('storage-diagnostic-settings', rinherit=ValueFilter.schema, required=['storage-type'], **{'storage-type': { 'type': 'string', 'enum': [BLOB_TYPE, QUEUE_TYPE, TABLE_TYPE, FILE_
TYPE]}} ) def __init__(self, data, manager=None): super(StorageDiagnosticSettingsFilter, self).__init__(data, manager) self.storage_type = data.get('storage-type') s
elf.log = logging.getLogger('custodian.azure.storage') def process(self, resources, event=None): session = local_session(self.manager.session_factory) token = StorageUtilities.get_storage_token(session) result, errors = ThreadHelper.execute_in_parallel( resources=resources, event=event, execution_method=self.process_resource_set, executor_factory=self.executor_factory, log=self.log, session=session, token=token ) return result def process_resource_set(self, resources, event=None, session=None, token=None): matched = [] for resource in resources: settings = self._get_settings(resource, session, token) filtered_settings = super(StorageDiagnosticSettingsFilter, self).process([settings], event) if filtered_settings:
from django import template from bookmarks.models import BookmarkInstance from tagging.models import Tag register = template.Library() @register.inclusion_tag('bookmarks/tags.html') def show_bookmarks_tags(): """ Show a box with tags for all articles that belong to current site. """
return {'bookmark_tags': Tag.objects.usage_for_queryset(queryset=BookmarkInstance.on_site.all(), counts=T
rue, min_count=1)}
""" Helper file to manage translations for the Meerkat Authentication module. We have two types of translations, general and implementation specific The general translations are extracted from the python, jijna2 and js files. """ from csv import DictReader import argparse import os import shutil import datetime from babel.messages.pofile import read_po, write_po from babel.messages.catalog import Catalog, Message from babel._compat import BytesIO parser = argparse.ArgumentParser() parser.add_argument("action", choices=["update-po", "initialise", "compile" ], help="Choose action" ) parser.add_argument("-l", type=str, help
="Two letter
langauge code") if __name__ == "__main__": args = parser.parse_args() lang_dir = "meerkat_auth" if args.action == "update-po": os.system("pybabel extract -F babel.cfg -o {}/messages.pot .".format(lang_dir) ) os.system("pybabel update -i {}/messages.pot -d {}/translations".format(lang_dir, lang_dir) ) os.system("rm {}/messages.pot".format(lang_dir)) elif args.action == "initialise": if args.l and len(args.l) == 2: os.system("pybabel extract -F babel.cfg -o {}/messages.pot .".format(lang_dir) ) os.system("pybabel init -i {}/messages.pot -d {}/translations -l {}".format( lang_dir, lang_dir,args.l )) os.system("pybabel update -i {}/messages.pot -d {}/translations".format(lang_dir, lang_dir) ) os.system("rm {}/messages.pot".format(lang_dir)) else: print("Need to specify a two letter language code") elif args.action == "compile": os.system("pybabel compile -d {}/translations".format(lang_dir))
#/usr/bin/python #!*-* coding:utf-8 *-* # Este script es sofware libre. Puede redistribuirlo y/o modificarlo bajo # los terminos de la licencia pública general de GNU, según es publicada # por la free software fundation bien la versión 3 de la misma licencia # o de cualquier versión posterior. (según su elección ). # Si usted hace alguna modificación en esta aplicación, deberá siempre # mencionar el autor original de la misma. # Autor: # Universidad Distrital Francisco Jose # Grupo de fisica e informatica # Diego Alberto Parra Garzón # Dr Julian Andres Salamanca Bernal # Colombia, Bogota D.C. import serial import os import subprocess import math import time import Gnuplot from Tkinter import * import tkMessageBox import Tkinter import shutil class Gramo(): def Atenua(self): bicho = Tk() bicho.geometry("280x170+200+90") bicho.config(bg="white") bicho.title("Infrarossi") bicho.resizable(width=0, height=0) def Verifica(): print "ola" def Salir(): tkMessageBox.showinfo("Infrarossi", message= "Saliendo .... ") arduino = serial.Serial("/dev/rfcomm0", 9600) arduino.write('aa') exit() exit() def Grafica(): os.system("python g_p_Ate.py &") def Comenzar1(): tkMessageBox.showinfo("Infrarossi", message= "Se procede a capturar datos, para detener el proceso cierre la ventana de captura de datos 'de color azul'") os.system("xterm -T Infrarossi
-geom 50x8+185+1
00 +cm -bg blue -e python bin/c_p_Ate.py &") # os.system("python bin/c_p_Ate.py") # --------------------------------CONFIGURACION DE VENTANA ------------------------------------------------------------------------------ X=8 Y=10 lblTitulo = Label(bicho, text="ATENUACION", fg = ("blue"), bg = ("white"), font = ("Century Schoolbook L",23)).place(x=30, y=20) btnConectar1 = Button(bicho, text= " INICIAR ", width=5, height=1, command= Comenzar1).place(x=20+X, y=100+Y) btnSalir = Button(bicho, text= " SALIR ", width=5, height=1, command= Salir).place(x=170+X, y=100+Y) btnGrafica = Button(bicho, text= " GRAFICA ", width=5, height=1, command= Grafica).place(x=95+X, y=100+Y) Verifica() bicho.mainloop() def __init__(self): self.Atenua() self.__del__() def __del__(self): print ("PROGRAMA TERMINADO") modulo = Gramo()
import os from conans.tools import unzip import shutil from conans.util.files import rmdir, mkdir from conans.client.remote_registry import RemoteRegistry from conans import tools from conans.errors import ConanException def _handle_remotes(registry_path, remote_file, output): registry = RemoteRegistry(registry_path, output) new_registry = RemoteRegistry(remote_file, output) registry.define_remotes(new_registry.remotes) def _handle_profiles(source_folder, target_folder, output): mkdir(target_folder) for root, _, files in os.walk(source_fol
der): relative_path = os.path.relpath(root, source_folder) if relative_path == ".": relative_path = "" for f in
files: profile = os.path.join(relative_path, f) output.info(" Installing profile %s" % profile) shutil.copy(os.path.join(root, f), os.path.join(target_folder, profile)) def _process_git_repo(repo_url, client_cache, output, runner, tmp_folder): output.info("Trying to clone repo %s" % repo_url) with tools.chdir(tmp_folder): runner('git clone "%s" config' % repo_url, output=output) tmp_folder = os.path.join(tmp_folder, "config") _process_folder(tmp_folder, client_cache, output) def _process_zip_file(zippath, client_cache, output, tmp_folder, remove=False): unzip(zippath, tmp_folder) if remove: os.unlink(zippath) _process_folder(tmp_folder, client_cache, output) def _handle_conan_conf(current_conan_conf, new_conan_conf_path): current_conan_conf.read(new_conan_conf_path) with open(current_conan_conf.filename, "w") as f: current_conan_conf.write(f) def _process_folder(folder, client_cache, output): for root, dirs, files in os.walk(folder): for f in files: if f == "settings.yml": output.info("Installing settings.yml") settings_path = client_cache.settings_path shutil.copy(os.path.join(root, f), settings_path) elif f == "conan.conf": output.info("Processing conan.conf") conan_conf = client_cache.conan_config _handle_conan_conf(conan_conf, os.path.join(root, f)) elif f == "remotes.txt": output.info("Defining remotes") registry_path = client_cache.registry _handle_remotes(registry_path, os.path.join(root, f), output) else: output.info("Copying file %s to %s" % (f, client_cache.conan_folder)) shutil.copy(os.path.join(root, f), client_cache.conan_folder) for d in dirs: if d == "profiles": output.info("Installing profiles") profiles_path = client_cache.profiles_path _handle_profiles(os.path.join(root, d), profiles_path, output) break dirs[:] = [d for d in dirs if d not in ("profiles", ".git")] def _process_download(item, client_cache, output, tmp_folder): output.info("Trying to download %s" % item) zippath = os.path.join(tmp_folder, "config.zip") tools.download(item, zippath, out=output) _process_zip_file(zippath, client_cache, output, tmp_folder, remove=True) def configuration_install(item, client_cache, output, runner): tmp_folder = os.path.join(client_cache.conan_folder, "tmp_config_install") # necessary for Mac OSX, where the temp folders in /var/ are symlinks to /private/var/ tmp_folder = os.path.realpath(tmp_folder) mkdir(tmp_folder) try: if item is None: try: item = client_cache.conan_config.get_item("general.config_install") except ConanException: raise ConanException("Called config install without arguments and " "'general.config_install' not defined in conan.conf") if item.endswith(".git"): _process_git_repo(item, client_cache, output, runner, tmp_folder) elif os.path.exists(item): # is a local file _process_zip_file(item, client_cache, output, tmp_folder) elif item.startswith("http"): _process_download(item, client_cache, output, tmp_folder) else: raise ConanException("I don't know how to process %s" % item) finally: if item: client_cache.conan_config.set_item("general.config_install", item) rmdir(tmp_folder)
""" RUN FROM THIS FILE Alexandre Yang ITP 115 Final Project 05/08/2014 Description: Refer to readme.txt """ import pygame from Oto import Oto from Button import Button from Label import Label # Input: pygame.Surface, tuple, int, int, int, int # Output: none # Side-effect: Draws the grid on the screen def drawBoard(surface, color, w, h, tileWidth, tileHeight): # Draw lines for x in range(tileWidth, w+1, tileWidth): pygame.draw.line(surface, color, (x, 0), (x, h)) for y in range(tileHeight, h+1, tileHeight): pygame.draw.line(surface, color, (0, y), (w, y)) # Input: int, int # Output: pygame.sprite.Sprite # Side-effect: none # Description: Creates a sprite to represent the position of the mouse-click def createMouseClick(mouseX, mouseY): mouseClick = pygame.sprite.Sprite() mouseClick.image = pygame.Surface((1, 1)) mouseClick.rect = mou
seClick.image.get_rect() mouseClick.rect.x = mouseX mouseClick.rect.y = mouseY return mouseClick def main(): # Set general variables screenW = 850 screenH = 775 boardW = 675 boardH = 675 tileWidth = 75 tileHeight = 75 running = True screen = pygame.display.set_mode((screenW, screenH)) # Create pygame Surface clock = pygame.time.Clock() # Create pygame Clock BPM = 4 active = False bgColor = 0, 0, 0 lineColo
r = 255, 255, 255 # Create sprite groups (necessary to call draw() method) otoList = pygame.sprite.Group() buttonList = pygame.sprite.Group() labelList = pygame.sprite.Group() # Create Menu Buttons and add them to buttonList sprite group playButton = Button(screen, 100, boardH+40, 50, 50, "Play") buttonList.add(playButton) pauseButton = Button(screen, 200, boardH+40, 75, 50, "Pause") buttonList.add(pauseButton) clearButton = Button(screen, 320, boardH+40, 70, 50, "Clear") buttonList.add(clearButton) plusBPMButton = Button(screen, 430, boardH+40, 65, 50, "BPM+") buttonList.add(plusBPMButton) minusBPMButton = Button(screen, 530, boardH+40, 65, 50, "BPM-") buttonList.add(minusBPMButton) originalButton = Button(screen, 700, 30, 140, 50, "Original") buttonList.add(originalButton) clarinetButton = Button(screen, 700, 130, 140, 50, "Clarinet") buttonList.add(clarinetButton) guitarButton = Button(screen, 700, 220, 140, 50, "Guitar") buttonList.add(guitarButton) synthButton = Button(screen, 700, 320, 140, 50, "Synth") buttonList.add(synthButton) pianoButton = Button(screen, 700, 420, 140, 50, "Piano") buttonList.add(pianoButton) piano2Button = Button(screen, 700, 520, 140, 50, "Piano2") buttonList.add(piano2Button) trumpetButton = Button(screen, 700, 620, 140, 50, "Trumpet") buttonList.add(trumpetButton) # main Pygame loop while running: # Resets the screen screen.fill(bgColor) # Draws the grid drawBoard(screen, lineColor, boardW, boardH, tileWidth, tileHeight) # Draw menu buttonList.draw(screen) # Listen for events for event in pygame.event.get(): # If user closes window if event.type == pygame.QUIT: running = False # If user clicks mouse elif event.type == pygame.MOUSEBUTTONDOWN: mouseX, mouseY = pygame.mouse.get_pos() # Rounds mouse positions down to nearest hundred (Used to position the cells and for simplicity) otoPosX = (mouseX // tileWidth) * tileWidth otoPosY = (mouseY//tileHeight) * tileHeight # Create a tiny sprite where the mouse was clicked to use in collision detection mouseClick = createMouseClick(mouseX, mouseY) # If left button was clicked if event.button == 1: # Check to see if mouseClick collided with any sprite in the otoList clickedBlock = pygame.sprite.spritecollide(mouseClick, otoList, False) # Check to see if mouseClick collided with any menu button clickedMenu = pygame.sprite.spritecollide(mouseClick, buttonList, False) # If a cell was clicked, then delete it if clickedBlock: otoList.remove(clickedBlock[0]) # Handle the menu button click events elif clickedMenu: if clickedMenu[0] == playButton: active = True elif clickedMenu[0] == pauseButton: active = False elif clickedMenu[0] == clearButton: otoList.empty() elif clickedMenu[0] == plusBPMButton: BPM += 1 elif clickedMenu[0] == minusBPMButton and BPM != 1: BPM -= 1 elif clickedMenu[0] == originalButton: Oto.changeInstrument("") elif clickedMenu[0] == clarinetButton: Oto.changeInstrument("clarinet") elif clickedMenu[0] == guitarButton: Oto.changeInstrument("Guitar") elif clickedMenu[0] == synthButton: Oto.changeInstrument("Synth") elif clickedMenu[0] == pianoButton: Oto.changeInstrument("Piano") elif clickedMenu[0] == piano2Button: Oto.changeInstrument("Piano2") elif clickedMenu[0] == trumpetButton: Oto.changeInstrument("trumpet") # If the grid was clicked then create a new cell at the position (an 'Oto' object) else: if mouseY < boardH and mouseX < boardW: oto = Oto(screen, tileWidth, tileHeight, boardW, boardH) oto.rect.x = otoPosX oto.rect.y = otoPosY otoList.add(oto) # if right button was clicked elif event.button == 3: clickedBlock = pygame.sprite.spritecollide(mouseClick, otoList, False) # Rotate cell clockwise if clickedBlock: clickedBlock[0].changeState() # Draw every cell to the screen otoList.draw(screen) # Move the cells if active: otoList.update() # Check to see if any cells collided for oto in otoList: oto.checkCollision(otoList) # Draw and update BPM label BPMLabel = Label(screen, 620, boardH+40, 50, 50, str(BPM)) labelList.empty() labelList.add(BPMLabel) labelList.draw(screen) # Update the screen pygame.display.flip() # Set the Frames Per Second clock.tick(BPM) main()
# -*- codin
g: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('SocialNetworkModels', '0006_remove_comments_post_author'), ] operations = [ migrations.AddField( model_name='comments', name='comment_author', field=models.CharField(default='aaa', max_length=200), preser
ve_default=False, ), ]
#!/usr/bin/env python import sys import socket import colorsys import time try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) except: print('Failed to create socket') sys.exit(1) host
= sys.argv[1]; port = 1337; r = int(sys.argv[3]) g = int(sys.argv[4]) b = int(sys.argv[5]) msg = bytes([ 0x20 + int(sys.argv[2]), r, g, b, 0x1F, 0x20 + int(sys.argv[2]) ]) s.sendto(msg, (host, port))
from __future__ import print_function import pandas from sklearn.naive_bayes import MultinomialNB from sklearn.cross_validation import train_test_split from sklearn.pr
eprocessing import LabelEncoder def main(): train_all = pandas.DataFrame.from_csv('train.csv') train = train_all[['Survived', 'Sex', 'Fare']][:200] gender_label = LabelEncoder() train.Sex = gender_label.fit_transfor
m(train.Sex) X = train[['Sex', 'Fare']] y = train['Survived'] X_train, X_test, y_train, y_test = train_test_split( X, y, test_size=0.33, random_state=42) clf = MultinomialNB() clf.fit(X_train, y_train) print('Accuracy: ', end='') print(sum(clf.predict(X_test) == y_test) / float(len(y_test))) if __name__ == '__main__': main()
import datetime import decimal import hashlib import logging from time import time from django.conf import settings from django.utils.encoding import force_bytes from django.utils.timezone import utc logger = logging.getLogger('django.db.backends') class CursorWrapper: def __init__(self, cursor, db): self.cursor = cursor self.db = db WRAP_ERROR_ATTRS = frozenset(['fetchone', 'fetchmany', 'fetchall', 'nextset']) def __getattr__(self, attr): cursor_attr = getattr(self.cursor, attr) if attr in CursorWrapper.WRAP_ERROR_ATTRS: return self.db.wrap_database_errors(cursor_attr) else: return cursor_attr def __iter__(self): with self.db.wrap_database_errors: for item in self.cursor: yield item def __enter__(self): return self def __exit__(self, type, value, traceback): # Close instead of passing through to avoid backend-specific behavior # (#17671). Catch errors liberally because errors in cleanup code # aren't useful. try: self.close() except self.db.Database.Error: p
ass # The following methods cannot be implemented in __getattr__, because the # code must run when the method is invoked, not just when it is accessed. def callproc(self, procname, params=None): self.db.validate_no_broken_transaction() with self.db.wrap_database_errors: if params is None: return self.cursor.callproc(proc
name) else: return self.cursor.callproc(procname, params) def execute(self, sql, params=None): self.db.validate_no_broken_transaction() with self.db.wrap_database_errors: if params is None: return self.cursor.execute(sql) else: return self.cursor.execute(sql, params) def executemany(self, sql, param_list): self.db.validate_no_broken_transaction() with self.db.wrap_database_errors: return self.cursor.executemany(sql, param_list) class CursorDebugWrapper(CursorWrapper): # XXX callproc isn't instrumented at this time. def execute(self, sql, params=None): start = time() try: return super(CursorDebugWrapper, self).execute(sql, params) finally: stop = time() duration = stop - start sql = self.db.ops.last_executed_query(self.cursor, sql, params) self.db.queries_log.append({ 'sql': sql, 'time': "%.3f" % duration, }) logger.debug( '(%.3f) %s; args=%s', duration, sql, params, extra={'duration': duration, 'sql': sql, 'params': params} ) def executemany(self, sql, param_list): start = time() try: return super(CursorDebugWrapper, self).executemany(sql, param_list) finally: stop = time() duration = stop - start try: times = len(param_list) except TypeError: # param_list could be an iterator times = '?' self.db.queries_log.append({ 'sql': '%s times: %s' % (times, sql), 'time': "%.3f" % duration, }) logger.debug( '(%.3f) %s; args=%s', duration, sql, param_list, extra={'duration': duration, 'sql': sql, 'params': param_list} ) ############################################### # Converters from database (string) to Python # ############################################### def typecast_date(s): return datetime.date(*map(int, s.split('-'))) if s else None # returns None if s is null def typecast_time(s): # does NOT store time zone information if not s: return None hour, minutes, seconds = s.split(':') if '.' in seconds: # check whether seconds have a fractional part seconds, microseconds = seconds.split('.') else: microseconds = '0' return datetime.time(int(hour), int(minutes), int(seconds), int((microseconds + '000000')[:6])) def typecast_timestamp(s): # does NOT store time zone information # "2005-07-29 15:48:00.590358-05" # "2005-07-29 09:56:00-05" if not s: return None if ' ' not in s: return typecast_date(s) d, t = s.split() # Extract timezone information, if it exists. Currently we just throw # it away, but in the future we may make use of it. if '-' in t: t, tz = t.split('-', 1) tz = '-' + tz elif '+' in t: t, tz = t.split('+', 1) tz = '+' + tz else: tz = '' dates = d.split('-') times = t.split(':') seconds = times[2] if '.' in seconds: # check whether seconds have a fractional part seconds, microseconds = seconds.split('.') else: microseconds = '0' tzinfo = utc if settings.USE_TZ else None return datetime.datetime( int(dates[0]), int(dates[1]), int(dates[2]), int(times[0]), int(times[1]), int(seconds), int((microseconds + '000000')[:6]), tzinfo ) def typecast_decimal(s): if s is None or s == '': return None return decimal.Decimal(s) ############################################### # Converters from Python to database (string) # ############################################### def rev_typecast_decimal(d): if d is None: return None return str(d) def truncate_name(name, length=None, hash_len=4): """Shortens a string to a repeatable mangled version with the given length. """ if length is None or len(name) <= length: return name hsh = hashlib.md5(force_bytes(name)).hexdigest()[:hash_len] return '%s%s' % (name[:length - hash_len], hsh) def format_number(value, max_digits, decimal_places): """ Formats a number into a string with the requisite number of digits and decimal places. """ if value is None: return None if isinstance(value, decimal.Decimal): context = decimal.getcontext().copy() if max_digits is not None: context.prec = max_digits if decimal_places is not None: value = value.quantize(decimal.Decimal(".1") ** decimal_places, context=context) else: context.traps[decimal.Rounded] = 1 value = context.create_decimal(value) return "{:f}".format(value) if decimal_places is not None: return "%.*f" % (decimal_places, value) return "{:f}".format(value) def strip_quotes(table_name): """ Strip quotes off of quoted table names to make them safe for use in index names, sequence names, etc. For example '"USER"."TABLE"' (an Oracle naming scheme) becomes 'USER"."TABLE'. """ has_quotes = table_name.startswith('"') and table_name.endswith('"') return table_name[1:-1] if has_quotes else table_name
def pig_it(text): ret
urn ' '.join([x[1:]+x[0]+'ay' if x.isalpha() else x for x in text.split()]) # 其实就是2个字符串过滤拼接,比移动方便多了,思路巧妙 # a if xx else b, 单行判断处理异常字符,xx为判断,标准套路 for x in text.split() if x.isalpha() x[1:]+x[0]+'ay' else x return ' '.join([
])
expected_num_transposes, num_transposes) self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes) self._assert_trans_nchw_to_nhwc('add_2-0-0', nodes) self._assert_map_nhwc_to_nchw('split-0', nodes) self.assertAllClose(output_val_ref, output_val, atol=1e-3) @test_util.deprecated_graph_mode_only def testSplitVWithNonConstAxis(self): if test.is_gpu_available(cuda_only=True): random_seed.set_random_seed(0) x = random_ops.truncated_normal([1, 784], seed=0) conv = _two_layer_model(x) dim = array_ops.placeholder(dtype='int32') sizes = constant_op.constant([50, 10, 4], shape=[3]) split = gen_array_ops.split_v( value=conv, size_splits=sizes, axis=dim, num_split=3) output = math_ops.reduce_sum(split[0]) with session.Session(config=_get_config(False)) as sess: output_val_ref = sess.run(output, feed_dict={dim: 3}) with session.Session(config=_get_config()) as sess: metadata = config_pb2.RunMetadata() output_val = sess.run(output, run_metadata=metadata, feed_dict={dim: 3}) nodes = [] num_transposes = 0 for node in metadata.cost_graph.node: if _is_transpose(node.name): num_transposes += 1 nodes.append(node.name) # Four transposes were initially added in the Expand phase of # LayoutOptimizer; two of them are cancelled out in the Collapse phase. expected_num_transposes = 2 self.assertEqual(expected_num_transposes, num_transposes) self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes) self._assert_trans_nchw_to_nhwc('SplitV-0-0', nodes) self._assert_map_nhwc_to_nchw('SplitV-2', nodes) self.assertAllClose(output_val_ref, output_val, atol=1e-3) @test_util.deprecated_graph_mode_only def testPadWithConstPaddings(self): if test.is_gpu_available(cuda_only=True): random_seed.set_random_seed(0) x = random_ops.truncated_normal([1, 784], seed=0) conv = _two_layer_model(x) paddings_val = [[1, 2], [3, 4], [5, 6], [7, 8]] paddings = constant_op.constant( paddings_val, dtype='int32', name='PaddingsConst') pad = array_ops.pad(conv, paddings) output = array_ops.identity(pad) with session.Session(config=_get_config(False)) as sess: output_val_ref = self.evaluate(output) with session.Session(config=_get_config()) as sess: metadata = config_pb2.RunMetadata() output_val = sess.run(output, run_metadata=metadata) nodes = [] num_transposes = 0 for node in metadata.cost_graph.node: if _is_transpose(node.name): num_transposes += 1 nodes.append(node.name) # Four transposes were initially added in the Expand phase of # LayoutOptimizer; two of them are cancelled out in the Collapse phase. expected_num_transposes = 2 self.assertEqual(expected_num_transposes, num_transposes) self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes) self._assert_trans_nchw_to_nhwc('Pad-0-0', nodes) self.assertIn('Pad-1-LayoutOptimizer', nodes) self.assertAllClose(output_val_ref, output_val, atol=1e-3) @test_util.deprecated_graph_mode_only def testReduceSum(self): if test.is_gpu_available(cuda_only=True): random_seed.set_random_seed(0) x = random_ops.truncated_normal([1, 784], seed=0) conv = _two_layer_model(x) reduce_sum = math_ops.reduce_sum(conv) output = array_ops.identity(reduce_sum) with session.Session(config=_get_config(False)) as sess: output_val_ref = self.evaluate(output) with session.Session(config=_get_config()) as sess: metadata = config_pb2.RunMetadata() output_val = sess.run(output, run_metadata=metadata) nodes = [] num_transposes = 0 for node in metadata.cost_graph.node: if _is_transpose(node.name): num_transposes += 1 nodes.append(node.name) # Three transposes were initially added in the Expand phase of # LayoutOptimizer; two of them are cancelled out in the Collapse phase. expected_num_transposes = 1 self.assertEqual(expected_num_transposes, num_transposes) self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes) self.assertAllClose(output_val_ref, output_val, atol=1e-3) @test_util.deprecated_graph_mode_only def testCast(self): if test.is_gpu_available(cuda_only=True): random_seed.set_random_seed(0) x = random_ops.truncated_normal([1, 784], seed=0) conv = _two_layer_model(x) cast = math_ops.cast(conv, dtype='bool') output = array_ops.identity(cast) with session.Session(config=_get_config(False)) as sess: output_val_ref = self.evaluate(output) with session.Session(config=_get_config()) as sess: metadata = config_pb2.RunMetadata() output_val = sess.run(output, run_metadata=metadata) nodes = [] num_transposes = 0 for node in metadata.cost_graph.node: if _is_transpose(node.name): num_transposes += 1 nodes.append(node.name) # Four transposes were initially added in the Expand phase of # LayoutOptimizer; two of them are cancelled out in the Collapse phase. expected_num_transposes = 2 self.assertEqual(expected_num_transposes, num_transposes) self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes) self._assert_trans_nchw_to_nhwc('Cast-0-0', nodes) self.assertAllClose(output_val_ref, output_val, atol=1e-3) @test_util.deprecated_graph_mode_only def testSqueeze(self): if test.is_gpu_available(cuda_only=True): random_seed.set_random_seed(0) x = random_ops.truncated_normal([1, 784], seed=0) conv = _two_layer_model(x) reduce_sum = math_ops.reduce_sum(conv, axis=[1, 2]) squeeze = array_ops.squeeze(reduce_sum) output = array_ops.identity(squeeze) with session.Session(config=_get_config(False)) as sess: output_val_ref = self.evaluate(output) with session.Session(config=_get_config()) as sess: metadata = config_pb2.RunMetadata() output_val = sess.run(output, run_metadata=metadata) nodes = [] num_transposes = 0 for node in metadata.cost_graph.node: if _is_transpose(node.name): num_transposes += 1 nodes.append(node.name) # Three transposes were initially added in the Expand phase of # LayoutOptimizer; two of them are cancelled out in the Collapse phase. expected_num_transposes = 1 self.assertEqual(expected_num_transposes, num_transposes) self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes) self.assertAllClose(output_val_ref, output_val, atol=1e-3) @test_util.deprecated_graph_mode_only def testSqueezeAlongHW(self): if test.is_gpu_available(cuda_only=True): random_seed.set_random_seed(0) x = random_ops.truncated_normal([1, 784], seed=0) conv = _two_layer_model(x) reduce_sum = math_ops.reduce_sum(conv, axis=[1, 2], keepdims=True) squeeze = array_ops.squeeze(reduce_sum, axis=[1, 2]) output = array_ops.identity(squeeze) with session.Session(config=_get_config(False)) as sess: output_val_ref = self.evaluate(output) with session.Session(config=_get_config()) as sess: metadata = config_pb2.RunMetadata() output_val = sess.run(output, run_metadata=metadata)
nodes = [] num_tr
ansposes = 0 for node in metadata.cost_graph.node: if _is_transpose(node.name): num_transposes += 1 nodes.append(node.name) # Three transposes were initially added in the Expand phase of # LayoutOptimizer; two of them are cancelled out in the Collapse phase. expected_num_transposes = 1 self.assertEqual(expected_num_transposes, num_transposes) self._assert_trans_nhwc_to_nchw('Conv2D-0', nodes) self.assertAllClose(output_val_ref, output_val, atol=1e-3) @test_util.deprecated_graph_mode_only def testSqueezeAlongNHW(self): if test.is_gpu_available(cuda_only=True): random_seed.set_random_seed(0) x = ra
from configparser import ConfigParser import v20 # Create an object config config = ConfigParser() # Read the config config.read("../API_Connection_Oanda/pyalgo.cfg") ctx = v20.Context( 'api-fxpractice.oanda.com', 443, True, application = 'sample_code', token = config['oanda_v20']['access_token'], datetime_format = 'RFC3339') # class oanda_info(): def get_Id_Account(): response = ctx.account.list() # Ask for the Oanda ID Account accounts = response.get('accounts') # Show the ID for account in
accounts: # account('Account: %s' %account) print account def get_instruments(): response = ctx.account.instruments(
config['oanda_v20']['account_id']) instruments = response.get('instruments') # instruments[0].dict() for instrument in instruments: ins = instrument.dict() print('%20s | %10s' % (ins['displayName'], ins['name']))
# By starting at the top of the triangle below and moving to adjacent numbers on the # row below, the maximum total from top to bottom is 23. # 3 # 7 4 # 2 4 6 # 8 5 9 3 # That is, 3 + 7 + 4 + 9 = 23. # Find the maximum total from top to bottom of the triangle below: # 75 # 95 64 # 17 47 82 # 18 35 87 10 # 20 04 82 47 65 # 19 01 23 75 03 34 # 88 02 77 73 07 63 67 # 99 65 04 28 06 16 70 92 # 41 41 26 56 83 40 80 70 33 # 41 48 72 33 47 32 37 16 94 29 # 53 71 44 65 25 43 91 52 97 51 14 # 70 11 33 28 77 73 17 78 39 68 17 57 # 91 71 52 38 17 14 91 43 58 50 27 29 48 # 63 66 04 68 89 53 67 30 73 16 69 87 40 31 # 04 62 98 27 23 09 70 98 73 93 38 53 60 04 23 # NOTE: As there are only 16384 routes, it is possible to solve this problem by trying # every route. However, Problem 67, is the same challenge with a triangle containing # one-hundred rows; it cannot be solved by brute force, and requires a clever method! ;o) text = '75\n\ 95 64\n\ 17 47 82\n\ 18 35 87 10\n\ 20 04 82 47 65\n\ 19 01 23 75 03 34\n\ 88 02 77 73 07 63 67\n\ 99
65 04 28 06 16 70 92\n\ 41 41 26 56 83 40 80 70 33\n\ 41 48 72 33 47 32 37 16 94 29\n\ 53 71 44 65 25 43 91 52 97 51 14\n\ 70 11 33 28 77 73 17 78 39 68 17 57\n\ 91 71 52 38 17 14 91 43 58 50 27 29 48\n\ 63 66 04 68 89 53 67 30 73 16 69 87 40 31\n\ 04 62 98 27 23 09
70 98 73 93 38 53 60 04 23' digits = [[int (y) for y in x.split(' ')] for x in text.split('\n')] for i in range(1, len(digits)): digits[i][0] += digits[i - 1][0] digits[i][len(digits[i]) - 1] += digits[i - 1][len(digits[i - 1]) - 1] for j in range(1, len(digits[i]) - 1): digits[i][j] += max(digits[i - 1][j - 1], digits[i - 1][j]) print max(digits[len(digits) - 1])
def get_perm_argparser(self, args): args = args.split(" ") if args[0] == "nick": self.conman.gen_send("Permission level for %s: %s" %
(args[1], self.permsman.get_nick_perms(args[1]))) elif args[0] == "cmd": if args[1].startswith("."): args[1] = args
[1][1:] self.conman.gen_send("Permission level for %s: %s" % (args[1], self.permsman.get_cmd_perms(args[1]))) elif args[0] == "msg": self.conman.gen_send("Message permissions for %s: %s" % (args[1], self.permsman.get_msg_perms(args[1]))) def set_perm_argparser(self, args): args = args.split(" ") if args[0] == "nick": self.conman.gen_send("Setting permission level for %s: %s" % (args[1], args[2])) self.permsman.set_nick_perms(args[1], args[2]) elif args[0] == "cmd": if args[1].startswith("."): args[1] = args[1][1:] self.conman.gen_send("Setting permission level for %s: %s" % (args[1], args[2])) self.permsman.set_cmd_perms(args[1], args[2]) elif args[0] == "msg": args[2] = args[2].lower() == "true" or args[2] == "1" self.conman.gen_send("Setting message permissions for %s: %s" % (args[1], args[2])) self.permsman.set_msg_perms(args[1], args[2]) self._map("command", "getperm", get_perm_argparser) self._map("command", "setperm", set_perm_argparser)
#!/usr/bin/env python import os import sys PROJECT_DIR = os.path.abspath(os.path.dirname(__file__)) sys.path.append(PROJECT_DIR) sys.path.append(os.path.abspath(PROJECT_DIR + '/../'
)) sys.path.append(os.path.abspath(PROJECT_DIR + '/../realestate/')) if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTING
S_MODULE", "testproject.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
# -*- coding: utf-8 -*- # gedit CodeCompletion plugin # Copyright (C) 2011 Fabio Zendhi Nagao # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distribut
ed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. def get_word(piter): a = piter.copy() b = piter.copy() while True: if a.starts_line(): break
a.backward_char() ch = a.get_char() #if not (ch.isalnum() or ch in ['_', ':', '.', '-', '>']): if not (ch.isalnum() or ch in "_:.->"): a.forward_char() break word = a.get_visible_text(b) return a, word def get_document(piter): a = piter.copy() b = piter.copy() while True: if not a.backward_char(): break while True: if not b.forward_char(): break return a.get_visible_text(b) # ex:ts=4:et:
"""Admin Configuration for Improved User""" from django.contrib.auth.admin import UserAdmin as BaseUserAdmin from django.utils.translation import gettext_lazy as _ from .forms import UserChangeForm, UserCreationForm class UserAdmin(BaseUserAdmin): """Admin panel for Improved User, mimics Django's default""" fieldsets = ( (None, {"fields": ("email", "password")}), (_("Personal info"), {"fields": ("full_name", "short_name")}), ( _("Permissions
"), { "fields": ( "is_active", "is_staff", "is_superuser", "groups", "user_permissions", ), }, ), (_("Important dates"), {"fields": ("last_login", "date_joined")}), ) add_fieldsets = ( ( None, { "classes": ("wide",), "fields": ("email", "short_name",
"password1", "password2"), }, ), ) form = UserChangeForm add_form = UserCreationForm list_display = ("email", "full_name", "short_name", "is_staff") search_fields = ("email", "full_name", "short_name") ordering = ("email",)
aidUtil = '/opt/MegaRAID/storcli/storcli64' class RaidControllerLSI(TextAttributeParser, RaidController): _attributes = [ (r'(?i)^Model\s=\s(.*)$', 'Model', None, False, None), (r'(?i)^Serial\sNumber\s=\s(.*)$', 'Serial', None, False, None), (r'(?i)^Controller\sStatus\s=\s(.*)$', 'Status', None, False, None), (r'(?i)^Bios\sVersion\s=\s(.*)$', 'BIOS', None, False, None), (r'(?i)^Firmware\sVersion\s=\s(.*)$', 'Firmware', None, False, None), (r'(?i)^On\sBoard\sMemory\sSize\s=\s(.*)$', 'CacheSize', None, False, None), (r'(?i)^BBU\s=\s(.*)$', 'Battery', None, False, lambda match: {'Absent': False}.get(match.group(1), True)), (r'(?i)^BBU\sStatus\s=\s(.*)$', 'BatteryStatus', None, False, lambda match: {'32': 'Degraded'}.get(match.group(1), match.group(1))) ] def __init__(self, name): super(self.__class__, self).__init__(name) self.Type = 'LSIMegaRAID' self.Serial = '-' self.__fill_data() self.__enumerate_ld() @staticmethod def probe(): if not os.path.isfile(raidUtil): return [] output = helpers.getOutput('{} show nolog'.format(raidUtil)) controllers = [] for line in output: match = re.search(r'^(\d+)\s\S+\s+\d+', line) if match: controllers.append(match.group(1)) return controllers def __enumerate_ld(self): ld_section = False for line in helpers.getOutput('{} /c{} show all nolog'.format(raidUtil, self.Name)): if re.match(r'(?i)^VD\sLIST\s:', line): ld_section = True continue if not ld_section: continue if re.match(r'(?i)Physical\sDrives.*', line): break match = re.search(r'(?i)(\d+/\d+)\s+', line) if match: self.LDs.append(RaidLDvendorLSI(match.group(1), self)) def printSpecificInfo(self): print('Model: {}, s/n {}, {}'.format(self.Model, self.Serial, self.Status)) print('Cache: {}'.format(self.CacheSize)) if self.Battery: print('BBU status: {}'.format(self.BatteryStatus)) print('BIOS version: {}'.forma
t(self.BIOS)) print('FW version : {}'.format(self.Firmware)) def __fill_data(self): for line in helpers.getOutput('{} /c{} show all nolog'.format(raidUtil, self.Name)): if re.match(r'(?i)^TOPOLOGY\s:', line):
break if self._process_attributes_line(line): continue class RaidLDvendorLSI(RaidLD): def __init__(self, name, controller): (self.DG, self.VD) = name.split('/') super(self.__class__, self).__init__(name, controller) self.Device = self.Name self.Level = '' self.State = '' self.Size = '' self.__fill_data() self.__find_devicename() self.__enumerate_pd() self.DriveCount = len(self.PDs) self.DriveActiveCount = self.DriveCount def __enumerate_pd(self): pd_section = False for line in helpers.getOutput('{} /c{}/v{} show all nolog'.format(raidUtil, self.Controller.Name, self.VD)): if re.match(r'(?i)PDs\sfor\sVD', line): pd_section = True continue if not pd_section: continue match = re.search(r'(?i)^(\d+):(\d+)\s+(\d+)\s+\S+', line) if match: self.PDs.append(RaidPDvendorLSI(match.group(1), match.group(2), match.group(3), self)) def __fill_data(self): for line in helpers.getOutput('{} /c{}/v{} show all nolog'.format(raidUtil, self.Controller.Name, self.VD)): match = re.search(r'(?i)SCSI\sNAA\sId\s=\s(.*)$', line) if match: self.NAA = match.group(1) match = re.search(r'(?i)^(\d+)\/(\d+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)', line) if match: self.Level = match.group(3) self.State = {'Optl': 'Optimal', 'Rec': 'Recovery', 'OfLn': 'OffLine', 'Pdgd': 'Partially Degraded', 'Dgrd': 'Degraded'}.get(match.group(4), match.group(4)) self.Size = DeviceCapacity(int(float(match.group(10)) * 1024), {'TB': 'GiB', 'GB': 'MiB', 'MB': 'KiB'}.get(match.group(11), None)) def __find_devicename(self): try: for filename in [f for f in os.listdir('/dev/disk/by-id')]: match = re.search(r'^scsi-\d+' + self.NAA, filename) if match: self.Device = '/dev/disk/by-id/' + filename except: pass class RaidPDvendorLSI(TextAttributeParser, RaidPD): _attributes = [ (r'(?i)^SN\s+=\s+(.*)$', 'Serial', None, False, None), (r'(?i)^Manufacturer\sId\s=\s+(.*)$', 'Vendor', None, False, None), (r'(?i)^Drive\sTemperature\s=\s+(\d+)C', 'Temperature', None, False, None), (r'(?i)^Model\sNumber\s=\s+(.*)$', 'Model', None, False, None), (r'(?i)^Media\sError\sCount\s=\s+(\d+)', 'ErrorCount', None, True, lambda match: int(match.group(1))), (r'(?i)^Predictive\sFailure\sCount\s=\s+(\d+)', 'ErrorCount', None, True, lambda match: int(match.group(1))) ] def __init__(self, enclosure, slot, did, ld): super(self.__class__, self).__init__('{}:{}'.format(enclosure, slot), ld) self.Enclosure = enclosure self.Slot = slot self.Device = did self.PHYCount = 0 self.__fill_basic_info() if hasattr(self, 'Vendor'): self.Model = self.Vendor + ' ' + self.Model if 'VMkernel' in os.uname(): self.__fill_LSI_smart_info() else: self.__fill_smart_info() def __fill_basic_info(self): for line in helpers.getOutput('{} /c{}/e{}/s{} show all nolog'.format(raidUtil, self.LD.Controller.Name, self.Enclosure, self.Slot)): match = re.search(r'^(\d+):(\d+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s+(\S+)', line) if match: self.Capacity = DeviceCapacity(int(float(match.group(6)) * 1024), {'TB': 'GiB', 'GB': 'MiB', 'MB': 'KiB'}.get(match.group(7), None)) self.Technology = match.group(8) self.State = { 'DHS': 'Dedicated Hot Spare', 'UGood': 'Unconfigured Good', 'GHS': 'Global Hotspare', 'UBad': 'Unconfigured Bad', 'Onln': 'Optimal', 'Rbld': 'Rebuild', 'Offln': 'Offline' }.get(match.group(4), match.group(4)) if self._process_attributes_line(line): continue def __fill_smart_info(self): smart = SMARTinfo('-d megaraid,{}'.format(int(self.Device)), self.LD.Device) if not smart.SMART: return for prop in ['Model', 'Serial', 'Firmware', 'Capacity', 'SectorSizes', 'FormFactor', 'PHYCount', 'PHYSpeed', 'RPM', 'PowerOnHours', 'ErrorCount', 'Temperature', 'SCT']: if hasattr(smart, prop): setattr(self, prop, getattr(smart, prop)) def __fill_LSI_smart_info(self): data_dump = [] for line in helpers.getOutput('{} /c{}/e{}/s{} show smart nolog'.format(raidUtil, self.LD.Controller.Name, self.Enclosure, self.Slot)): match = re.search(r'^(\S\S\s){15}\S\S$', line) if match: for c in line.split(' '): data_dump.append(int(c, 16)) data_dump = data_dump[2:] smart = {} for attr_index in range(0, len(data_dump) // 12): attr, value = struct.unpack('<BxxxxHxxxxx', bytearray(data_dump[attr_index * 12:(attr_index + 1) * 12])) if attr != 0: smart[attr] = value setattr(self, 'PowerOnHours', smart.get(9, None)) setattr(self, 'ErrorCount', smart.get(5, 0) + smart.get(187, 0) + smar
# -*- enc
oding: utf-8 -*- import os from abjad im
port abjad_configuration from abjad.demos import desordre def test_demos_desordre_01(): lilypond_file = desordre.make_desordre_lilypond_file()
#### NOTICE: THIS FILE IS AUTOGENERA
TED #### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY #### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES from swgpy.object import * def create(kernel): result = Tangible() result.template = "object
/tangible/furniture/all/shared_frn_all_lamp_free_s01_lit.iff" result.attribute_template_id = 6 result.stfName("frn_n","frn_lamp_free") #### BEGIN MODIFICATIONS #### #### END MODIFICATIONS #### return result
# ---------------------------------------------------------------------- # Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2020, Nument
a, Inc. Unless you have an agreement # with Numenta, Inc., for a separate license for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero Public Licen
se version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU Affero Public License for more details. # # You should have received a copy of the GNU Affero Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # ---------------------------------------------------------------------- import math from collections import defaultdict from nupic.research.frameworks.vernon import interfaces __all__ = [ "StepBasedLogging", ] class StepBasedLogging( interfaces.Experiment, # Requires interfaces.StepBasedLogging, # Implements ): @staticmethod def step_based_logging_interface_implemented(): return True def setup_experiment(self, config): """ :param config: Dictionary containing the configuration parameters - log_timestep_freq: Configures mixins and subclasses that log every timestep to only log every nth timestep (in addition to the final timestep of each epoch). Set to 0 to log only at the end of each epoch. """ super().setup_experiment(config) self._current_timestep = 0 self.log_timestep_freq = config.get("log_timestep_freq", 1) @property def current_timestep(self): return self._current_timestep @current_timestep.setter def current_timestep(self, value): self._current_timestep = value def run_iteration(self): timestep_begin = self.current_timestep ret = super().run_iteration() ret.update( timestep_begin=timestep_begin, timestep_end=self.current_timestep, ) return ret def post_batch(self, **kwargs): super().post_batch(**kwargs) # FIXME: move to post_optimizer_step self.current_timestep += 1 def should_log_batch(self, train_batch_idx): return (train_batch_idx == self.total_batches - 1) or ( self.log_timestep_freq > 0 and (self.current_timestep % self.log_timestep_freq) == 0) def get_state(self): state = super().get_state() state["current_timestep"] = self.current_timestep return state def set_state(self, state): super().set_state(state) if "current_timestep" in state: self.current_timestep = state["current_timestep"] @classmethod def get_recorded_timesteps(cls, result, config): log_timestep_freq = config.get("log_timestep_freq", 1) timestep_end = result["timestep_end"] if log_timestep_freq == 0: ret = [timestep_end - 1] else: # Find first logged timestep in range logged_begin = int(math.ceil(result["timestep_begin"] / log_timestep_freq) * log_timestep_freq) ret = list(range(logged_begin, timestep_end, log_timestep_freq)) last_batch_timestep = timestep_end - 1 if last_batch_timestep % log_timestep_freq != 0: ret.append(last_batch_timestep) return ret @classmethod def expand_result_to_time_series(cls, result, config): result_by_timestep = defaultdict(dict) # Assign the epoch result to the appropriate timestep. result_by_timestep[result["timestep_end"]].update( cls.get_readable_result(result) ) return result_by_timestep @classmethod def get_execution_order(cls): eo = super().get_execution_order() exp = "StepBasedLoggingCore" eo["run_iteration"].append(exp + ": Add timestep info") eo["post_batch"].append(exp + ": Increment timestep") eo["get_state"].append(exp + ": Get current timestep") eo["set_state"].append(exp + ": Set current timestep") eo.update( # StepBasedLogging expand_result_to_time_series=[exp + ": common result dict keys"], ) return eo
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Deleting field 'Trial.max_participants' db.delete_column(u'trials_trial', 'max_participants') def backwards(self, orm): # User chose to not deal with backwards NULL issues for 'Trial.max_participants' raise RuntimeError("Cannot reverse this migration. 'Trial.max_participants' and its values cannot be restored.") models = { u'trials.group': { 'Meta': {'object_name': 'Group'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '1'}), 'trial': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Trial']"}) }, u'trials.invitation': { 'Meta': {'object_name': 'Invitation'}, 'email': ('django.db.models.fields.EmailField', [], {'max_length': '254'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'sent': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'trial': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Trial']"}) }, u'trials.participant': { 'Meta': {'object_name': 'Participant'}, 'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Group']", 'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'trial': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Trial']"}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['userprofiles.RMUser']", 'null': 'True', 'blank': 'True'}) }, u'trials.report': { 'Meta': {'object_name': 'Report'}, 'binary': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}), 'count': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'date': ('django.db.models.fields.DateField', [], {}), 'group': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Group']", 'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'participant': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Participant']", 'null': 'True', 'blank': 'True'}), 'score': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'trial': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Trial']"}), 'variable': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Variable']"}) }, u'trials.trial': { 'Meta': {'object_name': 'Trial'}, 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'finish_date': ('django.db.models.fields.DateField', [], {}), 'group_a': ('django.db.models.fields.TextField', [], {}), 'group_a_desc': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'group_a_expected': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'group_b': ('django.db.models.fields.TextField', [], {}), 'group_b_desc': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'group_b_impressed': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'instruction_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'instruction_delivery': ('django.db.models.fields.TextField', [], {'default': "'im'", 'max_length': '2'}), 'instruction_hours_after': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}), 'is_edited': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'min_participants': ('django.db.models.fields.IntegerField', [], {}), 'owner': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['userprofiles.RMUser']"}),
'participants': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'private': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'recruiting': (
'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'recruitment': ('django.db.models.fields.CharField', [], {'default': "'an'", 'max_length': '2'}), 'reporting_freq': ('django.db.models.fields.CharField', [], {'default': "'da'", 'max_length': '200'}), 'start_date': ('django.db.models.fields.DateField', [], {}), 'stopped': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}) }, u'trials.variable': { 'Meta': {'object_name': 'Variable'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), 'question': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'style': ('django.db.models.fields.CharField', [], {'max_length': '2'}), 'trial': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['trials.Trial']"}) }, u'userprofiles.rmuser': { 'Meta': {'object_name': 'RMUser'}, 'account': ('django.db.models.fields.CharField', [], {'default': "'st'", 'max_length': '2'}), 'dob': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '254'}), 'gender': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'postcode': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}), 'receive_questions': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40', 'db_index': 'True'}) } } complete_apps = ['trials']
# Ansible module to manage CheckPoint Firewall (c) 2019 # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # from __future__ import absolute_import, division, print_function __metaclass__ = type import pytest from units.modules.utils import set_module_args, exit_json, fail_json, AnsibleExitJson from ansible.module_utils import basic from ansible.modules.network.check_point import cp_mgmt_host OBJECT = { "name": "New Host 1", "ip_address": "192.0.2.1" } CREATE_PAYLOAD
= { "name": "New Host 1", "ip_address": "192.0.2.1" } UPDATE_PAYLOAD = { "name": "New Host 1", "color": "blue", "ipv4_address": "192.0.2.2" } OBJECT_AFTER_UPDATE = UPDATE_PAYLOAD DELETE_PAYLOAD = {
"name": "New Host 1", "state": "absent" } function_path = 'ansible.modules.network.check_point.cp_mgmt_host.api_call' api_call_object = 'host' class TestCheckpointHost(object): module = cp_mgmt_host @pytest.fixture(autouse=True) def module_mock(self, mocker): return mocker.patch.multiple(basic.AnsibleModule, exit_json=exit_json, fail_json=fail_json) @pytest.fixture def connection_mock(self, mocker): connection_class_mock = mocker.patch('ansible.module_utils.network.checkpoint.checkpoint.Connection') return connection_class_mock.return_value def test_create(self, mocker, connection_mock): mock_function = mocker.patch(function_path) mock_function.return_value = {'changed': True, api_call_object: OBJECT} result = self._run_module(CREATE_PAYLOAD) assert result['changed'] assert OBJECT.items() == result[api_call_object].items() def test_create_idempotent(self, mocker, connection_mock): mock_function = mocker.patch(function_path) mock_function.return_value = {'changed': False, api_call_object: OBJECT} result = self._run_module(CREATE_PAYLOAD) assert not result['changed'] def test_update(self, mocker, connection_mock): mock_function = mocker.patch(function_path) mock_function.return_value = {'changed': True, api_call_object: OBJECT_AFTER_UPDATE} result = self._run_module(UPDATE_PAYLOAD) assert result['changed'] assert OBJECT_AFTER_UPDATE.items() == result[api_call_object].items() def test_update_idempotent(self, mocker, connection_mock): mock_function = mocker.patch(function_path) mock_function.return_value = {'changed': False, api_call_object: OBJECT_AFTER_UPDATE} result = self._run_module(UPDATE_PAYLOAD) assert not result['changed'] def test_delete(self, mocker, connection_mock): mock_function = mocker.patch(function_path) mock_function.return_value = {'changed': True} result = self._run_module(DELETE_PAYLOAD) assert result['changed'] def test_delete_idempotent(self, mocker, connection_mock): mock_function = mocker.patch(function_path) mock_function.return_value = {'changed': False} result = self._run_module(DELETE_PAYLOAD) assert not result['changed'] def _run_module(self, module_args): set_module_args(module_args) with pytest.raises(AnsibleExitJson) as ex: self.module.main() return ex.value.args[0]
field_dict['help_text'] = self.field.help_text field_dict['error_messages'] = self.field.error_messages # Instantiate the Remote Forms equivalent of the widget if possible # in order to retrieve the widget contents as a dictionary. remote_widget_class_name = 'Remote%s' % self.field.widget.__class__.__name__ try: remote_widget_class = getattr(widgets, remote_widget_class_name) remote_widget = remote_widget_class(self.field.widget, field_name=self.field_name) except Exception, e: logger.warning('Error serializing %s: %s', remote_w
idget_class_name, str(e)) widget_dict = {} else: widget_dict = remote_widget.as_dict() field_dict['widget'] = widget_dict return field_dict class RemoteCharField(RemoteField): def as_dict(self): field_dict = super(RemoteCharField, self).as_dict() field_dict.update({ 'max_length': self.fi
eld.max_length, 'min_length': self.field.min_length }) return field_dict class RemoteIntegerField(RemoteField): def as_dict(self): field_dict = super(RemoteIntegerField, self).as_dict() field_dict.update({ 'max_value': self.field.max_value, 'min_value': self.field.min_value }) return field_dict class RemoteFloatField(RemoteIntegerField): def as_dict(self): return super(RemoteFloatField, self).as_dict() class RemoteDecimalField(RemoteIntegerField): def as_dict(self): field_dict = super(RemoteDecimalField, self).as_dict() field_dict.update({ 'max_digits': self.field.max_digits, 'decimal_places': self.field.decimal_places }) return field_dict class RemoteTimeField(RemoteField): def as_dict(self): field_dict = super(RemoteTimeField, self).as_dict() field_dict['input_formats'] = self.field.input_formats if (field_dict['initial']): if callable(field_dict['initial']): field_dict['initial'] = field_dict['initial']() # If initial value is datetime then convert it using first available input format if (isinstance(field_dict['initial'], (datetime.datetime, datetime.time, datetime.date))): if not len(field_dict['input_formats']): if isinstance(field_dict['initial'], datetime.date): field_dict['input_formats'] = settings.DATE_INPUT_FORMATS elif isinstance(field_dict['initial'], datetime.time): field_dict['input_formats'] = settings.TIME_INPUT_FORMATS elif isinstance(field_dict['initial'], datetime.datetime): field_dict['input_formats'] = settings.DATETIME_INPUT_FORMATS input_format = field_dict['input_formats'][0] field_dict['initial'] = field_dict['initial'].strftime(input_format) return field_dict class RemoteDateField(RemoteTimeField): def as_dict(self): return super(RemoteDateField, self).as_dict() class RemoteDateTimeField(RemoteTimeField): def as_dict(self): return super(RemoteDateTimeField, self).as_dict() class RemoteRegexField(RemoteCharField): def as_dict(self): field_dict = super(RemoteRegexField, self).as_dict() # We don't need the pattern object in the frontend # field_dict['regex'] = self.field.regex return field_dict class RemoteEmailField(RemoteCharField): def as_dict(self): return super(RemoteEmailField, self).as_dict() class RemoteFileField(RemoteField): def as_dict(self): field_dict = super(RemoteFileField, self).as_dict() field_dict['max_length'] = self.field.max_length return field_dict class RemoteImageField(RemoteFileField): def as_dict(self): return super(RemoteImageField, self).as_dict() class RemoteURLField(RemoteCharField): def as_dict(self): return super(RemoteURLField, self).as_dict() class RemoteBooleanField(RemoteField): def as_dict(self): return super(RemoteBooleanField, self).as_dict() class RemoteNullBooleanField(RemoteBooleanField): def as_dict(self): return super(RemoteNullBooleanField, self).as_dict() class RemoteBCTChoiceFieldWithTitles(RemoteField): def as_dict(self): return super(RemoteBCTChoiceFieldWithTitles, self).as_dict() def get_dict(self): #field_dict = {'widget': {'attrs' : self.field.widget.attrs}} #field_dict = {'results': self.field.widget.attrs['results']} field_dict = {'results': self.field.results} if hasattr(self.field, 'img_url'): field_dict['img_url'] = self.field.img_url return field_dict class RemoteInlineForeignKeyField(RemoteField): def as_dict(self): return super(RemoteInlineForeignKeyField, self).as_dict() class RemoteChoiceField(RemoteField): def as_dict(self): field_dict = super(RemoteChoiceField, self).as_dict() #temporary switch off ''' field_dict['choices'] = [] for key, value in self.field.choices: field_dict['choices'].append({ 'value': key, 'display': value }) ''' #field_dict['choices'] = [] field_dict['widget']['choices'] = [] return field_dict def get_dict(self): field_dict = {'choices': []} ''' for key, value in self.field.choices: field_dict['choices'].append({ 'value': key, }) ''' #'display': value #return field_dict return {} class RemoteTypedChoiceField(RemoteChoiceField): def as_dict(self): field_dict = super(RemoteTypedChoiceField, self).as_dict() field_dict.update({ 'coerce': self.field.coerce, 'empty_value': self.field.empty_value }) return field_dict class RemoteToolChoiceField(RemoteTypedChoiceField): def get_dict(self): field_dict = {'choices': self.field.choices, 'ng-options': self.field.widget.attrs['ng-options'], } #print dir(self.field.widget) #print self.field.to_python() ''' for key, value in self.field.choices: field_dict['choices'].append({ 'value': key, }) ''' #'display': value #return field_dict return field_dict class RemoteModelChoiceField(RemoteChoiceField): def as_dict(self): return super(RemoteModelChoiceField, self).as_dict() ''' def get_dict(self): #field_dict = {'widget': {'attrs' : self.field.widget.attrs}} #field_dict = {'results': self.field.widget.attrs['results']} field_dict = {'results': self.field.results} if hasattr(self.field, 'img_url'): field_dict['img_url'] = self.field.img_url return field_dict ''' class RemoteMultipleChoiceField(RemoteChoiceField): def as_dict(self): return super(RemoteMultipleChoiceField, self).as_dict() class RemoteModelMultipleChoiceField(RemoteMultipleChoiceField): def as_dict(self): return super(RemoteModelMultipleChoiceField, self).as_dict() class RemoteTypedMultipleChoiceField(RemoteMultipleChoiceField): def as_dict(self): field_dict = super(RemoteTypedMultipleChoiceField, self).as_dict() field_dict.update({ 'coerce': self.field.coerce, 'empty_value': self.field.empty_value }) return field_dict class RemoteComboField(RemoteField): def as_dict(self): field_dict = super(RemoteComboField, self).as_dict() field_dict.update(fields=self.field.fields) return field_dict class RemoteMultiValueField(RemoteField): def as_dict(self): field_dict = super(RemoteMultiValueField, self).as_dict() field_dict['fields'] = self.field.fields return field_dict class RemoteFilePathFi
#!/usr/bin/python import math # return statement def printLog(x): if x <= 0: print "Positive number only, please." return result = math
.log(x) print "The log of x is", result x, y = -2, 3 printLog(y)
ument): def get_feed(self): return '{0}: {1}'.format(_(self.status), self.project_name) def onload(self): """Load project tasks for quick view""" if not self.get('__unsaved') and not self.get("tasks"): self.load_tasks() self.set_onload('activity_summary', frappe.db.sql('''select activity_type, sum(hours) as total_hours from `tabTimesheet Detail` where project=%s and docstatus < 2 group by activity_type order by total_hours desc''', self.name, as_dict=True)) def __setup__(self): self.onload() def load_tasks(self): """Load `tasks` from the database""" self.tasks = [] for task in self.get_tasks(): task_map = { "title": task.subject, "status": task.status, "start_date": task.exp_start_date, "end_date": task.exp_end_date, "description": task.description, "task_id": task.name, "task_weight": task.task_weight } self.map_custom_fields(task, task_map) self.append("tasks", task_map) def get_tasks(self): return frappe.get_all("Task", "*", {"project": self.name}, order_by="exp_start_date asc") def validate(self): self.validate_dates() self.validate_weights() self.sync_tasks() self.tasks = [] self.send_welcome_email() def validate_dates(self): if self.expected_start_date and self.expected_end_date: if getdate(self.expected_end_date) < getdate(self.expected_start_date): f
rappe.throw(_("Expected End Date can not be less than Expected Start Date"))
def validate_weights(self): sum = 0 for task in self.tasks: if task.task_weight > 0: sum = sum + task.task_weight if sum > 0 and sum != 1: frappe.throw(_("Total of all task weights should be 1. Please adjust weights of all Project tasks accordingly")) def sync_tasks(self): """sync tasks and remove table""" if self.flags.dont_sync_tasks: return task_names = [] for t in self.tasks: if t.task_id: task = frappe.get_doc("Task", t.task_id) else: task = frappe.new_doc("Task") task.project = self.name task.update({ "subject": t.title, "status": t.status, "exp_start_date": t.start_date, "exp_end_date": t.end_date, "description": t.description, "task_weight": t.task_weight }) self.map_custom_fields(t, task) task.flags.ignore_links = True task.flags.from_project = True task.flags.ignore_feed = True task.save(ignore_permissions = True) task_names.append(task.name) # delete for t in frappe.get_all("Task", ["name"], {"project": self.name, "name": ("not in", task_names)}): frappe.delete_doc("Task", t.name) self.update_percent_complete() self.update_costing() def map_custom_fields(self, source, target): project_task_custom_fields = frappe.get_all("Custom Field", {"dt": "Project Task"}, "fieldname") for field in project_task_custom_fields: target.update({ field.fieldname: source.get(field.fieldname) }) def update_project(self): self.update_percent_complete() self.update_costing() self.flags.dont_sync_tasks = True self.save(ignore_permissions = True) def update_percent_complete(self): total = frappe.db.sql("""select count(name) from tabTask where project=%s""", self.name)[0][0] if not total and self.percent_complete: self.percent_complete = 0 if (self.percent_complete_method == "Task Completion" and total > 0) or (not self.percent_complete_method and total > 0): completed = frappe.db.sql("""select count(name) from tabTask where project=%s and status in ('Closed', 'Cancelled')""", self.name)[0][0] self.percent_complete = flt(flt(completed) / total * 100, 2) if (self.percent_complete_method == "Task Progress" and total > 0): progress = frappe.db.sql("""select sum(progress) from tabTask where project=%s""", self.name)[0][0] self.percent_complete = flt(flt(progress) / total, 2) if (self.percent_complete_method == "Task Weight" and total > 0): weight_sum = frappe.db.sql("""select sum(task_weight) from tabTask where project=%s""", self.name)[0][0] if weight_sum == 1: weighted_progress = frappe.db.sql("""select progress,task_weight from tabTask where project=%s""", self.name,as_dict=1) pct_complete=0 for row in weighted_progress: pct_complete += row["progress"] * row["task_weight"] self.percent_complete = flt(flt(pct_complete), 2) def update_costing(self): from_time_sheet = frappe.db.sql("""select sum(costing_amount) as costing_amount, sum(billing_amount) as billing_amount, min(from_time) as start_date, max(to_time) as end_date, sum(hours) as time from `tabTimesheet Detail` where project = %s and docstatus = 1""", self.name, as_dict=1)[0] from_expense_claim = frappe.db.sql("""select sum(total_sanctioned_amount) as total_sanctioned_amount from `tabExpense Claim` where project = %s and approval_status='Approved' and docstatus = 1""", self.name, as_dict=1)[0] self.actual_start_date = from_time_sheet.start_date self.actual_end_date = from_time_sheet.end_date self.total_costing_amount = from_time_sheet.costing_amount self.total_billing_amount = from_time_sheet.billing_amount self.actual_time = from_time_sheet.time self.total_expense_claim = from_expense_claim.total_sanctioned_amount self.gross_margin = flt(self.total_billing_amount) - flt(self.total_costing_amount) if self.total_billing_amount: self.per_gross_margin = (self.gross_margin / flt(self.total_billing_amount)) *100 def update_purchase_costing(self): total_purchase_cost = frappe.db.sql("""select sum(base_net_amount) from `tabPurchase Invoice Item` where project = %s and docstatus=1""", self.name) self.total_purchase_cost = total_purchase_cost and total_purchase_cost[0][0] or 0 def update_sales_costing(self): total_sales_cost = frappe.db.sql("""select sum(grand_total) from `tabSales Order` where project = %s and docstatus=1""", self.name) self.total_sales_cost = total_sales_cost and total_sales_cost[0][0] or 0 def send_welcome_email(self): url = get_url("/project/?name={0}".format(self.name)) messages = ( _("You have been invited to collaborate on the project: {0}".format(self.name)), url, _("Join") ) content = """ <p>{0}.</p> <p><a href="{1}">{2}</a></p> """ for user in self.users: if user.welcome_email_sent==0: frappe.sendmail(user.user, subject=_("Project Collaboration Invitation"), content=content.format(*messages)) user.welcome_email_sent=1 def on_update(self): self.load_tasks() self.sync_tasks() def get_timeline_data(doctype, name): '''Return timeline for attendance''' return dict(frappe.db.sql('''select unix_timestamp(from_time), count(*) from `tabTimesheet Detail` where project=%s and from_time > date_sub(curdate(), interval 1 year) and docstatus < 2 group by date(from_time)''', name)) def get_project_list(doctype, txt, filters, limit_start, limit_page_length=20): return frappe.db.sql('''select distinct project.* from tabProject project, `tabProject User` project_user where (project_user.user = %(user)s and project_user.parent = project.name) or project.owner = %(user)s order by project.modified desc limit {0}, {1} '''.format(limit_start, limit_page_length), {'user':frappe.session.user}, as_dict=True, update={'doctype':'Project'}) def get_list_context(context=None): return { "show_sidebar": True, "show_search": True, 'no_breadcrumbs': True, "title": _("Projects"), "get_list": get_project_list, "row_template": "templates/includes/projects/project_row.html" } def get_users_for_project(doctype, txt, searchfield, start, page_len, filters): conditions = [] return frappe.db.sql("""select name, concat_ws(' ', first_name, middle_name, last_name) from `tabUser` where enabled=1 and name not in ("Guest", "Administrator") and ({key} like %(txt)s or full_name like %(txt)s) {fcond} {mcond} order by if(locate(%(_txt)s, name), locate(%(_txt)s, name), 99999), if(locate(%(_txt)s, full_name), locate(%(_txt)s, full_name), 99999), idx desc, name, full_name limit %(start)s, %(page_len)s""".format(**{ 'key': searchfield, 'fcond': get_filte
from base import MediaFile from fields import MediaFileField
from widgets import AdminMediaFi
leWidget
# -*- encoding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY
or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## {
'name': 'Password Encryption', 'version': '1.1', 'author': ['OpenERP SA', 'FS3'], 'maintainer': 'OpenERP SA', 'website': 'http://www.openerp.com', 'category': 'Tools', 'description': """ Ecrypted passwords ================== Interaction with LDAP authentication: ------------------------------------- This module is currently not compatible with the ``user_ldap`` module and will disable LDAP authentication completely if installed at the same time. """, 'depends': ['base'], 'data': [], 'auto_install': False, 'installable': True, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
# encoding: utf-8 # module pyexpat # from /usr/lib/python2.7/lib-dynload/pyexpat.x86_64-linux-gnu.so # by generator 1.135 """ Python wrapper for Expat parser. """ # imports impo
rt pyexpat.errors as errors # <module 'pyexpat.errors' (built-in)> import pyexpat.model as model # <module 'pyexpat.model' (built-in)> # Variables with simple values EXPAT_VERSION = 'expat_2.1.0' native_encoding = 'UTF-8' XML_PARAM_ENTITY_PARSING_ALWAYS = 2 XML_PARAM_ENTITY_PARSING_NEVER = 0 XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE = 1 __version__ = '2.7.8' # functions def ErrorString(errno): # real signature unknown; restored from __doc__
""" ErrorString(errno) -> string Returns string error for given number. """ return "" def ParserCreate(encoding=None, namespace_separator=None): # real signature unknown; restored from __doc__ """ ParserCreate([encoding[, namespace_separator]]) -> parser Return a new XML parser object. """ pass # classes from Exception import Exception class ExpatError(Exception): # no doc def __init__(self, *args, **kwargs): # real signature unknown pass __weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default """list of weak references to the object (if defined)""" error = ExpatError from object import object class XMLParserType(object): """ XML parser """ def __init__(self, *args, **kwargs): # real signature unknown pass # variables with complex values expat_CAPI = None # (!) real value is '' features = [ ( 'sizeof(XML_Char)', 1, ), ( 'sizeof(XML_LChar)', 1, ), ( 'XML_DTD', 0, ), ( 'XML_CONTEXT_BYTES', 1024, ), ( 'XML_NS', 0, ), ] version_info = ( 2, 1, 0, )
"""Support for the AccuWeather service.""" from __future__ import annotations from statistics import mean from typing import Any, cast from homeassistant.components.weather import ( ATTR_FORECAST_CONDITION, ATTR_FORECAST_PRECIPITATION, ATTR_FORECAST_PRECIPITATION_PROBABILITY, ATTR_FORECAST_TEMP, ATTR_FORECAST_TEMP_LOW, ATTR_FORECAST_TIME, ATTR_FORE
CAST_WIND_BEARING, ATTR_FORECAST_WIND_SPEED, Forecast, WeatherEntity, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_NAME, SPEED_MILES_PER_HOUR, TEMP_CELSIUS, TEMP_FAHRENHEIT, ) from homeassistant.core import HomeAss
istant from homeassistant.helpers.device_registry import DeviceEntryType from homeassistant.helpers.entity import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util.dt import utc_from_timestamp from . import AccuWeatherDataUpdateCoordinator from .const import ( API_IMPERIAL, API_METRIC, ATTR_FORECAST, ATTRIBUTION, CONDITION_CLASSES, DOMAIN, MANUFACTURER, NAME, ) PARALLEL_UPDATES = 1 async def async_setup_entry( hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Add a AccuWeather weather entity from a config_entry.""" name: str = entry.data[CONF_NAME] coordinator: AccuWeatherDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] async_add_entities([AccuWeatherEntity(name, coordinator)]) class AccuWeatherEntity(CoordinatorEntity, WeatherEntity): """Define an AccuWeather entity.""" coordinator: AccuWeatherDataUpdateCoordinator def __init__( self, name: str, coordinator: AccuWeatherDataUpdateCoordinator ) -> None: """Initialize.""" super().__init__(coordinator) self._unit_system = API_METRIC if coordinator.is_metric else API_IMPERIAL wind_speed_unit = self.coordinator.data["Wind"]["Speed"][self._unit_system][ "Unit" ] if wind_speed_unit == "mi/h": self._attr_wind_speed_unit = SPEED_MILES_PER_HOUR else: self._attr_wind_speed_unit = wind_speed_unit self._attr_name = name self._attr_unique_id = coordinator.location_key self._attr_temperature_unit = ( TEMP_CELSIUS if coordinator.is_metric else TEMP_FAHRENHEIT ) self._attr_attribution = ATTRIBUTION self._attr_device_info = DeviceInfo( entry_type=DeviceEntryType.SERVICE, identifiers={(DOMAIN, coordinator.location_key)}, manufacturer=MANUFACTURER, name=NAME, # You don't need to provide specific details for the URL, # so passing in _ characters is fine if the location key # is correct configuration_url="http://accuweather.com/en/" f"_/_/{coordinator.location_key}/" f"weather-forecast/{coordinator.location_key}/", ) @property def condition(self) -> str | None: """Return the current condition.""" try: return [ k for k, v in CONDITION_CLASSES.items() if self.coordinator.data["WeatherIcon"] in v ][0] except IndexError: return None @property def temperature(self) -> float: """Return the temperature.""" return cast( float, self.coordinator.data["Temperature"][self._unit_system]["Value"] ) @property def pressure(self) -> float: """Return the pressure.""" return cast( float, self.coordinator.data["Pressure"][self._unit_system]["Value"] ) @property def humidity(self) -> int: """Return the humidity.""" return cast(int, self.coordinator.data["RelativeHumidity"]) @property def wind_speed(self) -> float: """Return the wind speed.""" return cast( float, self.coordinator.data["Wind"]["Speed"][self._unit_system]["Value"] ) @property def wind_bearing(self) -> int: """Return the wind bearing.""" return cast(int, self.coordinator.data["Wind"]["Direction"]["Degrees"]) @property def visibility(self) -> float: """Return the visibility.""" return cast( float, self.coordinator.data["Visibility"][self._unit_system]["Value"] ) @property def ozone(self) -> int | None: """Return the ozone level.""" # We only have ozone data for certain locations and only in the forecast data. if self.coordinator.forecast and self.coordinator.data[ATTR_FORECAST][0].get( "Ozone" ): return cast(int, self.coordinator.data[ATTR_FORECAST][0]["Ozone"]["Value"]) return None @property def forecast(self) -> list[Forecast] | None: """Return the forecast array.""" if not self.coordinator.forecast: return None # remap keys from library to keys understood by the weather component return [ { ATTR_FORECAST_TIME: utc_from_timestamp(item["EpochDate"]).isoformat(), ATTR_FORECAST_TEMP: item["TemperatureMax"]["Value"], ATTR_FORECAST_TEMP_LOW: item["TemperatureMin"]["Value"], ATTR_FORECAST_PRECIPITATION: self._calc_precipitation(item), ATTR_FORECAST_PRECIPITATION_PROBABILITY: round( mean( [ item["PrecipitationProbabilityDay"], item["PrecipitationProbabilityNight"], ] ) ), ATTR_FORECAST_WIND_SPEED: item["WindDay"]["Speed"]["Value"], ATTR_FORECAST_WIND_BEARING: item["WindDay"]["Direction"]["Degrees"], ATTR_FORECAST_CONDITION: [ k for k, v in CONDITION_CLASSES.items() if item["IconDay"] in v ][0], } for item in self.coordinator.data[ATTR_FORECAST] ] @staticmethod def _calc_precipitation(day: dict[str, Any]) -> float: """Return sum of the precipitation.""" precip_sum = 0 precip_types = ["Rain", "Snow", "Ice"] for precip in precip_types: precip_sum = sum( [ precip_sum, day[f"{precip}Day"]["Value"], day[f"{precip}Night"]["Value"], ] ) return round(precip_sum, 1)
#!/usr/bin/python -tt # Copyright 2010 Google Inc. # Licensed under the Apache License, Version 2.0 # http://www.apache.org/licenses/LICENSE-2.0 # Google's Python Class # http://code.google.com/edu/languages/google-python-class/ # Basic string exercises # Fill in the code for the functions below. main() is already set up # to call the functions with a few different inputs, # printing 'OK' when each function is correct. # The starter code for each function includes a 'return' # which is just a placeholder for your code. # It's ok if you do not complete all the functions, and there # are some a
dditional functions to try in string2.py. # A. donuts # Given an int count of a number of donuts, return a string # of the form 'Number of donuts: <count>', where <count> is the number # passed in. However, if the count is 10 or more, then use the word 'many' # instead of the actual count. # So donuts(5) returns 'Number of donuts: 5' # and donuts(23) returns 'Number of donuts: many' def donuts(count): if count < 10: return 'Number of donuts: ' + str(count) els
e: return 'Number of donuts: many' # B. both_ends # Given a string s, return a string made of the first 2 # and the last 2 chars of the original string, # so 'spring' yields 'spng'. However, if the string length # is less than 2, return instead the empty string. def both_ends(s): if len(s) >= 2: return s[0] + s[1] + s[-2] + s[-1] else: return '' # C. fix_start # Given a string s, return a string # where all occurences of its first char have # been changed to '*', except do not change # the first char itself. # e.g. 'babble' yields 'ba**le' # Assume that the string is length 1 or more. # Hint: s.replace(stra, strb) returns a version of string s # where all instances of stra have been replaced by strb. def fix_start(s): first_char = s[0] rest = s[1:] return first_char + rest.replace(first_char,'*') # D. MixUp # Given strings a and b, return a single string with a and b separated # by a space '<a> <b>', except swap the first 2 chars of each string. # e.g. # 'mix', pod' -> 'pox mid' # 'dog', 'dinner' -> 'dig donner' # Assume a and b are length 2 or more. def mix_up(a, b): first_a = a[:2] rest_a = a[2:] first_b = b[:2] rest_b = b[2:] return first_b + rest_a + ' ' + first_a + rest_b # Provided simple test() function used in main() to print # what each function returns vs. what it's supposed to return. def test(got, expected): if got == expected: prefix = ' OK ' else: prefix = ' X ' print '%s got: %s expected: %s' % (prefix, repr(got), repr(expected)) # Provided main() calls the above functions with interesting inputs, # using test() to check if each result is correct or not. def main(): print 'donuts' # Each line calls donuts, compares its result to the expected for that call. test(donuts(4), 'Number of donuts: 4') test(donuts(9), 'Number of donuts: 9') test(donuts(10), 'Number of donuts: many') test(donuts(99), 'Number of donuts: many') print print 'both_ends' test(both_ends('spring'), 'spng') test(both_ends('Hello'), 'Helo') test(both_ends('a'), '') test(both_ends('xyz'), 'xyyz') print print 'fix_start' test(fix_start('babble'), 'ba**le') test(fix_start('aardvark'), 'a*rdv*rk') test(fix_start('google'), 'goo*le') test(fix_start('donut'), 'donut') print print 'mix_up' test(mix_up('mix', 'pod'), 'pox mid') test(mix_up('dog', 'dinner'), 'dig donner') test(mix_up('gnash', 'sport'), 'spash gnort') test(mix_up('pezzy', 'firm'), 'fizzy perm') # Standard boilerplate to call the main() function. if __name__ == '__main__': main()
def main(): #init an array named a a = list() a = [] b = [1,'1',[1,2]] #Get the size of a list a_size = len(a) #how to check if a list is empty if (a): print ("not empty") else: print ("empty") index = 0 a = ['a','b','c'] print (a[index]) a.append('d') a.extend(['e']) print ('After append a, extend [e]') print (a) a.inse
rt(2,'bb') print ('After insert bb at 2') print (a) a.ins
ert(0, 'a0') print ('After insert a0 at 0') print (a) #Find the index of a item in an array answer_1 = a.index('a') answer_0 = a.index('a0') print ('use a.index(item) to find the index only for the first item') #list.pop() r eturn last item in the list and remove the last item print 'Before a.pop(), a = ', a print 'a.pop() = ', a.pop() print 'After a.pop(), a = ', a #Remove an item a.remove('a0') print 'After remove(a0), a = ' a main()
# # product import logging from django.contrib import messages from django.contrib.auth.decorators import user_passes_test from django.urls import reverse from django.http import HttpResponseRedirect from django.shortcuts import render from dojo.utils import add_breadcrumb from dojo.forms import ToolTypeForm from dojo.models import Tool_Type logger = logging.getLogger(__name__) @user_passes_test(lambda u: u.is_staff) def new_tool_type(request): if request.method == 'POST': tform = ToolTypeForm(request.POST, instance=Tool_Type()) if tform.is_valid(): tform.save() messages.add_message(request, messages.SUCCESS, 'Tool Type Configuration Successfully Created.', ex
tra_tags='alert-success') return HttpResponseRedirect(reverse('tool_type', )) else: tform = ToolTypeForm() add_breadcrumb(title="New Tool Type Configuration", top_level=False, request=request) return render(request, 'dojo/new_tool_type.html', {'tform': tform}) @user_passes_test(lambda u: u.is_staff) def edit_tool_type(request, ttid): tool_type = Tool_Type.objects.get(pk=ttid) if request.method == 'POST': tform = ToolType
Form(request.POST, instance=tool_type) if tform.is_valid(): tform.save() messages.add_message(request, messages.SUCCESS, 'Tool Type Configuration Successfully Updated.', extra_tags='alert-success') return HttpResponseRedirect(reverse('tool_type', )) else: tform = ToolTypeForm(instance=tool_type) add_breadcrumb(title="Edit Tool Type Configuration", top_level=False, request=request) return render(request, 'dojo/edit_tool_type.html', { 'tform': tform, }) @user_passes_test(lambda u: u.is_staff) def tool_type(request): confs = Tool_Type.objects.all().order_by('name') add_breadcrumb(title="Tool Type List", top_level=not len(request.GET), request=request) return render(request, 'dojo/tool_type.html', {'confs': confs, })
r more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from pygraph.algorithms.accessibility import accessibility, mutual_accessibility from pygraph.classes.digraph import digraph from pygraph.algorithms.minmax import maximum_flow from pyvotecore.condorcet import CondorcetHelper from pyvotecore.common_functions import matching_keys, unique_permutations PREFERRED_LESS = 1 PREFERRED_SAME = 2 PREFERRED_MORE = 3 STRENGTH_TOLERANCE = 0.0000000001 STRENGTH_THRESHOLD = 0.1 NODE_SINK = -1 NODE_SOURCE = -2 # This class implements the Schulze Method (aka the beatpath method) class SchulzeHelper(CondorcetHelper): def condorcet_completion_method(self): self.schwartz_set_heuristic() def schwartz_set_heuristic(self): # Iterate through using the Schwartz set heuristic self.actions = [] while len(self.graph.edges()) > 0: access = accessibility(self.graph) mutual_access = mutual_accessibility(self.graph) candidates_to_remove = set() for candidate in self.graph.nodes(): candidates_to_remove |= (set(access[candidate]) - set(mutual_access[candidate])) # Remove nodes at the end of non-cycle paths if len(candidates_to_remove) > 0: self.actions.append({'nodes': candidates_to_remove}) for candidate in candidates_to_remove: self.graph.del_node(candidate) # If none exist, remove the weakest edges else: edge_weights = self.edge_weights(self.graph) self.actions.append({'edges': matching_keys(edge_weights, min(edge_weights.values()))}) for edge in self.actions[-1]["edges"]: self.graph.del_edge(edge) self.graph_winner() def generate_vote_management_graph(self): self.vote_management_graph = digraph() self.vote_management_graph.add_nodes(self.completed_patterns) self.vote_management_graph.del_node(tuple([PREFERRED_MORE] * self.required_winners)) self.pattern_nodes = self.vote_management_graph.nodes() self.vote_management_graph.add_nodes([NODE_SOURCE, NODE_SINK]) for pattern_node in self.pattern_nodes: self.vote_management_graph.add_edge((NODE_SOURCE, pattern_node)) for i in range(self.required_winners): self.vote_management_graph.add_node(i) for pattern_node in self.pattern_nodes: for i in range(self.required_winners): if pattern_node[i] == 1: self.vote_management_graph.add_edge((pattern_node, i)) for i in range(self.required_winners): self.vote_management_graph.add_edge((i, NODE_SINK)) # Generates a list of all patterns that do not contain indifference def generate_completed_patterns(self): self.completed_patterns = [] for i in range(0, self.required_winners + 1): for pattern in unique_permutations( [PREFERRED_LESS] * (self.required_winners - i) + [PREFERRED_MORE] * (i) ): self.completed_patterns.append(tuple(pattern)) def proportional_completion(self, candidate, other_candidates): profile = dict(zip(self.completed_patterns, [0] * len(self.completed_patterns))) # Obtain an initial tally from the ballots for ballot in self.ballots: pattern = [] for other_candidate in other_candidates: if ballot["ballot"][candidate] < ballot["ballot"][other_candidate]: pattern.append(PREFERRED_LESS) elif ballot["ballot"][candidate] == ballot["ballot"][other_candidate]: pattern.append(PREFERRED_SAME) else: pattern.append(PREFERRED_MORE) pattern = tuple(pattern) if pattern not in profile: profile[pattern] = 0.0 profile[pattern] += ballot["count"] weight_sum = sum(profile.values()) # Peel off patterns with indifference (from the most to the least) and apply proportional completion to them for pattern in sorted(profile.key
s(), key=lambda pattern: pattern.count(PREFERRED_SAME), reverse=True): if pattern.count(PREFERRED_SAME) == 0: break self.proportional_completion_round(pattern, profile) try: assert round(weight_sum, 5) == round(sum(profile.values()), 5) except: print ("Proportional completion broke (went from %s to %s)" % (weight_sum, sum(profile.values()))) return profile def propor
tional_completion_round(self, completion_pattern, profile): # Remove pattern that contains indifference weight_sum = sum(profile.values()) completion_pattern_weight = profile[completion_pattern] del profile[completion_pattern] patterns_to_consider = {} for pattern in profile.keys(): append = False append_target = [] for i in range(len(completion_pattern)): if completion_pattern[i] == PREFERRED_SAME: append_target.append(pattern[i]) if pattern[i] != PREFERRED_SAME: append = True else: append_target.append(completion_pattern[i]) append_target = tuple(append_target) if append is True and append_target in profile: append_target = tuple(append_target) if append_target not in patterns_to_consider: patterns_to_consider[append_target] = set() patterns_to_consider[append_target].add(pattern) denominator = 0 for (append_target, patterns) in patterns_to_consider.items(): for pattern in patterns: denominator += profile[pattern] # Reweight the remaining items for pattern in patterns_to_consider.keys(): if denominator == 0: profile[pattern] += completion_pattern_weight / len(patterns_to_consider) else: if pattern not in profile: profile[pattern] = 0 profile[pattern] += sum(profile[considered_pattern] for considered_pattern in patterns_to_consider[pattern]) * completion_pattern_weight / denominator try: assert round(weight_sum, 5) == round(sum(profile.values()), 5) except: print ("Proportional completion round broke (went from %s to %s)" % (weight_sum, sum(profile.values()))) return profile # This method converts the voter profile into a capacity graph and iterates # on the maximum flow using the Edmonds Karp algorithm. The end result is # the limit of the strength of the voter management as per Markus Schulze's # Calcul02.pdf (draft, 28 March 2008, abstract: "In this paper we illustrate # the calculation of the strengths of the vote managements."). def strength_of_vote_management(self, voter_profile): # Initialize the graph weights for pattern in self.pattern_nodes: self.vote_management_graph.set_edge_weight((NODE_SOURCE, pattern), voter_profile[pattern]) for i in range(self.required_winners): if pattern[i] == 1: self.vote_management_graph.set_edge_weight((pattern, i), voter_profile[pattern]) # Iterate towards the limit r = [(float(sum(voter_profile.values())) - voter_profile[tuple([PREFERRED_MORE] * self.required_winners)]) / self.required_winners] while len(r) < 2 or r[-2] - r[-1] > STRENGTH_TOLERANCE: for i in range(self.required_winners): self.vote_management_graph.set_edge_weight((i, NODE_SINK), r[-1]) max_flow = maximum_flow(self.vote_management_graph, NODE_SOURCE, NODE_SINK) sink_sum = sum(v for k, v in max_flow[0].iteritems() if k[1] == NODE_SINK) r.app
# =============================================================================== # Copyright 2015 Jake Ross # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # =============================================================================== # ============= enthought library imports ======================= from traits.api import HasTraits, Str, List, Instance from traitsui.api import View, UItem, Item, TableEditor from traitsui.table_column import ObjectColumn from pychron.core.helpers.traitsui_shortcuts import okcancel_view from pychron.core.ui.enum_editor import myEnumEditor class Conflict(HasTraits): queue_name = Str runspec = Instance('pychron.experiment.automated_run.spec.AutomatedRunSpec') identifier = Str position = Str repository_identifier = Str repository_ids = Str available_ids = List class ConflictResolver(HasTraits): conflicts = List available_ids = List def apply(self): for c in self.conflicts: c.runspec.repository_identifier = c.repository_identifier def add_conflicts(self, qname, cs): for ai, exps in cs: self.conflicts.append(Conflict(queue_name=qname, runspec=ai, position=ai.position, repository_identifier=ai.repository_identifier, identifier=ai.identifier, repository_ids=','.join(exps), available_ids=self.available_ids)) def traits_view(self): cols = [ObjectColumn(name='queue_name', editable=False), ObjectColumn(name='identifier', editable=False), ObjectColumn(name='position', editable=False), ObjectColumn(name='repository_identifier', label='Assigned Reposi
tory', tooltip='Repository assigned to this analysis in the Experiment Queue', editor=myEnumEditor(name='available_ids')), ObjectColumn(name='repository_ids', label='Existing Repositories', tooltip='Set of repositories that already contain this L#', editable=Fals
e)] v = okcancel_view(UItem('conflicts', editor=TableEditor(columns=cols)), title='Resolve Repository Conflicts') return v if __name__ == '__main__': def main(): from pychron.paths import paths paths.build('_dev') from pychron.core.helpers.logger_setup import logging_setup from pychron.experiment.automated_run.spec import AutomatedRunSpec logging_setup('dvcdb') from pychron.dvc.dvc_database import DVCDatabase from itertools import groupby db = DVCDatabase(kind='mysql', host='localhost', username='root', name='pychronmeta', password='Argon') db.connect() identifiers = ['63290', '63291'] runs = [AutomatedRunSpec(identifier='63290', repository_identifier='Cather_McIntoshd')] cr = ConflictResolver() experiments = {} cr.available_ids = db.get_repository_identifiers() eas = db.get_associated_repositories(identifiers) for idn, exps in groupby(eas, key=lambda x: x[1]): experiments[idn] = [e[0] for e in exps] conflicts = [] for ai in runs: identifier = ai.identifier es = experiments[identifier] if ai.repository_identifier not in es: conflicts.append((ai, es)) if conflicts: cr.add_conflicts('Foo', conflicts) if cr.conflicts: info = cr.edit_traits(kind='livemodal') if info.result: cr.apply() # for ci in runs: # print ci.identifier, ci.experiment_identifier from traits.api import Button class Demo(HasTraits): test = Button def traits_view(self): return View(Item('test')) def _test_fired(self): main() d = Demo() d.configure_traits() # ============= EOF =============================================
oding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models from ..compat import USER_MODEL class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'Review' db.create_table(u'review_review', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'])), ('object_id', self.gf('django.db.models.fields.PositiveIntegerField')()), ('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm[USER_MODEL['orm_label']], null=True, blank=True)), ('content', self.gf('django.db.models.fields.TextField')(max_length=1024, blank=True)), ('language', self.gf('django.db.models.fields.CharField')(max_length=5, blank=True)), ('creation_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
)) db.send_create_signal(u'review', ['Review']) # Adding model 'ReviewExtraInfo'
db.create_table(u'review_reviewextrainfo', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('type', self.gf('django.db.models.fields.CharField')(max_length=256)), ('review', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['review.Review'])), ('content_type', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['contenttypes.ContentType'])), ('object_id', self.gf('django.db.models.fields.PositiveIntegerField')()), )) db.send_create_signal(u'review', ['ReviewExtraInfo']) # Adding model 'RatingCategory' db.create_table(u'review_ratingcategory', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), )) db.send_create_signal(u'review', ['RatingCategory']) # Adding model 'RatingCategoryTranslation' db.create_table(u'review_ratingcategorytranslation', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('name', self.gf('django.db.models.fields.CharField')(max_length=256)), ('category', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['review.RatingCategory'])), ('language', self.gf('django.db.models.fields.CharField')(max_length=2)), )) db.send_create_signal(u'review', ['RatingCategoryTranslation']) # Adding model 'Rating' db.create_table(u'review_rating', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('value', self.gf('django.db.models.fields.CharField')(max_length=20)), ('review', self.gf('django.db.models.fields.related.ForeignKey')(related_name='ratings', to=orm['review.Review'])), ('category', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['review.RatingCategory'])), )) db.send_create_signal(u'review', ['Rating']) def backwards(self, orm): # Deleting model 'Review' db.delete_table(u'review_review') # Deleting model 'ReviewExtraInfo' db.delete_table(u'review_reviewextrainfo') # Deleting model 'RatingCategory' db.delete_table(u'review_ratingcategory') # Deleting model 'RatingCategoryTranslation' db.delete_table(u'review_ratingcategorytranslation') # Deleting model 'Rating' db.delete_table(u'review_rating') models = { u'auth.group': { 'Meta': {'object_name': 'Group'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, u'auth.permission': { 'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, USER_MODEL['model_label']: { 'Meta': {'object_name': USER_MODEL['object_name']}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, u'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, u'review.rating': { 'Meta': {'ordering': "['category', 'review']", 'object_name': 'Rating'}, 'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['review.RatingCategory']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'review': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'ratings'", 'to': u"orm['review.Review']"}), 'value': ('django.db.models.fields.CharField', [], {'max_length': '20'}) }, u'review.ratingcategory': { 'Meta': {'object_name': 'RatingCategory'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) }, u'review.ratingcategorytranslation': { 'Meta': {'object_name': 'RatingCategoryTranslation'}, 'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['review.RatingCategory']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'language': ('django.db.models.fields.CharField', [], {'max_length': '2'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}) }, u'review.review': { 'Meta': {'ordering': "['-creation_date']", 'object_name': 'Review'}, 'content': ('django.db.models.fields.TextField', [], {'max_leng
from .plot_widget import PlotWidget from .filter_popup import FilterPopup from .filterable_kw_list_model import Filtera
bleKwListModel from .data_type_keys_list_model import DataTypeKeysListModel from .data_type_proxy_model import DataTypeProxyModel from .data_type_keys_widget import DataTypeKeysWidget from .plot_case_model import PlotCaseModel from .plot_case_selection_widget import CaseSelectionWidget from .color_chooser import ColorBox from .style_chooser import StyleChooser from .plot_window import PlotWindow from .plot
_tool import PlotTool
from selenium import webdriver from selenium.webdriver.common.keys import Keys from random import randint from time import sleep import brain import game drv = webdriver.Firefox() drv.get('http://gabrielecirulli.github.io/2048/') container = drv.find_element_by_class_name('tile-container') retry = drv.find_element_by_class_name('retry-button') bo
ard = [[None, None, None, None], [None, None, None, None], [None, None, None, None], [None, None, None, None]] def move_up(): container.send_keys(Keys.UP) def move_down(): container.send_keys(Keys.DOWN) def move_left(): container.send_keys(Keys.LEFT) def move_right(): container.send_keys(Keys.RIGHT) def zero_board(): global board board = [[None, None, None, None], [None,
None, None, None], [None, None, None, None], [None, None, None, None]] def update_board(): global board sleep(0.1) tiles = container.find_elements_by_class_name('tile') tiledata = list(map(lambda x: x.get_attribute('class').split(), tiles)) zero_board() for tile in tiledata: value = tile[1].split('-')[1] pos = tile[2].split('-')[-2:] board[int(pos[1]) - 1][int(pos[0]) - 1] = int(value) def pick_move(): global board g = game.Game(board) predictions = brain.predict_next_board(g) scores = [] for p in predictions[1:]: print(p, len(p)) score = brain.weight_boards(predictions[0], p) scores.append(score) return brain.choose(scores) while not retry.is_displayed(): update_board() pick_move()() sleep(2) update_board() for b in board: print(b) sleep(2) print("Score: ", drv.find_element_by_class_name('score-container').text.splitlines()[0]) print("Game Over")
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2004-2011 Pexego Sistemas Informáticos. All Rights Reserved # $Omar Castiñeira Saavedra$ # # This program is free software: you can redistribute it and/or modify # it under the terms of
the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public L
icense for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ##############################################################################
#-*- coding: utf-8 -*- # collections.py # Define various kind of collections # # Copyright (C) 2016 Jakub Kadlcik # # This copyrighted material is made available to anyone wishing to use, # modify, copy, or redistribute it subject to the terms and conditions of # the GNU General Public License v.2, or (at your option) any later version. # This program is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY expressed or implied, including the implied warranties of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. You should have received a copy of the # GNU General Public License along with this program; if not, write to the # Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA # 02110-1301, USA. # from __future__ import absolute_import from operator import attrgetter, methodcaller from psutil import NoSuchProcess class Collection(list): def replace_values(self, attribute, source_value, required_value): for app in self: if getattr(app, attribute) == source_value: app.update(attribute, required_value) def sorted(self, attribute): self.replace_values(attribute, None, "") try: return sorted(self, key=methodcaller(attribute)) except TypeError: return sorted(self, key=attrgetter(attribute)) class ApplicationsCollection(Collection): def with_helpers(self): applications = filter(lambda app: app.helper, self) return ApplicationsCollection(applications) def without_helpers(self): applications = filter(lambda app: not app.helper, self) return ApplicationsCollection(applications) def exclude_types(self, app_types): """app_types -- see Applications.TYPES""" applications = filter(lambda app: app.type not in app_types, self) return ApplicationsCollection(applications) def filter_types(self, app_types): """app_types -- see Applications.TYPES""" applications = filter(lambda app: app.type in app_types, self) return ApplicationsCollection(applications) def count_type(self, app_type): count = 0 for application in self: if application.type == app_type: count += 1 return count class ProcessesCollection(Collection): def owned_by(self, user): if not user: return self return self.filtered(lambda process: process.username() == user) def newer_than(self, timestamp): return self.filtered(lambda process: process.create_time() >= timestamp) def unique(self): unique = set() for process in self: try: unique.add(process) except NoSuchProcess: pass return Pro
cessesCollection(unique) def filtered(self, function): processes = ProcessesCollection() for process in self: try: if function(process): processes.append(process) except NoSuchProcess: pass return processes class AffectedProcessesCollection(ProcessesCollection): def update(self, iterable): for x in iterable: if x in self: self[self.index(x)].update(x) else: self.append(x) class PackagesCollection(Collection): _
package_manager = None def __init__(self, *args): list.__init__(self, *args) def intersection(self, packages): if packages is not None: return PackagesCollection(set(packages).intersection(self)) return self @property def files(self): files = [] for package in self: files.extend(self._package_manager.package_files(package.name)) return set(files) def unique_newest(self): packages = {} for p in self: if p.name in packages: if packages[p.name].modified > p.modified: continue packages[p.name] = p return PackagesCollection(packages.values())
""" Analytical template tags and filters. """ from __future__ import absolute_import import logging from django import template from django.template import Node, TemplateSyntaxError from django.utils.importlib import import_module from templatetags.utils import AnalyticalException TAG_LOCATIONS = ['head_top', 'head_bottom', 'body_top', 'body_bottom'] TAG_POSITIONS = ['first', None, 'last'] TAG_MODULES = [ 'storefront.clicky', 'storefront.mixpanel', 'storefront.google_analytics', ] ''' 'storefront.olark', 'analytical.chartbeat', 'analytical.crazy_egg', 'analytical.gosquared', 'analytical.hubspot', 'analytical.kiss_insights', 'analytical.kiss_metrics', 'analytical.optimizely', 'analytical.performable',
'analytical.reinvigorate', 'analytical.woopra', ''' logger = logging.getLogger(__name__) r
egister = template.Library() def _location_tag(location): def analytical_tag(parser, token): bits = token.split_contents() if len(bits) > 1: raise TemplateSyntaxError("'%s' tag takes no arguments" % bits[0]) return AnalyticalNode(location) return analytical_tag for loc in TAG_LOCATIONS: register.tag('analytical_%s' % loc, _location_tag(loc)) class AnalyticalNode(Node): def __init__(self, location): self.nodes = [node_cls() for node_cls in template_nodes[location]] def render(self, context): return "".join([node.render(context) for node in self.nodes]) def _load_template_nodes(): template_nodes = dict((l, dict((p, []) for p in TAG_POSITIONS)) for l in TAG_LOCATIONS) def add_node_cls(location, node, position=None): template_nodes[location][position].append(node) for path in TAG_MODULES: module = _import_tag_module(path) try: module.contribute_to_analytical(add_node_cls) except AnalyticalException, e: logger.debug("not loading tags from '%s': %s", path, e) for location in TAG_LOCATIONS: template_nodes[location] = sum((template_nodes[location][p] for p in TAG_POSITIONS), []) return template_nodes def _import_tag_module(path): app_name, lib_name = path.rsplit('.', 1) return import_module("%s.templatetags.%s" % (app_name, lib_name)) template_nodes = _load_template_nodes()
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is dist
ributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Set User.last_login automatically in the DB Revision ID: a65114e48d6f Revises: 10
4b4c56862b Create Date: 2016-06-11 00:28:39.176496 """ from alembic import op import sqlalchemy as sa revision = 'a65114e48d6f' down_revision = '104b4c56862b' def upgrade(): op.alter_column( "accounts_user", "last_login", server_default=sa.func.now(), ) def downgrade(): op.alter_column("accounts_user", "last_login", server_default=None)
from man import comm from . import NogginConstants as Constants from . import GameStates from .util import FSA from . import Leds TEAM_BLUE = 0 TEAM_RED = 1 class GameController(FSA.FSA): def __init__(self, brain): FSA.FSA.__init__(self,brain) self.brain = brain self.gc = brain.comm.gc #jf- self.setTimeFunction(self.brain.nao.getSimulatedTime) self.addStates(GameStates) self.currentState = 'gameInitial' self.setName('GameController') self.setPrintStateChanges(True) self.stateChangeColor = 'cyan' self.setPrintFunction(self.brain.out.printf) self.timeRemaining = self.gc.timeRemaining() self.kickOff = self.gc.kickOff self.penaltyShots = False self.ownKickOff = False def run(self): self.setGCLEDS() self.ownKickOff = (self.gc.kickOff == self.brain.my.teamColor) if self.gc.secondaryState == comm.STATE2_PENALTYSHOOT: if self.gc.state == comm.STATE_INITIAL: self.switchTo('penaltyShotsGameInitial') elif self.gc.state == comm.STATE_SET: self.switchTo('penaltyShotsGameSet') elif self.gc.state == comm.STATE_READY: self.switchTo('penaltyShotsGameReady') elif self.gc.state == comm.STATE_PLAYING: if self.gc.penalty != comm.PENALTY_NONE: self.switchTo('penaltyShotsGamePenalized') else: self.switchTo("penaltyShotsGamePlaying") elif self.gc.state == comm.STATE_FINISHED: self.switchTo('penaltyShotsGameFinished') elif self.gc.secondaryState == comm.STATE2_NORMAL: if self.gc.state == comm.STATE_INITIAL: self.switchTo('gameInitial') elif self.gc.state == comm.STATE_SET: self.switchTo('gameSet') elif self.gc.state == comm.STATE_READY: self.switchTo('gameReady') elif self.gc.state == comm.STATE_PLAYING: if self.gc.penalty != comm.PENALTY_NONE: self.switchTo("gamePenalized") else: self.switchTo("gamePlaying") elif self.gc.state == comm.STATE_FINISHED: self.switchTo('gameFinished') self.timeRemaining = self.gc.timeRemaining() #Set team color if self.gc.color != self.brain.my.teamColor: self.brain.my.teamColor = self.gc.color self.brain.makeFieldObjectsRelative() self.printf("Switching team color to " + Constants.teamColorDict[self.brain.my.teamColor]) if self.gc.kickOff != self.kickOff: self.printf("Switching kickoff to team #%g"%self.gc.kickOff + " from team #%g"% self.kickOff) self.kickOff = self.gc.kickOff FSA.FSA.run(self) def timeRemaining(self): return self.timeRemaining() d
ef timeSincePlay(self): return Constants.LENGTH_OF_HALF - self.timeRemaining def getScoreDifferential(self): ''' negative when we're losing ''' return self.brain.gameController.gc.teams(self.brain.my.teamColor)[
1] -\ self.brain.gameController.gc.teams((self.brain.my.teamColor+1)%2)[1] def setGCLEDS(self): ''' Method to set the chest and feet according to the current GC states and infos ''' ####### KICKOFF ###### if (self.gc.kickOff == self.gc.team and (self.gc.state == comm.STATE_INITIAL or self.gc.state == comm.STATE_READY or self.gc.state == comm.STATE_PLAYING)): self.brain.leds.executeLeds(Leds.HAVE_KICKOFF_LEDS) else: self.brain.leds.executeLeds(Leds.NO_KICKOFF_LEDS) ###### TEAM COLOR ###### if self.gc.color == TEAM_BLUE: self.brain.leds.executeLeds(Leds.TEAM_BLUE_LEDS) else: self.brain.leds.executeLeds(Leds.TEAM_RED_LEDS) ###### GAME STATE ###### if self.gc.state == comm.STATE_INITIAL: self.brain.leds.executeLeds(Leds.STATE_INITIAL_LEDS) elif self.gc.state == comm.STATE_SET: self.brain.leds.executeLeds(Leds.STATE_SET_LEDS) elif self.gc.state == comm.STATE_READY: self.brain.leds.executeLeds(Leds.STATE_READY_LEDS) elif self.gc.state == comm.STATE_PLAYING: if self.gc.penalty != comm.PENALTY_NONE: self.brain.leds.executeLeds(Leds.STATE_PENALIZED_LEDS) else: self.brain.leds.executeLeds(Leds.STATE_PLAYING_LEDS) elif self.gc.state == comm.STATE_FINISHED: self.brain.leds.executeLeds(Leds.STATE_FINISHED_LEDS)
pe) assert(args is None or type(args) in [list, tuple]) assert(kwargs is None or type(kwargs) == dict) assert(timeout is None or type(timeout) in six.integer_types) assert(receive_progress is None or type(receive_progress) == bool) assert(discloseMe is None or type(discloseMe) == bool) Message.__init__(self) self.request = request self.procedure = procedure self.args = args self.kwargs = kwargs self.timeout = timeout self.receive_progress = receive_progress self.discloseMe = discloseMe @staticmethod def parse(wmsg): """ Verifies and parses an unserialized raw message into an actual WAMP message instance. :param wmsg: The unserialized raw message. :type wmsg: list :returns obj -- An instance of this class. """ ## this should already be verified by WampSerializer.unserialize ## assert(len(wmsg) > 0 and wmsg[0] == Call.MESSAGE_TYPE) if len(wmsg) not in (4, 5, 6): raise ProtocolError("invalid message length {} for CALL".format(len(wmsg))) request = check_or_raise_id(wmsg[1], "'request' in CALL") options = check_or_raise_extra(wmsg[2], "'options' in CALL") procedure = check_or_raise_uri(wmsg[3], "'procedure' in CALL") args = None if len(wmsg) > 4: args = wmsg[4] if type(args) != list: raise ProtocolError("invalid type {} for 'args' in CALL".format(type(args))) kwargs = None if len(wmsg) > 5: kwargs = wmsg[5] if type(kwargs) != dict: raise ProtocolError("invalid type {} for 'kwargs' in CALL".format(type(kwargs))) timeout = None if u'timeout' in options: option_timeout = options[u'timeout'] if type(option_timeout) not in six.integer_types: raise ProtocolError("invalid type {} for 'timeout' option in CALL".format(type(option_timeout))) if option_timeout < 0: raise ProtocolError("invalid value {} for 'timeout' option in CALL".format(option_timeout)) timeout = option_timeout receive_progress = None if u'receive_progress' in options: option_receive_progress = options[u'receive_progress'] if type(option_receive_progress) != bool: raise ProtocolError("invalid type {} for 'receive_progress' option in CALL".format(type(option_receive_progress))) receive_progress = option_receive_progress discloseMe = None if u'disclose_me' in options: option_discloseMe = options[u'disclose_me'] if type(option_discloseMe) != bool: raise ProtocolError("invalid type {} for 'disclose_me' option in CALL".format(type(option_discloseMe))) discloseMe = option_discloseMe obj = Call(request, procedure, args = args, kwargs = kwargs, timeout = timeout, receive_progress = receive_progress, discloseMe = discloseMe) return obj def marshal(self): """ Implements :func:`autobahn.wamp.interfaces.IMessage.marshal` """ options = {} if self.timeout is not None: options[u'timeout'] = self.timeout if self.receive_progress is not None: options[u'receive_progress'] = self.receive_progress if self.discloseMe is not None: options[u'disclose_me'] = self.discloseMe if self.kwargs: return [Call.MESSAGE_TYPE, self.request, options, self.procedure, self.args, self.kwargs] elif self.args: return [Call.MESSAGE_TYPE, self.request, options, self.procedure, self.args] else: return [Call.MESSAGE_TYPE, self.request, options, self.procedure] def __str__(self): """ Implements :func:`autobahn.wamp.interfaces.IMessage.__str__` """ return "WAMP CALL Message (request = {}, procedure = {}, args = {}, kwargs = {}, timeout = {}, receive_progress = {}, discloseMe = {})".format(self.request, self.procedure, self.args, self.kwargs, self.timeout, self.receive_progress, self.discloseMe) class Cancel(Message): """ A WAMP `CANCEL` message. Format: `[CANCEL, CALL.Request|id, Options|dict]` """ MESSAGE_TYPE = 49 """ The WAMP message code for this type of message. """ SKIP = u'skip' ABORT = u'abort' KILL = u'kill' def __init__(self, request, mode = None): """ Message constructor. :param request: The WAMP request ID of the original `CALL` to cancel. :type request: int :param mode: Specifies how to cancel the call (`"skip"`, `"abort"` or `"kill"`). :type mode: str """ assert(type(request) in six.integer_types) assert(mode is None or type(mode) == six.text_type) assert(mode is None or mode in [self.SKIP, self.ABORT, self.KILL]) Message.__init__(self) self.request = request self.mode = mode @staticmethod def parse(wmsg): """ Verifies and parses an unserialized raw message into an actual WAMP message instance. :param wmsg: The unserialized raw message. :type wmsg: list :returns obj -- An instance of this class. """ ## this should already be verified by WampSerializer.unserialize ## assert(len(wmsg) > 0 and wmsg[0] == Cancel.MESSAGE_TYPE) if len(wmsg) != 3: raise ProtocolError("invalid message length {} for CANCEL".format(len(wmsg))) request = check_or_raise
_id(wmsg[1], "'request' in CANCEL") options = check_or_raise_extra(wmsg[2], "'options' in CANCEL") ## options ## mode = None if u'mode' in options:
option_mode = options[u'mode'] if type(option_mode) != six.text_type: raise ProtocolError("invalid type {} for 'mode' option in CANCEL".format(type(option_mode))) if option_mode not in [Cancel.SKIP, Cancel.ABORT, Cancel.KILL]: raise ProtocolError("invalid value '{}' for 'mode' option in CANCEL".format(option_mode)) mode = option_mode obj = Cancel(request, mode = mode) return obj def marshal(self): """ Implements :func:`autobahn.wamp.interfaces.IMessage.marshal` """ options = {} if self.mode is not None: options[u'mode'] = self.mode return [Cancel.MESSAGE_TYPE, self.request, options] def __str__(self): """ Implements :func:`autobahn.wamp.interfaces.IMessage.__str__` """ return "WAMP CANCEL Message (request = {}, mode = '{}'')".format(self.request, self.mode) class Result(Message): """ A WAMP `RESULT` message. Formats: * `[RESULT, CALL.Request|id, Details|dict]` * `[RESULT, CALL.Request|id, Details|dict, YIELD.Arguments|list]` * `[RESULT, CALL.Request|id, Details|dict, YIELD.Arguments|list, YIELD.ArgumentsKw|dict]` """ MESSAGE_TYPE = 50 """ The WAMP message code for this type of message. """ def __init__(self, request, args = None, kwargs = None, progress = None): """ Message constructor. :param request: The request ID of the original `CALL` request. :type request: int :param args: Positional values for application-defined event payload. Must be serializable using any serializers in use. :type args: list :param kwargs: Keyword values for application-defined event payload. Must be serializable using any serializers in use. :type kwargs: dict :param progress: If `True`, this result is a progressive call result, and subsequent results (or a final error) will follow. :type progress: bool """ assert(type(request) in six.integer_types) assert(args is None or type(args) in [list, tuple]) assert(kwargs is None or type(kwargs) == dict) assert(progress is None or type(progress) == bool) Message.__init__(self) self.request = request self.args = args self.kwargs = kwargs self.progress = progress @staticmeth
ublished by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU Affero Public License for more details. # # You should have received a copy of the GNU Affero Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ import numpy as np import torch from PIL import Image from torch.nn.utils.rnn import pad_sequence from torch.utils.data import Sampler from torchvision import datasets class MNISTBufferedDataset(datasets.MNIST): def __init__( self, root, train=True, transform=None, target_transform=None, download=False ): super(MNISTBufferedDataset, self).__init__( root, train=train, transform=transform, target_transform=target_transform, download=download, ) def __getitem__(self, index): """ Override to allow generation of white noise for index -1 Args: index (int): Index Returns: tuple: (image, target) where target is index of the target class. """ if index == -1: # Noise target = np.random.randint(10) # -1 img = np.random.rand(28, 28) else: img, target = self.data[index].numpy(), int(self.targets[index]) img = Image.fromarray(img, mode="L") if self.transform is not None: img = self.transform(img) if self.target_transform is not None: target = self.target_transform(target) return img, target class MNISTSequenceSampler(Sampler): """ Loop through one or more sequences of digits Draw each digit image (based on label specified by sequence) randomly TODO: Having this work with a custom DataSet that draws random MNIST digits may be more appropriate """ def __init__( self, data_source, sequences=None, batch_size=64, random_mnist_images=True, randomize_sequence_cursors=True, max_batches=100, use_mnist_pct=1.0, noise_buffer=False, ): super(MNISTSequenceSampler, self).__init__(data_source) self.data_source = data_source self.random_mnist_images = random_mnist_images self.randomize_sequence_cursors = randomize_sequence_cursors self.use_mnist_pct = use_mnist_pct self.noise_buffer = noise_buffer self.max_batches = max_batches self.bsz = batch_size se
lf.label_indices = {} # Digit -> Indices in dataset self.label_cursors = {} # Digit -> Cursor across images for each digit sequences = list(sequences) # Avoid c
hanging underlying sequence list if self.noise_buffer: for seq in sequences: if seq[-1] != -1: seq.append(-1) self.sequences = sequences self.n_sequences = len(self.sequences) self.seq_lengths = torch.tensor([len(subseq) for subseq in self.sequences]) # Each of these stores both current and next batch state (2 x batch_size) self.sequence_id = torch.stack( (self._init_sequence_ids(), self._init_sequence_ids()) ) # Iterate over subsequences first_batch_cursors = self._init_sequence_cursors() self.sequence_cursor = torch.stack( (first_batch_cursors, first_batch_cursors) ) # Iterates over sequence items self._increment_next() self.sequences_mat = pad_sequence( torch.tensor(self.sequences), batch_first=True, padding_value=-99 ) # Get index for each digit (that appears in a passed sequence) for seq in sequences: for digit in seq: if digit != -1 and digit not in self.label_indices: mask = (data_source.targets == digit).nonzero().flatten() idx = torch.randperm(mask.size(0)) if self.use_mnist_pct < 1.0: idx = idx[: int(self.use_mnist_pct * len(idx))] self.label_indices[digit] = mask[idx] self.label_cursors[digit] = 0 def _init_sequence_ids(self): return torch.LongTensor(self.bsz).random_(0, self.n_sequences) def _init_sequence_cursors(self): if self.randomize_sequence_cursors: lengths = self.seq_lengths[self.sequence_id[0]] cursors = ( torch.FloatTensor(self.bsz).uniform_(0, 1) * lengths.float() ).long() else: cursors = torch.zeros(self.bsz).long() return cursors def _increment_next(self): # Increment cursors and select new random subsequences for those that # have terminated self.sequence_cursor[1] += 1 roll_mask = self.sequence_cursor[1] >= self.seq_lengths[self.sequence_id[1]] if roll_mask.sum() > 0: # Roll items to 0 of randomly chosen next subsequence self.sequence_id[1, roll_mask] = torch.LongTensor( 1, roll_mask.sum() ).random_(0, self.n_sequences) self.sequence_cursor[1, roll_mask] = 0 def _get_next_batch(self): """ """ # First row is current inputs inp_labels_batch = self.sequences_mat[ self.sequence_id[0], self.sequence_cursor[0] ] inp_idxs = [self._get_sample_image(digit.item()) for digit in inp_labels_batch] # Second row is next (predicted) inputs tgt_labels_batch = self.sequences_mat[ self.sequence_id[1], self.sequence_cursor[1] ] tgt_idxs = [self._get_sample_image(digit.item()) for digit in tgt_labels_batch] # Roll next to current self.sequence_id[0] = self.sequence_id[1] self.sequence_cursor[0] = self.sequence_cursor[1] self._increment_next() return inp_idxs + tgt_idxs def _get_sample_image(self, digit): """ Return a sample image id for digit from MNIST """ if digit == -1: # Generate white noise return -1 else: cursor = self.label_cursors[digit] if self.random_mnist_images: # If not random, always take first digit self.label_cursors[digit] += 1 indices = self.label_indices[digit] if cursor >= len(indices) - 1: # Begin sequence from beginning & shuffle self.label_cursors[digit] = cursor = 0 idx = torch.randperm(len(self.label_indices[digit])) self.label_indices[digit] = indices = self.label_indices[digit][idx] return indices[cursor].item() def __iter__(self): for _i in range(len(self)): yield self._get_next_batch() return def __len__(self): return self.max_batches if self.max_batches else len(self.data_source) def pred_sequence_collate(batch): """ """ bsz = len(batch) // 2 inp_tuples = batch[:bsz] tgt_tuples = batch[bsz:] inp_images_batch = torch.stack([item[0] for item in inp_tuples]).view(bsz, -1) tgt_images_batch = torch.stack([item[0] for item in tgt_tuples]).view(bsz, -1) inp_labels_batch = torch.tensor([item[1] for item in inp_tuples]) tgt_labels_batch = torch.tensor([item[1] for item in tgt_tuples]) return (inp_images_batch, tgt_images_batch, tgt_labels_batch, inp_labels_batch) class PTBSequenceSampler(Sampler): """ """ def __init__( self, data_source, batch_size=64, max_batches=1000000, uniform_offsets=False ): super(PTBSequenceSampler, self).__init__(None) self.batch_size = batch_size self.max_batches = max_batches self.data_source = data_source self.data_len = len(self.data_source) # Choose initial random offsets into PTB, one per item in batch if uniform_offsets: # Useful for evaluati
""" SALTS XBMC Addon Copyright (C) 2014 tknorris This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import xbmc import xbmcaddon import xbmcgui from salts_lib import log_utils from salts_lib import utils from salts_lib.constants import MODES from salts_lib.db_utils import DB_Connection MAX_ERRORS = 10 kodi = xbmcaddon.Addon(id='plugin.video.salts') log_utils.log('Service: Installed Version: %s' % (kodi.getAddonInfo('version'))) db_connection = DB_Connection() if kodi.getSetting('use_remote_db') == 'false' or kodi.getSetting('enable_upgrade') == 'true': db_connection.init_database() class Service(xbmc.Player): def __init__(self, *args, **kwargs): log_utils.log('Service: starting...') xbmc.Player.__init__(self, *args, **kwargs) self.win = xbmcgui.Window(10000) self.reset() def reset(self): log_utils.log('Service: Resetting...') self.win.clearProperty('salts.playing') self.win.clearProperty('salts.playing.trakt_id') self.win.clearProperty('salts.playing.season') self.win.clearProperty('salts.playing.episode') self.win.clearProperty('salts.playing.srt') self.win.clearProperty('salts.playing.resume') self.tracked = False self._totalTime = 999999 self.trakt_id = None self.season = None self.episode = None self._lastPos = 0 def onPlayBackStarted(self): log_utils.log('Service: Playback started') playing = self.win.getProperty('salts.playing') == 'True' self.trakt_id = self.win.getProperty('salts.playing.trakt_id') self.season = self.win.getProperty('salts.playing.season') self.episode = self.win.getProperty('salts.playing.episode') srt_path = self.win.getProperty('salts.playing.srt') resume_point = self.win.getProperty('salts.playing.trakt_resume') if playing: # Playback is ours log_utils.log('Service: tracking progress...') self.tracked = True if srt_path: log_utils.log('Service: Enabling subtitles: %s' % (srt_path)) self.setSubtitles(srt_path) else: self.showSubtit
les(False) self._totalTime = 0 while self._totalTime == 0: try: self._totalTime = self.getTotalTime() except RuntimeError: self._totalTime = 0 break xbmc.sleep(1000) if resume_point: resume_time = float(resume_point) * self._totalTime / 100 log_utils.log("Resume Percent: %s, Resume Time: %s
Total Time: %s" % (resume_point, resume_time, self._totalTime), log_utils.LOGDEBUG) self.seekTime(resume_time) def onPlayBackStopped(self): log_utils.log('Service: Playback Stopped') if self.tracked: playedTime = float(self._lastPos) try: percent_played = int((playedTime / self._totalTime) * 100) except: percent_played = 0 # guard div by zero pTime = utils.format_time(playedTime) tTime = utils.format_time(self._totalTime) log_utils.log('Service: Played %s of %s total = %s%%' % (pTime, tTime, percent_played), log_utils.LOGDEBUG) if playedTime == 0 and self._totalTime == 999999: log_utils.log('XBMC silently failed to start playback', log_utils.LOGWARNING) elif playedTime >= 5: log_utils.log('Service: Setting bookmark on |%s|%s|%s| to %s seconds' % (self.trakt_id, self.season, self.episode, playedTime), log_utils.LOGDEBUG) db_connection.set_bookmark(self.trakt_id, playedTime, self.season, self.episode) if percent_played >= 75: if xbmc.getCondVisibility('System.HasAddon(script.trakt)'): run = 'RunScript(script.trakt, action=sync, silent=True)' xbmc.executebuiltin(run) self.reset() def onPlayBackEnded(self): log_utils.log('Service: Playback completed') self.onPlayBackStopped() monitor = Service() utils.do_startup_task(MODES.UPDATE_SUBS) errors = 0 while not xbmc.abortRequested: try: isPlaying = monitor.isPlaying() utils.do_scheduled_task(MODES.UPDATE_SUBS, isPlaying) if monitor.tracked and monitor.isPlayingVideo(): monitor._lastPos = monitor.getTime() except Exception as e: errors += 1 if errors >= MAX_ERRORS: log_utils.log('Service: Error (%s) received..(%s/%s)...Ending Service...' % (e, errors, MAX_ERRORS), log_utils.LOGERROR) break else: log_utils.log('Service: Error (%s) received..(%s/%s)...Continuing Service...' % (e, errors, MAX_ERRORS), log_utils.LOGERROR) else: errors = 0 xbmc.sleep(1000) log_utils.log('Service: shutting down...')
'''
New Integration Test for hybrid. @author: Quarkonics ''' import zstackwoodpecker.test_util as test_util import zstackwoodpecker.test_lib as test_lib import zstackwoodpecker.test_state as test_state import zstackwoodpecker.operations.hybrid_operations as hyb_o
ps import zstackwoodpecker.operations.resource_operations as res_ops import time import os postfix = time.strftime('%m%d-%H%M%S', time.localtime()) test_obj_dict = test_state.TestStateDict() remote_bucket_name = 'test-bucket-%s' % postfix test_stub = test_lib.lib_get_test_stub() hybrid = test_stub.HybridObject() def test(): hybrid.add_datacenter_iz(add_datacenter_only=True) hybrid.add_bucket() hybrid.detach_bucket() hybrid.attach_bucket() test_util.test_pass('Create Attach Detach OSS Bucket Test Success') def env_recover(): if hybrid.oss_bucket_create: hybrid.del_bucket() #Will be called only if exception happens in test(). def error_cleanup(): global test_obj_dict test_lib.lib_error_cleanup(test_obj_dict)
import os import uuid from django.db import models from django.core.files.uploadedfile import UploadedFile from django.forms.forms import
pretty_name from . import get_image_cropper from . import tasks from . import settings from . import utils from . import signals from .managers import AssetManager from .fields import AssetRealFileField try: from ..versioning import manager except ValueError: from versioning import manager try: from ..cms.internal_tags.models import AutoTagModel except ValueError: from cms.int
ernal_tags.models import AutoTagModel class AssetBase(AutoTagModel): UNKNOWN = 'unknown' IMAGE = 'image' DOCUMENT = 'document' AUDIO = 'audio' VIDEO = 'video' TYPES = settings.ASSET_TYPES and settings.ASSET_TYPES or \ ((UNKNOWN, 'Unknown'), (IMAGE, 'Image'), (DOCUMENT, 'Document'), (AUDIO, 'Audio'), (VIDEO, 'Video'),) __original_file = None title = models.CharField(max_length=255) file = AssetRealFileField(upload_to=utils.assets_dir) type = models.CharField(max_length=255, choices=TYPES, db_index=True) slug = models.SlugField(unique=True, max_length=255) user_filename = models.CharField(max_length=255) created = models.DateTimeField(auto_now_add=True) modified = models.DateTimeField(auto_now=True) cbversion = models.PositiveIntegerField(editable=False) objects = AssetManager() class Meta: abstract = True def __init__(self, *args, **kwargs): super(AssetBase, self).__init__(*args, **kwargs) self.__original_file = self.file def rename_file(self): if self.type == self.DOCUMENT: return False return settings.HASH_FILENAME def url(self): """ This is a wrapper of file.url """ return self.file.url def generate_slug(self): return str(uuid.uuid1()) def assign_tag(self): pass def delete_real_file(self, file_obj): file_obj.storage.delete(file_obj.name) signals.file_removed.send(file_obj.name) def _can_crop(self): return self.type == self.IMAGE def reset_crops(self): """ Reset all known crops to the default crop. If settings.ASSET_CELERY is specified then the task will be run async """ if self._can_crop(): if settings.CELERY or settings.USE_CELERY_DECORATOR: # this means that we are using celery tasks.reset_crops.apply_async(args=[self.pk], countdown=5) else: tasks.reset_crops(None, asset=self) def ensure_crops(self, *required_crops): """ Make sure a crop exists for each crop in required_crops. Existing crops will not be changed. If settings.ASSET_CELERY is specified then the task will be run async """ if self._can_crop(): if settings.CELERY or settings.USE_CELERY_DECORATOR: # this means that we are using celery args = [self.pk]+list(required_crops) tasks.ensure_crops.apply_async(args=args, countdown=5) else: tasks.ensure_crops(None, *required_crops, asset=self) def create_crop(self, name, x, x2, y, y2): """ Create a crop for this asset. """ if self._can_crop(): spec = get_image_cropper().create_crop(name, self.file, x=x, x2=x2, y=y, y2=y2) ImageDetail.save_crop_spec(self, spec) def save(self, *args, **kwargs): """ For new assets, creates a new slug. For updates, deletes the old file from storage. Calls super to actually save the object. """ if not self.pk and not self.slug: self.slug = self.generate_slug() if self.__original_file and self.file != self.__original_file: self.delete_real_file(self.__original_file) file_changed = True if self.pk: new_value = getattr(self, 'file') if hasattr(new_value, "file"): file_changed = isinstance(new_value.file, UploadedFile) else: self.cbversion = 0 if file_changed: self.user_filename = os.path.basename(self.file.name) self.cbversion = self.cbversion + 1 if not self.title: self.title = self.user_filename super(AssetBase, self).save(*args, **kwargs) if file_changed: signals.file_saved.send(self.file.name) utils.update_cache_bust_version(self.file.url, self.cbversion) self.reset_crops() if self.__original_file and self.file.name != self.__original_file.name: with manager.SwitchSchemaManager(None): for related in self.__class__._meta.get_all_related_objects( include_hidden=True): field = related.field if getattr(field, 'denormalize', None): cname = field.get_denormalized_field_name(field.name) if getattr(field, 'denormalize'): related.model.objects.filter(**{ field.name: self.pk }).update(**{ cname: self.file.name }) def delete(self, *args, **kwargs): """ Deletes the actual file from storage after the object is deleted. Calls super to actually delete the object. """ file_obj = self.file super(AssetBase, self).delete(*args, **kwargs) self.delete_real_file(file_obj) def __unicode__(self): return '%s' % (self.user_filename) class ImageDetailBase(models.Model): image = models.ForeignKey(settings.ASSET_MODEL) width = models.PositiveIntegerField() height = models.PositiveIntegerField() name = models.CharField(max_length=255) editable = models.BooleanField(editable=False, default=False) x = models.PositiveIntegerField(null=True) x2 = models.PositiveIntegerField(null=True) y = models.PositiveIntegerField(null=True) y2 = models.PositiveIntegerField(null=True) class Meta: abstract = True def __unicode__(self): return pretty_name(self.name) def get_crop_config(self): return get_image_cropper().get_crop_config(self.name) @classmethod def save_crop_spec(cls, asset, spec, update_version=True): if spec: cdict = spec.to_dict() updated = cls.objects.filter(image=asset, name=cdict['name']).update(**cdict) if not updated: cls(image=asset, **cdict).save() if update_version: asset.__class__.objects.filter(pk=asset.pk ).update(cbversion=models.F('cbversion')+1) class Asset(AssetBase): class Meta: abstract = False class ImageDetail(ImageDetailBase): class Meta: abstract = False
#!/usr/bin/env python ####################################################### # Copyright (c) 2015, ArrayFire # All rights reserved. # # This file is distributed under 3-clause BSD license. # The complete license agreement can be obtained at: # http://arrayfire.com/licenses/BSD-3-Clause ######################################################## import arrayfire as af from . import _util def simple_algorithm(verbose=False): display_func = _util.display_func(verbose) print_func = _util.print_func(verbose) a = af.randu(3, 3) k = af.constant(1, 3, 3, dtype=af.Dtype.u32) af.eval(k) print_func(af.sum(a), af.product(a), af.min(a), af.max(a), af.count(a), af.any_true(a), af.all_true(a)) display_func(af.sum(a, 0)) display_func(af.sum(a, 1)) rk = af.constant(1, 3, dtype=af.Dtype.u32) rk[2] = 0 af.eval(rk) display_func(af.sumByKey(rk, a, dim=0)) display_func(af.sumByKey(rk, a, dim=1)) display_func(af.productByKey(rk, a, dim=0)) display_func(af.productByKey(rk, a, dim=1)) display_func(af.minByKey(rk, a, dim=0)) display_func(af.minByKey(rk, a, dim=1)) display_func(af.maxByKey(rk, a, dim=0)) display_func(af.maxByKey(rk, a, dim=1)) display_func(af.anyTrueByKey(rk, a, dim=0)) display_func(af.anyTrueByKey(rk, a, dim=1)) display_func(af.allTrueByKey(rk, a, dim=0)) display_func(af.allTrueByKey(rk, a, dim=1)) display_func(af.countByKey(rk, a, dim=0)) display_func(af.countByKey(rk, a, dim=1)) display_func(af.product(a, 0)) display_func(af.product(a, 1)) display_func(af.min(a, 0)) display_func(af.min(a, 1)) display_func(af.max(a, 0)) display_func(af.max(a, 1)) display_func(af.count(a, 0)) display_func(af.count(a, 1)) display_func(af.any_true(a, 0)) display_func(af.any_true(a, 1)) display_func(af.all_true(a, 0)) display_func(af.all_true(a, 1)) display_func(af.accum(a, 0)) display_func(af.accum(a, 1)) display_func(af.scan(a, 0, af.BINARYOP.ADD)) display_func(af.scan(a, 1, af.BINARYOP.MAX)) display_func(af.scan_by_key(k, a, 0, af.BINARYOP.ADD)) display_func(af.scan_by_key(k, a, 1, af.BINARYOP.MAX)) display_func(af.sort(a, is_ascending=True)) display_func(af.sort(a, is_ascending=False)) b = (a > 0.1) * a c = (a > 0.4) * a d = b / c print_func(af.sum(d)) print_func(af.sum(d, nan_val=0.0)) display_func(af.sum(d, dim=0, nan_val=0.0)) val, idx = af.sort_index(a, is_ascending=True) display_func(val) display_func(idx) val, idx = af.sort_index(a, is_ascending=False) display_func(val) display_func(idx) b = af.randu(3, 3) keys, vals = af.sort_by_key(a, b, is_ascending=True) display_func(keys) display_func(vals) keys,
vals = af.sort_by_key(a, b, is_ascending=False) display_func(keys) display_func(vals) c = af.randu(5, 1) d = af.randu(5, 1) cc = af.set_unique(c, is_sorted=False) dd = af.set_unique(af.sort(d), is_sorted=True) display_func(cc) display_func(dd) display_func(af.set_union(cc, dd, is_unique=True)) display_func(af.set_union(cc, dd, is_unique
=False)) display_func(af.set_intersect(cc, cc, is_unique=True)) display_func(af.set_intersect(cc, cc, is_unique=False)) _util.tests["algorithm"] = simple_algorithm
#!/usr/bin/env python import datetime import logging import math import socket import tables import xml.etree.ElementTree as ET logging.basicConfig(filename = 'mbta_daemon.log', level=logging.INFO) logger = logging.getLogger('xml2hdf5') class VehicleLocation(tables.IsDescription): vehicleID = tables.StringCol(4) route = tables.StringCol(8) direction = tables.StringCol(16) latitude = tables.Float64Col() #Reported latitude longitude = tables.Float64Col() #Reported longitude time = tables.Float64Col() #Time stamp in seconds since epoch time heading = tables.UInt16Col() #Heading in degrees def parse_mbta_xml(database, thefile, presentData = None): """ Parses MBTA XML data and adds it to a HDF5 database. Inputs: database: Handle to HDF5 file thefile: Name of XML file to parse presentData: A dictionary hash of present data (to save time on the check) If absent, will use database queries (much slower) """ try: tree = ET.parse(thefile) root = tree.getroot() except ET.ParseError: #Error parsing XML content of the file logger.error('Could not find root of XML file: %s', thefile) return #Part 1. Get epoch time to nearest second # MBTA reports in whole units of milliseconds timeData = root.find('lastTime') if timeData is None: #Maybe XML returned an error of some sort logger.warning('XML file %s does not have time data', thefile) return report_time = long(timeData.attrib['time'][:-3]) #Part 2. Parse vehicle location data. for thevehicle in root.iter('vehicle'): rawdata = thevehicle.attrib #Raw MBTA data data= {} try: #Bus was here at this epoch time data['time'] = report_time - long(rawdata['secsSinceReport']) data['vehicleID'] = rawdata['id'] data['route'] = rawdata['routeTag'] data['direction'] = rawdata['dirTag'] data['latitude'] = rawdata['lat'] data['longitude'] = rawdata['lon'] data['heading'] = rawdata['heading'] except KeyError: pass #Part 3. Make sure record is not a duplicate if presentData is None: #No hashes. Query database to check that this record wasn't already reported queryString = '((vehicleID == "%(vehicleID)s") & (time == %(time)s))' % data try: query = database.getWhereList(queryString) except tables.exceptions.HDF5ExtError: #gets thrown whenHDF5 file is open and being written to logger.critical("Could not get file lock on HDF5 file. Abort.") import sys sys.exit() if len(query) == 0: vehiclePosition = database.row for key, value in data.items(): vehiclePosition[key] = value vehiclePosition.append() else: assert len(query) == 1, "OMG COLLISION" else: #Use hashes to check i
f record is already reported if (data['vehicleID'], data['time']) not in presentData: vehiclePosition = database.row for key, value in data.items(): vehiclePosition[key] = value vehiclePosition.append() presentData[data['vehicleID'], data['time']] = True
database.flush() logger.info('Parsed data from XML file: %s', thefile) return presentData def ParseAll(theHDF5FileName = 'mbta_trajectories.h5', Cleanup = True): import glob, os try: s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) ## Create an abstract socket, by prefixing it with null. s.bind('\0mbta_hdf5_writer_'+theHDF5FileName) compressionOptions = tables.Filters(complevel=9, complib='blosc') f = tables.openFile(theHDF5FileName, mode = 'a', filters = compressionOptions, title = 'Historical MBTA bus data') logging.debug('Lock acquired on %s', theHDF5FileName) except socket.error: logging.error('Lock could not be acquired on %s', theHDF5FileName) return try: thetable = f.root.VehicleLocations except tables.exceptions.NoSuchNodeError: thetable = f.createTable('/', 'VehicleLocations', VehicleLocation, 'MBTA vehicle positions', filters = compressionOptions) #Create table indexers thetable.cols.time.createIndex() thetable.cols.vehicleID.createIndex() #Hash current data presentData = {} for row in thetable: presentData[row['vehicleID'], row['time']] = True for filename in sorted(glob.glob('*.xml')): presentData = parse_mbta_xml(thetable, filename, presentData) if Cleanup: os.unlink(filename) f.close() if __name__ == '__main__': ParseAll()
json_errors @cache.cached(timeout=60, key_prefix=cache_key) #@log_request("aggregate", "aggregates") def cubes_model(star_name): cubes_arg = request.args.get("cubes", None) try: cubes = cubes_arg.split("|") except: raise RequestError("Parameter cubes with value '%s'should be a valid cube names separated by a '|'" % (cubes_arg) ) if len (cubes) > 5: raise RequestError("You can only join 5 cubes together at one time") g.cube = get_complex_cube(star_name, cubes) hier_limits = None # Copy from the application context #g.json_record_limit = current_app.slicer.json_record_limit g.json_record_limit = 10000 if "prettyprint" in request.args: g.prettyprint = str_to_bool(request.args.get("prettyprint")) else: g.prettyprint = current_app.slicer.prettyprint response = g.cube.to_dict(expand_dimensions=True, with_mappings=False, full_attribute_names=True, create_label=True, hierarchy_limits=hier_limits) response["features"] = workspace.cube_features(g.cube) return jsonify(response) def xlschecker(*args, **kwargs): if "format" in request.args: if request.args.get("format") in ['excel', 'csv']: return True return False @blueprint.route("/api/slicer/cube/<star_name>/cubes_aggregate", methods=["JSON", "GET"]) @requires_complex_browser @api_json_errors @cache.cached(timeout=60, key_prefix=cache_key, unless=xlschecker) def aggregate_cubes(star_name): cubes_arg = request.args.get("cubes", None) try: cubes = cubes_arg.split("|") except: raise RequestError("Parameter cubes with value '%s'should be a valid cube names separated by a '|'" % (cubes_arg) ) if len (cubes) > 5: raise RequestError("You can only join 5 cubes together at one time") g.cube = get_complex_cube(star_name, cubes) g.browser = current_app.cubes_workspace.browser(g.cube) cube = g.cube output_format = validated_parameter(request.args, "format", values=["json", "csv", "excel"], default="json") header_type = validated_parameter(request.args, "header", values=["names", "labels", "none"], default="labels") fields_str = request.args.get("fields") if fields_str: fields = fields_str.lower().split(',') else: fields = None # Aggregates # ---------- aggregates = [] for agg in request.args.getlist("aggregates") or []: aggregates += agg.split("|") drilldown = [] ddlist = request.args.getlist("drilldown") if ddlist: for ddstring in ddlist: drilldown += ddstring.split("|") #this handles cuts with geometry__time prepare_cell_cubes_ext(restrict=False) prepare_cell("split", "split") result = g.browser.aggregate(g.cell, aggregates=aggregates, drilldown=drilldown, split=g.split, page=g.page, page_size=g.page_size, order=g.order) # Hide cuts that were generated internally (default: don't) if current_app.slicer.hide_private_cuts: result.cell = result.cell.public_cell() # Copy from the application context #g.json_record_limit = current_app.slicer.json_record_limit g.json_record_limit = 10000 if "prettyprint" in request.args: g.prettyprint = str_to_bool(request.args.get("prettyprint")) else: g.prettyprint = current_app.slicer.prettyprint if output_format == "json": resultdict= result.to_dict() tempcells = list(result._cells) resultdict['cells'] = tempcells resultdict['cell'] = list(resultdict['cell']) if "cluster" in request.args: clusteragg = request.args.get('clusteragg', 'avg') if len(cubes) > 1 or len(cubes) < 1: log.warn("cluster must have one and only one cube. This call had %s"%str(cubes)) if clusteragg in ['avg', 'min', 'max', 'sum']: clusterfield = "%s__amount_%s"%(cubes[0], clusteragg,) numclusters = request.args.get('numclusters',5) tempresult = get_cubes_breaks(resultdict['cells'], clusterfield, method=request.args.get('cluster'), k=numclusters) tempresult['data'] = list(tempresult['data']) resultdict.set('cluster', tempresult) resp = Response(response=json.dumps(resultdict), status=200, \ mimetype="application/json") return(resp) elif output_format not in ["csv","excel"]: raise RequestError("unknown respon
se format '%s'" % output_format) # csv
if header_type == "names": header = result.labels elif header_type == "labels": header = [] for l in result.labels: # TODO: add a little bit of polish to this if l == SPLIT_DIMENSION_NAME: header.append('Matches Filters') else: header += [ attr.label or attr.name for attr in cube.get_attributes([l], aggregated=True) ] else: header = None fields = result.labels try: filename_output = cubes[0] + "_" + datetime.now().strftime("%Y-%m-%d") except: filename_output = "aggregate_" + datetime if output_format == "excel": output_string = xls_generator(result, fields, include_header=bool(header), header=header) headers = {"Content-Disposition": 'attachment; filename="' + filename_output + '.xlsx"'} return Response(output_string, mimetype="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", headers=headers) else: generator = csv_generator(result, fields, include_header=bool(header), header=header) headers = {"Content-Disposition": 'attachment; filename="' + filename_output + '.csv"'} return Response(generator, mimetype='text/csv', headers=headers) @blueprint.route("/api/slicer/cube/<star_name>/cubes_facts", methods=["JSON", "GET"]) @requires_complex_browser @api_json_errors @cache.cached(timeout=60, key_prefix=cache_key) #@log_request("facts", "fields") def cubes_facts(star_name): cubes_arg = request.args.get("cubes", None) try: cubes = cubes_arg.split("|") except: raise RequestError("Parameter cubes with value '%s'should be a valid cube names separated by a '|'" % (cubes_arg) ) if len (cubes) > 5: raise RequestError("You can only join 5 cubes together at one time") g.cube = get_complex_cube(star_name, cubes) g.browser = current_app.cubes_workspace.browser(g.cube) # Copy from the application context g.json_record_limit = current_app.slicer.json_record_limit if "prettyprint" in request.args: g.prettyprint = str_to_bool(request.args.get("prettyprint")) else: g.prettyprint = current_app.slicer.prettyprint # Request parameters fields_str = request.args.get("fields") if fields_str: fields = fields_str.split(',') else: fields = None # fields contain attribute names if fields: attributes = g.cube.get_attributes(fields) else: attributes = g.cube.all_attributes # Construct the field list fields = [attr.ref() for attr in attributes] # Get the result facts = g.browser.facts(g.cell, fields=fields, order=g.order,
# Transformer/Utilities/__in
it__.py
import bcrypt from hashlib import sha512 from helptux import db, login_manager class Role(db.Model): __tablename__ = 'roles' id = db.Column(db.Integer, primary_key=True) role = db.Column(db.String(255), index=True, unique=True) def __repr__(self): return '<Role {0}>'.format(self.role) def __init__(self, role): self.role = role users_roles = db.Table('users_roles', db.Column('user_id', db.Integer, db.ForeignKey('users.id')), db.Column('role_id', db.Integer, db.ForeignKey('roles.id')) ) class User(db.Model): __tablename__ = 'users' id = db.Column(db.Integer, primary_key=True) username = db.Column(db.String(255), index=True, unique=True, nullable=False) email = db.Column(db.String(255), index=True, unique=True, nullable=False) password_hash = db.Column(db.String(), nullable=False) posts = db.relationship('Post', backref='author', lazy='dynamic') authenticated = db.Column(db.Boolean, default=False) roles = db.relationship('Role', secondary=users_roles, primaryjoin=(users_roles.c.user_id == id), secondaryjoin=(users_roles.c
.role_id == Role.id), backref=db.backref('users', lazy='dynamic'), lazy='dynamic') def __init__(self, email, password): self.email = email self.username = self.email self.set_password(password) def __repr__(self): return '<User {0}>'.format(self.username) def output_obj(self): return { 'id': self.id, 'us
ername': self.username, 'posts': [p.id for p in self.posts], 'roles': [r.id for r in self.roles] } def set_password(self, input_password): bit_input = input_password.encode('utf-8') self.password_hash = bcrypt.hashpw(bit_input, bcrypt.gensalt()) def verify_password(self, input_password): bit_input = input_password.encode('utf-8') if bcrypt.hashpw(bit_input, self.password_hash) == self.password_hash: return True else: return False def is_active(self): return True def is_anonymous(self): return False def get_id(self): return str(self.id) def is_authenticated(self): return self.authenticated def has_role(self, role_name): for role in self.roles: if role.role == role_name: return True return False
import csv from openpyxl import load_workbook import io from dwarfsquad.lib.build.from_export import build_compound_methods, build_lots_and_levels from dwarfsquad.lib.build.from_export.build_assay_configuration import build_assay_configuration from dwarfsquad.lib.build.from_export.build_rulesettings import add_rules_to_methods from dwarfsquad.lib.export.export_rulesettings import generate_rule_schemas from dwarfsquad.lib.macros.generate_macros import generate_macros def build
_full_ac(path_to_xlsx): wb = load_workbook(path_to_xlsx) validate_workbook(wb) ac = build_assay_configuration(read_csv_from_sheet(wb.get_sheet_by_name('Assay'))) ac.compound_methods = build_compound_methods(read_csv_from_sheet(wb.get_sheet_by_name('Compound'))) ac.lots = build_lots_and_levels(read_csv_from_sheet(wb.get_sheet_by_name('Lots'))) ac.compound_methods = add_rules_to_methods(read_csv_from_sheet(wb.get_sheet_by_name('Rule')), ac.compound_methods) ac.qa_rule_schem
as = generate_rule_schemas(ac) if not ac.macros: ac.macros = generate_macros(ac) return ac def get_column_value(c): if c.value: try: return str(round(c.value, 8)) except TypeError: return str(c.value) else: return '' def read_csv_from_sheet(worksheet): stream = io.StringIO() for row in worksheet.rows: stream.write(u','.join([get_column_value(c) for c in row])) stream.write(u'\n') reader = csv.DictReader(stream.getvalue().splitlines()) rows = [r for r in reader] return rows def validate_workbook(wb): assert 'Assay' in wb assert 'Compound' in wb assert 'Lots' in wb assert 'Rule' in wb
ute_import, unicode_literals import flask import os import logging from flask_heroku import Heroku from flask_redis import Redis from flask_sslify import SSLify from flask_sqlalchemy import SQLAlchemy from raven.contrib.flask import Sentry from werkzeug.contrib.fixers import ProxyFix from freight.api.controller import ApiController from freight.constants import PROJECT_ROOT from freight.utils.celery import ContextualCelery api = ApiController(prefix='/api/0') db = SQLAlchemy(session_options={}) celery = ContextualCelery() heroku = Heroku() redis = Redis() sentry = Sentry(logging=True, level=logging.WARN) def configure_logging(app): logging.getLogger().setLevel(getattr(logging, app.config['LOG_LEVEL'])) def create_app(_read_config=True, **config): from kombu import Queue app = flask.Flask( __name__, static_folder=None, template_folder=os.path.join(PROJECT_ROOT, 'templates')) # Utilized for sessions and other secrets # NOTE: This key is insecure and you should override it on the server app.config['SECRET_KEY'] = 't\xad\xe7\xff%\xd2.\xfe\x03\x02=\xec\xaf\\2+\xb8=\xf7\x8a\x9aLD\xb1' if 'SECRET_KEY' in os.environ: app.config['SECRET_KEY'] = os.environ['SECRET_KEY'] # The api key to authorize end users against this system. # NOTE: This key is insecure and you should override it on the server app.config['API_KEY'] = '3e84744ab2714151b1db789df82b41c0021958fe4d77406e9c0947c34f5c5a70' if 'API_KEY' in os.environ: app.config['API_KEY'] = os.environ['API_KEY'] # The private key to use when cloning repositories # TODO(dcramer): this should support an on-disk option, as well as be # possible to override per repo app.config['SSH_PRIVATE_KEY'] = os.environ.get('SSH_PRIVATE_KEY', '').replace("\\n", "\n") app.config['FREIGHT_URL'] = os.environ.get('FREIGHT_URL', '').rstrip('/') if 'REDISCLOUD_URL' in os.environ: app.config['REDIS_URL'] = os.environ['REDISCLOUD_URL'] app.config['WORKSPACE_ROOT'] = os.environ.get('WORKSPACE_ROOT', '/tmp') app.config['DEFAULT_TIMEOUT'] = int(os.environ.get('DEFAULT_TIMEOUT', 300)) app.config['LOG_LEVEL'] = os.environ.get('LOG_LEVEL', 'INFO' if config.get('DEBUG') else 'ERROR') # Currently authentication requires Google app.config['GOOGLE_CLIENT_ID'] = os.environ.get('GOOGLE_CLIENT_ID') app.config['GOOGLE_CLIENT_SECRET'] = os.environ.get('GOOGLE_CLIENT_SECRET') app.config['GOOGLE_DOMAIN'] = os.environ.get('GOOGLE_DOMAIN') # Generate a GitHub token via Curl: # curlish https://api.github.com/authorizations \ # -u your-username \ # -X POST \ # -J scopes='repo' \ # -J note='freight' app.config['GITHUB_TOKEN'] = os.environ.get('GITHUB_TOKEN') app.config['GITHUB_API_ROOT'] = 'https://api.github.com' app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True app.config['SQLALCHEMY_POOL_SIZE'] = 60 app.config['SQLALCHEMY_MAX_OVERFLOW'] = 20 if 'SQLALCHEMY_DATABASE_URI' in os.environ: app.config['SQLALCHEMY_DATABASE_URI'] = os.environ['SQLALCHEMY_DATABASE_URI'] app.config['BROKER_TRANSPORT'] = None if 'BROKER_URL' in os.environ: app.config['BROKER_URL'] = os.environ['BROKER_URL'] app.config['CELERY_ACCEPT_CONTENT'] = ['json'] app.config['CELERY_ACKS_LATE'] = True app.config['CELERY_DEFAULT_QUEUE'] = "default" app.config['CELERY_DEFAULT_EXCHANGE'] = "default" app.config['CELERY_DEFAULT_EXCHANGE_TYPE'] = "direct" app.config['CELERY_DEFAULT_ROUTING_KEY'] = "default" app.config['CELERY_DISABLE_RATE_LIMITS'] = True app.config['CELERY_IGNORE_RESULT'] = True app.config['CELERY_RESULT_BACKEND'] = None app.config['CELERY_RESULT_SERIALIZER'] = 'json' app.config['CELERY_SEND_EVENTS'] = False app.config['CELERY_TASK_RESULT_EXPIRES'] = 1 app.config['CELERY_TASK_SERIALIZER'] = 'json' app.config['CELERY_TIMEZONE'] = 'UTC' app.config['CELERYD_PREFETCH_MULTIPLIER'] = 1 app.config['CELERYD_MAX_TASKS_PER_CHILD'] = 10000 app.config['CELERY_QUEUES'] = ( Queue('default', routing_key='default'), Queue('freight.tasks', routing_key='freight.tasks'), ) app.config['CELERY_IMPORTS'] = ( 'freight.tasks', ) app.config['CELERY_ROUTES'] = { 'freight.execute_task': { 'queue': 'freight.tasks', 'routing_key': 'freight.tasks', }, } app.config['SENTRY_INCLUDE_PATHS'] = [ 'ds', ] # We don't support non-proxied installs app.wsgi_app = ProxyFix(app.wsgi_app) # Pull in Heroku configuration heroku.init_app(app) if 'DYNO' in os.environ: # XXX: the released version of flask-sslify does not support init_app SSLify(app) # Set any remaining defaults that might not be present yet if not app.config.get('SQLALCHEMY_DATABASE_URI'): app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql:///freight' if not app.config.get('BROKER_URL'): app.config['BROKER_URL'] = 'redis://localhost/0' app.config.update(config) if _read_config: if os.environ.get('FREIGHT_CONF'): # FREIGHT_CONF=/etc/freight.conf.py app.config.from_envvar('FREIGHT_CONF') else: # Look for ~/.freight/freight.conf.py path = os.path.normpath(os.path.expanduser('~/.freight/freight.conf.py')) app.config.from_pyfile(path, silent=True) configure_logging(app) configure_sentry(app) configure_api(app) configure_celery(app) configure_redis(app) configure_sqlalchemy(app) configure_web_routes(app) return app def configure_api(app): from freight.api.controller import ApiCatchall from freight.api.app_details import AppDetailsApiView from freight.api.app_index import AppIndexApiView from freight.api.stats import StatsApiView from freight.api.task_details import TaskDetailsApiView from freight.api.task_index import TaskIndexApiView from freight.api.task_log import TaskLogApiView api.add_resource(AppIndexApiView, '/apps/') api.add_resource(AppDetailsApiView, '/apps/<app_id>/') api.add_resource(StatsApiView, '/stats/') api.add_resource(TaskIndexApiView, '/tasks/') api.add_resource(TaskDetailsApiView, '/tasks/<task_id>/') api.add_resource(TaskLogApiView, '/tasks/<task_id>/log/') # catchall should be the last resource api.add_resource(ApiCatchall, '/<path:path>') # init must be called after routes are registered api.init_app(app) def configure_celery(app): celery.init_app(app) def configure_redis(app): redis.init_app(app) def configure_sentry(app): from flask import session from raven.contrib.celery import register_signal, register_logger_signal sentry.init_app(app) @app.before_request def capture_user(*args, **kwargs): if 'uid' in session: sentry.client
.user_context({ 'id': session['uid'], 'email': session['email'], }) register_signal(sentry.client)
register_logger_signal(sentry.client) def configure_sqlalchemy(app): db.init_app(app) def configure_web_routes(app): from freight.web.auth import AuthorizedView, LoginView, LogoutView from freight.web.index import IndexView from freight.web.static import StaticView static_root = os.path.join(PROJECT_ROOT, 'dist') app.add_url_rule( '/static/<path:filename>', view_func=StaticView.as_view(b'static', root=static_root)) app.add_url_rule( '/auth/login/', view_func=LoginView.as_view(b'login', authorized_url='authorized')) app.add_url_rule( '/auth/logout/', view_func=LogoutView.as_view(b'logout', complete_url='index')) app.add_url_rule( '/auth/complete/', view_func=AuthorizedView.as_view(b'authorized', authorized_url='authorized', complete_url='index')) index_view = IndexView.as_view(b'index', login_url='login') app.add_url_rule('/', view_func=index_view) app.add_url_rule('/<path:path>', view_func=index_view
#!/usr/bin/env python # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import argparse import collections import datetime import os import re import shutil import sys import yaml def parse_opts(argv): parser = argparse.ArgumentParser( description='Convert to new NIC config templates with ' 'OS::Heat::Value resources.') parser.add_argument('-t', '--template', metavar='TEMPLATE_FILE', help=("Existing NIC config template to conver."), required=True) parser.add_argument('--discard-comments', metavar='DISCARD_COMMENTS', help="Discard comments from the template. (The " "scripts functions to keep YAML file comments in " "place, does not work in all scenarios.)", default=False) opts = parser.parse_args(argv[1:]) return opts def to_commented_yaml(filename): """Convert comments into 'comments<num>: ...' YAML""" out_str = '' last_non_comment_spaces = '' with open(filename, 'r') as f: comment_count = 0 for line in f: # skip blank line if line.isspace(): continue char_count = 0 spaces = '' for char in line: char_count += 1 if char == ' ': spaces += ' ' next elif char == '#': comment_count += 1 comment = line[char_count:-1] last_non_comment_spaces = spaces out_str += "%scomment%i_%i: '%s'\n" % ( last_non_comment_spaces, comment_count, len(spaces), comment) break else: last_non_comment_spaces = spaces out_str += line # inline comments check m = re.match(".*:.*#(.*)", line) if m: comment_count += 1 out_str += "%s inline_comment%i: '%s'\n" % ( last_non_comment_spaces, comment_count, m.group(1)) break with open(filename, 'w') as f: f.write(out_str) return out_str def to_normal_yaml(filename): """Convert back to normal #commented YAML""" with open(filename, 'r') as f: data = f.read() out_str = '' next_line_break = False for line in data.split('\n'): # get_input not supported by run-os-net-config.sh script line = line.replace('get_input: ', '') # Normal comments m = re.match(" +comment[0-9]+_([0-9]+): '(.*)'.*", line) # Inline comments i = re.match(" +inline_comment[0-9]+: '(.*)'.*", line) if m: if next_line_break: out_str += '\n' next_line_break = False for x in range(0, int(m.group(1))): out_str += " " out_str += "#%s\n" % m.group(2) elif i: out_str += " #%s\n" % i.group(1) next_line_break = False else: if next_line_break: out_str += '\n' out_str += line next_line_break = True if next_line_break: out_str += '\n' with open(filename, 'w') as f: f.write(out_str) return out_str class TemplateDumper(yaml.SafeDumper): def represent_ordered_dict(self, data): return self.represent_dict(data.items()) def description_presenter(self, data): if not len(data) > 80: return self.represent_scalar('tag:yaml.org,2002:str', data) return self.represent_scalar('tag:yaml.org,2002:str', data, style='>') class TemplateLoader(yaml.SafeLoader): def construct_mapping(self, node): self.flatten_mapping(node) return collections.OrderedDict(self.construct_pairs(node)) TemplateDumper.add_representer(str, TemplateDumper.description_presenter) TemplateDumper.add_representer(bytes, TemplateDumper.description_presenter) TemplateDumper.add_representer(collections.OrderedDict, TemplateDumper.represent_ordered_dict) TemplateLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, TemplateLoader.construct_mapping) def write_template(template, filename=None): with open(filename, 'w') as f: yaml.dump(template, f, TemplateDumper, width=120, default_flow_style=False) def validate_template(template): if not os.path.exists(template): raise RuntimeError('Template not provided.') if not os.path.isfile(template): rai
se RuntimeError('Template %s is not a file.') pass def backup_template(template): extension = datetime.datetime.now().strftime('%Y%m%d%H%M%S') backup_filename = os.path.realpath(template) + '.' + extension if os.path.exists(backup_filename): raise RuntimeError('Backupe file: %s already exists. Aborting!' % backup_filename) shutil.copyfile(template, backup_filename) print('The original template was saved as: %s' % b
ackup_filename) def needs_conversion(): with open(OPTS.template, 'r') as f: template = yaml.load(f.read(), Loader=TemplateLoader) net_config_res = template['resources'].get('OsNetConfigImpl') if (net_config_res and net_config_res[ 'type'] == 'OS::Heat::SoftwareConfig'): backup_template(OPTS.template) if not OPTS.discard_comments: # Convert comments '# ...' into 'comments<num>: ...' # is not lost when loading the data. to_commented_yaml(OPTS.template) return True return False def convert_to_heat_value_resource(): if needs_conversion(): with open(OPTS.template, 'r') as f: template = yaml.load(f.read(), Loader=TemplateLoader) net_config_res = template['resources']['OsNetConfigImpl'] net_config_res_props = net_config_res['properties'] # set the type to OS::Heat::Value net_config_res['type'] = 'OS::Heat::Value' del net_config_res_props['group'] old_config = net_config_res_props['config'] new_config = old_config['str_replace']['params']['$network_config'] del net_config_res_props['config'] net_config_res_props['value'] = new_config outputs = template['outputs'] del outputs['OS::stack_id'] outputs['config'] = {} outputs['config']['value'] = 'get_attr[OsNetConfigImpl, value]' write_template(template, filename=OPTS.template) if not OPTS.discard_comments: # Convert previously converted comments, 'comments<num>: ...' # YAML back to normal #commented YAML to_normal_yaml(OPTS.template) print('The update template was saved as: %s' % OPTS.template) else: print('Template does not need conversion: %s' % OPTS.template) OPTS = parse_opts(sys.argv) convert_to_heat_value_resource()
from Monument import Monument, Dataset import importer_utils as utils import importer as importer class DkBygningDa(Monument): def set_adm_location(self): if self.has_non_empty_attribute("kommune"): if utils.count_wikilinks(self.kommune) == 1: adm_location = utils.q_from_first_wikilink("da", self.kommune) self.add_statement("located_adm", adm_location) def set_location(self): """ Set location based on 'by' column. If there's one wikilinked item, confirm that the corresponding WD item is of a type that's a subclass of 'human settlement', using query results downloaded by importer. If not wikilinked, check if there's a dawp article with the same name and do the same check. """ place_item = None if self.has_non_empty_attribute("by"): place = self.by if utils.count_wikilinks(place) == 1: place = utils.get_wikilinks(place)[0].title if utils.wp_page_exists("da", place): place_item = utils.q_from_wikipedia("da", place) if place_item: place_item_ids = utils.get_P31(place_item, self.repo) for p31_value in place_item_ids: if p31_value in self.data_files["settlement"]: self.add_statement("location", place_item) # there can be more than one P31, but after f
irst positive # we can leave return def set_sagsnr(self): """Danish listed buildings case ID (P2783).""" self.add_statement("listed_building_dk", str(self.sagsnr)) def update_labels(self): self.add_label("da", utils.remove_markup(self.sagsnavn)) def set_address(self): """ Set address of object.
self.addresse is always streetname + number. self.postnr is always zipcode self.by is always placename. """ if self.has_non_empty_attribute("adresse"): address = self.adresse + " " + self.postnr + " " + self.by self.add_statement("located_street", address) def set_inception(self): if self.has_non_empty_attribute("opforelsesar"): inception = utils.parse_year(self.opforelsesar) if isinstance(inception, int): self.add_statement( "inception", utils.package_time({"year": inception})) def set_monuments_all_id(self): """Map monuments_all ID to fields in this table.""" self.monuments_all_id = "{!s}-{!s}-{!s}".format( self.kommunenr, self.ejendomsnr, self.bygningsnr) def __init__(self, db_row_dict, mapping, data_files, existing): Monument.__init__(self, db_row_dict, mapping, data_files, existing) self.set_monuments_all_id() self.update_labels() self.exists("da") self.set_commonscat() self.set_image("billede") self.set_coords(("lat", "lon")) self.set_adm_location() self.set_location() self.set_sagsnr() self.set_address() self.set_inception() self.exists_with_prop(mapping) self.print_wd() if __name__ == "__main__": """Point of entrance for importer.""" args = importer.handle_args() dataset = Dataset("dk-bygninger", "da", DkBygningDa) dataset.subclass_downloads = {"settlement": "Q486972"} importer.main(args, dataset)
"""helpers.py -- supporting routines for PyBlaster project @Author Ulrich Jansen <ulrich.jansen@rwth-aachen.
de> """ suffixes = ['B', 'KB', 'MB', 'GB', 'TB', 'PB'] def humansize(nbytes): if nbytes == 0: return '0 B' i = 0 while nbytes >= 1024 and i < len(suffixes)-1: nbytes /= 1024. i += 1 f = ('%.2f'
% nbytes).rstrip('0').rstrip('.') return '%s %s' % (f, suffixes[i]) def seconds_to_minutes(nsecs): if nsecs == 0: return "" return "%d:%02d" % (int(nsecs / 60), nsecs % 60)
import unittest from cumulusci.core import template_utils class TemplateUtils(unittest.TestCase): def test_string_generator(self): x = 100 y = template_utils.StringGenerator(lambda: str(x)) assert str(y) == "100" x = 200 assert str(y) == "200" def test_faker_library(self): fake = template_utils.FakerTemplateLibrary() assert fake.first_n
ame assert "example.com" in fake.email(domain="example.com") def test_faker_languages(self): fake = template_utils.FakerTemplateLibrary("no_NO") assert fake.first_name assert "example.com" in fake.email(domain="example.com") def test_format_str(self):
assert template_utils.format_str("abc") == "abc" assert template_utils.format_str("{{abc}}", {"abc": 5}) == "5" assert len(template_utils.format_str("{{fake.first_name}}")) assert "15" in template_utils.format_str( "{{fake.first_name}} {{count}}", {"count": 15} ) assert "15" in template_utils.format_str( "{{fake.first_name}} {{count}}", {"count": "15"} ) assert ( template_utils.format_str("{% raw %}{}{% endraw %}", {"count": "15"}) == "{}" ) def test_format_str_languages(self): norwegian_faker = template_utils.FakerTemplateLibrary("no_NO") val = template_utils.format_str( "{{vikingfake.first_name}} {{abc}}", {"abc": 5, "vikingfake": norwegian_faker}, ) assert "5" in val def cosmopolitan_faker(language): return template_utils.FakerTemplateLibrary(language) val = template_utils.format_str( "{{fakei18n('ne_NP').first_name}} {{abc}}", {"abc": 5, "fakei18n": cosmopolitan_faker, "type": type}, ) assert "5" in val
"""General tests for Buoyant library.""" import datetime import unittest from io import BytesIO import buoyant from buoyant import buoy sampledata = [ { "latitude (degree)": "39.235", "sea_surface_wave_peak_period (s)": "13.79", "polar_coordinate_r1 (1)": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;", "station_id": "urn:ioos:station:wmo:46014", "sea_surface_wind_wave_period (s)": "3.80", "spectral_energy (m**2/Hz)": "0;0;0;0;0.117495;0.347233;0.340078;1.07545;1.31407;0.644604;0.319928;0.20951;0.203445;0.407703;0.501098;1.05528;0.552653;0.982512;0.40238;0.259344;0.176087;0.156276;0.10127;0.0713481;0.1257;0.0469963;0.0294347;0.0344079;0.0196117;0.0208386;0.0207157;0.0185725;0.0112313;0.0140935;0.00829521;0.0135329;0.0103501;0.00823833;0.00611987;0.00516951;0.00295949;0.00274196;0.00162249;0.00153895;0.000701703;0.000452887", "sea_surface_wave_mean_period (s)": "7.61", "sea_water_temperature (c)": "", "bandwidths (Hz)": "0.0050;0.0050;0.0050;0.0050;0.0050;0.0050;0.0050;0.0050;0.0050;0.0050;0.0050;0.0050;0.0050;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0100;0.0200;0.0200;0.0200;0.0200;0.0200;0.0200;0.0200", "sea_surface_wind_wave_to_direction (degree)": "", "polar_coordinate_r2 (1)": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;", "sampling_rate (Hz)": "", "sea_surface_wave_to_direction (degree)": "", "sea_surface_swell_wave_significant_height (m)": "1.07", "number_of_frequencies (count)": "46", "center_frequencies (Hz)": "0.0325;0.0375;0.0425;0.0475;0.0525;0.0575;0.0625;0.0675;0.0725;0.0775;0.0825;0.0875;0.0925;0.1000;0.1100;0.1200;0.1300;0.1400;0.1500;0.1600;0.1700;0.1800;0.1900;0.2000;0.2100;0.2200;0.2300;0.2400;0.2500;0.2600;0.2700;0.2800;0.2900;0.3000;0.3100;0.3200;0.3300;0.3400;0.3500;0.3650;0.3850;0.4050;0.4250;0.4450;0.4650;0.4850", "date_time": "2015-07-31T19:50:00Z", "sea_surface_wind_wave_significant_height (m)": "0.17", "sea_surface_wave_significant_height (m)": "1.09", "sea_surface_swell_wave_to_direction (degree)": "", "sea_surface_swell_wave_period (s)": "", "calculation_method": "UNKNOWN", "mean_wave_direction (degree)": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;", "longitude (degree)": "-123.974", "principal_wave_direction (degree)": ";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;", "sensor_id": "urn:ioos:sensor:wmo:46014::wpm1", } ] class BuoyTestCase(unittest.TestCase): def setUp(self): self.b = buoyant.Buoy("41012") def test_observation(self): """Test the Observation class.""" self.assertTrue(issubclass(buoyant.Observation, float)) subint = float.__new__(buoyant.Observation, 11) assert subint == 11 assert isinstance(subint, buoyant.Observation) obs = buoyant.Observation(1, "m") assert isinstance(obs, buoyant.Observation) assert obs.unit == "m" self.assertEqual(str(obs), "1.0 m") assert repr(obs) == "Ob
servation(1.0, 'm')" assert obs + 2 == 3 def test_buoy_instant(self): assert self.b assert isinstance(self.b, buoyant.Buoy) def tes
t_data_exists(self): x = self.b.sea_water_electrical_conductivity assert x.unit == "mS/cm" currents = self.b.currents self.assertIsInstance(currents, list) assert isinstance(x.datetime, datetime.datetime) assert isinstance(self.b.image, BytesIO) assert isinstance(self.b.__dict__["lat"], float) assert isinstance(self.b.coords, tuple) assert (self.b.__dict__["lat"], self.b.__dict__["lon"]) == self.b.coords def test_keys(self): """Test that observation attributes exist in Buoy class.""" self.assertIsNotNone(self.b.sea_water_salinity) self.assertIsNotNone(self.b.air_pressure_at_sea_level) self.assertIsNotNone(self.b.air_temperature) self.assertIsNotNone(self.b.currents) self.assertIsNotNone(self.b.sea_water_electrical_conductivity) self.assertIsNotNone(self.b.sea_water_salinity) self.assertIsNotNone(self.b.sea_water_temperature) def test_parse_unit(self): dictionary = {"magic (pixie dust)": "42"} x = buoy.parse_unit("magic", dictionary) assert isinstance(x, buoyant.Observation) nope = buoy.parse_unit("widget", dictionary) self.assertIsNone(nope) spectral_energy = buoy.parse_unit("spectral_energy", sampledata[0]) self.assertEqual(spectral_energy[4], buoy.Observation(0.117495, "m**2/Hz")) def test_error(self): with self.assertRaises(AttributeError): self.b._get("foo bar") self.assertIsNone(self.b.depth) def test_image(self): station = buoyant.Buoy(51001) assert buoy.CAM_ENDPOINT in station.image_url self.assertIsNotNone(station.image) def test_degroup(self): waves = buoyant.buoy._degroup(sampledata, buoyant.properties.waves) self.assertEqual( waves[0]["sea_surface_wind_wave_significant_height"], buoy.Observation(0.17, "m"), ) if __name__ == "__main__": unittest.main()
"Port": config['Incoming/MMTP'].get('Port', 0), "Nickname": nickname, "Identity": formatBase64(mixminion.Crypto.pk_encode_public_key(identityKey)), "Published": formatTime(now), "ValidAfter": formatDate(validAt), "ValidUntil": formatDate(validUntil), "Pa
cketKey": formatBase64(mixminion.Crypto.pk_encode_public_key(packetKey)), "KeyID": identityKeyID, "MMTPProtocolsIn" : mmtpProtocolsIn, "MMTPProtocolsOut" : mmtpProtocolsOut, "PacketVersion" : mixminion.Packet.PACKET_VERSION, "mm_version" : mixminion.__version__, "Secure" : secure, "Contact" : contact, } # If we don't know our IP address, try to guess i
f fields['IP'] == '0.0.0.0': #XXXX008 remove; not needed since 005. try: fields['IP'] = _guessLocalIP() LOG.warn("No IP configured; guessing %s",fields['IP']) except IPGuessError, e: LOG.error("Can't guess IP: %s", str(e)) raise UIError("Can't guess IP: %s" % str(e)) # If we don't know our Hostname, try to guess if fields['Hostname'] is None: fields['Hostname'] = socket.getfqdn() LOG.warn("No Hostname configured; guessing %s",fields['Hostname']) try: _checkHostnameIsLocal(fields['Hostname']) dnsResults = mixminion.NetUtils.getIPs(fields['Hostname']) except socket.error, e: LOG.warn("Can't resolve configured hostname %r: %s", fields['Hostname'],str(e)) else: found = [ ip for _,ip,_ in dnsResults ] if fields['IP'] not in found: LOG.warn("Configured hostname %r resolves to %s, but we're publishing the IP %s", fields['Hostname'], englishSequence(found), fields['IP']) # Fill in a stock server descriptor. Note the empty Digest: and # Signature: lines. info = """\ [Server] Descriptor-Version: 0.2 Nickname: %(Nickname)s Identity: %(Identity)s Digest: Signature: Published: %(Published)s Valid-After: %(ValidAfter)s Valid-Until: %(ValidUntil)s Packet-Key: %(PacketKey)s Packet-Versions: %(PacketVersion)s Software: Mixminion %(mm_version)s Secure-Configuration: %(Secure)s Contact: %(Contact)s """ % fields if insecurities: info += "Why-Insecure: %s\n"%(", ".join(insecurities)) if fingerprint: info += "Contact-Fingerprint: %s\n"%fingerprint if comments: info += "Comments: %s\n"%comments # Only advertise incoming MMTP if we support it. if config["Incoming/MMTP"].get("Enabled", 0): info += """\ [Incoming/MMTP] Version: 0.1 IP: %(IP)s Hostname: %(Hostname)s Port: %(Port)s Key-Digest: %(KeyID)s Protocols: %(MMTPProtocolsIn)s """ % fields for k,v in config.getSectionItems("Incoming/MMTP"): if k not in ("Allow", "Deny"): continue info += "%s: %s" % (k, _rule(k=='Allow',v)) # Only advertise outgoing MMTP if we support it. if config["Outgoing/MMTP"].get("Enabled", 0): info += """\ [Outgoing/MMTP] Version: 0.1 Protocols: %(MMTPProtocolsOut)s """ % fields for k,v in config.getSectionItems("Outgoing/MMTP"): if k not in ("Allow", "Deny"): continue info += "%s: %s" % (k, _rule(k=='Allow',v)) if not config.moduleManager.isConfigured(): config.moduleManager.configure(config) # Ask our modules for their configuration information. info += "".join(config.moduleManager.getServerInfoBlocks()) info += """\ [Testing] Platform: %s Configuration: %s """ %(getPlatformSummary(), config.getConfigurationSummary()) # Remove extra (leading or trailing) whitespace from the lines. lines = [ line.strip() for line in info.split("\n") ] # Remove empty lines lines = filter(None, lines) # Force a newline at the end of the file, rejoin, and sign. lines.append("") info = "\n".join(lines) info = signServerInfo(info, identityKey) # Write the desciptor writeFile(serverKeys.getDescriptorFileName(), info, mode=0644) # This is for debugging: we try to parse and validate the descriptor # we just made. # FFFF Remove this once we're more confident. inf = ServerInfo(string=info) ok = checkDescriptorConsistency(inf, config, log=0, isPublished=0) if ok not in ('good', 'so-so'): print "========" print info print "======" checkDescriptorConsistency(inf, config, log=1, isPublished=0) assert ok in ('good', 'so-so') return info def _rule(allow, (ip, mask, portmin, portmax)): """Return an external representation of an IP allow/deny rule.""" if mask == '0.0.0.0': ip="*" mask="" elif mask == "255.255.255.255": mask = "" else: mask = "/%s" % mask if portmin==portmax==48099 and allow: ports = "" elif portmin == 0 and portmax == 65535 and not allow: ports = "" elif portmin == portmax: ports = " %s" % portmin else: ports = " %s-%s" % (portmin, portmax) return "%s%s%s\n" % (ip,mask,ports) #---------------------------------------------------------------------- # Helpers to guess a reasonable local IP when none is provided. class IPGuessError(MixError): """Exception: raised when we can't guess a single best IP.""" pass # Cached guessed IP address _GUESSED_IP = None def _guessLocalIP(): "Try to find a reasonable IP for this host." global _GUESSED_IP if _GUESSED_IP is not None: return _GUESSED_IP # First, let's see what our name resolving subsystem says our # name is. ip_set = {} try: ip_set[ socket.gethostbyname(socket.gethostname()) ] = 1 except socket.error: try: ip_set[ socket.gethostbyname(socket.getfqdn()) ] = 1 except socket.error: pass # And in case that doesn't work, let's see what other addresses we might # think we have by using 'getsockname'. for target_addr in ('18.0.0.1', '10.0.0.1', '192.168.0.1', '172.16.0.1')+tuple(ip_set.keys()): # open a datagram socket so that we don't actually send any packets # by connecting. try: s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.connect((target_addr, 9)) #discard port ip_set[ s.getsockname()[0] ] = 1 except socket.error: pass for ip in ip_set.keys(): if ip.startswith("127.") or ip.startswith("0."): del ip_set[ip] # FFFF reject 192.168, 10., 176.16.x if len(ip_set) == 0: raise IPGuessError("No address found") if len(ip_set) > 1: raise IPGuessError("Multiple addresses found: %s" % ( ", ".join(ip_set.keys()))) IP = ip_set.keys()[0] if IP.startswith("192.168.") or IP.startswith("10.") or \ IP.startswith("176.16."): raise IPGuessError("Only address found is in a private IP block") return IP _KNOWN_LOCAL_HOSTNAMES = {} def _checkHostnameIsLocal(name): if _KNOWN_LOCAL_HOSTNAMES.has_key(name): return r = mixminion.NetUtils.getIPs(name) for family, addr, _ in r: if family == mixminion.NetUtils.AF_INET: if addr.startswith("127.") or addr.startswith("0."): LOG.warn("Hostname %r resolves to reserved address %s", name, addr) else: if addr in ("::", "::1"): LOG.warn("Hostname %r resolves to reserved address %s", name,addr) _KNOWN_LOCAL_HOSTNAMES[name] = 1 def generateCertChain(filename, mmtpKey, identityKey, nickname, certStarts, certEnds): """Create a two-certificate chain for use in MMTP. filename -- location to
import datetime from django.db import models from django.utils import timezone class Question(models.Model): question_text = models.CharField(max_length=200) pub_date =
models.DateTimeField('date published') def __unicode__(self): # __unicode__ on Python 2 return self.question_text def was_published_recently(self): now = timezone.now() return now - datetime.timedelta(days=1) <= self.pub_date <= now was_published_recently.admin_order_field = 'pub_date'
was_published_recently.boolean = True was_published_recently.short_description = 'Published recently?' class Choice(models.Model): question = models.ForeignKey(Question) choice_text = models.CharField(max_length=200) votes = models.IntegerField(default=0) def __unicode__(self): # __unicode__ on Python 2 return self.choice_text
# Licensed to the Software Freedom Conservancy (SFC) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The SFC licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from selenium.common.exceptions import WebDriverException try: import http.client as http_client except ImportError: import httplib as http_client from selenium.webdriver.common.desired_capabilities import DesiredCapabilities from selenium.webdriver.remote.webdriver import WebDriver as RemoteWebDriver from .service import Service from .remote_connection import SafariRemoteConnection class WebDriver(RemoteWebDriver): """ Controls the SafariDriver and allows you to drive the browser. """ def __init__(self, port=0, executable_path="/usr/bin/safaridriver", reuse_service=False, desired_capabilities=DesiredCapabilities.SAFARI, quiet=False, keep_alive=True): """ Creates a new Safari driver instance and launches or finds a running safaridriver service. :Args: - port - The port on which the safaridriver service should listen for new connections. If zero, a free port will be found. - executable_path - Path to a custom safaridriver executable to be used. If absent, /usr/bin/safaridriver is used. - reuse_service - If True, do not spawn a safaridriver instance; instead, connect to an already-running service that was launched externally.
- desired_capabilities: Dictionary object with desired capabilities (Can be used to provide various Safari switches). - quiet - If True, the driver's stdout and stderr is suppressed. - keep_alive - Whether to configure SafariRemoteConnection to use HTTP keep-alive. Defaults to False.
""" self._reuse_service = reuse_service self.service = Service(executable_path, port=port, quiet=quiet) if not reuse_service: self.service.start() executor = SafariRemoteConnection(remote_server_addr=self.service.service_url, keep_alive=keep_alive) RemoteWebDriver.__init__( self, command_executor=executor, desired_capabilities=desired_capabilities) self._is_remote = False def quit(self): """ Closes the browser and shuts down the SafariDriver executable that is started when starting the SafariDriver """ try: RemoteWebDriver.quit(self) except http_client.BadStatusLine: pass finally: if not self._reuse_service: self.service.stop() # safaridriver extension commands. The canonical command support matrix is here: # https://developer.apple.com/library/content/documentation/NetworkingInternetWeb/Conceptual/WebDriverEndpointDoc/Commands/Commands.html # First available in Safari 11.1 and Safari Technology Preview 41. def set_permission(self, permission, value): if not isinstance(value, bool): raise WebDriverException("Value of a session permission must be set to True or False.") payload = {} payload[permission] = value self.execute("SET_PERMISSIONS", {"permissions": payload}) # First available in Safari 11.1 and Safari Technology Preview 41. def get_permission(self, permission): payload = self.execute("GET_PERMISSIONS")["value"] permissions = payload["permissions"] if not permissions: return None if permission not in permissions: return None value = permissions[permission] if not isinstance(value, bool): return None return value # First available in Safari 11.1 and Safari Technology Preview 42. def debug(self): self.execute("ATTACH_DEBUGGER") self.execute_script("debugger;")
asks that have no FAILED descendants self.update(worker_id, {'host': host}, get_work=True) if assistant: self.add_worker(worker_id, [('assistant', assistant)]) best_task = None if current_tasks is not None: ct_set = set(current_tasks) for task in sorted(self._state.get_running_tasks(), key=self._rank): if task.worker_running == worker_id and task.id not in ct_set: best_task = task locally_pending_tasks = 0 running_tasks = [] upstream_table = {} greedy_resources = collections.defaultdict(int) n_unique_pending = 0 worker = self._state.get_worker(worker_id) if worker.is_trivial_worker(self._state): relevant_tasks = worker.get_pending_tasks(self._state) used_resources = collections.defaultdict(int) greedy_workers = dict() # If there's no resources, then they can grab any task else: relevant_tasks = self._state.get_pending_tasks() used_resources = self._used_resources() activity_limit = time.time() - self._config.worker_disconnect_delay active_workers = self._state.get_active_workers(last_get_work_gt=activity_limit) greedy_workers = dict((worker.id, worker.info.get('workers', 1)) for worker in active_workers) tasks = list(relevant_tasks) tasks.sort(key=self._rank, reverse=True) for task in tasks: upstream_status = self._upstream_status(task.id, upstream_table) in_workers = (assistant and getattr(task, 'runnable', bool(task.workers))) or worker_id in task.workers if task.status == RUNNING and in_workers: # Return a list of currently running tasks to the client, # makes it easier to troubleshoot other_worker = self._state.get_worker(task.worker_running) more_info = {'task_id': task.id, 'worker': str(other_worker)} if other_worker is not None: more_info.update(other_worker.info) running_tasks.append(more_info) if task.status == PENDING and in_workers and upstream_status != UPSTREAM_DISABLED: locally_pending_tasks += 1 if len(task.workers) == 1 and not assistant: n_unique_pending += 1 if best_task: continue if task.status == RUNNING and (task.worker_running in greedy_workers): greedy_workers[task.worker_running] -= 1 for resource, amount in six.iteritems((task.resources or {})): greedy_resources[resource] += amount if self._schedulable(task) and self._has_resources(task.resources, greedy_resources): if in_workers and self._has_resources(task.resources, used_resources): best_task = task else: workers = itertools.chain(task.workers, [worker_id]) if assistant else task.workers for task_worker in workers: if greedy_workers.get(task_worker, 0) > 0: # use up a worker greedy_workers[task_worker] -= 1 # keep track of the resources used in greedy scheduling for resource, amount in six.iteritems((task.resources or {})): greedy_resources[resource] += amount break reply = {'n_pending_tasks': locally_pending_tasks, 'running_tasks': running_tasks, 'task_id': None, 'n_unique_pending': n_unique_pending} if best_task: self._state.set_status(best_task, RUNNING, self._config) best_task.worker_running = worker_id best_task.time_running = time.time() self._update_task_history(best_task, RUNNING, host=host) reply['task_id'] = best_task.id reply['task_family'] = best_task.family reply['task_module'] = getattr(best_task, 'module', None) reply['task_params'] = best_task.params return reply def ping(self, **kwargs): worker_id = kwargs['worker'] self.update(worker_id) def _upstream_status(self, task_id, upstream_status_table): if task_id in upstream_status_table: return upstream_status_table[task_id] elif self._state.has_task(task_id): task_stack = [task_id] while task_stack: dep_id = task_stack.pop() if self._state.has_task(dep_id): dep = self._state.get_task(dep_id) if dep.status == DONE: continue if dep_id not in upstream_status_table: if dep.status == PENDING and dep.deps: task_stack = task_stack + [dep_id] + list(dep.deps) upstream_status_table[dep_id] = '' # will be updated postorder else: dep_status = STATUS_TO_UPSTREAM_MAP.get(dep.status, '') upstream_status_table[dep_id] = dep_status elif upstream_status_table[dep_id] == '' and dep.deps: # This is the postorder update step when we set the # status based on the previously calculated child elements upstream_status = [upstream_status_table.get(a_task_id, '') for a_task_id in dep.deps] upstream_status.append('') # to handle empty list
status = max(upstream_status, key=UPSTREAM_SEVERITY_KEY) upstream_status_table[dep_id] = status return upstream_status_table[dep_id] def _serialize_task(self, task_id, include_deps=True, deps=None): task = self._state.get_task(task_id) ret = { 'display_
name': task.pretty_id, 'status': task.status, 'workers': list(task.workers), 'worker_running': task.worker_running, 'time_running': getattr(task, "time_running", None), 'start_time': task.time, 'last_updated': getattr(task, "updated", task.time), 'params': task.params, 'name': task.family, 'priority': task.priority, 'resources': task.resources, 'tracking_url': getattr(task, "tracking_url", None), } if task.status == DISABLED: ret['re_enable_able'] = task.scheduler_disable_time is not None if include_deps: ret['deps'] = list(task.deps if deps is None else deps) return ret def graph(self, **kwargs): self.prune() serialized = {} seen = set() for task in self._state.get_active_tasks(): serialized.update(self._traverse_graph(task.id, seen)) return serialized def _filter_done(self, task_ids): for task_id in task_ids: task = self._state.get_task(task_id) if task is None or task.status != DONE: yield task_id def _traverse_graph(self, root_task_id, seen=None, dep_func=None, include_done=True): """ Returns the dependency graph rooted at task_id This does a breadth-first traversal to find the nodes closest to the root before hitting the scheduler.max_graph_nodes limit. :param root_task_id: the id of the graph's root :return: A map of task id to serialized node """ if seen is None: seen = set() elif root_task_id in seen: return {} if dep_func is None: def dep_func(t): return t.deps seen.add(root_task_id) serialized = {} queue = collections.deque([root_task_id]) while queue: task_id = queue.popleft() task
import nose from nose.plugins.attrib import attr import logging import colorguard import os bin_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries')) @attr(speed='slow') def test_cromu_00070_caching(): # Test exploitation of CROMU_00070 given an input which causes a leak. Then test that we can do it again restoring # from the cache. for _ in range(2): payload = bytes.fromhex("06000006020a00000000000000000000000c030c00000100e1f505000000000000eb") cg = colorguard.ColorGuard(os.path.join(bin_location, "tests/cgc/CROMU_00070"), payload) pov = cg.attempt_exploit() nose.tools.assert_not_equal(pov, None) nose.tools.assert_true(pov.test_binary()) def run_all(): functions = globals() all_functions = dict(filter((lambda kv: kv[0].startswith('test_')), functions.items())) for f in sorted(all_functions.keys()): if hasattr(all_functions[f], '__call__'): all_functi
ons[f]() if __name__ == "__main__": logging.getLogger("colorguard").setLevel("DEBUG") logging.getLogger("p
ovsim").setLevel("DEBUG") import sys if len(sys.argv) > 1: globals()['test_' + sys.argv[1]]() else: run_all()
# coding=utf-8 #https://developers.google.com/drive/v3/web/quickstart/python from __future__ import print_function import httplib2 import os import io from apiclient import discovery import oauth2client from oauth2client import client from oauth2client import tools from apiclient.http import MediaIoBaseDownload from apiclient.http import MediaFileUpload import sys import argparse from pyfcm import FCMNotification import h5py """ DESCRIPTION Script with class that manages operations with Google. Send file, uploads file and list files """ class GoogleManager: def __init__(self): self.SCOPES = 'https://www.googleapis.com/auth/drive' self.CLIENT_SECRET_FILE = 'GoogleDrive_Client_secret.json' self.APPLICATION_NAME = 'pythonscript' print("[GOOGLE MANAGER] Google Manager started") def init_for_upload(self,upload_file=None,upload_file_name=None): if upload_file and upload_file_name: self.upload_manager(upload_file,upload_file_name) print("[GOOGLE MANAGER] Will upload file") else: raise ValueError("[ERROR] Object initializer has to have file name to upload and name of uploaded file in upload mode. Initialize object with mode, upload filename and upload destination name") def init_for_download(self,download_file=None): if download_file: self.download_manager(download_file) print("[GOOGLE MANAGER] Will download file") else: raise ValueError("[ERROR] Object initializer has to have file name to download in download mode. Initialize object with mode and file name to download") def init_for_list(self,): self.download_manager(True) def download_file(self,file_id, mimeType, filename,drive_service): if "google-apps" in mimeType: return request = drive_service.files().get_media(fileId=file_id) fh = io.FileIO(filename, 'wb') downloader = MediaIoBaseDownload(fh, request) done = False while done is False: status, done = downloader.next_chunk() print("[PROGRESS] Download %d%%." % int(status.progress() * 100)) def get_credentials(self): SCOPES = 'https://www.googleapis.com/auth/drive' CLIENT_SECRET_FILE = 'GoogleDrive_Client_secret.json' APPLICATION_NAME = 'pythonscript' home_dir = os.path.expanduser('~') credential_dir = os.path.join(home_dir, '.credentials') if not os.path.exists(credential_dir): os.makedirs(credential_dir) credential_path = os.path.join(credential_dir,'drive-python-quickstart.json') store = oauth2client.file.Storage(credential_path) credentials = store.get() if not credentials or credentials.invalid: flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES) flow.user_agent = APPLICATION_NAME credentials = tools.run_flow(flow, store) return credentials def upload_manager(self,fileToUpload,nameToUpload): credentials = self.get_credentials() http = credentials.authorize(httplib2.Http()) drive_service = discovery.build('drive', 'v3', http=http) file_metadata = {'name' : nameToUpload} media = MediaFileUpload(fileToUpload,resumable=True) file = drive_service.files().create(body=file_metadata,media_body=media,fields='id').execute() print(file) print("[GOOGLE MANAGER] File with name {} uploaded to Google Drive".format(nameToUpload)) def download_manager(self,fileToDownload=None,list = False): credentials = self.get_credentials() http = credentials.authorize(httplib2.Http()) service = discovery.build('drive', 'v3', http=http) results = service.files().list(pageSize=10,fields="nextPageToken, files(id, name)").execute() items = results.get('files', []) if not items: print("[GOOGLE MANAGER] No files found.") else: for item in items: name = str(item["name"].encode('ascii', 'ignore')) print("[GOOGLE MANAGER] Found file ->
{}".format(name)) if name == fileToDownload and not list: credentials = self.get_credentials() http = credentials.authorize(httpli
b2.Http()) drive_service = discovery.build('drive', 'v3', http=http) self.download_file(item['id'],"text/plain",item['name'],drive_service) #drive = GoogleManager() #drive.init_for_download("weights.h5") #drive.init_for_download("model.json") #drive.init_for_upload("more_images.h5","weights.h5") #drive.init_for_upload("model_more_images.json","model.json") #drive.init_for_list()
# -*- coding: utf-8 -*- #Import libraries from sys import exit from math import sqrt #Print title (http://patorjk.com/software/taag/#p=display&f=Small%20Slant&t=Equation%20Solver%20V2.1) print " ____ __ _ ____ __ _ _____ ___" print " / __/__ ___ _____ _/ /_(_)__ ___ / __/__ / / _____ ____ | | / /_ | < /" print " / _// _ `/ // / _ `/ __/ / _ \/ _ \ _\ \/ _ \/ / |/ / -_) __/ | |/ / __/_ / / " print "/___/\_, /\_,_/\_,_/\__/_/\___/_//_/ /___/\___/_/|___/\__/_/ |___/____(_)_/ " print " /_/
" #Welcome phrase print "\nWelcome in the 'Equation Solver' 2.1 by Rafa
el Riber .\nPlease give the values for 'a', 'b' and 'c' as follows: f(x) = Ax^2+Bx+C.\n" #Define check function def check(x): if x != 0: pass else: exit("Invalid value. Please enter only numbers other than zero.") #Input and check a = float(input("Value of 'A': ")) check(a) b = float(input("Value of 'B': ")) check(b) c = float(input("Value of 'C': ")) check(c) #Formulas dis = (b * b) - 4 * (a * c) x1 = (-b - sqrt(dis) ) / (2 * a) x2 = (-b + sqrt(dis) ) / (2 * a) x3 = (-b) / (2 * a) sx = (-b) / (2 * a) sy = (- dis) / (4 * a) #Calculus conditions if dis >= 0: print "\nThe discriminant is equal to: %s.\n" % (dis) else: exit("The equation has no real roots: The discriminant is negative.") if dis == 0: print "Sole root of the equation: (%s). Summit: (%s; %s)\n" % (x3, sx, sy) else: print "Roots: (%s; %s)\nSummit: (%s; %s) \n\nThank you for using the Equation Solver by Rafael Riber !" % (x1, x2, sx, sy)
""" sentry_javascript_lite.plugin ~~~~~~~~~~~~~~~~~~~~~ """ import re from django.conf import settings from sentry.lang.javascript.plugin import JavascriptPlugin from sentry.lang.javascript.processor import SourceProcessor from sentry.interfaces.stacktrace import (Frame, Stacktrace) from sentry_javascript_lite import VERSION def javascript_lite_preprocess_event(data): if data.get('platform') != 'javascript': return processor = JavascriptLiteSourceProcessor() return processor.process(data) class JavascriptPlugin(JavascriptPlugin): author = 'Chad Killingsworth, Jack Henry and Associates' author_url = 'https://github.com/Banno/getsentry-javascript-lite' version = VERSION description = "Preprocess Raw Javascript Stacktraces" resource_links = [ ('Bug Tracker', 'https://github.com/Banno/getsentry-javascript-lite/issues'), ('Source', 'https://github.com/Banno/getsentry-javascript-lite'), ] slug = 'javascript-lite' title = 'Javascript-lite Event Preprocessor' conf_title = title conf_key = 'javascript-lite' def get_event_preprocessors(self, **kwargs): if not settings.SENTRY_SCRAPE_JAVASCRIPT_CONTEXT: return [] return [javascript_lite_preprocess_event] class JavascriptLiteSourceProcessor(SourceProcessor): chrome_ie_stacktrace_expr = re.compile(r'^\s*at (.*?) ?\(?((?:file|https?|chrome-extension):.*?):(\d+)(?::(\d+))?\)?\s*$', re.IGNORECASE) firefox_safari_stacktrace_expr = re.compile(r'^\s*(.*?)(?:\((.*?)\))?@((?:file|https?|chrome).*?):(\d+)(?::(\d+))?\s*$', re.IGNORECASE) whitespace_expr = re.compile(r'^\s+') location_parts_expr = re.compile(r'[\(\)\s]') def get_stacktraces(self, data): stacktraces = super(JavascriptLiteSourceProcessor, self).get_stacktraces(data); if (not stacktraces and 'extra' in data and isinstance(data['extra'], dict) and 'rawstack' in data['extra']): stacktraces = self.format_raw_stacktrace(data['extra']['rawstack']) if stacktraces: data['extra'].pop('rawstack', None) return stacktraces def format_raw_stacktrace(self, value): kwargs = { 'frames': [], 'frames_omitted': [] } for frame in value.split('\n'): if JavascriptLiteSourceProcessor.chrome_ie_stacktrace_expr.search(frame): kwargs['frames'].append(self.format_chrome_ie_frame(frame)) elif JavascriptLiteSourceProcessor.firefox_safari_stacktrace_expr.search(frame): kwargs['frames'].append(self.format_firefox_safari_frame(frame)) if len(kwargs['fra
mes']) > 0:
return [Stacktrace(**kwargs)] return [] def format_chrome_ie_frame(self, frame): tokens = JavascriptLiteSourceProcessor.chrome_ie_stacktrace_expr.findall(frame)[0] frame = { 'filename': tokens[1], 'function': tokens[0] or '?', 'in_app': True, } try: frame['lineno'] = int(float(tokens[2])) except: pass try: frame['colno'] = int(float(tokens[3])) except: pass return Frame.to_python(frame) def format_firefox_safari_frame(self, frame): tokens = JavascriptLiteSourceProcessor.firefox_safari_stacktrace_expr.findall(frame)[0] frame = { 'filename': tokens[2], 'function': tokens[0] or '?', 'in_app': True, } if tokens[1]: frame['args'] = tokens[1].split(',') try: frame['lineno'] = int(float(tokens[3])) except: pass try: frame['colno'] = int(float(tokens[4])) except: pass return Frame.to_python(frame)
############################################################################### # This file is part of openWNS (open Wireless Network Simulator) # _____________________________________________________________________________ # # Copyright (C) 2004-2007 # Chair of Communication Networks (ComNets) # Kopernikusstr. 16, D-52074 Aachen, Germany # phone: ++49-241-80-27910, # fax: ++49-241-80-22242 # email: info@openwns.org # www: http://www.openwns.org # _____________________________________________________________________________ # # openWNS is free software; you can redistribute it and/or modify it under the # terms of the GNU Lesser General Public License version 2 as published by the # Free Software Foundation; # # openWNS is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # # You should have received a copy of the GNU Lesser General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################### import scenarios.interfaces import openwns.geometry.position import math class PositionListPlacer(s
cenarios.interfaces.INodePlacer): """ Place a number of nodes on the given positions. """ def __init__(self, numberOfNodes = 1, positionsList = [openwns.geometry.position.Position(1,1)], rotate = 0.0): """ @type numberOfNodes: int @param numberOfNodes:
The number of nodes on the circle @Type: position: float @param position: distance from BS in Meters for every single node @type rotate: float @param rotate: Rotate the final result by rotate in radiant [0..2pi] """ self.center = openwns.geometry.position.Position(x = 0.0, y = 0.0, z = 0.0) self.numberOfNodes = numberOfNodes self.positionsList = positionsList self.rotate = rotate def setCenter(self, center): self.center = center def getPositions(self): positions = [] for i in xrange(self.numberOfNodes): x = self.positionsList[i].x y = self.positionsList[i].y v = openwns.geometry.position.Vector(x = x, y = y, z = 0.0) p = v.turn2D(self.rotate).toPosition() positions.append(p) return [p + self.center for p in positions] def isInside(self, position): for i in xrange(self.numberOfNodes): x = self.positionsList[i].x y = self.positionsList[i].y v = openwns.geometry.position.Vector(x = x, y = y, z = 0.0) p = v.turn2D(self.rotate).toPosition() if p.x + self.center.x == position.x: return True return False
# AsteriskLint -- an Asterisk PBX config syntax checker # Copyright (C) 2015-2016 Walter Doekes, OSSO B.V. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from .config import ConfigAggregator from .d
ialplan import DialplanAggregator from .file import FileReader from .func_odbc import FuncOdbcAggregator class FileConfigParser(ConfigAggregator, FileReader): pass class FileDialplanParser(DialplanAggregator, FileReader): pass class FileFuncOdbcParser(FuncOdbcA
ggregator, FileReader): pass
# coding: utf-8 # # Copyright 2014 The Oppia Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, softwar # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for classification of real numbers.""" __author__ = 'Sean Lip' from core.tests import test_utils from extensions.rules import real class RealRuleUnitTests(test_utils.GenericTestBase): """Tests for rules operating on Real objects.""" def test_equals_rule(self): self.assertTrue(real.Equals(3).eval(3)) self.assertTrue(real.Equals(3.0).eval(3)) self.assertFalse(real.Equals(4).eval(3)) def test_is_less_than_rule(self): self.assertTrue(real.IsLessThan(4).eval(3)) self.assertTrue(real.IsLessThan(4).eval(3.0)) self.assertTrue(real.IsLessThan(4.0).eval(3.0)) self.assertFalse(real.IsLessThan(3).eval(3)) self.assertFalse(real.IsLessThan(3.0).eval(3.0)) self.assertFalse(real.IsLessThan(3.0).eval(4.0)) self.assertFalse(real.IsLessThan(3).eval(4)) def test_is_greater_than_rule(self): self.assertTrue(real.IsGreaterThan(3).eval(4)) self.assertTrue(real.IsGreaterThan(3.0).eval(4)) self.assertTrue(real.IsGreaterThan(3.0).eval(4.0)) self.assertFalse(real.IsGreaterThan(3).e
val(3)) self.assertFalse(real.IsGreaterThan(3.0).eval(3.0)) self.assertFalse(real.IsGreaterThan(4.0).eval(3.0)) self.assertFalse(real.IsGreaterThan(4).eval(3)) def test_is_less_than_or_equal_to_rule(self): rule = real.IsLessThanOrEqualTo(3) self.assertTrue(rule.eval(2)) self.assertTrue(rule.eval(3)) self.assertFalse(rule.eval(4)) def t
est_is_greater_than_or_equal_to_rule(self): rule = real.IsGreaterThanOrEqualTo(3) self.assertTrue(rule.eval(4)) self.assertTrue(rule.eval(3)) self.assertFalse(rule.eval(2)) def test_is_inclusively_between_rule(self): with self.assertRaises(AssertionError): real.IsInclusivelyBetween(2, 1) rule = real.IsInclusivelyBetween(1, 3) self.assertTrue(rule.eval(2)) self.assertTrue(rule.eval(1)) self.assertTrue(rule.eval(3)) self.assertTrue(rule.eval(1.0)) self.assertFalse(rule.eval(3.001)) def test_is_within_tolerance_rule(self): rule = real.IsWithinTolerance(0.5, 0) self.assertTrue(rule.eval(0)) self.assertTrue(rule.eval(0.5)) self.assertFalse(rule.eval(0.51))
# -*- coding: utf-8 -*- ######################################################################### # # Copyright (C) 2016 OSGeo # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ##########################################
############################### from __future__ import unicode_literals from django.db import migrations, models import datetime from django.conf import settings class Migration(migrations.Migration): dependencies = [ ('layers', '0002_initi
al_step2'), migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Upload', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('import_id', models.BigIntegerField(null=True)), ('state', models.CharField(max_length=16)), ('date', models.DateTimeField(default=datetime.datetime.now, verbose_name=b'date')), ('upload_dir', models.CharField(max_length=100, null=True)), ('name', models.CharField(max_length=64, null=True)), ('complete', models.BooleanField(default=False)), ('session', models.TextField(null=True)), ('metadata', models.TextField(null=True)), ('mosaic_time_regex', models.CharField(max_length=128, null=True)), ('mosaic_time_value', models.CharField(max_length=128, null=True)), ('mosaic_elev_regex', models.CharField(max_length=128, null=True)), ('mosaic_elev_value', models.CharField(max_length=128, null=True)), ('layer', models.ForeignKey(to='layers.Layer', null=True)), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL, null=True)), ], options={ 'ordering': ['-date'], }, ), migrations.CreateModel( name='UploadFile', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('file', models.FileField(upload_to=b'uploads')), ('slug', models.SlugField(blank=True)), ('upload', models.ForeignKey(blank=True, to='upload.Upload', null=True)), ], ), ]
# Sample 5 import socket import sys try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) except socket.error as msg: print('Failed to create socket!') print('Error code: ' + str(msg[0]) + ', error message: ' + msg[1]) sys.exit() print('Socked created successfully.') # Part 1 host = '' port = 8888 try: s.bind((host, port)) except socket.error: msg = str(socket.error) print('Bind failed! Error code: ' + str(msg[0]) + ', message: ' + msg[1]) sys.exit() print('Socket bind complete.') s.listen(10) # Limitation to number of connections that can be in the queue print('Socket is now listening.') # Part 3 - the while loop to keep the socket listening for clients while True: conn, addr = s.accept() # blocking call, to accept th
e first client that comes # can type in bash the following to talk to the socket: telnet localhost 8888 # Part 2 data = conn.recv(1024) if not data: break reply = '<<<Hello ' + str(data) + '>>>' conn.sendall(reply.encode('UTF8')) # once you start the socket with python sample5.py
# try telnet localhost 8888 in another terminal # type test, and it should echo back <<<Hello test>>> conn.close() s.close()
import typing as t import warnings from .request import Request class _FakeSubclassCheck(type): def __subclasscheck__(cls, subclass: t.Type) -> bool: warnings.warn( "'BaseRequest' i
s deprecated and will be removed in" " Werkzeug 2.1. Use 'issubclass(cls, Request)' instead.", DeprecationWarning, stacklevel=2, ) return issubclass(su
bclass, Request) def __instancecheck__(cls, instance: t.Any) -> bool: warnings.warn( "'BaseRequest' is deprecated and will be removed in" " Werkzeug 2.1. Use 'isinstance(obj, Request)' instead.", DeprecationWarning, stacklevel=2, ) return isinstance(instance, Request) class BaseRequest(Request, metaclass=_FakeSubclassCheck): def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: warnings.warn( "'BaseRequest' is deprecated and will be removed in" " Werkzeug 2.1. 'Request' now includes the functionality" " directly.", DeprecationWarning, stacklevel=2, ) super().__init__(*args, **kwargs)
t_to_32bytearray, big_endian_to_int, ecsign, ecrecover_to_pub, normalize_key # Reimplemented from ethereum.utils def sha3(seed): return sha3_256(to_string(seed)) big_endian_to_int = lambda x: big_endian_int.deserialize(str_to_bytes(x).lstrip(b'\x00')) is_numeric = lambda x: isinstance(x, int) def bytearray_to_bytestr(value): return bytes(value) def to_string(value): if isinstance(value, bytes): return value if isinstance(value, str): return bytes(value, 'utf-8') if isinstance(value, int): return bytes(str(value), 'utf-8') def normalize_address(x, allow_blank=False): if is_numeric(x): return int_to_addr(x) if allow_blank and x in {'', b''}: return b'' if len(x) in (42, 50) and x[:2] in {'0x', b'0x'}: x = x[2:] if len(x) in (40, 48): x = decode_hex(x) if len(x) == 24: assert len(x) == 24 and sha3(x[:20])[:4] == x[-4:] x = x[:20] if len(x) != 20: raise Exception("Invalid address format: %r" % x) return x def normalize_key(key): if is_numeric(key): o = encode_int32(key) elif len(key) == 32: o = key elif len(key) == 64: o = decode_hex(key) elif len(key) == 66 and key[:2] == '0x': o = decode_hex(key[2:]) else: raise Exception("Invalid key format: %r" % key) if o == b'\x00' * 32: raise Exception("Zero privkey invalid") return o def safe_ord(value): if isinstance(value, int): return value else: return ord(value) def ecsign(rawhash, key): if secp256k1 and hasattr(secp256k1, 'PrivateKey'): pk = secp256k1.PrivateKey(key, raw=True) signature = pk.ecdsa_recoverable_serialize( pk.ecdsa_sign_recoverable(rawhash, raw=True) ) signature = signature[0] + bytearray_to_bytestr([signature[1]]) v = safe_ord(signature[64]) + 27 r = big_endian_to_int(signature[0:32]) s = big_endian_to_int(signature[32:64]) else: v, r, s = ecdsa_raw_sign(rawhash, key) return v, r, s # end reimplementation #log = get_logger('eth.chain.tx') TT256 = 2 ** 256 TT256M1 = 2 ** 256 - 1 TT255 = 2 ** 255 SECP256K1P = 2**256 - 4294968273 # in the yellow paper it is specified that s should be smaller than secpk1n (eq.205) secpk1n = 115792089237316195423570985008687907852837564279074904382605163141518161494337 null_address = b'\xff' * 20 address_type = Binary.fixed_length(20, allow_empty=True) class Transaction(rlp.Serializable): """ A transaction is stored as: [nonce, gasprice, startgas, to, value, data, v, r, s] nonce is the number of transactions already sent by that account, encoded in binary form (eg. 0 -> '', 7 -> '\x07', 1000 -> '\x03\xd8'). (v,r,s) is the raw Electrum-style signature of the transaction without the signature made with the private key corresponding to the sending account, with 0 <= v <= 3. From an Electrum-style signature (65 bytes) it is possible to extract the public key, and thereby the address, directly. A valid transaction is one where: (i) the signature is well-formed (ie. 0 <= v <= 3, 0 <= r < P, 0 <= s < N, 0 <= r < P - N if v >= 2), and (ii) the sending account has enough funds to pay the fee and the value. """ fields = [ ('nonce', big_endian_int), ('gasprice', big_endian_int), ('startgas', big_endian_int), ('to', address_type), ('value', big_endian_int), ('data', binary), ('v', big_endian_int), ('r', big_endian_int), ('s', big_endian_int), ] _sender = None def __init__(self, nonce, gasprice, startgas, to, value, data, v=0, r=0, s=0): self.data = None to = normalize_address(to, allow_blank=True) super(Transaction, self).__init__(nonce, gasprice, startgas, to, value, data, v, r, s) if self.gasprice >= TT256 or self.startgas >= TT256 or \ self.value >= TT256 or self.nonce >= TT256: raise InvalidTransaction("Values way too high!") @property def sender(self): if not self._sender: # Determine sender if self.r == 0 and self.s == 0: self._sender = null_address else: if self.v in (27, 28): vee = self.v sighash = sha3(rlp.encode(self, UnsignedTransaction)) elif self.v >= 37: vee = self.v - self.network_id * 2 - 8 assert vee in (27, 28) rlpdata = rlp.encode(rlp.infer_sedes(self).serialize(self)[:-3] + [self.network_id, '', '']) sighash = sha3(rlpdata) else: raise InvalidTransaction("Invalid V value") if self.r >= secpk1n or self.s >= secpk1n or self.r == 0 or self.s ==
0: raise InvalidTransaction("Invalid signature values!") pub = ecrecover_to_pub(sighash, vee, self
.r, self.s) if pub == b"\x00" * 64: raise InvalidTransaction("Invalid signature (zero privkey cannot sign)") self._sender = sha3(pub)[-20:] return self._sender @property def network_id(self): if self.r == 0 and self.s == 0: return self.v elif self.v in (27, 28): return None else: return ((self.v - 1) // 2) - 17 @sender.setter def sender(self, value): self._sender = value def sign(self, key, network_id=None): """Sign this transaction with a private key. A potentially already existing signature would be overridden. """ if network_id is None: rawhash = sha3(rlp.encode(self, UnsignedTransaction)) else: assert 1 <= network_id < 2**63 - 18 rlpdata = rlp.encode(rlp.infer_sedes(self).serialize(self)[:-3] + [network_id, b'', b'']) rawhash = sha3(rlpdata) key = normalize_key(key) self.v, self.r, self.s = ecsign(rawhash, key) if network_id is not None: self.v += 8 + network_id * 2 self._sender = privtoaddr(key) return self @property def hash(self): return sha3(rlp.encode(self)) def to_dict(self): d = {} for name, _ in self.__class__.fields: d[name] = getattr(self, name) if name in ('to', 'data'): d[name] = '0x' + encode_hex(d[name]) d['sender'] = '0x' + encode_hex(self.sender) d['hash'] = '0x' + encode_hex(self.hash) return d @property def intrinsic_gas_used(self): num_zero_bytes = str_to_bytes(self.data).count(ascii_chr(0)) num_non_zero_bytes = len(self.data) - num_zero_bytes return (opcodes.GTXCOST # + (0 if self.to else opcodes.CREATE[3]) + opcodes.GTXDATAZERO * num_zero_bytes + opcodes.GTXDATANONZERO * num_non_zero_bytes) @property def creates(self): "returns the address of a contract created by this tx" if self.to in (b'', '\0' * 20): return mk_contract_address(self.sender, self.nonce) def __eq__(self, other): return isinstance(other, self.__class__) and self.hash == other.hash def __lt__(self, other): return isinstance(other, self.__class__) and self.hash < other.hash def __hash__(self): return big_endian_to_int(self.hash) def __ne__(self, other): return not self.__eq__(other) def __repr__(self): return '<Transaction(%s)>' % encode_hex(self.hash)[:4] def __structlog__(self): return encode_hex(self.hash) # This method should be called for block numbers >= HOMESTEAD_FORK_BLKNUM only. # The >= operator is replaced by > because the integer division N/2 always produces the value # which is by 0.5 less than the real N/2 def check_low_s_metropolis(self): if self.s > secpk1n // 2: raise InvalidTransaction("Invalid signature S value!") def check_low_s_ho
# This file is part of the Trezor project. # # Copyright (C) 2012-2018 SatoshiLabs and contributors # # This library is free software: you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License version 3 # as published by the Free Software Foundation. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the License along with this library. # If not, see <https://www.gnu.org/licenses/lgpl-3.0.html>. from . import messages from .tools import expect # # Ontology functions # @expect(messages.OntologyAddress, field="address") def get_address(client, address_n, show_display=False): return client.call( messages.OntologyGetAddress(address_n=address_n, show_display=show_display) ) @expect(messages.OntologyPublicKey) def get_public_key(client, address_n, show_display=False): r
eturn client.call( messages.OntologyGetPublicKey(address_n=address_n, show_display=show_display) ) @expect(messages.OntologySignedTransfer) def sign_transfer(client, address_n, t,
tr): return client.call( messages.OntologySignTransfer(address_n=address_n, transaction=t, transfer=tr) ) @expect(messages.OntologySignedWithdrawOng) def sign_withdrawal(client, address_n, t, w): return client.call( messages.OntologySignWithdrawOng( address_n=address_n, transaction=t, withdraw_ong=w ) ) @expect(messages.OntologySignedOntIdRegister) def sign_register(client, address_n, t, r): return client.call( messages.OntologySignOntIdRegister( address_n=address_n, transaction=t, ont_id_register=r ) ) @expect(messages.OntologySignedOntIdAddAttributes) def sign_add_attr(client, address_n, t, a): return client.call( messages.OntologySignOntIdAddAttributes( address_n=address_n, transaction=t, ont_id_add_attributes=a ) )
assignments = [] rows = 'ABCDEFGHI' cols = '123456789' def assign_value(values, box, value): """ Please use this function to update your values dictionary! Assigns a value to a given box. If it updates the board record it. """ # Don't waste memory appending actions that don't actually change any values if values[box] == value: return values values[box] = value if len(value) == 1: assignments.append(values.copy()) return values def naked_twins(values): """Eliminate values using the naked twins strategy. Args: values(dict): a dictionary of the form {'box_name': '123456789', ...} Returns: the values dictionary with the naked twins eliminated from peers. """ # Find all instances of naked twins twins_list = [] for box in boxes: if len(values[box]) == 2: for peer in peers[box]: if values[peer] == values[box]: twins_list.append([box,peer]) # Eliminate the naked twins as p
ossibilities for their peers if twins_list: for twins in twins_list: # intersect list of twins' peers for common units twins_peers = set(peers[twins[0]]).intersection(set(peers[twins[1]])) for peer in twins_peers: for v in values[twins[0]]: values =
assign_value(values, peer, values[peer].replace(v,'')) return values def cross(A, B): "Cross product of elements in A and elements in B." return [s+t for s in A for t in B] def diag(A, B): "Diagonals of A elements with elements in B." return [A[r]+B[c] for r in range(len(A)) for c in range(len(B)) if r == c] def grid_values(grid): """ Convert grid into a dict of {square: char} with '123456789' for empties. Args: grid(string) - A grid in string form. Returns: A grid in dictionary form Keys: The boxes, e.g., 'A1' Values: The value in each box, e.g., '8'. If the box has no value, then the value will be '123456789'. """ return dict((boxes[i], grid[i] if (grid[i] != '.') else '123456789') for i in range(len(boxes))) def display(values): """ Display the values as a 2-D grid. Args: values(dict): The sudoku in dictionary form """ width = 1+max(len(values[s]) for s in boxes) line = '+'.join(['-'*(width*3)]*3) for r in rows: print(''.join(values[r+c].center(width)+('|' if c in '36' else '') for c in cols)) if r in 'CF': print(line) return def eliminate(values): for box,value in values.items(): if len(value) == 1: for peer in peers[box]: values = assign_value(values, peer, values[peer].replace(value,'')) return values def only_choice(values): for box,v in values.items(): if len(v) > 1: for unit in units[box]: pval = str().join(values[key] for key in unit if key != box) d = [val for val in v if val not in pval] if len(d) == 1: values = assign_value(values, box, d[0]) return values def reduce_puzzle(values): stalled = False while not stalled: # Check how many boxes have a determined value solved_values_before = len([box for box in values.keys() if len(values[box]) == 1]) # Use the Eliminate Strategy values = eliminate(values) # Use the Only Choice Strategy values = only_choice(values) # Use the Naked Twins Strategy values = naked_twins(values) # Check how many boxes have a determined value, to compare solved_values_after = len([box for box in values.keys() if len(values[box]) == 1]) # If no new values were added, stop the loop. stalled = solved_values_before == solved_values_after # Sanity check, return False if there is a box with zero available values: if len([box for box in values.keys() if len(values[box]) == 0]): return False return values def search(values): # First, reduce the puzzle using the previous function values = reduce_puzzle(values) if not values: return False # Return solution if all box have unique value if all(len(v) == 1 for v in values.values()): return values # Choose one of the unfilled squares with the fewest possibilities _,box = min((len(v),k) for k,v in values.items() if len(v) > 1) # Now use recursion to solve each one of the resulting sudokus, and if one returns a value (not False), return that answer! # If you're stuck, see the solution.py tab! for val in values[box]: new_values = values.copy() new_values[box] = val res = search(new_values) if res: return res def solve(grid): """ Find the solution to a Sudoku grid. Args: grid(string): a string representing a sudoku grid. Example: '2.............62....1....7...6..8...3...9...7...6..4...4....8....52.............3' Returns: The dictionary representation of the final sudoku grid. False if no solution exists. """ return search(grid_values(grid)) boxes = cross(rows, cols) row_units = [cross(r, cols) for r in rows] column_units = [cross(rows, c) for c in cols] square_units = [cross(rs, cs) for rs in ('ABC','DEF','GHI') for cs in ('123','456','789')] diag_units = [diag(rows, cols)] + [diag(rows, cols[::-1])] unitlist = row_units + column_units + square_units + diag_units units = dict((s, [u for u in unitlist if s in u]) for s in boxes) peers = dict((s, set(sum(units[s],[]))-set([s])) for s in boxes) if __name__ == '__main__': diag_sudoku_grid = '2.............62....1....7...6..8...3...9...7...6..4...4....8....52.............3' display(solve(diag_sudoku_grid)) try: from visualize import visualize_assignments visualize_assignments(assignments) except SystemExit: pass except: print('We could not visualize your board due to a pygame issue. Not a problem! It is not a requirement.')
import click import pickle from build import Build @click.group() def cli(): pass @cli.comman
d() @click.option('--cache-file', default='test-cache') @click.option('--query') def query(cache_file, query): with open(cache_file, 'rb') as f: key, criteria = query.split('=') buildobjs = pickle.load(f) for name, build in buildobjs.items(): item = getattr(build, key, '') if criteria in item: print(build, item)
cli()
from dnfpyUtils.stats.statistic import Statistic import numpy as np class Trajectory(Statistic): """ Abstract class for trajectory """ def __init__(self,name,dt=0.1,dim=0,**kwargs): super().__init__(name=name,size=0,dim=dim,dt=dt,**kwargs) self.trace = [] #save the trace def getViewData(self): return self._data#,self.getMean() def reset(self): super().
reset() self.trace = [] self._data = np.nan def getMean(self): return np.nanmean(self.trace) def getRMSE(self): return np.sqrt(np.nanmean(self.trace)) def getCount(self): return np.sum(~np.isnan(self.trace)) def getMax(self
): return np.max(self.trace) def getPercentile(self,percent): return np.nanpercentile(self.trace,percent) def getMin(self): return np.min(self.trace) def getStd(self): return np.std(self.trace) def getTrace(self): """ Return the time trace of the statistic """ return self.trace
ctName("addAxialAction") self.addCoronalAction = QtGui.QAction(self) self.addCoronalAction.setText(QtGui.QApplication.translate("MWindow", "Coronal", None, QtGui.QApplication.UnicodeUTF8)) self.addCoronalAction.setIconVisibleInMenu(True) self.addCoronalAction.setObjectName("addCoronalAction") self.addSagittalAction = QtGui.QAction(self) self.addSagittalAction.setText(QtGui.QApplication.translate("MWindow", "Sagittal", None, QtGui.QApplication.UnicodeUTF8)) self.addSagittalAction.setIconVisibleInMenu(True) self.addSagittalAction.setObjectName("addSagittalAction") self.addVolumeAction = QtGui.QAction(self) self.addVolumeAction.setText(QtGui.QApplication.translate("MWindow", "Volume", None, QtGui.QApplication.UnicodeUTF8)) self.addVolumeAction.setIconVisibleInMenu(True) self.addVolumeAction.setObjectName("addVolumeAction") self.contextMenu = QtGui.QMenu(self) self.contextMenu.addAction(self.renameAction) self.contextMenu.addAction(self.resetAction) self.contextMenu.addAction(self.duplicateAction) self.contextMenu.addAction(self.closeAction) self.contextMenu.setIcon(icon1) windowMenu = QtGui.QMenu(self.contextMenu) windowMenu.addAction(self.addAxialAction) windowMenu.addAction(self.addCoronalAction) windowMenu.addAction(self.addSagittalAction) windowMenu.addAction(self.addVolumeAction) windowMenu.setTitle(QtGui.QApplication.translate("MWindow", "Add Scene", None, QtGui.QApplication.UnicodeUTF8)) self.contextMenu.addAction(windowMenu.menuAction()) def createActions(self): logging.debug("In MWindow::createActions()") self.connect(self, QtCore.SIGNAL("tabCloseRequested(int)"), self.slotTabCloseRequested) self.connect(self, QtCore.SIGNAL("currentChanged(int)"), self.slotTabChanged) self.mouseReleaseEvent = self.rightClickAction self.connect(self.rename.Ok, QtCore.SIGNAL("clicked()"), self.slotRenameOkButtonClicked) self.connect(self.rename.Cancel, QtCore.SIGNAL("clicked()"), self.slotRenameCancelButtonClicked) def rightClickAction(self, event): if event.button() == 2: pos = QtGui.QCursor.pos() result = self.contextMenu.exec_(pos) if result == self.renameAction: self.rename.newName.setText(self.tabText(self.currentIndex())) self.rename.show() elif result == self.closeAction: self.close() elif result == self.resetAction: self.reset() elif result == self.duplicateAction: self.duplicate() elif result == self.addAxialAction: mscreen = self._mScreens[self.currentIndex()] mscreen.createScene(VtkImagePlane.PLANE_ORIENTATION_AXIAL) elif result == self.addCoronalAction: mscreen = self._mScreens[self.currentIndex()
] mscreen.createScene(VtkImagePlane.PLANE_ORIENTATION_CORONAL) elif result == self.addSagittalAction: mscreen = self._mScreens[self.currentIndex()] mscreen.createScene(VtkImagePlane.PLANE_O
RIENTATION_SAGITTAL) elif result == self.addVolumeAction: mscreen = self._mScreens[self.currentIndex()] mscreen.createScene(VtkImagePlane.PLANE_ORIENTATION_VOLUME) def updateWidgets(self): logging.debug("In MWindow::updateWidgets()") self.setTabsClosable(True) self.setMovable(True) def slotTabCloseRequested(self, index): logging.debug("In MWindow::slotTabCloseRequested()") mScreen = self.widget(index) if mScreen.main or mScreen.references > 0: QtGui.QMessageBox.critical(self, QtGui.QApplication.translate( "Implant", "Error", None, QtGui.QApplication.UnicodeUTF8), QtGui.QApplication.translate( "Implant", "Some tool is locking this tab and it cannot be closed.", None, QtGui.QApplication.UnicodeUTF8)) return mScreen.close() self._mScreens.remove(mScreen) self.removeTab(index) if self.currentIndex() == -1: self.close() def slotRenameOkButtonClicked(self): logging.debug("In MWindow::slotRenameOkButtonClicked()") self.setTabText(self.currentIndex(), self.rename.newName.text()) mScreen = self.widget(self.currentIndex()) mScreen.name = self.rename.newName.text() self.rename.hide() def slotRenameCancelButtonClicked(self): logging.debug("In MWindow::slotRenameCancelButtonClicked()") self.rename.hide() def slotTabChanged(self, index): logging.debug("In MWindow::slotTabCloseRequested()") if self._mScreens: self.currentTab().updateWidgets() def allTabs(self): logging.debug("In MWindow::allTabs()") return self._mScreens def currentTab(self): logging.debug("In MWindow::currentTab()") return self._mScreens[self.currentIndex()] def createMScreensFromImagedata(self, imagedata, cubeCorners=None, name=None, generate3D=1): logging.debug("In MWindow::createMScreensFromImagedata()") i = self.count() name = QtGui.QApplication.translate("MWindow", "Region {0}", None, QtGui.QApplication.UnicodeUTF8).format(i) screen = MScreen(mWindow=self, vtkImageData=imagedata, cubeCorners=cubeCorners, name=name) screen.createScene(VtkImagePlane.PLANE_ORIENTATION_AXIAL) if generate3D: screen.createScene(VtkImagePlane.PLANE_ORIENTATION_VOLUME) screen.createScene(VtkImagePlane.PLANE_ORIENTATION_CORONAL) screen.createScene(VtkImagePlane.PLANE_ORIENTATION_SAGITTAL) self.addTab(screen, name) return screen def reset(self): self.currentTab().reset() def duplicate(self): self.currentTab().duplicate() def save(self): logging.debug("In MWindow::save()") save = {"vti": self._vtiPath} mscreens = [] save["mScreens"] = mscreens save["camera"] = [self.cameraController.getActiveAction(self.cameraController.BUTTON_LEFT), self.cameraController.getActiveAction(self.cameraController.BUTTON_RIGHT), self.cameraController.getActiveAction(self.cameraController.BUTTON_MIDDLE), self.cameraController.getActiveAction(self.cameraController.BUTTON_SCROLL)] for i, screen in enumerate(self._mScreens): mscreens.append(screen.save(self._yamlPath, i, self.tabText(i))) return save @property def ilsa(self): logging.debug("In MWindow::ilsa()") return self._ilsa @property def yamlPath(self): logging.debug("In MWindow::yamlPath.getter()") return self._yamlPath @property
make_consistencygroup(root) alias = Consistencygroups.alias namespace = Consistencygroups.namespace return xmlutil.MasterTemplate(root, 1, nsmap={alias: namespace}) class ConsistencyGroupsTemplate(xmlutil.TemplateBuilder): def construct(self): root = xmlutil.TemplateElement('consistencygroups') elem = xmlutil.SubTemplateElement(root, 'consistencygroup', selector='consistencygroups') make_consistencygroup(elem) alias = Consistencygroups.alias namespace = Consistencygroups.namespace return xmlutil.MasterTemplate(root, 1, nsmap={alias: namespace}) class ConsistencyGroupFromSrcTemplate(xmlutil.TemplateBuilder): def construct(self): root = xmlutil.TemplateElement('consistencygroup-from-src', selector='co
nsistencygroup-from-sr
c') make_consistencygroup_from_src(root) alias = Consistencygroups.alias namespace = Consistencygroups.namespace return xmlutil.MasterTemplate(root, 1, nsmap={alias: namespace}) class CreateDeserializer(wsgi.MetadataXMLDeserializer): def default(self, string): dom = utils.safe_minidom_parse_string(string) consistencygroup = self._extract_consistencygroup(dom) return {'body': {'consistencygroup': consistencygroup}} def _extract_consistencygroup(self, node): consistencygroup = {} consistencygroup_node = self.find_first_child_named( node, 'consistencygroup') attributes = ['name', 'description'] for attr in attributes: if consistencygroup_node.getAttribute(attr): consistencygroup[attr] = consistencygroup_node.\ getAttribute(attr) return consistencygroup class CreateFromSrcDeserializer(wsgi.MetadataXMLDeserializer): def default(self, string): dom = utils.safe_minidom_parse_string(string) consistencygroup = self._extract_consistencygroup(dom) retval = {'body': {'consistencygroup-from-src': consistencygroup}} return retval def _extract_consistencygroup(self, node): consistencygroup = {} consistencygroup_node = self.find_first_child_named( node, 'consistencygroup-from-src') attributes = ['cgsnapshot', 'name', 'description'] for attr in attributes: if consistencygroup_node.getAttribute(attr): consistencygroup[attr] = ( consistencygroup_node.getAttribute(attr)) return consistencygroup class ConsistencyGroupsController(wsgi.Controller): """The ConsistencyGroups API controller for the OpenStack API.""" _view_builder_class = consistencygroup_views.ViewBuilder def __init__(self): self.consistencygroup_api = consistencygroupAPI.API() super(ConsistencyGroupsController, self).__init__() @wsgi.serializers(xml=ConsistencyGroupTemplate) def show(self, req, id): """Return data about the given consistency group.""" LOG.debug('show called for member %s', id) context = req.environ['cinder.context'] try: consistencygroup = self.consistencygroup_api.get( context, group_id=id) except exception.ConsistencyGroupNotFound as error: raise exc.HTTPNotFound(explanation=error.msg) return self._view_builder.detail(req, consistencygroup) def delete(self, req, id, body): """Delete a consistency group.""" LOG.debug('delete called for member %s', id) context = req.environ['cinder.context'] force = False if body: cg_body = body['consistencygroup'] force = cg_body.get('force', False) LOG.info(_LI('Delete consistency group with id: %s'), id, context=context) try: group = self.consistencygroup_api.get(context, id) self.consistencygroup_api.delete(context, group, force) except exception.ConsistencyGroupNotFound: msg = _("Consistency group %s could not be found.") % id raise exc.HTTPNotFound(explanation=msg) except exception.InvalidConsistencyGroup as error: raise exc.HTTPBadRequest(explanation=error.msg) return webob.Response(status_int=202) @wsgi.serializers(xml=ConsistencyGroupsTemplate) def index(self, req): """Returns a summary list of consistency groups.""" return self._get_consistencygroups(req, is_detail=False) @wsgi.serializers(xml=ConsistencyGroupsTemplate) def detail(self, req): """Returns a detailed list of consistency groups.""" return self._get_consistencygroups(req, is_detail=True) def _get_consistencygroups(self, req, is_detail): """Returns a list of consistency groups through view builder.""" context = req.environ['cinder.context'] consistencygroups = self.consistencygroup_api.get_all(context) limited_list = common.limited(consistencygroups, req) if is_detail: consistencygroups = self._view_builder.detail_list(req, limited_list) else: consistencygroups = self._view_builder.summary_list(req, limited_list) return consistencygroups @wsgi.response(202) @wsgi.serializers(xml=ConsistencyGroupTemplate) @wsgi.deserializers(xml=CreateDeserializer) def create(self, req, body): """Create a new consistency group.""" LOG.debug('Creating new consistency group %s', body) if not self.is_valid_body(body, 'consistencygroup'): raise exc.HTTPBadRequest() context = req.environ['cinder.context'] try: consistencygroup = body['consistencygroup'] except KeyError: msg = _("Incorrect request body format") raise exc.HTTPBadRequest(explanation=msg) name = consistencygroup.get('name', None) description = consistencygroup.get('description', None) volume_types = consistencygroup.get('volume_types', None) if not volume_types: msg = _("volume_types must be provided to create " "consistency group %(name)s.") % {'name': name} raise exc.HTTPBadRequest(explanation=msg) availability_zone = consistencygroup.get('availability_zone', None) LOG.info(_LI("Creating consistency group %(name)s."), {'name': name}, context=context) try: new_consistencygroup = self.consistencygroup_api.create( context, name, description, volume_types, availability_zone=availability_zone) except exception.InvalidConsistencyGroup as error: raise exc.HTTPBadRequest(explanation=error.msg) except exception.InvalidVolumeType as error: raise exc.HTTPBadRequest(explanation=error.msg) except exception.ConsistencyGroupNotFound as error: raise exc.HTTPNotFound(explanation=error.msg) retval = self._view_builder.summary( req, dict(new_consistencygroup.iteritems())) return retval @wsgi.response(202) @wsgi.serializers(xml=ConsistencyGroupFromSrcTemplate) @wsgi.deserializers(xml=CreateFromSrcDeserializer) def create_from_src(self, req, body): """Create a new consistency group from a source. The source can be a snapshot. It could be extended in the future to support other sources. Note that this does not require volume_types as the "create" API above. """ LOG.debug('Creating new consistency group %s.', body) if not self.is_valid_body(body, 'consistencygroup-from-src'): raise exc.HTTPBadRequest() context = req.environ['cinder.context'] try: consistencygroup = body['consistencygroup-from-src'] except K
import unittest from restkiss.preparers import Preparer, FieldsPreparer class InstaObj(object): def __init__(self, **kwargs): for k, v in kwargs.items(): setattr(self, k, v) class LookupDataTestCase(unittest.TestCase): def setUp(self): super(LookupDataTestCase, self).setUp() self.preparer = FieldsPreparer(fields=None) self.obj_data = InstaObj( say='what', count=453, moof={ 'buried': { 'id': 7, 'data': InstaObj(yes='no') } }, parent=None ) self.dict_data = { 'hello': 'world', 'abc': 123, 'more': { 'things': 'here', 'nested': InstaObj( awesome=True, depth=3 ), }, 'parent': None, } def test_dict_simple(self): self.assertEqual(self.preparer.lookup_data('hello', self.dict_data), 'world') self.assertEqual(self.preparer.lookup_data('abc', self.dict_data), 123) def test_obj_simple(self): self.assertEqual(self.preparer.lookup_data('say', self.obj_data), 'what') self.assertEqual(self.preparer.lookup_data('count', self.obj_data), 453) def test_dict_nested(self): self.assertEqual(self.preparer.lookup_data('more.things', self.dict_data), 'here') self.assertEqual(self.preparer.lookup_data('more.nested.depth', self.dict_data), 3) def test_obj_nested(self): self.assertEqual(self.preparer.lookup_data('moof.buried.id', self.obj_data), 7) self.assertEqual(self.preparer.lookup_data('moof.buried.data.yes', self.obj_data), 'no') def test_dict_miss(self): with self.assertRaises(KeyError): self.preparer.lookup_data('another', self.dict_data) def test_obj_miss(self): with self.assertRaises(AttributeError): self.preparer.lookup_data('whee', self.obj_data) def test_dict_nullable_fk(self): self.assertEqual(self.preparer.lookup_data('p
arent.id', self.dict_data), None) def test_obj_nullable_fk(self): self.assertEqual(self.preparer.lookup_data('parent.id',
self.obj_data), None) def test_empty_lookup(self): # We could possibly get here in the recursion. self.assertEqual(self.preparer.lookup_data('', 'Last value'), 'Last value') def test_complex_miss(self): with self.assertRaises(AttributeError): self.preparer.lookup_data('more.nested.nope', self.dict_data)
from huzzer.function_generator import generate_expression, generate_unary_expr from huzzer.expressions import VariableExpression, FunctionExpression, BRANCH_EXPRESSIONS from huzzer.namers import DefaultNamer from huzzer import INT, BOOL empty_variables = { INT: [], BOOL: [] } def test_generate_unary_expr(): ints = [generate_unary_expr(INT, empty_variables, 0) for i in range(50)] assert all([ x.type_signiature == (INT, INT) and len(x.args) == 1 and type(x.args[0]) == int for x in ints ]) bools = [generate_unary_expr(BOOL, empty_variables, 0) for i in range(10)] assert all([ x.type_signiature == (BOOL, BOOL) and len(x.args) == 1 and type(x.args[0]) == bool for x in bools ]) bool_variable = VariableExpression(BOOL, 1) just_bools = { INT: [], BOOL: [bool_variable] } var_expr = generate_unary_expr(BOOL, just_bools, 1) assert var_expr is bool_variable int_expr = generate_unary_expr(INT, just_bools, 1) assert int_expr is not bool_variable # haskell_type, # variables, # functions, # branch_expressions, # tree_depth, # branching_probability=0.4, # variable_probability=0.7, # function_call_probability=0.5 def test_generate_expression(): int_function = FunctionExpression([BOOL, INT, INT], 1) bool_function = FunctionExpression([BOOL, BOOL, BOOL, BOOL], 2) functions = { INT: [int_function], BOOL: [bool_function] } # this should definitely start with the bool func, as the probabilities are one bool_expr = generate_expression( BOOL, empty_variables, functions, BRANCH_EXPRESSIONS, 2, branching_probability=1.0, function_call_probability=1.0 ) assert type(bool_expr) == type(bool_function) and bool_expr.function_id == 2 expr = generate_expression( BOOL, empty_variables, functions, BRANCH_EXPRESSIONS, 1, branching_probability=1.0, function_call_probability=1.0 ) assert expr.type_signiature == (BOOL, BOOL) assert type(expr) != type(bool_function) bool_variable = VariableExpression(BOOL, 1) int_variable = VariableExpression(INT, 2) variables = { INT: [int_variable], BOOL: [bool_variable] } var_expr = generate_expression( BOOL, variables, functions, BRANCH_EXPRESSIONS, 1, branching_probability=1.0, function_call_probability=1.0, variable_probability=1.0 ) assert type(var_expr) is type(bool_variable) and var_expr.var_id == bool_variable.var_id func_expr_with_only_vars = generate_expression( BOOL, variables, functi
ons, BRANCH_EXPRESSIONS, 2, branching_probability=1.0, function_call_probability=1.0, variable_probability=1.0 ) assert type(func_expr_with_only_vars) == type(bool_function) and \ all([ar
g is bool_variable for arg in func_expr_with_only_vars.args])
import os from celery import Celery os.environ.setdefault('DJANGO_SETTINGS_MOD
ULE', 'persephone.settings') app = Celery('persephone') app.config_fr
om_object('django.conf:settings', namespace='CELERY') app.autodiscover_tasks()
# # Gramps - a GTK+/GNOME based genealogy program # # Copyright (C) 2002-2006 Donald N. Allingham # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later
version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-13
01 USA. # #------------------------------------------------------------------------- # # Standard Python modules # #------------------------------------------------------------------------- import re from ...const import GRAMPS_LOCALE as glocale _ = glocale.translation.gettext #------------------------------------------------------------------------- # # Gramps modules # #------------------------------------------------------------------------- from . import Rule #------------------------------------------------------------------------- # # HasIdOf # #------------------------------------------------------------------------- class RegExpIdBase(Rule): """ Objects with a Gramps ID that contains a substring or matches a regular expression. """ labels = [ _('Text:') ] name = 'Objects with <Id>' description = "Matches objects whose Gramps ID contains a substring " \ "or matches a regular expression" category = _('General filters') allow_regex = True def apply(self, db, obj): return self.match_substring(0, obj.gramps_id)
import numpy as np from . import _marching_cubes_cy def marching_cubes(volume, level, spacing=(1., 1., 1.)): """ Marching cubes algorithm to find iso-valued surfaces in 3d volumetric data Parameters ---------- volume : (M, N, P) array of doubles Input data volume to find isosurfaces. Will be cast to `np.float64`. level : float Contour value to search for isosurfaces in `volume`. spacing : length-3 tuple of floats Voxel spacing in spatial dimensions corresponding to numpy array indexing dimensions (M, N, P) as in `volume`. Returns ------- verts : (V, 3) array Spatial coordinates for V unique mesh vertices. Coordinate order matches input `volume` (M, N, P). faces : (F, 3) array Define triangular faces via referencing vertex indices from ``verts``. This algorithm specifically outputs triangles, so each face has exactly three indices. Notes ----- The marching cubes algorithm is implemented as described in [1]_. A simple explanation is available here:: http://www.essi.fr/~lingrand/MarchingCubes/algo.html There are several known ambiguous cases in the marching cubes algorithm. Using point labeling as in [1]_, Figure 4, as shown:: v8 ------ v7 / | / | y / | / | ^ z v4 ------ v3 | | / | v5 ----|- v6 |/ (note: NOT right handed!) | / | / ----> x | / | / v1 ------ v2 Most notably, if v4, v8, v2, and v6 are all >= `level` (or any generalization of this case) two parallel planes are generated by this algorithm, separating v4 and v8 from v2 and v6. An equally valid interpretation would be a single connected thin surface enclosing all four points. This is the best known ambiguity, though there are others. This algorithm does not attempt to resolve such ambiguities; it is a naive implementation of marching cubes as in [1]_, but may be a good beginning for work with more recent techniques (Dual Marching Cubes, Extended Marching Cubes, Cubic Marching Squares, etc.). Because of interactions between neighboring cubes, the isosurface(s) generated by this algorithm are NOT guaranteed to be closed, particularly for complicated contours. Furthermore, this algorithm does not guarantee a single contour will be returned. Indeed, ALL isosurfaces which cross `level` will be found, regardless of connectivity. The output is a triangular mesh consisting of a set of unique vertices and connecting triangles. The order of these vertices and triangles in the output list is determined by the position of the smallest ``x,y,z`` (in lexicographical order) coordinate in the contour. This is a side-effect of how the input array is traversed, but can be relied upon. To quantify the area of an isosurface generated by this algorithm, pass the outputs directly into `skimage.measure.mesh_surface_area`. Regarding visualization of algorithm output, the ``mayavi`` package is recommended. To contour a volume named `myvolume` about the level 0.0:: >>> from mayavi import mlab # doctest: +SKIP >>> verts, tris = marching_cubes(myvolume, 0.0, (1., 1., 2.)) # doctest: +SKIP >>> mlab.triangular_mesh([vert[0] for vert in verts], ... [vert[1] for vert in verts], ... [vert[2] for vert in verts], ... tris) # doctest: +SKIP >>> mlab.show() # doctest: +SKIP References ---------- .. [1] Lorensen, William and Harvey E. Cline. Marching Cubes: A High Resolution 3D Surface Construction Algorithm. Computer Graphics (SIGGRAPH 87 Proceedings) 21(4) July 1987, p. 163-170). See Also -------- skimage.measure.mesh_surface_area """ # Check inputs and ensure `volume` is C-contiguous for memoryviews if volume.ndim != 3: raise ValueError("Input volume must have 3 dimensions.") if level < volume.min() or level > volume.max(): raise ValueError("Contour level must be within volume data range.") volume = np.array(volume, dtype=np.float64, order="C") # Extract raw triangles using marching cubes in Cython # Returns a list of length-3 lists, each sub-list containing three # tuples. The tuples hold (x, y, z) coordinates for triangle vertices. # Note: this algorithm is fast, but returns degenerate "triangles" which # have repeated vertices - and equivalent vertices are redundantly # placed in every triangle they connect with. raw_tris = _marching_cubes_cy.iterate_and_store_3d(volume, float(level), spacing) # Find and collect unique vertices, storing triangle verts as indices. # Returns a true mesh with no degenerate faces. verts, faces = _marching_cubes_cy.unpack_unique_verts(raw_tris) return np.asarray(verts), np.asarray(faces) def mesh_surface_area(verts, tris): """ Compute surface area, given vertices & triangular faces Parameters ---------- verts : (V, 3) array of floats Array containing (x, y, z) coordinates for V unique mesh vertices. faces : (F, 3) array of ints List of length-3 lists of integers, referencing vertex coordinates as provided in `verts` Returns ------- area : float Surface area of mesh. Units now [coordinate units] ** 2. Notes ----- The arguments expected by this function are the exact outputs from `skimage.measure.marching_cubes`. For unit correct output, ensure correct `spacing` was passed to `skimage.measure.marching_cubes`. This algorithm works properly only if the ``faces`` provided are all triangles. See Also -------- skimage.measure.marching_cubes """
# Fancy indexing to define two vector arrays from triangle vertices actual_verts = verts[tris] a = actual_verts[:, 0, :] - actual_verts[:, 1, :] b = actual_verts[:, 0, :] - actual_verts[:, 2, :] del actual_verts # Area of triangle in 3D = 1/2 * Euclidean norm of cross product return ((np.cross(a, b) ** 2).
sum(axis=1) ** 0.5).sum() / 2.
#!/bin/false # -*- coding: utf-8 -*- from objects.orobject import OrObject from objects.function import Function from objects.number import Number from objects.file import File from objects.inheritdict import InheritDict from objects.ordict import OrDict from objects.orddict import ODict import objects.console as console import objects.exception as exception import objects.orstring as orstring import types import libbuiltin def expose(r, n=""): v = OrObject.from_py(r) if n: v.name = n return v builtin = InheritDict() builtin.update({ "int": expose(libbuiltin.toint), "num": expose(Number), "dict": expose(OrDict), "odict": expose(ODict), "set": expose(set), "io": expose(console.io), "file": expose(File), "input": expose(console.input), "
output": expose(console.output
), "error": expose(console.error), "endl": expose("\n"), "repr": expose(repr), "join": expose(libbuiltin.join), "range": expose(range), "type": expose(libbuiltin.typeof, "type"), "dir": expose(libbuiltin.dirof, "dir"), "attrs": expose(libbuiltin.attrsof, "attrs"), "reverse": expose(reversed), "sort": expose(sorted), "chr": expose(unichr), "Exception": expose(Exception), "hasattr": expose(OrObject.has, "hasattr"), "getattr": expose(OrObject.get, "getattr"), "setattr": expose(OrObject.set, "setattr"), }) stolen_builtins = [ 'abs', 'all', 'any', 'bool', 'callable', #buffer 'cmp', #chr (not as unichr) 'dict', 'divmod', 'enumerate', #delattr 'exit', 'filter', # frozenset 'hash', 'id', #get/hasattr 'iter', 'len', 'list', 'map', 'max', 'min', 'ord', # object 'range', 'repr', #property 'round', 'set', 'slice', #setattr 'str', 'sum', 'unicode', #super 'zip' ] for i in stolen_builtins: builtin[i] = expose(__builtins__[i])
from django.contrib.auth import update_session_auth_hash from rest_framework import serializers from authentication.models import Account class AccountSerializer(serializers.ModelSerializer): password = serializers.CharField(write_only=True, required=False) confirm_password = serializers.CharField(write_only=True, required=False) class Meta: model = Account fields = ('id', 'email', 'username', 'created_at', 'updated_at', 'first_name', 'last_name', 'tagline', 'password', 'confirm_password', 'userType') read_only_fields = ('created_at', 'updated_at',) def create(self, validated_data): return Account.objects.create(**validated_data) def update(self, instance, validated_data): instance.username = validated_data.get('username',
instance.username) instance.tagline = validated_data.get('tagline', instance.tagline) instance.save() password = validated_data.get('password', None) confirm_password = validated_data.get('confirm_password', None) if password and confirm_password and password == confirm_password: instance.set_pas
sword(password) instance.save() update_session_auth_hash(self.context.get('request'), instance) return instance class SimpleAccountSerializer(serializers.ModelSerializer): class Meta: model = Account fields = ('id', 'email', 'username',)
#!/usr/bin/python # coding=utf-8 # Simple Steam profile checker Telegram bot # Copyright (c) 2017 EasyCoding Team # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this progr
am. If not, see <http://
www.gnu.org/licenses/>. from html import unescape from re import sub from urllib.request import Request as request, urlopen from xml.dom import minidom class SteamChecker: @staticmethod def striptags(gtstr, gtrep=''): """ Strip HTML tags from string. :param gtstr: String to strip tags :param gtrep: Replacement for tags :return: String without HTML tags """ return sub('<[^<]+?>', gtrep, unescape(gtstr)) def __fetchxml(self): """ Format query to API, fetch results and return them as string. :return: API check results """ apiuri = 'https://check.team-fortress.su/api.php?action=check&token=%s&id=%s' % (self.__token, self.__id) req = request(apiuri, data=None, headers={'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; rv:52.0.0)' 'Gecko/20100101 Firefox/52.0.0'}) with urlopen(req) as xmlres: return xmlres.read().decode('utf-8') @property def sitestatus(self): """ TEAM-FORTRESS.SU user friendly status of checked user profile. :return: TEAM-FORTRESS.SU check results """ # Set dictionary with API return codes... stv = { '1': 'гарант', '2': 'в белом списке', '3': 'в чёрном списке', '4': 'нет в базе', '5': 'в чёрном списке аукциона', '6': 'сотрудник сайта', '7': 'донатер', '8': 'ненадёжный' } # Return result using dictionary... return stv[self.__sitestatus] @property def vacstatus(self): """ VAC status of checked user profile. :return: VAC status """ stv = { '0': 'чист', '1': 'забанен' } return stv[self.__vacstatus] @property def f2pstatus(self): """ Free-to-Play status (has no purchased games) of checked user profile. :return: Free-to-Play status """ stv = { '0': 'нет', '1': 'да' } return stv[self.__f2pstatus] @property def tradestatus(self): """ Current trade status of checked user profile. :return: Trade status """ stv = { '0': 'нет ограничений', '1': 'заблокирована', '2': 'испытательный срок' } return stv[self.__tradestatus] @property def gamebanstatus(self): """ Current game bans on checked user profile. :return: Game bans status and their count """ return 'нет' if self.__gamebans == '0' else 'есть (%s)' % self.__gamebans @property def description(self): """ Formatted custom description of checked user profile. :return: Custom description with markup """ return '`%s`' % self.striptags(self.__description, ' ') if self.__description else '*отсутствует.*' def __init__(self, tid, token): """ Main SteamChecker constructor. :param tid: Profile link, username or SteamID :param token: API token """ # Setting token and unique identifier to pseudo-private properties... self.__id = tid self.__token = token # Fetching XML from API... rxml = self.__fetchxml() # Parsing received XML... xmlp = minidom.parseString(rxml) # Checking API result... if xmlp.getElementsByTagName('qstatus')[0].firstChild.data != 'OK': raise Exception('Incorrect API return code') # Setting public fields... self.steamid32 = xmlp.getElementsByTagName('steamID')[0].firstChild.data self.steamid64 = xmlp.getElementsByTagName('steamID64')[0].firstChild.data self.steamidv3 = xmlp.getElementsByTagName('steamIDv3')[0].firstChild.data self.nickname = xmlp.getElementsByTagName('nickname')[0].firstChild.data self.avatar = xmlp.getElementsByTagName('avatar')[0].firstChild.data self.permalink = xmlp.getElementsByTagName('permalink')[0].firstChild.data self.srstatus = self.striptags(xmlp.getElementsByTagName('steamrep')[0].firstChild.data) # Setting private fields... self.__sitestatus = xmlp.getElementsByTagName('sitestatus')[0].firstChild.data self.__vacstatus = xmlp.getElementsByTagName('isbanned')[0].firstChild.data self.__f2pstatus = xmlp.getElementsByTagName('isf2p')[0].firstChild.data self.__tradestatus = xmlp.getElementsByTagName('istrbanned')[0].firstChild.data self.__premium = xmlp.getElementsByTagName('ispremium')[0].firstChild.data self.__gamebans = xmlp.getElementsByTagName('gamebans')[0].firstChild.data # Fetching custom description... dcs = xmlp.getElementsByTagName('customdescr')[0].firstChild self.__description = dcs.data if dcs else ''
# -*- coding: utf-8 -*- """ Tablib - JSON Support """ import tablib import sys from tablib.packages import omnijson as json title = 'json' extentions = ('json', 'jsn') def export_set(dataset): """Returns JSON representation of Dataset.""" return json.dumps(dataset.dict) def export_book(databook): """Returns JSON representation of Databook.""" re
turn json.dumps(databook._package()) def import_set(dset, in_stream): """Returns dataset from JSON stream.""" dset.wipe() dset.dict = json.loads(in
_stream) def import_book(dbook, in_stream): """Returns databook from JSON stream.""" dbook.wipe() for sheet in json.loads(in_stream): data = tablib.Dataset() data.title = sheet['title'] data.dict = sheet['data'] dbook.add_sheet(data) def detect(stream): """Returns True if given stream is valid JSON.""" try: json.loads(stream) return True except ValueError: return False
ef nest_update_event_broker(hass, nest): """ Dispatch SIGNAL_NEST_UPDATE to devices when nest stream API received data. Runs in its own thread. """ _LOGGER.debug("Listening for nest.update_event") while hass.is_running: nest.update_event.wait() if not hass.is_running:
break nest.update_event.clear() _LOGGER.debug("Dispatching nest data update") dispatcher_send(hass, SIGNAL_NEST_UPDATE) _LOGGER.debug("Stop listening for nest.update_event") async def async_setup(hass, config): """Set up Nest components.""" if DOMAIN not in config:
return True conf = config[DOMAIN] local_auth.initialize(hass, conf[CONF_CLIENT_ID], conf[CONF_CLIENT_SECRET]) filename = config.get(CONF_FILENAME, NEST_CONFIG_FILE) access_token_cache_file = hass.config.path(filename) hass.async_create_task( hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data={"nest_conf_path": access_token_cache_file}, ) ) # Store config to be used during entry setup hass.data[DATA_NEST_CONFIG] = conf return True async def async_setup_entry(hass, entry): """Set up Nest from a config entry.""" nest = Nest(access_token=entry.data["tokens"]["access_token"]) _LOGGER.debug("proceeding with setup") conf = hass.data.get(DATA_NEST_CONFIG, {}) hass.data[DATA_NEST] = NestDevice(hass, conf, nest) if not await hass.async_add_job(hass.data[DATA_NEST].initialize): return False for component in "climate", "camera", "sensor", "binary_sensor": hass.async_create_task( hass.config_entries.async_forward_entry_setup(entry, component) ) def validate_structures(target_structures): all_structures = [structure.name for structure in nest.structures] for target in target_structures: if target not in all_structures: _LOGGER.info("Invalid structure: %s", target) def set_away_mode(service): """Set the away mode for a Nest structure.""" if ATTR_STRUCTURE in service.data: target_structures = service.data[ATTR_STRUCTURE] validate_structures(target_structures) else: target_structures = hass.data[DATA_NEST].local_structure for structure in nest.structures: if structure.name in target_structures: _LOGGER.info( "Setting away mode for: %s to: %s", structure.name, service.data[ATTR_AWAY_MODE], ) structure.away = service.data[ATTR_AWAY_MODE] def set_eta(service): """Set away mode to away and include ETA for a Nest structure.""" if ATTR_STRUCTURE in service.data: target_structures = service.data[ATTR_STRUCTURE] validate_structures(target_structures) else: target_structures = hass.data[DATA_NEST].local_structure for structure in nest.structures: if structure.name in target_structures: if structure.thermostats: _LOGGER.info( "Setting away mode for: %s to: %s", structure.name, AWAY_MODE_AWAY, ) structure.away = AWAY_MODE_AWAY now = datetime.utcnow() trip_id = service.data.get( ATTR_TRIP_ID, "trip_{}".format(int(now.timestamp())) ) eta_begin = now + service.data[ATTR_ETA] eta_window = service.data.get(ATTR_ETA_WINDOW, timedelta(minutes=1)) eta_end = eta_begin + eta_window _LOGGER.info( "Setting ETA for trip: %s, " "ETA window starts at: %s and ends at: %s", trip_id, eta_begin, eta_end, ) structure.set_eta(trip_id, eta_begin, eta_end) else: _LOGGER.info( "No thermostats found in structure: %s, " "unable to set ETA", structure.name, ) def cancel_eta(service): """Cancel ETA for a Nest structure.""" if ATTR_STRUCTURE in service.data: target_structures = service.data[ATTR_STRUCTURE] validate_structures(target_structures) else: target_structures = hass.data[DATA_NEST].local_structure for structure in nest.structures: if structure.name in target_structures: if structure.thermostats: trip_id = service.data[ATTR_TRIP_ID] _LOGGER.info("Cancelling ETA for trip: %s", trip_id) structure.cancel_eta(trip_id) else: _LOGGER.info( "No thermostats found in structure: %s, " "unable to cancel ETA", structure.name, ) hass.services.async_register( DOMAIN, SERVICE_SET_AWAY_MODE, set_away_mode, schema=SET_AWAY_MODE_SCHEMA ) hass.services.async_register( DOMAIN, SERVICE_SET_ETA, set_eta, schema=SET_ETA_SCHEMA ) hass.services.async_register( DOMAIN, SERVICE_CANCEL_ETA, cancel_eta, schema=CANCEL_ETA_SCHEMA ) @callback def start_up(event): """Start Nest update event listener.""" threading.Thread( name="Nest update listener", target=nest_update_event_broker, args=(hass, nest), ).start() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, start_up) @callback def shut_down(event): """Stop Nest update event listener.""" nest.update_event.set() hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, shut_down) _LOGGER.debug("async_setup_nest is done") return True class NestDevice: """Structure Nest functions for hass.""" def __init__(self, hass, conf, nest): """Init Nest Devices.""" self.hass = hass self.nest = nest self.local_structure = conf.get(CONF_STRUCTURE) def initialize(self): """Initialize Nest.""" try: # Do not optimize next statement, it is here for initialize # persistence Nest API connection. structure_names = [s.name for s in self.nest.structures] if self.local_structure is None: self.local_structure = structure_names except (AuthorizationError, APIError, socket.error) as err: _LOGGER.error("Connection error while access Nest web service: %s", err) return False return True def structures(self): """Generate a list of structures.""" try: for structure in self.nest.structures: if structure.name not in self.local_structure: _LOGGER.debug( "Ignoring structure %s, not in %s", structure.name, self.local_structure, ) continue yield structure except (AuthorizationError, APIError, socket.error) as err: _LOGGER.error("Connection error while access Nest web service: %s", err) def thermostats(self): """Generate a list of thermostats.""" return self._devices("thermostats") def smoke_co_alarms(self): """Generate a list of smoke co alarms.""" return self._devices("smoke_co_alarms") def cameras(self): """Generate a list of cameras.""" return self._devices("cameras") def _devices(self, device_type): """Generate a list of Nest devices.""" try: for structure in self.nest.structures: if structure.name not in self.local_structure:
# -*- coding: ut
f-8 -*- import os import django from .fixtures import * # noqa # import pytest os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings") def pytest_configure(config): django.setup()
#!/usr/bin/env python # # Copyright 2014 (c) Lei Xu <eddyxu@gmail.com> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific lang
uage governing permissions and # limitations under the License. from
fabric.api import task import yaml import logging import vsfs_ec2 as ec2 vsfs = ec2.VsfsEC2() logging.basicConfig(format='[%(asctime)s](%(levelname)s) %(message)s', level=logging.INFO) @task def help(name=''): """Print full information of the function. (name='task') """ if not name: print("Use: 'fab help:func_name' for detailed help for each task.") else: print(globals()[name].__doc__) @task def price_history(instance='m1.small', n=10): """Print out the recent price history (instance='m1.small',n=10). """ vsfs.get_spot_price_history(instance_type=instance, num_prices=n) @task def spot_requests(): """Prints all active spot instance requests. """ vsfs.get_all_spot_requests() @task def instances(state='running'): """Prints the information of instance. """ vsfs.get_all_instances(state) @task def image_list(): """Prints all images. """ vsfs.print_all_images() @task def image_create(price=0.01, spot='yes', revision='HEAD', branch='master'): """Creates an VSFS image using Spot Instance (price=0.01,spot=yes/no). Options: @param spot set to 'yes' to use spot instance, set to 'no' to use on-demand instance. default: 'yes' @param price the bid price for spot instance. default: 0.01 @param branch git branch of the vsfs source. @param revision the git revsion of the vsfs source. """ if spot == 'yes': vsfs.create_image_spot(price) else: vsfs.create_image() @task def image_delete(image_id): """Deleted a stored image with the given ID. """ vsfs.delete_image(image_id) @task def security_group_list(): """List out all security groups. """ vsfs.print_security_groups() @task def cluster_start(ami, nmaster, nindexd, nclient, yaml='example.yaml'): """Starts a cluster (ami='', nmaster=0, nindexd=0, nclient=0, \ yaml='example.yaml') Configuration of cluster is defined in 'example.yaml' """ num_masters = int(nmaster) num_indexd = int(nindexd) num_client = int(nclient) vsfs.start_cluster(ami, num_masters, num_indexd, num_client, conf_yaml=yaml) @task def vpc_list(): """Prints all available VPC and its detailed information. """ vsfs.print_vpcs() @task def vpc_create(): """Creates a 10.0.0.0/22 virtual private cluster (VPC). """ vsfs.create_vpc() @task def vpc_clear(): """Removes all virtual private clusters. """ vsfs.remove_vpcs() @task def list_x509_certifications(): print vsfs.locate_x509_certifications() @task def s3_space(): """Calculate s3 space consumption. """ vsfs.print_s3_space() @task def volume_list(): """List all volumes """ vsfs.print_volumes() @task def volume_create(ami, price, volsize): """Creates a new EBS volume and format it (param: ami, price, volsize) """ vsfs.create_volume_spot(ami, price, volsize) @task def elastic_ip_list(): """List all elastic ips. """ vsfs.print_elastic_ips() @task def test_run(): """Start cluster on active instances. """ confs = {} with open('test.yaml') as fobj: confs = yaml.load(fobj.read()) vsfs.start_test_cluster(confs)
""" This file tests the MNISTPlus class. majorly concerning the X and y member of the dataset and their corresponding sizes, data scales and topological views. """ from pylearn2.datasets.mnistplus import MNISTPlus from pylearn2.space import IndexSpace, VectorSpace import unittest from pylearn2.testing.skip import skip_if_no_data import numpy as np def test_MNISTPlus(): """ Test the MNISTPlus warper. Tests the scale of data, the splitting of train, valid, test sets. Tests that a topological batch has 4 dimensions. Tests that it work well with selected type of augmentation. """ skip_if_no_data() for subset in ['train', 'valid', 'test']: ids = MNISTPlus(which_set=subset) assert 0.01 >= ids.X.min() >= 0.0 assert 0.99 <= ids.X.max() <= 1.0 topo = ids.get_batch_topo(1) assert topo.ndim == 4 del ids train_y = MNISTPlus(which_set='train', label_type='
label') assert 0.99 <= train_y.X.max() <= 1.0 assert 0.0 <= train_y.X.min() <= 0.01 assert train_y.y.max() == 9 assert train_y.y.min() == 0 assert train_y.y.shape == (train_y.X.shape[0], 1) train_y = MNISTPlus(which_s
et='train', label_type='azimuth') assert 0.99 <= train_y.X.max() <= 1.0 assert 0.0 <= train_y.X.min() <= 0.01 assert 0.0 <= train_y.y.max() <= 1.0 assert 0.0 <= train_y.y.min() <= 1.0 assert train_y.y.shape == (train_y.X.shape[0], 1) train_y = MNISTPlus(which_set='train', label_type='rotation') assert 0.99 <= train_y.X.max() <= 1.0 assert 0.0 <= train_y.X.min() <= 0.01 assert train_y.y.max() == 9 assert train_y.y.min() == 0 assert train_y.y.shape == (train_y.X.shape[0], 1) train_y = MNISTPlus(which_set='train', label_type='texture_id') assert 0.99 <= train_y.X.max() <= 1.0 assert 0.0 <= train_y.X.min() <= 0.01 assert train_y.y.max() == 9 assert train_y.y.min() == 0 assert train_y.y.shape == (train_y.X.shape[0], 1)