prefix
stringlengths
0
918k
middle
stringlengths
0
812k
suffix
stringlengths
0
962k
import json from django.db import models from django.conf import settings from dj
ango.utils.six import with_metaclass, text_type from django.utils.translation import ugettext_lazy as _ from . import SirTrevorContent from .forms import SirTrevorFormField class SirTrevorField(with_metaclass(models.SubfieldBase, models.Field)): description = _("TODO") d
ef get_internal_type(self): return 'TextField' def formfield(self, **kwargs): defaults = { 'form_class': SirTrevorFormField } defaults.update(kwargs) return super(SirTrevorField, self).formfield(**defaults) def to_python(self, value): return SirTrevorContent(value) def get_db_prep_value(self, value, connection, prepared=False): return text_type(value) if 'south' in settings.INSTALLED_APPS: from south.modelsinspector import add_introspection_rules add_introspection_rules([], ["^sirtrevor\.fields\.SirTrevorField"])
import sys from resources.datatables impor
t WeaponType def setup(core, object): object.setStfFilename('static_item_n') object.setStfName('weapon_pistol_trader_roadmap_01_02') object.setDetailFilename('static_item_d') object.setDetailName('weapon_pistol_trader_roadmap_01_02') object.setStringAttribute('class_required', 'Tra
der') object.setIntAttribute('required_combat_level', 62) object.setAttackSpeed(0.4); object.setMaxRange(35); object.setDamageType("energy"); object.setMinDamage(250); object.setMaxDamage(500); object.setWeaponType(WeaponType.Pistol); return
# Licensed under a 3-clause BSD style license - see LICENSE.rst import warnings warnings.warn("the ``irsa_dust`` module has been moved to " "astroquery.ipac.irsa.irsa_dust, "
"please update your imports.", DeprecationWarn
ing, stacklevel=2) from astroquery.ipac.irsa.irsa_dust import *
.r = Report() self.r['ExecutablePath'] = '/usr/bin/napoleon-solod' self.r['Package'] = 'libnapoleon-solo1 1.2-1' self.r['Signal'] = '11' self.r['StacktraceTop'] = """foo_bar (x=2) at crash.c:28 d01 (x=3) at crash.c:29 raise () from /lib/libpthread.so.0 <signal handler called> __frob (x=4) at crash.c:30""" def tearDown(self): if os.path.exists(self.crash_path): os.unlink(self.crash_path) exe_crash_base = os.path.join(self.crash_base, '1_usr_bin_napoleon-solod') if os.path.exists(exe_crash_base): os.unlink(exe_crash_base) def test_create_db_default(self): try: CrashDatabase(None, {}) self.assertTrue(os.path.isfile(os.path.expanduser('~/crashdb.sqlite'))) finally: os.unlink(os.path.expanduser('~/crashdb.sqlite')) def test_crashes_base_url(self): cb = CrashDatabase(None, {'dbfile': ':memory:', 'crashes_base_url': self.crash_base_url}) self.assertEqual(cb.base_url, self.crash_base_url) def test_crashes_base_url_is_none(self): cb = CrashDatabase(None, {'dbfile': ':memory:'}) self.assertIsNone(cb.base_url) def test_upload_download(self): cb = CrashDatabase(None, {'dbfile': ':memory:', 'crashes_base_url': self.crash_base_url}) crash_id = cb.upload(self.r) self.assertEqual(crash_id, 1) report = cb.download(1) self.assertIsInstance(report, Report) self.assertIn('Signal', report) self.assertEqual(report['Signal'], '11') def test_failed_upload_no_URL(self): cb = CrashDatabase(None, {'dbfile': ':memory:'}) self.assertRaises(ValueError, cb.upload, self.r) def test_failed_upload_invalid_URL_scheme(self): cb = CrashDatabase(None, {'dbfile': ':memory:'}) self.r['_URL'] = 'invalid://scheme/path' self.assertRaises(ValueError, cb.upload, self.r) def test_failed_download(self): cb = CrashDatabase(None, {'dbfile': ':memory:'}) self.assertRaises(Exception, cb.download, 23232) def test_get_id_url(self): cb = CrashDatabase(None, {'dbfile': ':memory:'}) self.assertEqual("#1", cb.get_id_url(None, 1)) self.assertEqual("#1: napoleon-solod crashed with SIGSEGV in foo_bar()", cb.get_id_url(self.r, 1)) def test_update(self): """ Test complete update """ cb = CrashDatabase(None, {'dbfile': ':memory:', 'crashes_base_url': self.crash_base_url}) crash_id = cb.upload(self.r) self.r['SourcePackage'] = 'adios' self.r['Signal'] = u'9' cb.update(crash_id, self.r, 'a comment to add') report = cb.download(crash_id) self.assertIn('SourcePackage', report) self.assertEqual(report['Signal'], u'9') def test_update_with_key_filter(self): """ Test a partial update """ cb = CrashDatabase(None, {'dbfile': ':memory:', 'crashes_base_url': self.crash_base_url}) crash_id = cb.upload(self.r) self.r['SourcePackage'] = 'adios' self.r['Signal'] = u'9' cb.update(crash_id, self.r, 'a comment to add', key_filter=('Package', 'SourcePackage')) report = cb.download(crash_id) self.assertIn('SourcePackage', report) self.assertNotEqual(report['Signal'], u'9') def test_failed_update_no_URL(self): cb = CrashDatabase(None, {'dbfile': ':memory:'}) self.r['_URL'] = self.crash_base_url + 'test.crash' crash_id = cb.upload(self.r) del self.r['_URL'] self.assertRaises(ValueError, cb.update, *(crash_id, self.r, 'comment')) def test_get_distro_release(self): cb = CrashDatabase(None, {'dbfile': ':memory:', 'crashes_base_url': self.crash_base_url}) crash_id = cb.upload(self.r) self.assertIsNone(cb.get_distro_release(crash_id)) self.r['DistroRelease'] = 'Ubuntu 14.04' crash_id = cb.upload(self.r) self.assertEqual(cb.get_distro_release(crash_id), 'Ubuntu 14.04') def test_get_unretraced(self): cb = CrashDatabase(None, {'dbfile': ':memory:', 'crashes_base_url': self.crash_base_url}) self.assertEqual(cb.get_unretraced(), []) crash_id = cb.upload(self.r) self.assertEqual(cb.get_unretraced(), [crash_id]) self.r['Stacktrace'] = """ #0 0x00007f96dcfb9f77 in __GI_raise (sig=sig@entry=6) at ../nptl/sysdeps/unix/sysv/linux/raise.c:56 resultvar = 0 pid = 1427 selftid = 1427 #1 0x00007f96dcfbd5e8 in __GI_abort () at abort.c:90 save_stage = 2 act = {__sigaction_handler = {sa_handler = 0x0, sa_sigaction = 0x0}, sa_mask = {__val = {140286034336064, 140285996709792, 140285998988405, 5, 0, 752786625060479084, 140285929102568, 140285994568476, 140285996709792, 140285459489344, 140285999015717, 140285994520128, 140285996776629, 140285996776368, 140733249635424, 6}}, sa_flags = 56247888, sa_restorer = 0x18} sigs = {__val = {32, 0 <repeats 15 times>}} #2 0x00007f96e0deccbc in smb_panic_default (why=0x7f96e0df8b1c "internal error") at ../lib/util/fault.c:149 No locals. #3 smb_panic (why
=why@entry=0x7f96e0df8b1c "internal error") at .
./lib/util/fault.c:162 No locals. #4 0x00007f96e0dece76 in fault_report (sig=<optimized out>) at ../lib/util/fault.c:77 counter = 1 #5 sig_fault (sig=<optimized out>) at ../lib/util/fault.c:88 No locals. #6 <signal handler called> No locals. #7 0x00007f96b9bae711 in sarray_get_safe (indx=<optimized out>, array=<optimized out>) at /build/buildd/gcc-4.8-4.8.1/src/libobjc/objc-private/sarray.h:237 No locals. #8 objc_msg_lookup (receiver=0x7f96e3485278, op=0x7f96c0fae240 <_OBJC_SELECTOR_TABLE+128>) at /build/buildd/gcc-4.8-4.8.1/src/libobjc/sendmsg.c:448 No locals. #9 0x00007f96c0da737a in sogo_table_get_row (table_object=<optimized out>, mem_ctx=0x7f96e33e5940, query_type=MAPISTORE_PREFILTERED_QUERY, row_id=1, data=0x7fff035a4e00) at MAPIStoreSOGo.m:1464 e = <optimized out> ret = MAPISTORE_SUCCESS wrapper = <optimized out> pool = 0x7f96e3485278 table = <optimized out> rc = 0 __FUNCTION__ = "sogo_table_get_row" __PRETTY_FUNCTION__ = "sogo_table_get_row" """ cb.update(crash_id, self.r, "") self.assertEqual(cb.get_unretraced(), []) self.r['Stacktrace'] = """#8 0x00007ff5aae8e159 in ldb_msg_find_ldb_val (msg=<optimised out>, attr_name=<optimised out>) at ../common/ldb_msg.c:399 el = <optimised out> #9 0x00007ff5aae8e669 in ldb_msg_find_attr_as_string (msg=<optimised out>, attr_name=<optimised out>, default_value=0x0) at ../common/ldb_msg.c:584 v = <optimised out> #10 0x00007ff5905d0e5f in ?? () No symbol table info available. #11 0x0000000000000081 in ?? () No symbol table info available. #12 0x0000000000000000 in ?? () No symbol table info available.""" cb.update(crash_id, self.r, "") self.assertEqual(cb.get_unretraced(), [crash_id]) def test_get_unfixed(self): cb = CrashDatabase(None, {'dbfile': ':memory:', 'crashes_base_url': self.crash_base_url}) self.assertEqual(cb.get_unfixed(), set()) crash_id = cb.upload(self.r) self.assertEqual(cb.get_unfixed(), set([crash_id])) cb.close_duplicate(self.r, crash_id, crash_id) self.assertEqual(cb.get_unfixed(), set()) def test_close_duplicate(self): cb = CrashDatabase(None, {'dbfile': ':memory:', 'crashes_base_url': self.crash_base_url}) crash_id = cb.upload(self.r) self.assertIsNone(cb.duplicate_of(crash_id)) crash_id2 = cb.upload(self.r) self.assertIsNone(cb.duplicate_of(crash_id2)) cb.close_duplicate(self.r, crash_id2, crash_id) self.assertEqual(cb.duplicate_of(crash_id2), crash_id) # Remove current duplicate thing cb.close_duplicate(self.r, crash_id2, None) self.assertIsNone(cb.duplicate_of(crash_id2)) # Tests related with components def test_app_components_get_set(self): cb = CrashDatabase(N
self.assertEqual(self.lbm.ensure_vxlan(seg_id), "vxlan-" + seg_id) add_vxlan_fn.assert_called_with("vxlan-" + seg_id, seg_id, group="224.0.0.1", dev=self.lbm.local_int) cfg.CONF.set_override('l2_population', 'True', 'VXLAN') self.assertEqual(self.lbm.ensure_vxlan(seg_id), "vxlan-" + seg_id) add_vxlan_fn.assert_called_with("vxlan-" + seg_id, seg_id, group="224.0.0.1", dev=self.lbm.local_int, proxy=True) def test_update_interface_ip_details(self): gwdict = dict(gateway='1.1.1.1', metric=50) ipdict = dict(cidr='1.1.1.1/24', broadcast='1.1.1.255', scope='global', ip_version=4, dynamic=False) with contextlib.nested( mock.patch.object(ip_lib.IpAddrCommand, 'add'), mock.patch.object(ip_lib.IpAddrCommand, 'delete') ) as (add_fn, del_fn): self.lbm.update_interface_ip_details("br0", "eth0", [ipdict], None) self.assertTrue(add_fn.called) self.assertTrue(del_fn.called) with contextlib.nested( mock.patch.object(ip_lib.IpRouteCommand, 'add_gateway'), mock.patch.object(ip_lib.IpRouteCommand, 'delete_gateway') ) as (addgw_fn, delgw_fn): self.lbm.update_interface_ip_details("br0", "eth0", None, gwdict) self.assertTrue(addgw_fn.called) self.assertTrue(delgw_fn.called) def test_bridge_exists_and_ensure_up(self): ip_lib_mock = mock.Mock() with mock.patch.object(ip_lib, 'IPDevice', return_value=ip_lib_mock): # device exists self.assertTrue(self.lbm._bridge_exists_and_ensure_up("br0")) self.assertTrue(ip_lib_mock.link.set_up.called) # device doesn't exists ip_lib_mock.link.set_up.side_effect = RuntimeError self.assertFalse(self.lbm._bridge_exists_and_ensure_up("br0")) def test_ensure_bridge(self): with contextlib.nested( mock.patch.object(self.lbm, '_bridge_exists_and_ensure_up'), mock.patch.object(utils, 'execute'), mock.patch.object(self.lbm, 'update_interface_ip_details'), mock.patch.object(self.lbm, 'interface_exists_on_bridge'), mock.patch.object(self.lbm, 'is_device_on_bridge'), mock.patch.object(self.lbm, 'get_bridge_for_tap_device'), ) as (de_fn, exec_fn, upd_fn, ie_fn, if_br_fn, get_if_br_fn): de_fn.return_value = False exec_fn.return_value = False self.assertEqual(self.lbm.ensure_bridge("br0", None), "br0") ie_fn.return_Value = False self.lbm.ensure_bridge("br0", "eth0") upd_fn.assert_called_with("br0", "eth0", None, None) ie_fn.assert_called_with("br0", "eth0") self.lbm.ensure_bridge("br0", "eth0", "ips", "gateway") upd_fn.assert_called_with("br0", "eth0", "ips", "gateway") ie_fn.assert_called_with("br0", "eth0") exec_fn.side_effect = Exception() de_fn.return_value = True self.lbm.ensure_bridge("br0", "eth0") ie_fn.assert_called_with("br0", "eth0") exec_fn.reset_mock() exec_fn.side_effect = None de_fn.return_value = True ie_fn.return_value = False get_if_br_fn.return_value = "br1" self.lbm.ensure_bridge("br0", "eth0") expected = [ mock.call(['brctl', 'delif', 'br1', 'eth0'], root_helper=self.root_helper), mock.call(['brctl', 'addif', 'br0', 'eth0'], root_helper=self.root_helper), ] exec_fn.assert_has_calls(expected) def test_ensure_physical_in_bridge(self): self.assertFalse( self.lbm.ensure_physical_in_bridge("123", p_const.TYPE_VLAN, "phys", "1") ) with mock.patch.object(self.lbm, "ensure_flat_bridge") as flbr_fn: self.assertTrue( self.lbm.ensure_physical_in_bridge("123", p_const.TYPE_FLAT, "physnet1", None) ) self.assertTrue(flbr_fn.called) with mock.patch.object(self.lbm, "ensure_vlan_bridge") as vlbr_fn: self.assertTrue( self.lbm.ensure_physical_in_bridge("123", p_const.TYPE_VLAN, "physnet1", "1") ) self.assertTrue(vlbr_fn.called) with mock.patch.object(self.lbm, "ensure_vxlan_bridge") as vlbr_fn: self.lbm.vxlan_mode = lconst.VXLAN_MCAST self.assertTrue(
self.lbm.ensure_physical_in_bridge("123", p_const.TYPE_VXLAN, "physnet1", "1") ) self.assertTrue(vlbr_fn.called) def test_add_tap_interface(self): with m
ock.patch.object(ip_lib, "device_exists") as de_fn: de_fn.return_value = False self.assertFalse( self.lbm.add_tap_interface("123", p_const.TYPE_VLAN, "physnet1", "1", "tap1") ) de_fn.return_value = True with contextlib.nested( mock.patch.object(self.lbm, "ensure_local_bridge"), mock.patch.object(utils, "execute"), mock.patch.object(self.lbm, "get_bridge_for_tap_device") ) as (en_fn, exec_fn, get_br): exec_fn.return_value = False get_br.return_value = True self.assertTrue(self.lbm.add_tap_interface("123", p_const.TYPE_LOCAL, "physnet1", None, "tap1")) en_fn.assert_called_with("123") get_br.return_value = False exec_fn.return_value = True self.assertFalse(self.lbm.add_tap_interface("123", p_const.TYPE_LOCAL, "physnet1", None, "tap1")) with mock.patch.object(self.lbm, "ensure_physical_in_bridge") as ens_fn: ens_fn.return_value = False self.assertFalse(self.lbm.add_tap_interface("123", p_const.TYPE_VLAN, "physnet1", "1", "tap1")) def test_add_interface(self): with mock.patch.object(self.lbm, "add_tap_interface") as add_tap: self.lbm.add_interface("123", p_const.TYPE_VLAN, "physnet-1", "1", "234") add_tap.assert_called_with("123", p_const.TYPE_VLAN, "physnet-1", "1", "tap234") def test_delete_vlan_bridge(self): with contextlib.nested( mock.patch.object(ip_lib, "device_exists"), mock.patch.object(self.lbm, "get_interfaces_on_bridge"), mock.patch.object(self.lbm, "remove_interface"), mock.patch.object(self.lbm, "get_interface_details"), mock.patch.object(self.lbm, "update_interface_ip_details"), mock.patch.object(self.lbm, "delete_vxlan"),
import sys import logging import time import requests from biokbase.AbstractHandle.Client import AbstractHandle def getStderrLogger(name, level=logging.INFO): logger = logging.getLogger(name) logger.setLevel(level) # send messages to sys.stderr streamHandler = logging.StreamHandler(sys.__stderr__) formatter = logging.Formatter("%(asctime)s - %(filename)s - %(lineno)d - %(levelname)s - %(message)s") formatter.converter = time.gmtime streamHandler.setFormatter(formatter) logger.addHandler(streamHandler) return logger def getHandles(logger, shock_url = "https://kbase.us/services/shock-api/", handle_url = "https://kbase.us/services/handle_service/", shock_ids = None, handle_ids = None, token = None): if token is None: raise Exception("Authentication token required!") hs = AbstractHandle(url=handle_url, token=token) handles = list() if shock_ids is not None: header = dict() header["Authorization"] = "Oauth {0}".format(token) for sid in shock_ids: info = None try: logger.info("Found shock id {0}, retrieving information about the data.".format(sid)) response = requests.get("{0}/node/{1}".format(shock_url, sid), headers=header, verify=True) info = response.json()["data"] except: logger.error("There was an error retrieving information about the shock node id {0} from url {1}".format(sid, shock_url)) try: logger.info("Retrieving a handle id for the data.") handle_id = hs.persist_handle({"id"
: sid, "type" : "shock", "url" : shock_url,
"file_name": info["file"]["name"], "remote_md5": info["file"]["md5"]}) except: try: handle_id = hs.ids_to_handles([sid])[0]["hid"] single_handle = hs.hids_to_handles([handle_id]) assert len(single_handle) != 0 if info is not None: single_handle[0]["file_name"] = info["file"]["name"] single_handle[0]["remote_md5"] = info["file"]["md5"] print >> sys.stderr, single_handle handles.append(single_handle[0]) except: logger.error("The input shock node id {} is already registered or could not be registered".format(sid)) raise elif handle_ids is not None: for hid in handle_ids: try: single_handle = hs.hids_to_handles([hid]) assert len(single_handle) != 0 handles.append(single_handle[0]) except: logger.error("Invalid handle id {0}".format(hid)) raise return handles
#!/usr/bin/env python import fileinput import re import sys refs = {} complete_file = "" for line in open(sys.argv[1], 'r'): complete_file += line for m in re.findall('\[\[(.+)\]\]\n=+ ([^\n]+)', complete_file): ref, title = m refs["<<" + ref
+ ">>"] = "<<" + ref + ", " + title + ">>" def translate(m
atch): try: return refs[match.group(0)] except KeyError: return "" rc = re.compile('|'.join(map(re.escape, sorted(refs, reverse=True)))) for line in open(sys.argv[1], 'r'): print rc.sub(translate, line),
''' import csv from collections import Counter counts = Counter() with open ('zoo.csv') as fin: cin = csv.reader(fin) for num, row in enumerate(cin): if num > 0: counts[row[0]] += int (row[-1]) for animal, hush in counts.items(): print("%10s %10s" % (animal, hush)) ''' ''' import bubbles p = bubbles.Pipeline() p.source(bubbles.data_object('csv_source', 'zoo.csv', inter_field=True)) p.aggregate('animal', 'hush') p.pretty_print() ''' def display_shapefile(name, iwidth=500, iheight=500): import shapefile from PIL import Image, ImageDraw
r = shapefile.Reader(name) mleft, mbottom, mright, mtop = r.bbox # map units mwidth = mright - mleft mh
eight = mtop - mbottom # scale map units to image units hscale = iwidth/mwidth vscale = iheight/mheight img = Image.new("RGB", (iwidth, iheight), "white") draw = ImageDraw(img) for shape in r.shapes(): pixels = [ (int(iwidth - ((mright - x) * hscale)), int((mtop - y) * vscale)) for x, y, in shape.points] if shape.shapeType == shapefile.POLYGON: draw.polygon(pixels, outline='black') elif shape.shapeType == shapefile.POLYLINE: draw.line(pixels, fill='black') img.show() if __name__=="__main__": import sys display_shapefile(sys.argv[1], 700, 700)
#!/usr/bin/python # -*- coding: utf-8 -*- from holmes.validators.base import Validator from holmes.utils import _ class ImageAltValidator(Validator): @classmethod def get_without_alt_parsed_value(cls, value): result = [] for src, name in value: data = '<a href="%s" target="_blank">%s</a>' % (src, name) result.append(data) return {'images': ', '.join(result)} @classmethod def get_alt_too_big_parsed_value(cls, value): result = [] for src, name, alt in value['images']: data = u'<a href="{}" alt="{}" target="_blank">{}</a>'.format( src, alt, name ) result.append(data) return { 'max_size': value['max_size'], 'images': ', '.join(result) } @classmethod def get_violation_definitions(cls): return { 'invalid.images.alt': { 'title': _('Image(s) without alt attribute'), 'description': _( 'Images without alt text are not good for ' 'Search Engines. Images without alt were ' 'found for: %(images)s.'), 'value_parser': cls.get_without_alt_parsed_value, 'category': _('SEO'), 'generic_description': _( 'Images without alt attribute are not good for ' 'search engines. They are searchable by the content ' 'of this attribute, so if it\'s empty, it cause bad ' 'indexing optimization.' ) }, 'invalid.images.alt_too_big': { 'title': _('Image(s) with alt attribute too big'), 'description': _( 'Images with alt text bigger than %(max_size)d chars are ' 'not good for search engines. Images with a too big alt ' 'were found for: %(
images)s.'), 'value_parser': cls.get_alt_too_big_parsed_value, 'category': _('SEO'), 'ge
neric_description': _( 'Images with alt text too long are not good to SEO. ' 'This maximum value are configurable ' 'by Holmes configuration.' ), 'unit': 'number' } } @classmethod def get_default_violations_values(cls, config): return { 'invalid.images.alt_too_big': { 'value': config.MAX_IMAGE_ALT_SIZE, 'description': config.get_description('MAX_IMAGE_ALT_SIZE') } } def validate(self): max_alt_size = self.get_violation_pref('invalid.images.alt_too_big') imgs = self.get_imgs() result_no_alt = [] result_alt_too_big = [] for img in imgs: src = img.get('src') if not src: continue src = self.normalize_url(src) img_alt = img.get('alt') if src: name = src.rsplit('/', 1)[-1] if not img_alt: result_no_alt.append((src, name)) elif len(img_alt) > max_alt_size: result_alt_too_big.append((src, name, img_alt)) if result_no_alt: self.add_violation( key='invalid.images.alt', value=result_no_alt, points=20 * len(result_no_alt) ) if result_alt_too_big: self.add_violation( key='invalid.images.alt_too_big', value={ 'images': result_alt_too_big, 'max_size': max_alt_size }, points=20 * len(result_alt_too_big) ) def get_imgs(self): return self.review.data.get('page.all_images', None)
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar import warnings from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.pipeline import PipelineResponse from azure.
core.pipeline.transport import AsyncHttpResponse, HttpRequest from azure.mgmt.core.exceptions import ARMErrorForma
t from ... import models as _models T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class NetworkInterfaceLoadBalancersOperations: """NetworkInterfaceLoadBalancersOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.mgmt.network.v2018_10_01.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer) -> None: self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config def list( self, resource_group_name: str, network_interface_name: str, **kwargs: Any ) -> AsyncIterable["_models.NetworkInterfaceLoadBalancerListResult"]: """List all load balancers in a network interface. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param network_interface_name: The name of the network interface. :type network_interface_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either NetworkInterfaceLoadBalancerListResult or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2018_10_01.models.NetworkInterfaceLoadBalancerListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceLoadBalancerListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2018-10-01" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request async def extract_data(pipeline_response): deserialized = self._deserialize('NetworkInterfaceLoadBalancerListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return AsyncItemPaged( get_next, extract_data ) list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/loadBalancers'} # type: ignore
# -*- coding: utf-8 -*- # this default value is just for testing in a fake. # pylint: disable=dangerous-default-value """Fake Module containing helper functions for the SQLite plugin""" from plasoscaffolder.bll.services import base_sqlite_plugin_helper from plasoscaffolder.bll.services import base_sqlite_plugin_path_helper from plasoscaffolder.dal import base_sql_query_execution from plasoscaffolder.dal import sql_query_data from plasoscaffolder.model import sql_query_column_model from plasoscaffolder.model import sql_query_column_model_data class FakeSQLitePluginHelper(base_sqlite_plugin_helper.BaseSQLitePluginHelper): """Fake for the SQLite plugin helper""" def __init__( self, plugin_exists=False, folder_exists=False, file_exists=False, valid_name=True, change_bool_after_every_call_plugin_exists=False, change_bool_after_every_call_folder_exists=False, change_bool_after_every_call_file_exists=False, change_bool_after_every_call_valid_name=False, distinct_columns=None, valid_row_name=True, change_bool_after_every_call_valid_row_name=False, change_bool_after_every_call_valid_comma_separated_string=False, valid_comma_separated_string=True, columns_and_timestamp_column=([], []), assumed_timestamps=[]): """ Initializes the fake plugin helper Args: change_bool_after_every_call_plugin_exists (bool): if the function boolean should change after every call. change_bool_after_every_call_file_exists (bool): if the function boolean should change after every call. change_bool_after_every_call_folder_exists (bool): if the function boolean should change after every call. change_bool_after_every_call_valid_name (bool): if the function boolean should change after every call. change_bool_after_every_call_valid_comma_separated_string (bool): if the function boolean should change after every call. file_exists (bool): what the FileExists function should return plugin_exists (bool): what the PluginExists function should return folder_exists (bool): what the FolderExists function should return valid_name (bool): what the IsValidPluginName function should return distinct_columns ([]): what the GetDistinctColumnsFromSQLQueryData function should return valid_row_name (bool): if the row name is valid, what the function isValidRowName will return change_bool_after_every_call_valid_row_name (bool): if the function boolean should change after every call. columns_and_timestamp_column ([sql_query_column_model.SQLColumnModel], [sql_query_column_model.SQLColumnModel]): what to return for the method GetColumnsAndTimestampColumn assumed_timestamps ([str]): what to return for the method GetAssumedTimestamps """ self.change_valid_name = change_bool_after_every_call_valid_name self.change_file_exists = change_bool_after_every_call_file_exists self.change_folder_exists = change_bool_after_every_call_folder_exists self.change_plugin_exists = change_bool_after_every_call_plugin_exists self.change_valid_row_name = change_bool_after_every_call_valid_row_name self.change_valid_comma_separated_string = ( change_bool_after_every_call_valid_comma_separated_string) self.plugin_exists = plugin_exists self.folder_exists = folder_exists self.file_exists = file_exists self.valid_name = valid_name self.distinct_columns = distinct_columns self.is_valid_row_name = valid_row_name self.is_valid_comma_separated_string = valid_comma_separated_string self.columns_and_timestamp_column = columns_and_timestamp_column self.assumed_timestamps = assumed_timestamps def PluginExists(self, path: str, plugin_name: str, database_suffix: str, path_helper: base_sqlite_plugin_path_helper.BaseSQLitePluginPathHelper) \ -> bool: if self.change_plugin_exists: self.plugin_exists = not self.plugin_exists return not self.plugin_exists else: return self.plugin_exists def FileExists(self, path: str) -> bool: """will return true false true ... starting with the initial (against loops while testing)""" if self.change_file_exists: self.file_exists = not self.file_exists return not self.file_exists else: return self.file_exists def FolderExists(self, path: str) -> bool: """will return true false true ... starting with the initial (against loops while testing)""" if self.change_folder_exists: self.folder_exists = not self.folder_exists return not self.folder_exists else: return self.folder_exists def IsValidPluginName(self, plugin_name: str) -> bool: """will return true false true ... sta
rting with the initial (against loops while testing)""" if self.change_valid_name: self.valid_name = not self.valid_name return not self.valid_name else: return self.valid_name def IsValidRowName(self, row_name: str) -> bool: """will return true false true ... starting with the initial (against loops while testing)""" if self
.change_valid_row_name: self.is_valid_row_name = not self.is_valid_row_name return not self.is_valid_row_name else: return self.is_valid_row_name return def IsValidCommaSeparatedString(self, text: str) -> bool: """will return true false true ... starting with the initial (against loops while testing)""" if self.change_valid_comma_separated_string: self.is_valid_comma_separated_string = ( not self.is_valid_comma_separated_string) return not self.is_valid_comma_separated_string else: return self.is_valid_comma_separated_string return def RunSQLQuery(self, query: str, executor: base_sql_query_execution.BaseSQLQueryExecution()): """ Validates the sql Query Args: executor (base_sql_query_execution.SQLQueryExection()) the sql executor query (str): the sql Query Returns: base_sql_query_execution.SQLQueryData: the data to the executed Query """ return executor.ExecuteQuery(query) def GetDistinctColumnsFromSQLQueryData( self, queries: [sql_query_data.SQLQueryData]) -> [str]: """ Get a distinct list of all attributes from multiple queries Args: queries ([base_sql_query_execution.SQLQueryData]): an array of multiple sql query data objects Returns: [str]: a distinct list of all attributes used in the query """ return self.distinct_columns def GetAssumedTimestamps(self, columns: [sql_query_column_model]) -> [str]: """Gets all columns assumed that they are timestamps Args: columns ([sql_query_column_model]): the columns from the query Returns: [str]: the names from the columns assumed they could be a timestamp """ return self.assumed_timestamps def GetColumnsAndTimestampColumn( self, columns: [sql_query_column_model.SQLColumnModel], timestamps: [str], data: [str] ) -> ([sql_query_column_model_data.SQLColumnModelData], [sql_query_column_model.SQLColumnModel]): """Splits the column list into a list of simple columns and a list for timestamp event columns and adds the data to the simple columns Args: columns ([sql_query_column_model_data.SQLColumnModelData]): the columns from the SQL query timestamps ([str]): the timestamp events data ([str]): the data from the cursor Returns: ([sql_query_column_model_data.SQLColumnModelData], [sql_query_column_model.SQLColumnModel): a tuple of columns, the first are the normal columns, the second are the timestamp events """ return self.columns_and_timestamp_column
#!/usr/bin/env python3 #* This file is part of the MOOSE framework #* https://www.mooseframework.org #* #* All rights reserved, see COPYRIGHT for full restrictions #* https://github.com/idaholab/moose/blob/master/COPYRIGHT #* #* Licensed under LGPL 2.1, please see LICENSE for details #* https://www.gnu.org/licenses/lgpl-2.1.html import unittest import logging from MooseDocs.test import MooseDocsTestCase from MooseDocs.extensions import core from MooseDocs import base logging.basicConfig() class TestCore(MooseDocsTestCase): EXTENSIONS = [core] def testCodeBlock(self): text = "```\nint x = 0;\n```" ast = self.tokenize(text) self.assertToken(ast(0), 'Code', language='text', content='\nint x = 0;\n', escape=True) def helper(r): self.assertHTMLTag(r(0), 'pre', class_='moose-pre') self.assertHTMLTag(r(0)(0), 'code', class_='language-text') self.assertHTMLString(r(0)(0)(0), '\nint x = 0;\n', escape=True) res = self.render(ast) self.assertHTMLTag(res, 'body') helper(res) res = self.render(ast, renderer=base.MaterializeRenderer()) self.assertHTMLTag(res, 'div') helper(res) tex = self.render(ast, renderer=base.LatexRenderer()) self.assertLatex(tex(0), 'Environment', 'verbatim', after_begin='\n', before_end='\n'
, escape=False) self.assertLatexString(tex(0)(0), 'int x = 0;', escape=False) def testLineBreak(self): text = r'Break\\ this'
ast = self.tokenize(text) self.assertToken(ast(0), 'Paragraph', size=3) self.assertToken(ast(0)(0), 'Word', content='Break') self.assertToken(ast(0)(1), 'LineBreak') self.assertToken(ast(0)(2), 'Word', content='this') text = r'''Break\\ this''' ast = self.tokenize(text) self.assertToken(ast(0), 'Paragraph', size=3) self.assertToken(ast(0)(0), 'Word', content='Break') self.assertToken(ast(0)(1), 'LineBreak') self.assertToken(ast(0)(2), 'Word', content='this') def testEscapeCharacter(self): text = "No \[link\] and no \!\! comment" ast = self.tokenize(text) self.assertToken(ast(0), 'Paragraph', size=14) self.assertToken(ast(0)(0), 'Word', content='No') self.assertToken(ast(0)(1), 'Space', count=1) self.assertToken(ast(0)(2), 'Punctuation', content='[') self.assertToken(ast(0)(3), 'Word', content='link') self.assertToken(ast(0)(4), 'Punctuation', content=']') self.assertToken(ast(0)(5), 'Space', count=1) self.assertToken(ast(0)(6), 'Word', content='and') self.assertToken(ast(0)(7), 'Space', count=1) self.assertToken(ast(0)(8), 'Word', content='no') self.assertToken(ast(0)(9), 'Space', count=1) self.assertToken(ast(0)(10), 'Punctuation', content='!') self.assertToken(ast(0)(11), 'Punctuation', content='!') self.assertToken(ast(0)(12), 'Space', count=1) self.assertToken(ast(0)(13), 'Word', content='comment') for c in ['!', '[', ']', '@', '^', '*', '+', '~', '-']: text = r'foo \{} bar'.format(c) ast = self.tokenize(text) self.assertToken(ast(0), 'Paragraph', size=5) self.assertToken(ast(0)(0), 'Word', content='foo') self.assertToken(ast(0)(1), 'Space', count=1) self.assertToken(ast(0)(2), 'Punctuation', content=c) self.assertToken(ast(0)(3), 'Space', count=1) self.assertToken(ast(0)(4), 'Word', content='bar') if __name__ == '__main__': unittest.main(verbosity=2)
"""Fi
xer that changes raw_input(...) into input(...).""" # Author: Andre Roberge # Local imports from .. import fixer_base from ..fixer_util import Name class FixRawInput(fixer_base.BaseFix): BM_compatible = True PATTERN = """ power< name='raw_input' trailer< '(' [any] ')' > any* > """ def transform(self, node, results):
name = results["name"] name.replace(Name("input", prefix=name.prefix))
#!/usr/bin/env python3 """ 2018 AOC Day 09 """ import argparse import typing import unittest class Node(object): ''' Class representing node in cyclic linked list ''' def __init__(self, prev: 'Node', next: 'Node', value: int): ''' Create a node with explicit parameters ''' self._prev = prev self._next = next self._value = value @staticmethod def default() -> 'Node': ''' Create a node linked to itself with value 0 ''' node = Node(None, None, 0) # type: ignore node._prev = node node._next = node return node def forward(self, n: int = 1) -> 'Node': ''' Go forward n nodes ''' current = self for _ in range(n): current = current._next return current def back(self, n: int = 1) -> 'Node': ''' Go backward n nodes ''' current = self for _ in range(n): current = current._prev return current def insert(self, value: int) -> 'Node': ''' Insert new node after current node with given value, and return newly inserted Node ''' new_node = Node(self, self._next, value) self._next._prev = new_node self._next = new_node return self._
next def remove(self) -> 'Node': ''' Remove current Node and return the following Node ''' self._prev._next = self._next self._next._prev = self._prev return self._next def value(self) -> int: ''' Get value ''' return self._value def chain_values(self): values = [self.value()] current = self.forward() while current != self:
values.append(current.value()) current = current.forward() return values def part1(nplayers: int, highest_marble: int) -> int: """ Solve part 1 """ current = Node.default() player = 0 scores = {p: 0 for p in range(nplayers)} for idx in range(1, highest_marble + 1): if idx % 23 == 0: scores[player] += idx current = current.back(7) scores[player] += current.value() current = current.remove() else: current = current.forward().insert(idx) player = (player + 1) % nplayers return max(scores.values()) def part2(nplayers: int, highest_node: int) -> int: """ Solve part 2 """ return part1(nplayers, highest_node) def main(): """ Run 2018 Day 09 """ parser = argparse.ArgumentParser(description='Advent of Code 2018 Day 09') parser.add_argument('nplayers', type=int, help='# of players') parser.add_argument( 'highest_marble', type=int, help='highest-valued marble', ) opts = parser.parse_args() print('Part 1:', part1(opts.nplayers, opts.highest_marble)) print('Part 2:', part2(opts.nplayers, opts.highest_marble * 100)) if __name__ == '__main__': main() class ExampleTest(unittest.TestCase): def test_part1(self): examples = { (9, 25): 32, (10, 1618): 8317, (13, 7999): 146373, (17, 1104): 2764, (21, 6111): 54718, (30, 5807): 37305, } for example, expected in examples.items(): self.assertEqual(part1(*example), expected)
# ----------------------------------------------------------------------------- # Copyright (c) 2014--, The Qiita Development Team. # # Distributed under the terms of the BSD 3-clause License. # # The full license is in the file LICENSE, distributed with this software. # ----------------------------------------------------------------------------- from os.path import join, basename from os import environ from qp_shogun.utils import ( _format_params, make_read_pairs_per_sample, _run_commands, _per_sample_ainfo) DIR = environ["QC_SORTMERNA_DB_DP"] RNA_REF_DB = ( '{0}silva-arc-23s-id98.fasta,' '{0}silva-arc-23s-id98.idx:' '{0}silva-bac-16s-id90.fasta,' '{0}silva-bac-16s-id90.idx:' '{0}silva-bac-23s-id98.fasta,' '{0}silva-bac-23s-id98.idx:' '{0}silva-arc-16s-id95.fasta,' '{0}silva-arc-16s-id95.idx:' '{0}silva-euk-18s-id95.fasta,' '{0}silva-euk-18s-id95.idx:' '{0}silva-euk-28s-id98.fasta,' '{0}silva-euk-28s-id98.idx:' '{0}rfam-5s-database-id98.fasta,' '{0}rfam-5s-database-id98.idx:' '{0}rfam-5.8s-database-id98.fasta,' '{0}rfam-5.8s-database-id98.idx' ).format(DIR) SORTMERNA_PARAMS = { 'blast': 'Output blast format', 'num_alignments': 'Number of alignments', 'a': 'Number of threads', 'm': 'Memory'} def generate_sortmerna_commands(forward_seqs, reverse_seqs, map_file, out_dir, parameters): """Generates the Sortmerna commands Parameters ---------- forward_seqs : list of str The list of forward seqs filepaths reverse_seqs : list of str The list of reverse seqs filepaths map_file : str The path to the mapping file out_dir : str The job output directory parameters : dict The command's parameters, keyed by parameter name Returns ------- cmds: list of str The Sortmerna commands samples: list of tup list of 4-tuples with run prefix, sample name, fwd read fp, rev read fp Notes ----- Currently this is requiring matched pairs in the make_read_pairs_per_sample step but implicitly allowing empty reverse reads in the actual command generation. This behavior may allow support of situations with empty reverse reads in some samples, for example after trimming and QC. """ # matching filenames, samples, and run prefixes samples = make_read_pairs_per_sample(forward_seqs, reverse_seqs, map_file) cmds = [] param_string = _format_params(parameters, SORTMERNA_PARAMS) threads = parameters['Number of threads'] # Sortmerna 2.1 does not support direct processing of # compressed files currently # note SMR auto-detects file type and adds .fastq extension # to the generated output files template = ("unpigz -p {thrds} -c {ip} > {ip_unpigz} && " "sortmerna --ref {ref_db} --reads {ip_unpigz} " "--aligned {smr_r_op} --other {smr_nr_op} " "--fastx {params} && " "pigz -p {thrds} -c {smr_r_op}.fastq > {smr_r_op_gz} && " "pigz -p {thrds} -c {smr_nr_op}.fastq > {smr_nr_op_gz};" ) arguments = {'thrds': threads, 'ref_db': RNA_REF_DB, 'params': param_string} for run_prefix, sample, f_fp, r_fp in samples: prefix_path = join(out_dir, run_prefix) for index, fp in enumerate([f_fp, r_fp]): # if reverse filepath is not present ignore it if fp is None: continue arguments['ip'] = fp arguments['ip_unpigz'] = join( out_dir, basename(fp.replace('.fastq.gz', '.fastq'))) arguments['smr_r_op'] = prefix_path + '.ribosomal.R%d'\ % (index + 1) arguments['smr_nr_op'] = prefix_path + '.nonribosomal.R%d'\ % (index + 1) arguments['smr_r_op_gz'] = arguments['smr_r_op'] + '.fastq.gz' arguments['smr_nr_op_gz'] = arguments['smr_nr_op'] + '.fastq.gz' cmds.append(template.format(**arguments)) return cmds, samples # In this version I have not added a summary file or sam file def sortmerna(qclient, job_id, parameters, out_dir): """Run Sortmerna with the given parameters Parameters ---------- qclient : tgp.qiita_client.QiitaClient The Qiita server client job_id : str The job id parameters : dict The parameter values out_dir : str The path to the job's output directory Returns ------- bool, list, str The results of the job """ # Step 1 get the rest of the information need to run Sortmerna qclient.update_job_step(job_id, "Step 1 of 4: Collecting information") artifact_id = parameters['input'] del parameters['input'] # Get the artifact filepath information artifact_info = qclient.get("/qiita_db/artifacts/%s/" % artifact_id) fps = artifact_info['files'] # Get the artifact metadata prep_info = qclient.get('/qiita_db/prep_template/%s/' % artifact_info['prep_information'][0]) qiime_map = prep_info['qiime-map'] # Step 2 generating command for Sortmerna qclient.update_job_step(job_id, "Step 2 of 4: Generating" " SortMeRNA commands") rs = fps['raw_reverse_seqs'] if 'raw_reverse_seqs' in fps else [] commands, samples = generate_sortmerna_commands(
fps['raw_forward_seqs'],
rs, qiime_map, out_dir, parameters) # Step 3 executing Sortmerna len_cmd = len(commands) msg = "Step 3 of 4: Executing ribosomal filtering (%d/{0})".format(len_cmd) success, msg = _run_commands(qclient, job_id, commands, msg, 'QC_Sortmerna') if not success: return False, None, msg ainfo = [] # Generates 2 artifacts: one for the ribosomal # reads and other for the non-ribosomal reads # Step 4 generating artifacts for Nonribosomal reads msg = ("Step 4 of 5: Generating artifacts " "for Nonribosomal reads (%d/{0})").format(len_cmd) suffixes = ['%s.nonribosomal.R1.fastq.gz', '%s.nonribosomal.R2.fastq.gz'] prg_name = 'Sortmerna' file_type_name = 'Non-ribosomal reads' ainfo.extend(_per_sample_ainfo( out_dir, samples, suffixes, prg_name, file_type_name, bool(rs))) # Step 5 generating artifacts for Ribosomal reads msg = ("Step 5 of 5: Generating artifacts " "for Ribosomal reads (%d/{0})").format(len_cmd) suffixes = ['%s.ribosomal.R1.fastq.gz', '%s.ribosomal.R2.fastq.gz'] prg_name = 'Sortmerna' file_type_name = 'Ribosomal reads' ainfo.extend(_per_sample_ainfo( out_dir, samples, suffixes, prg_name, file_type_name, bool(rs))) return True, ainfo, ""
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # Copyright 2018-2022 F4PGA Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # SPDX-License-Identifier: Apache-2.0 import time import os class Timed: def __init__(self, t, name, unprinted_runtime=False): self.t = t self.name = name self.start = None self.unprinted_runtime = unprinted_runtime def __enter__(self): self.start = time.time() def __exit__(self, type, value, traceback): end = time.time() self.t.add_runtime( self.name, end - self.start, unprinted_runtime=self.unprinted_runtime ) def get_vivado_max_freq(report_file): processing = False group = "" delay = "" freq = 0 freqs = dict() path_type = None with open(report_file, 'r') as fp: for l in fp: if l.startswith("Slack"): if '(MET)' in l: violation = 0.0 else: violation = float( l.split(':')[1].split()[0].strip().strip('ns') ) processing = True if processing is True: fields = l.split() if len(fields) > 1 and fields[1].startswith('----'): processing = False # check if this is a timing we want if group not in requirement.split():
continue if group not in fr
eqs: freqs[group] = dict() freqs[group]['actual'] = freq freqs[group]['requested'] = requested_freq freqs[group]['met'] = freq >= requested_freq freqs[group]['{}_violation'.format(path_type.lower()) ] = violation path_type = None if path_type is not None: freqs[group]['{}_violation'.format(path_type.lower()) ] = violation data = l.split(':') if len(data) > 1: if data[0].strip() == 'Data Path Delay': delay = data[1].split()[0].strip('ns') freq = 1e9 / float(delay) if data[0].strip() == 'Path Group': group = data[1].strip() if data[0].strip() == 'Requirement': requirement = data[1].strip() r = float(requirement.split()[0].strip('ns')) if r != 0.0: requested_freq = 1e9 / r if data[0].strip() == 'Path Type': ptype = data[1].strip() if path_type != ptype.split()[0]: path_type = ptype.split()[0] for cd in freqs: freqs[cd]['actual'] = float("{:.3f}".format(freqs[cd]['actual'] / 1e6)) freqs[cd]['requested'] = float( "{:.3f}".format(freqs[cd]['requested'] / 1e6) ) return freqs def get_yosys_resources(yosys_log): with open(yosys_log, "r") as f: data = f.readlines() resources = dict() print_stats = False proc_cells = False for line in data: print_stats = "Printing statistics" in line or print_stats if not print_stats: continue if proc_cells and line.strip(): cell, count = line.split() resources[cell] = count proc_cells = ("Number of cells" in line or proc_cells) and line.strip() return resources def have_exec(mybin): return which(mybin) != None # https://stackoverflow.com/questions/377017/test-if-executable-exists-in-python def which(program, get_dir=False): def is_exe(fpath): return os.path.isfile(fpath) and os.access(fpath, os.X_OK) fpath, fname = os.path.split(program) if fpath: if is_exe(program): return program else: for path in os.environ["PATH"].split(os.pathsep): exe_file = os.path.join(path, program) if is_exe(exe_file): if get_dir: return path else: return exe_file return None def safe_get_dict_value(dict, key, default): if key in dict: return dict[key] else: return default def get_file_dict(file_name, file_type): return dict(name=os.path.realpath(file_name), file_type=file_type)
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (C) 2007-2013 by Erwin Marsi and TST-Centrale # # This file is part of the DAESO Framework. # # The DAESO Framework is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # The DAESO Framework is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """ distutils setup script for distributing Timbl Tools """ # TODO: # - docs, data and test are not installed when using bdist_wininst... __authors__ = "Erwin Marsi <e.marsi@gmail.com>" from distutils.core import setup from glob import glob from os import walk, path, remove from os.path import basename, isdir, join, exists from shutil import rmtree if exists('MANIFEST'): remove('MANIFEST') if exists("build"): rmtree("build") name = "timbl-tools" version = "0.5.0" description = """Timbl Tools is a collection of Python modules and scripts for working with TiMBL, the Tilburg Memory-based Learner.""" long_description = """ Timbl Tools is a collection of Python modules and scripts for working with TiMBL, the Tilburg Memory-based Learner. It provides support for: * creating Timbl servers and clients * running (cross-validated) experiments * lazy parsing of verbose Timbl ouput (e.g. NN distributions) * down-sampling of instances * writing ascii graphs of the feature weights """ packages = [ root[4:] for (root, dirs, files) in walk("lib") if not ".svn" in root ] def get_data_files(data_dir_prefix, dir): # data_files specifies a sequence of (directory, files) pairs # Each (directory, files) pair in the sequence specifies the installation directory # and the files to install there. data_files = [] for base, subdirs, files in walk(dir): install_dir = join(data_dir_prefix, base) files = [ join(base, f) for f in files if not f.endswith(".pyc") and not f.endswith("~") ] data_files.append((install_dir, files)) if '.svn' in subdirs: subdirs.remove('.svn') # ignore svn directories return data_files # data files are installed under sys.prefix/share/pycornetto-%(version) data_dir = join("share", "%s-%s" % (name, version)) data_files = [(data_dir, ['CHANGES', 'COPYING', 'INSTALL', 'README'])] data_files += get_data_files(data_dir, "doc") data_files += get_data_files(data_dir, "data") sdist_options = dict( formats=["zip","gztar","bztar"]) setup( name = name, version = version, description = description, long_description = long_description, license = "GNU Public License v3", author = "Erwin Marsi", author_email = "e.marsi@gmail.com", url = "https://github.com/emsrc/timbl-tools", requires = ["networkx"], provides = ["tt (%s)" % version], package_dir = {"": "lib"}, packages = packages, scripts = glob(join("bin","*.py")), data_files =
data_files, platforms = "POSIX, Mac OS X, MS Windows", keywords = [ "TiMBL"], classifiers = [ "Development
Status :: 4 - Beta", "Intended Audience :: Science/Research", "License :: OSI Approved :: GNU Public License (GPL)", "Operating System :: OS Independent", "Programming Language :: Python", "Topic :: Scientific/Engineering :: Artificial Intelligence", "Natural Language :: English" ], options = dict(sdist=sdist_options) )
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regener
ated. # -------------------------------------------------------------------------- from .operations import Operations from .components_operations import
ComponentsOperations from .web_tests_operations import WebTestsOperations from .export_configurations_operations import ExportConfigurationsOperations from .proactive_detection_configurations_operations import ProactiveDetectionConfigurationsOperations from .component_current_billing_features_operations import ComponentCurrentBillingFeaturesOperations from .component_quota_status_operations import ComponentQuotaStatusOperations from .api_keys_operations import APIKeysOperations __all__ = [ 'Operations', 'ComponentsOperations', 'WebTestsOperations', 'ExportConfigurationsOperations', 'ProactiveDetectionConfigurationsOperations', 'ComponentCurrentBillingFeaturesOperations', 'ComponentQuotaStatusOperations', 'APIKeysOperations', ]
from extract_feature_lib import * from sys import argv from dga_model_eval import * from __init__ import * def clear_cache(index, cache): print "clear cache", index for tmp in cache: client[db_name][coll_name_list[index]+"_matrix"].insert(cache[tmp]) def extract_domain_feature(index): #cursor = client[db_name].domain.find({"_id":{"$in": ["emltrk.com", "weminemnc.com"]}}) cursor = client[db_name][coll_name_list[index]].find(timeout=False) outlier = {"", "ntp.org", "isipp.com", "gccdn.net", "cdngc.net", "gstatic.com", "cloudfront.net"} cache = {} num = 1 print index for row in cursor: if get_tail(row["_id"]) in outlier: continue # if index == 0 and np.random.randint(1, 100) != 7: # continue if index == 0: if np.random.randint(1, 50) != 7: flag = False this_db_name = db_name for m in range(0, 5): pre_db_name = "p" + (datetime.strptime(this_db_name[1:], "%y%m%d") - timedelta(days=1)).strftime("%y%m%d") if client[pre_db_name][coll_name_list[index]+"_matrix"].find_one({"_id": row["_id"]}): flag = True break this_db_name = pre_db_name if not flag: continue num += 1 print "one more", num, row["_id"]
ip_count = len(row["ITEMS"])
min_ttl = min(row["TTLS"]) max_ttl = max(row["TTLS"]) lifetime = int(row["LAST_SEEN"] - row["FIRST_SEEN"])/(60*60*24) p16_entropy = ip_diversity(row["ITEMS"]) if index == 2 and (ip_count < 2 or min_ttl > 20000 or p16_entropy < 0.08): continue gro = growth(row["_id"], row["ITEMS"], row["SUBDOMAINS"], db_name) relative = relative_domain(row["ITEMS"], db_name) ipinfo = ip_info(row["ITEMS"], db_name) if ipinfo[0] == -1: print "no ip", row["_id"], index, db_name continue subdomain = subdomain_diversity(row["SUBDOMAINS"]) cache[row["_id"]] = {"ip_count": ip_count, "p16_entropy": p16_entropy, "relative": relative, "subdomain": subdomain, "growth": gro, "ipinfo": ipinfo, "ttl": [min_ttl, max_ttl, max_ttl - min_ttl], "lifetime": lifetime, "_id": row["_id"]} # client[db_name][coll_name_list[index]+"_matrix"].insert(tmp) clear_cache(index, cache) def extract_ip_feature(index): model = init_dga() cursor = client[db_name][coll_name_list[index]].find(timeout=False) cache = {} print index for row in cursor: if not ip_p.match(str(row["_id"])): continue number = len(row["ITEMS"]) min_ttl = min(row["TTLS"]) max_ttl = max(row["TTLS"]) lifetime = int(row["LAST_SEEN"] - row["FIRST_SEEN"])/(60*60*24) dd = domain_diversity(row["SUBDOMAINS"], row["ITEMS"]) ips = ip_pool_stability(row["ITEMS"], db_name) tmp_counter = collections.Counter(evaluate_url_list(model, row["ITEMS"])) dga = round(tmp_counter['dga']/float(number), 3) cache[row["_id"]] = {"number": number, "dd": dd, "ips": ips, "dga": dga, "ttl": [min_ttl, max_ttl], "lifetime": lifetime, "_id": row["_id"]} # client[db_name][coll_name_list[index]+"_matrix"].insert(tmp) clear_cache(index, cache) def main(index): #index = int(raw_input()) if index < 4: extract_domain_feature(index) elif index < 8: extract_ip_feature(index) else: for i in range(4, 8): extract_ip_feature(i) for i in range(0, 4): extract_domain_feature(i) if __name__ == '__main__': script, db_name = argv main(8)
# -*- encoding: utf-8 -*- ############################################################################## # # Copyright (C) 2011 OpenERP Italian Community (<http://www.openerp-italia.org>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILIT
Y or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import time from report import report_sxw class Parser(report_sxw.rml_parse): def __init__(self, cr, uid, name, co
ntext): super(Parser, self).__init__(cr, uid, name, context) self.localcontext.update({ 'time': time, })
# Copyright (c) 2016 Lee Cannon # Licensed under the MIT License, see included LICENSE File import click import os import re from datetime import datetime def _is_file_modnet(file_name: str) -> bool: """Returns True if the filename contains Modnet. :param file_name: The filename to check. :type file_name: str :return: If the filename contains Modnet :rtype: bool """ if file_name.find('Modnet') != -1: return True else: return False def _is_file_merge(file_name: str) -> bool: """Returns True if the filename contains Merge. :param file_name: The filename to check. :type file_name: str :return: If the filename contains Merge :rtype: bool """ if file_name.find('Merge') != -1: return True else: return False def correct_html(file: str, output_folder: str, date: str) -> str: """Correct the HTML output of Jupyter Notebook. :param file: The Notebook HTMl output file :type file: str :param output_folder: The folder to output the corrected HTML to. :type output_folder: str :param date: The date the report is generated for. :type date: str :return: The output file. :rtype: str """ includes = os.path.dirname(os.path.abspath(__file__)) + os.sep + 'includes' + os.sep with open(file, 'r') as f: contents = f.read() is_modnet = _is_file_modnet(file) is_merge = _is_file_merge(file) # Replace require.js CDN with full code. with open(includes + 'require.min.js', 'r') as f: require = f.read() contents = contents.replace( '<script src="https://cdnjs.cloudflare.com/ajax/libs/require.js/2.1.10/require.min.js"></script>', '<script>' + require + '</script>') # Replace jQuery CDN with full code. with open(includes + 'jquery.min.js', 'r') as f: jquery = f.read() contents = contents.replace( '<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/2.0.3/jquery.min.js"></script>', '<script>' + jquery + '</script>') # Remove MathJax CDN as it is unnecessary contents = contents.replace( '<script src="https://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS_HTML"></script>', '') # Inject jQuery code to hide the code input boxes contents = contents.replace( '</body>', "<script>$('div.input').hide();</script></body>") # Deal with how crap IE is. contents = contents.replace( '</title>', '</title>\n<meta http-equiv="x-ua-compatible" content="IE=9">') if str(file).find('Daily') != -1: if is_modnet: output_file = output_folder + 'modnet-daily-report-' + date + '.html' elif is_merge: output_file = output_folder + 'merge-daily-report-' + date + '.html' else: output_file = output_folder + 'daily-report-' + date + '.html' elif str(file).find('Weekly') != -1: if is_modnet: output_file = output_folder + 'modnet-weekly-report-' + date + '.html' elif is_merge: output_file = output_folder + 'merge-weekly-report-' + date + '.html' else: output_file = output_folder + 'weekly-report-' + date + '.html' elif str(file).find('Monthly') != -1: if is_modnet: output_file = output_folder + 'modnet-monthly-report-' + date + '.html' elif is_merge: output_file = output_folder + 'merge-monthly-report-' + date + '.html' else: output_file = output_folder + 'monthly-report-' + date + '.html' else: raise TypeError('The only supported report types are "daily" and "weekly') with open(output_file, 'w') as f: f.write(contents) return output_file @click.group() @click.version_option(version=0.1) def main(): """A command line wrapper around the trending module.""" pass @main.command(short_help='correct notebook output') @click.argument('file', type=click.Path(exists=True)) @click.argument('output_folder', type=click.Path(exists=True)) @click.argument('date') def clean_html(file, output_folder, date): """FILE: The path to the output of the notebook DATE: The date the report is for. For weekly the date of the monday is recommended.""" if not re.search('[0-9]{4}-[0-9]{2}-[0-9]{2}', date): click.echo('The date must be in "yyyy-mm-dd" format.') exit() output_file = correct_html(file, output_folder, date) if click.confirm('Do you want to view the corrected output?'): click.launch(output_file) @main.command(short_help='generate the daily report') @click.argument('file', type=click.Path(exists=True)) def daily(file): """FILE: The path to that days data""" raise NotImplementedError @main.command(short_help='generate the weekly report') @click.argument('monday_file', type=click.Path(exists=True)) def weekly(monday_file): """MONDAY_FILE: The path to the monday data""" raise NotImplementedError @main.command(short_help='generate the monthly report') @click.argument('month_number') @click.argument('year', default=str(datetime.now().year)) def monthly(month_number, year): """MONTH_NUMBER: The number of the month YEAR: The year to grab the data from""" try: month_number = int(month_number) except ValueError: click.echo('The month entered i
s invalid') exit() if month_number < 1 or month_number > 12: click.echo('The month entered is invalid') exit() try: year = int(year) except ValueError: click.echo('The year entered is invalid') exit() if year < 201
5: click.echo('No data from before 2015 is present') exit() raise NotImplementedError
from testmodule import * import sys class TestWrites(TestRunner): def __init__(self): super().__init__() def mthd(self): import pysharkbite securityOps = super().getSecurityOperations() securityOps.create_user("testUser","password") ## validate that we DON'T see the permissions assert( False == securityOps.has_system_permission("testUser",pysharkbite.SystemPermissions.CREATE_TABLE) ) securityOps.grant_system_permission("testUser",pysharkbite.SystemPermissions.CREATE_TABLE ) securityOps.grant_system_permission("testUser",pysharkbite.SystemPermissions.DROP_TABLE ) ## validate that we DO see the permissions assert( True == securityOps.has_system_permission("testUser",pysharkbite.SystemPermissions.CREATE_TABLE) ) auths = pysharkbite.Authorizations() auths.addAuthorization("blah1") auths.addAuthorization("blah2") securityOps.grantAuthorizations(auths,"testUser") tableOperations = super().getTableOperations() tableOperations.create(False) ## validate that we DO see the permissions securityOps.grant_table_permission("testUser",super().getTable(),pysharkbite.TablePermissions.READ ) securityOps.grant_table_permission("testUser",super().getTable(),pysharkbite.TablePermissions.WRITE ) securityOps.grant_table_permission("testUser",super().getTable(),pysharkbite.TablePermissions.DROP_TABLE ) super().setUser("testUser","password") super().inity(replace=True) tableOperations = super().getTableOperations() securityOps = super().getConnector().securityOps() """ Add authorizations """ """ mutation.put("cf","cq","cv",1569786960) """ with tableOperations.createWriter(auths, 10) as writer: mutation = pysharkbite.Mutation("row2") mutation.put("cf","cq","blah1",1569786960, "value") mutation.put("cf2","cq2","blah1",1569786960, "value2") """ no value """ mutation.put("cf3","cq3","blah2",1569786960, "") writer.addMutation( mutation ) auths = pysharkbite.Authorizations() auths.addAuthorization("blah1") scanner = tableOperations.createScanner(auths, 2) startKey = pysharkbite.Key() endKey = pysharkbite.Key() startKey.setRow("row") endKey.setRow("row3") range = pysharkbite.Range(startKey,True,endKey,False) scanner.addRange( range ) resultset = scanner.getResultSet() for keyvalue in resultset: key = keyvalue.getKey() assert( "row2" == key.getRow() ) value = keyvalue.getValue() if "cf" == key.getColumnFamily(): assert( "value" == value.get() ) if ("cf2" == key.getColumnFamily() ): assert( "value2" == value.get() ) if ("cf3" == key.getColumnFamily() ): print("Unexpected column cf3") sys.exit(1) """ delete your table if user did not create temp """ tableOperations.remove() super().setUser("root","secret") super().inity(replace=True) tableOperations = super().getTableOperations() securityOps = super().getSecurityOperations() securityOps.remove_user("testUser") try: super().setUser("testUser","password") super().inity(replace=True) print("Expected failure when setting user") sys.exit(1) except pysharkbite.ClientExce
ption: print("caught
expected exception") pass runner = TestWrites() runner.mthd()
#!/us
r/bin/env python # encoding: utf-8 def run(whatweb, pluginname): whatweb.recog_from_header(plu
ginname, "X-Cache")
# -*- coding: UTF-8 -*- ####################################################################### # ---------------------------------------------------------------------------- # "THE BEER-WARE LICENSE" (Revision 42): # @tantrumdev wrote this file. As long as you retain this notice you # can do whatever you want with this stuff. If we meet some day, and you think # this stuff is worth it, you can buy me a beer in return. - Muad'Dib # ---------------------------------------------------------------------------- ####################################################################### # Addon Name: Placenta # Addon id: plugin.video.placenta # Addon Provider: Mr.Blamo import re import urllib import urlparse import json import base64 from resources.lib.modules import client, cleantitle, directstream, dom_parser2, source_utils, log_utils class source: def __init__(self): self.priority = 1 self.language = ['en'] self.domains = ['savaze.com'] self.base_link = 'http://www.savaze.com' self.movies_search_path = ('links/%s') def movie(self, imdb, title, localtitle, aliases, year): try: urls = [] lst = ['1080p','720p','bluray-2','bluray'] for i in lst: url = urlparse.urljoin(self.base_link, self.movies_search_path % (imdb) + '-%s' % i) r = client.request(url) if r: urls.append(url) url = urlparse.urljoin(self.base_link, self.movies_search_path % (imdb)) url = client.request(url, output='geturl') if '-1080p' not in url and '-720p' not in url and '-bluray' not in url: r = client.request(url) if r: urls.append(url) if not urls: return return urls except Exception: return def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year): return '' def episode(self, url, imdb, tvdb, title, premiered, season
, episode): try: urls = [] lst = ['1080p','720p','bluray-2','bluray'] clean_season = season if len(season) >= 2 else '0' + season clean_episode = episode if len(episode) >= 2 else '0' + episode for i in lst: url = urlparse.urljoin(self.base_link, self.movies_search_path % (imdb) + '-s%se%s-%s' % (clean_season, clean_episode, i)) r = client.request(url) if r: urls.appen
d(url) url = urlparse.urljoin(self.base_link, self.movies_search_path % (imdb)) url = client.request(url, output='geturl') if '-1080p' not in url and '-720p' not in url and '-bluray' not in url: r = client.request(url) if r: urls.append(url) if not urls: return return urls except Exception: return def sources(self, url, hostDict, hostprDict): try: sources = [] for u in url: hostDict += [('clicknupload.org')] quality = '1080p' if '-1080p' in u or 'bluray-2' in u else '720p' if '-720p' in u or 'bluray' in u else 'SD' r = client.request(u) r = dom_parser2.parse_dom(r, 'ul', {'class': 'download-links'}) r = dom_parser2.parse_dom(r, 'a', req=['href']) r = [i.attrs['href'] for i in r if i] for i in r: try: valid, host = source_utils.is_host_valid(i, hostDict) if not valid: continue sources.append({ 'source': host, 'quality': quality, 'language': 'en', 'url': i, 'direct': False, 'debridonly': False }) except: pass return sources except Exception: return def resolve(self, url): return url
# # Copyright (C) 2013-2014 Emerson Max de Medeiros Silva # # This file is part of ippl. # # ippl is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ippl is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with ippl. If not, see <http://www.gnu.org/licenses/>. # import math from ippl.shape import * from ippl.render import * if __name__ == "__main__": s = S
hape() # IV - Lines
l = Line(Point(0, 0), Point(50, 25)) s.outer_loop.append(l) l = Line(Point(50, 25), Point(0, 0)) l.move(0, 30) s.outer_loop.append(l) l = Line(Point(0, 25), Point(50, 0)) l.move(55, 0) s.outer_loop.append(l) l = Line(Point(50, 0), Point(0, 25)) l.move(55, 30) s.outer_loop.append(l) aabb = s.bounds() size = aabb.size() size = (int(size[0]) + 1, int(size[1]) + 1) r = Render() r.draw_bounds = True r.image_size = size r.initialize() r.shape(s) r.save("render_test.png")
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Linter to verify that all flags reported by GHC's --show-options mode are documented in the user's guide. """ import sys import subprocess from typing import Set from pathlib import Path # A list of known-undocumented flags. This should be considered to be a to-do # list of flags that need to be documented. EXPECTED_UNDOCUMENTED_PATH = \ Path(__file__).parent / 'expected-undocumented-flags.txt' EXPECTED_UNDOCUMENTED = \ {line for line in open(EXPECTED_UNDOCUMENTED_PATH).read().split()} def expected_undocumented(flag: str) -> bool: if flag in EXPECTED_UNDOCUMENTED: return True if flag.startswith('-Werror'): return True if flag.startswith('-Wno-') \ or flag.startswith('-dno') \ or flag.startswith('-fno') \ or flag.startswith('-XNo'): return True if flag.startswith('-Wwarn=') \ or flag.startswith('-Wno-warn='): return True return False def read_documented_flags(doc_flags) -> Set[str]: # Map characters that mark the end of a flag # to whitespace. trans = str.maketrans({ '=': ' ', '[': ' ', '⟨': ' ', }) return {line.translate(trans).split()[0] for line in doc_flags.read().split('\n') if line != ''} def read_ghc_flags(ghc_path: str) -> Set[str]: ghc_output = subprocess.check_output([ghc_path, '--show-options'], encoding='UTF-8') return {flag
for flag in ghc_output.split('\n') if not expected_undocumented(flag) if flag != ''} def main() -> None: import argparse parser = argparse.ArgumentParser() parser.add_argument('--ghc', type=argparse.FileType('r'), help='path of GHC executable') parser.add_argument('--doc-flags', t
ype=argparse.FileType('r'), help='path of ghc-flags.txt output from Sphinx') args = parser.parse_args() doc_flags = read_documented_flags(args.doc_flags) ghc_flags = read_ghc_flags(args.ghc.name) failed = False undocumented = ghc_flags - doc_flags if len(undocumented) > 0: print(f'Found {len(undocumented)} flags not documented in the users guide:') print('\n'.join(f' {flag}' for flag in sorted(undocumented))) print() failed = True now_documented = EXPECTED_UNDOCUMENTED.intersection(doc_flags) if len(now_documented) > 0: print(f'Found flags that are documented yet listed in {EXPECTED_UNDOCUMENTED_PATH}:') print('\n'.join(f' {flag}' for flag in sorted(now_documented))) print() failed = True if failed: sys.exit(1) if __name__ == '__main__': main()
#!/usr/bin/env python # -*- coding: utf-8 -*- ''' ========================================================================= Program: Visualization Toolkit Module: TestNamedColorsIntegration.py Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen All rights reserved. See Copyright.txt or http://www.kitware.com/Copyright.htm for details. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the above copyright notice for more information. ========================================================================= ''' import vtk import vtk.test.Testing from vtk.util.misc import vtkGetDataRoot VTK_DATA_ROOT = vtkGetDataRoot() class reconstructSurface(vtk.test.Testing.vtkTest): def testReconstructSurface(self): # Read some points. Use a programmable filter to read them. # pointSource = vtk.vtkProgrammableSource() def readPoints(): fp = open(VTK_DATA_ROOT + "/Data/cactus.3337.pts", "r") points = vtk.vtkPoints() while True: line = fp.readline().split() if len(line) == 0: break if line[0] == "p": points.InsertNextPoint(float(line[1]), float(line[2]), float(line[3])) pointSource.GetPolyDataOutput().SetPoints(points) pointSource.SetExecuteMethod(readPoints) # Construct the surface and create isosurface # surf = vtk.vtkSurfaceReconstructionFilter() surf.SetInputConnection(pointSource.GetOutputPort()) cf = vtk.vtkContourFilter() cf.SetInputConnection(surf.GetOutputPort()) cf.SetValue(0, 0.0) reverse = vtk.vtkReverseSense() reverse.SetInputConnection(cf.GetOutputPort()) reverse.ReverseCellsOn() reverse.ReverseNormalsOn() map = vtk.vtkPolyDataMapper() map.SetInputConnection(reverse.GetOutputPort()) map.ScalarVisibilityOff() surfaceActor = vtk.vtkActor() surfaceActor.SetMapper(map) surfaceActor.GetProperty().SetDiffuseColor(1.0000, 0.3882, 0.2784) surfaceActor.GetProperty().SetSpecularColor(1, 1, 1) surfaceActor.GetProperty().SetSpecular(.4)
surfaceActor.GetProperty().SetSpecularPower(50) # Create the RenderWindow, Renderer and both Actors # ren = vtk.vtkRenderer() renWin = vtk.vtkRenderWindow() renWin.AddRenderer(ren) # Add the actors to the renderer, set the background
and size # ren.AddActor(surfaceActor) ren.SetBackground(1, 1, 1) renWin.SetSize(300, 300) ren.GetActiveCamera().SetFocalPoint(0, 0, 0) ren.GetActiveCamera().SetPosition(1, 0, 0) ren.GetActiveCamera().SetViewUp(0, 0, 1) ren.ResetCamera() ren.GetActiveCamera().Azimuth(20) ren.GetActiveCamera().Elevation(30) ren.GetActiveCamera().Dolly(1.2) ren.ResetCameraClippingRange() # render and interact with data iRen = vtk.vtkRenderWindowInteractor() iRen.SetRenderWindow(renWin); renWin.Render() img_file = "reconstructSurface.png" vtk.test.Testing.compareImage(iRen.GetRenderWindow(), vtk.test.Testing.getAbsImagePath(img_file), threshold=25) vtk.test.Testing.interact() if __name__ == "__main__": vtk.test.Testing.main([(reconstructSurface, 'test')])
# -*- coding: utf-8 -*- # Generated by Django 1.9.9 on 2016-09-01 11:41 from __future__ import unicode_literals from django.db import migrations, models
class Migration(migrations.Migration): dependencies = [ ('profiles', '0006_add_show_security_question_field'), ] operations = [ migrations.AddField( model_name='userprofilessettings', name='num_security_questions',
field=models.PositiveSmallIntegerField(default=3, verbose_name='Number of security questions asked for password recovery'), ), migrations.AddField( model_name='userprofilessettings', name='password_recovery_retries', field=models.PositiveSmallIntegerField(default=5, verbose_name='Max number of password recovery retries before lockout'), ), ]
from flas
k import render_template, redirect, url_for, request from flask.views import MethodView from nastradini import mongo, utils from positionform import PositionFo
rm class Position(MethodView): methods = ['GET', 'POST'] def get(self): form = PositionForm() return render_template('position.html', form=form) def post(self): # First, let's get the doc id. doc_id = utils.get_doc_id() # Create position info object. form = PositionForm(request.form) json = form.data # Store the document. mongo.db.positions.update({'_id': doc_id}, json, True) return redirect(url_for('browse_internal_positions'))
#!/usr/bin/env python # Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Given the output of -t commands from a ninja build for a gyp and GN generated build, report on differences between the command lines.""" import os import shlex import subprocess import sys # Must be in src/. os.chdir(os.path.join(os.path.dirname(__file__), '..', '..', '..')) g_total_differences = 0 def FindAndRemoveArgWithValue(command_line, argname): """Given a command line as a list, remove and return the value of an option that takes a value as a separate entry. Modifies |command_line| in place. """ if argname not in command_line: return '' location = command_line.index(argname) value = command_line[location + 1] command_line[location:location + 2] = [] return value def MergeSpacedArgs(command_line, argname): """Combine all arguments |argname| with their values, separated by a space.""" i = 0 result = [] while i < len(command_line): arg = command_line[i] if arg == argname: result.append(arg + ' ' + command_line[i + 1]) i += 1 else: result.append(arg) i += 1 return result def NormalizeSymbolArguments(command_line): """Normalize -g arguments. If there's no -g args, it's equivalent to -g0. -g2 is equivalent to -g. Modifies |command_line| in place. """ # Strip -g0 if there's no symbols. have_some_symbols = False for x in command_line: if x.startswith('-g') and x != '-g0': have_some_symbols = True if not have_some_symbols and '-g0' in command_line: command_line.remove('-g0') # Rename -g2 to -g. if '-g2' in command_line: command_line[index('-g2')] = '-g' def GetFlags(lines): """Turn a list of command lines into a semi-structured dict.""" flags_by_output = {} for line in lines: # TODO(scottmg): Hacky way of getting only cc for now. if 'clang' not in line: continue command_line = shlex.split(line.strip())[1:] output_name = FindAndRemoveArgWithValue(command_line, '-o') dep_name = FindAndRemoveArgWithValue(command_line, '-MF') NormalizeSymbolArguments(command_line) command_line = MergeSpacedArgs(command_line, '-Xclang') defines = [x for x in command_line if x.startswith('-D')] include_dirs = [x for x in command_line if x.startswith('-I')] dash_f = [x for x in command_line if x.startswith('-f')] warnings = [x for x in command_line if x.startswith('-W')] cc_file = [x for x in command_line if x.endswith('.cc') or x.endswith('.c') or x.endswith('.cpp')] if len(cc_file) != 1: print 'Skipping %s' % command_line continue assert len(cc_file) == 1 others = [x for x in command_line if x not in defines and \ x not in include_dirs and \
x
not in dash_f and \ x not in warnings and \ x not in cc_file] # Filter for libFindBadConstructs.so having a relative path in one and # absolute path in the other. others_filtered = [] for x in others: if x.startswith('-Xclang ') and x.endswith('libFindBadConstructs.so'): others_filtered.append( '-Xclang ' + os.path.join(os.getcwd(), os.path.normpath( os.path.join('out/gn_flags', x.split(' ', 1)[1])))) elif x.startswith('-B'): others_filtered.append( '-B' + os.path.join(os.getcwd(), os.path.normpath(os.path.join('out/gn_flags', x[2:])))) else: others_filtered.append(x) others = others_filtered flags_by_output[cc_file[0]] = { 'output': output_name, 'depname': dep_name, 'defines': sorted(defines), 'include_dirs': sorted(include_dirs), # TODO(scottmg): This is wrong. 'dash_f': sorted(dash_f), 'warnings': sorted(warnings), 'other': sorted(others), } return flags_by_output def CompareLists(gyp, gn, name, dont_care_gyp=None, dont_care_gn=None): """Return a report of any differences between gyp and gn lists, ignoring anything in |dont_care_{gyp|gn}| respectively.""" global g_total_differences if not dont_care_gyp: dont_care_gyp = [] if not dont_care_gn: dont_care_gn = [] output = '' if gyp[name] != gn[name]: gyp_set = set(gyp[name]) gn_set = set(gn[name]) missing_in_gyp = gyp_set - gn_set missing_in_gn = gn_set - gyp_set missing_in_gyp -= set(dont_care_gyp) missing_in_gn -= set(dont_care_gn) if missing_in_gyp or missing_in_gn: output += ' %s differ:\n' % name if missing_in_gyp: output += ' In gyp, but not in GN:\n %s' % '\n '.join( sorted(missing_in_gyp)) + '\n' g_total_differences += len(missing_in_gyp) if missing_in_gn: output += ' In GN, but not in gyp:\n %s' % '\n '.join( sorted(missing_in_gn)) + '\n\n' g_total_differences += len(missing_in_gn) return output def Run(command_line): """Run |command_line| as a subprocess and return stdout. Raises on error.""" return subprocess.check_output(command_line, shell=True) def main(): if len(sys.argv) != 2 and len(sys.argv) != 3: print 'usage: %s gyp_target gn_target' % __file__ print ' or: %s target' % __file__ return 1 if len(sys.argv) == 2: sys.argv.append(sys.argv[1]) print >>sys.stderr, 'Regenerating...' # Currently only Release, non-component. Run('gn gen out/gn_flags --args="is_debug=false is_component_build=false"') os.environ.pop('GYP_DEFINES', None) Run('python build/gyp_chromium -Goutput_dir=out_gyp_flags -Gconfig=Release') gn = Run('ninja -C out/gn_flags -t commands %s' % sys.argv[2]) gyp = Run('ninja -C out_gyp_flags/Release -t commands %s' % sys.argv[1]) all_gyp_flags = GetFlags(gyp.splitlines()) all_gn_flags = GetFlags(gn.splitlines()) gyp_files = set(all_gyp_flags.keys()) gn_files = set(all_gn_flags.keys()) different_source_list = gyp_files != gn_files if different_source_list: print 'Different set of sources files:' print ' In gyp, not in GN:\n %s' % '\n '.join( sorted(gyp_files - gn_files)) print ' In GN, not in gyp:\n %s' % '\n '.join( sorted(gn_files - gyp_files)) print '\nNote that flags will only be compared for files in both sets.\n' file_list = gyp_files & gn_files files_with_given_differences = {} for filename in sorted(file_list): gyp_flags = all_gyp_flags[filename] gn_flags = all_gn_flags[filename] differences = CompareLists(gyp_flags, gn_flags, 'dash_f') differences += CompareLists(gyp_flags, gn_flags, 'defines') differences += CompareLists(gyp_flags, gn_flags, 'include_dirs') differences += CompareLists(gyp_flags, gn_flags, 'warnings', dont_care_gn=[ # More conservative warnings in GN we consider to be OK. '-Wendif-labels', '-Wextra', '-Wsign-compare', ]) differences += CompareLists(gyp_flags, gn_flags, 'other') if differences: files_with_given_differences.setdefault(differences, []).append(filename) for diff, files in files_with_given_differences.iteritems(): print '\n'.join(sorted(files)) print diff print 'Total differences:', g_total_differences # TODO(scottmg): Return failure on difference once we're closer to identical. return 0 if __name__ == '__main__': sys.exit(main())
from sender import * if __na
me__ == '__main__': connection = Connection().initialize() connection.send('Default exchange message!') connection.
destroy()
parse import urlparse from wal_e import log_help from wal_e.pipeline import get_download_pipeline from wal_e.piper import PIPE from wal_e.retries import retry, retry_with_count assert calling_format logger = log_help.WalELogger(__name__) _Key = collections.namedtuple('_Key', ['size']) WABS_CHUNK_SIZE = 4 * 1024 * 1024 def uri_put_file(creds, uri, fp, content_encoding=None): assert fp.tell() == 0 assert uri.startswith('wabs://') def log_upload_failures_on_error(exc_tup, exc_processor_cxt): def standard_detail_message(prefix=''): return (prefix + ' There have been {n} attempts to upload ' 'file {url} so far.'.format(n=exc_processor_cxt, url=uri)) typ, value, tb = exc_tup del exc_tup # Screen for certain kinds of known-errors to retry from if issubclass(typ, socket.error): socketmsg = value[1] if isinstance(value, tuple) else value logger.info( msg='Retrying upload because of a socket error', detail=standard_detail_message( "The socket error's message is '{0}'." .format(socketmsg))) else: # For all otherwise untreated exceptions, report them as a # warning and retry anyway -- all exceptions that can be # justified should be treated and have error messages # listed. logger.warning( msg='retrying file upload from unexpected exception', detail=standard_detail_message( 'The exception type is {etype} and its value is ' '{evalue} and its traceback is {etraceback}' .format(etype=typ, evalue=value, etraceback=''.join(traceback.format_tb(tb))))) # Help Python GC by resolving possible cycles del tb # Because we're uploading in chunks, catch rate limiting and # connection errors which occur for each individual chunk instead of # failing the whole file and restarting. @retry(retry_with_count(log_upload_failures_on_error)) def upload_chunk(chunk, block_id): check_sum = base64.encodestring(md5(chunk).digest()).strip('\n') conn.put_block(url_tup.netloc, url_tup.path, chunk, block_id, content_md5=check_sum) url_tup = urlparse(uri) kwargs = dict(x_ms_blob_type='BlockBlob') if content_encoding is not None: kwargs['x_ms_blob_content_encoding'] = content_encoding conn = BlobService(creds.account_name, creds.account_key, protocol='https') conn.put_blob(url_tup.netloc, url_tup.path, '', **kwargs) # WABS requires large files to be uploaded in 4MB chunks block_ids = [] length, index = 0, 0 pool_size = os.getenv('WABS_UPLOAD_POOL_SIZE', 5) p = gevent.pool.Pool(size=pool_size) while True: data = fp.read(WABS_CHUNK_SIZE) if data: length += len(data) block_id = base64.b64encode(str(index)) p.wait_available() p.spawn(upload_chunk, data, block_id) block_ids.append(block_id) index += 1 else: p.join() break conn.put_block_list(url_tup.netloc, url_tup.path, block_ids) # To maintain consistency with the S3 version of this function we must # return an object with a certain set of attributes. Currently, that set # of attributes consists of only 'size' return _Key(size=len(data)) def uri_get_file(creds, uri, conn=None): assert uri.startswith('wabs://') url_tup = urlparse(uri) if conn is None: conn = BlobService(creds.account_name, creds.account_key, protocol='https') # Determin the size of the target blob props = conn.get_blob_properties(url_tup.netloc, url_tup.path) blob_size = int(props['content-length']) ret_size = 0 data = '' # WABS requires large files to be downloaded in 4MB chunks while ret_size < blob_size: ms_range = 'bytes={}-{}'.format(ret_size, ret_size + WABS_CHUNK_SIZE - 1) while True: # Because we're downloading in chunks, catch rate limiting and # connection errors here instead of letting them bubble up to the # @retry decorator so that we don't have to start downloading the # whole file over again. try:
part = conn.get_blob(url_tup.netloc, url_tup.path, x_ms_range=ms_range) except EnvironmentError as e: if e.errno in (errno.EBUSY, errno.ECONNRESET): logger.warning( msg="retrying after encountering exception", detail=("Exception traceback:\n{0}".format(
traceback.format_exception(*sys.exc_info()))), hint="") gevent.sleep(30) else: raise else: break length = len(part) ret_size += length data += part if length > 0 and length < WABS_CHUNK_SIZE: break elif length == 0: break return data def do_lzop_get(creds, url, path, decrypt): """ Get and decompress a S3 URL This streams the content directly to lzop; the compressed version is never stored on disk. """ assert url.endswith('.lzo'), 'Expect an lzop-compressed file' assert url.startswith('wabs://') conn = BlobService(creds.account_name, creds.account_key, protocol='https') def log_wal_fetch_failures_on_error(exc_tup, exc_processor_cxt): def standard_detail_message(prefix=''): return (prefix + ' There have been {n} attempts to fetch wal ' 'file {url} so far.'.format(n=exc_processor_cxt, url=url)) typ, value, tb = exc_tup del exc_tup # Screen for certain kinds of known-errors to retry from if issubclass(typ, socket.error): socketmsg = value[1] if isinstance(value, tuple) else value logger.info( msg='Retrying fetch because of a socket error', detail=standard_detail_message( "The socket error's message is '{0}'." .format(socketmsg))) else: # For all otherwise untreated exceptions, report them as a # warning and retry anyway -- all exceptions that can be # justified should be treated and have error messages # listed. logger.warning( msg='retrying WAL file fetch from unexpected exception', detail=standard_detail_message( 'The exception type is {etype} and its value is ' '{evalue} and its traceback is {etraceback}' .format(etype=typ, evalue=value, etraceback=''.join(traceback.format_tb(tb))))) # Help Python GC by resolving possible cycles del tb @retry(retry_with_count(log_wal_fetch_failures_on_error)) def download(): with open(path, 'wb') as decomp_out: with get_download_pipeline(PIPE, decomp_out, decrypt) as pl: g = gevent.spawn(write_and_return_error, url, conn, pl.stdin) try: # Raise any exceptions guarded by # write_and_return_error. exc = g.get() if exc is not None: raise exc except WindowsAzureMissingResourceError: # Short circuit any re-try attempts under certain race # conditions. pl.abort() logger.warning( msg=('could no longer locate object while ' 'performing wal restore'), detail=('The absolute URI that could not be '
from django.urls import path from . import dashboard_views app_name = 'exam' urlpatterns = [ path('assignment/new/', dashboard_views.MakeAssignmentVi
ew.as_view(), name='assignment_new'), path('assi
gnment/success/', dashboard_views.MakeAssignmentSuccess.as_view(), name='assignment_success'), path('assignment/<int:assignment_id>/name_list/', dashboard_views.AssignmentNameListView.as_view(), name='assignment_name_list'), ]
# Generated by the protocol buffer compiler. DO NOT EDIT! # source: pogoprotos/networking/requests/messages/release_pokemon_message.proto import sys _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() DESCRIPTOR = _descriptor.FileDescriptor( name='pogoprotos/networking/requests/messages/release_pokemon_message.proto', package='pogoprotos.networking.requests.messages', syntax='proto3', serialized_pb=_b('\nEpogoprotos/networking/requests/messages/release_pokemon_message.proto\x12\'pogoprotos.networking.requests.messages\"@\n\x15ReleasePokemonMessage\x12\x12\n\npokemon_id\x18\x01 \x01(\x06\x12\x13\n\x0bpokemon_ids\x18\x02 \x03(\x06\x62\x06proto3') ) _sym_db.RegisterFileDescriptor(DESCRIPTOR) _RELEASEPOKEMONMESSAGE = _descriptor.Descriptor( name='ReleasePokemonMessage', full_name='pogoprotos.networking.requests.messages.ReleasePokemonMessage', filename=None, file=DESCRIPTOR, containing_type=None, fields=[ _descriptor.FieldDescriptor( name='pokemon_id', full_name='pogoprotos.networking.requests.messages.ReleasePokemonMessage.pokemon_id', index=0, number=1, type=6, cpp_type=4, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='pokemon_ids', full_name='pogoprotos.networking.requests.messages.Rel
easePokemonMessage.pokemon_ids', index=1, number=2, type=6, cpp_type=4, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), ], extensions=[ ], nested_types=[], enum_t
ypes=[ ], options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=114, serialized_end=178, ) DESCRIPTOR.message_types_by_name['ReleasePokemonMessage'] = _RELEASEPOKEMONMESSAGE ReleasePokemonMessage = _reflection.GeneratedProtocolMessageType('ReleasePokemonMessage', (_message.Message,), dict( DESCRIPTOR = _RELEASEPOKEMONMESSAGE, __module__ = 'pogoprotos.networking.requests.messages.release_pokemon_message_pb2' # @@protoc_insertion_point(class_scope:pogoprotos.networking.requests.messages.ReleasePokemonMessage) )) _sym_db.RegisterMessage(ReleasePokemonMessage) # @@protoc_insertion_point(module_scope)
import os import sys import textwrap from collections import OrderedDict from argparse import ArgumentParser, RawDescriptionHelpFormatter from faice.tool
s.run.__main__ import main as run_main from faice.tools.run.__main__ import DESCRIPTION as RUN_DESCRIPTION from faice.tools.vagrant.__main
__ import main as vagrant_main from faice.tools.vagrant.__main__ import DESCRIPTION as VAGRANT_DESCRIPTION VERSION = '1.2' TOOLS = OrderedDict([ ('run', run_main), ('vagrant', vagrant_main) ]) def main(): description = [ 'FAICE Copyright (C) 2017 Christoph Jansen', '', 'This program comes with ABSOLUTELY NO WARRANTY. This is free software, and you are welcome to redistribute it' 'under certain conditions. See the LICENSE file distributed with this software for details.', ] parser = ArgumentParser( description=os.linesep.join([textwrap.fill(block) for block in description]), formatter_class=RawDescriptionHelpFormatter ) parser.add_argument( '-v', '--version', action='version', version=VERSION ) subparsers = parser.add_subparsers(title="tools") sub_parser = subparsers.add_parser('run', help=RUN_DESCRIPTION, add_help=False) _ = subparsers.add_parser('vagrant', help=VAGRANT_DESCRIPTION, add_help=False) if len(sys.argv) < 2: parser.print_help() exit() _ = parser.parse_known_args() sub_args = sub_parser.parse_known_args() tool = TOOLS[sub_args[1][0]] sys.argv[0] = 'faice {}'.format(sys.argv[1]) del sys.argv[1] exit(tool()) if __name__ == '__main__': main()
from django.contrib.gis.db import models # Create your models here. class GeoWaterUse(models.Model): id = models.AutoField(primary_key=True) geometry = models.PointField() api = models.CharField(max_length=20, null=False) well_name = models.CharField(max_length=100, null=True) frac_date = models.DateField(auto_now=False, auto_now_add=False) state = models.CharField(max_length=20, null=True) county = models.CharField(max_length=20, null=True) latitude = models.DecimalField(max_digits=20, decimal_places=6) longitude = models.DecimalField(max_digits=20, decimal_places=6) horizontal_length = models.DecimalField(max_digits=20, decimal_places=3) water_use = models.DecimalField(max_digits=20, decimal_places=3) objects = models.GeoManager() class Meta: ordering = ["api"] class GeoProducedWater(models.Model
): id = models.AutoField(primary_key=True) geometry = models.PointField() api = models.CharField(max_length=20, null=False) well_name = models.CharField(max_length=100, null=True) latitude = models.DecimalField(max_digits=20, decimal_places=6
) longitude = models.DecimalField(max_digits=20, decimal_places=6) volume_date = models.DateField(auto_now=False, auto_now_add=False) h2o_volume = models.DecimalField(max_digits=10, decimal_places=2) days_on = models.PositiveIntegerField() is_prediction = models.BooleanField() objects = models.GeoManager() class Meta: ordering = ["api"]
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # random code that helps with debugging/testing the python interfaces and examples # this is not meant to be run by normal users from __future__ import with_statement # for python 2.5 __copyright__ = 'Copyright (C) 2009-2010' __license__ = 'Apache License, Version 2.0' from openravepy import * from numpy import * from itertools import izip def test_drawjoints(robot): """draws the joint axes of the robot """ env = robot.GetEnv() h = None try: while True: h = [env.drawlinelist(array([j.GetAnchor()-j.GetAxis(0),j.GetAnchor()+j.GetAxis(0)]),5,array([0,0,float(j.GetDOFIndex())/robot.GetDOF()])) for j in robot.GetJoints() if not j.IsStatic()] h += [env.drawlinelist(array([j.GetAnchor()-0.25*j.GetAxis(0),j.GetAnchor()+0.25*j.GetAxis(0)]),20,array([0,float(j.GetDOFIndex())/robot.GetDOF(),0])) for j in robot.GetPassiveJoints() if not j.IsStatic()] time.sleep(0.1) finally: h = None def test_drawmanip(manip): robot=manip.GetRobot() env=robot.GetEnv() while True: h = None joints = [robot.GetJoints()[i] for i in manip.GetArmJoints()] h = [env.drawlinelist(array([j.GetAnchor()-j.GetAxis(0),j.GetAnchor()+j.GetAxis(0)]),5,array([0,0,i/8.0])) for i,j in enumerate(joints)] time.sleep(0.1) def derive_normalizeAxisRotation(): """Find the rotation theta around axis v such that rot(v,theta) * q is closest to the identity""" from sympy import * vx,vy,vz = Symbol('vx'),Symbol('vy'),Symbol('vz') v = Matrix(3,1,[vx,vy,vz]) theta = Symbol('theta') q0 = Matrix(4,1,[cos(theta/2),sin(theta/2)*v[0],sin(theta/2)*v[1],sin(theta/2)*v[2]]) q0dtheta = Matrix(4,1,[-sin(theta/2)/2,cos(theta/2)*v[0]/2,cos(theta/2)*v[1]/2,cos(theta/2)*v[2]/2]) qx,qy,qz,qw = Symbol('qx'),Symbol('qy'),Symbol('qz'),Symbol('qw') q1 = Matrix(4,1,[qx,qy,qz,q
w]) qidentity = Matrix(4,1,[S.One,S.Zero,S.Zero,S.Zero]) qfinal = Matrix(4,1,[q0[0]*q1[0] - q0[1]*q1[1] - q0[2]*q1[2] - q0[3]*q1[3], q0[0]*q1[1] + q0[1]*q1[0] + q0[2]*q1[3] - q0[3]*q1[2], q0[0]*q1[2] + q0[2]*q1[0] +
q0[3]*q1[1] - q0[1]*q1[3], q0[0]*q1[3] + q0[3]*q1[0] + q0[1]*q1[2] - q0[2]*q1[1]]) qfinaldtheta = Matrix(4,1,[q0dtheta[0]*q1[0] - q0dtheta[1]*q1[1] - q0dtheta[2]*q1[2] - q0dtheta[3]*q1[3], q0dtheta[0]*q1[1] + q0dtheta[1]*q1[0] + q0dtheta[2]*q1[3] - q0dtheta[3]*q1[2], q0dtheta[0]*q1[2] + q0dtheta[2]*q1[0] + q0dtheta[3]*q1[1] - q0dtheta[1]*q1[3], q0dtheta[0]*q1[3] + q0dtheta[3]*q1[0] + q0dtheta[1]*q1[2] - q0dtheta[2]*q1[1]]) solveeq = qfinaldtheta.dot(qidentity-qfinal).expand() sthetad2 = Symbol('sthetad2') # sin(theta/2) cthetad2 = Symbol('cthetad2') # cos(theta/2) finaleq = Poly(solveeq.subs([(sin(theta/2),sthetad2),(cos(theta/2),cthetad2)]),sthetad2,cthetad2) # should be: # Poly((qw**2/2 + qx**2/2 + qy**2/2 + qz**2/2 - qw**2*vx**2/2 - qw**2*vy**2/2 - qw**2*vz**2/2 - qx**2*vx**2/2 - qx**2*vy**2/2 - qx**2*vz**2/2 - qy**2*vx**2/2 - qy**2*vy**2/2 - qy**2*vz**2/2 - qz**2*vx**2/2 - qz**2*vy**2/2 - qz**2*vz**2/2)*sthetad2*cthetad2 - qx/2*sthetad2 + (-qw*vz/2 - qy*vx/2 - qz*vy/2)*cthetad2, sthetad2, cthetad2) # sthetad2*cthetad2 coefficient reduces to 0 # Poly(- qx/2*sthetad2 + (-qw*vz/2 - qy*vx/2 - qz*vy/2)*cthetad2, sthetad2, cthetad2) # theta = 2*atan2(-qw*vz-qz*vy-qy*vx,qx)
# -*- coding: utf-8 -*- # # codimension - graphics python two-way code editor and analyzer # Copyright (C) 2010-2017 Sergey Satskiy <sergey.satskiy@gmail.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # """A few constants which do not depend on other project files""" # Default encoding for the cases when: # - the encoding could
not be detected # - replaces ascii to be on
the safe side DEFAULT_ENCODING = 'utf-8' # File encoding used for various settings and project files SETTINGS_ENCODING = 'utf-8' # Directory to store Codimension settings and projects CONFIG_DIR = '.codimension3'
q1
_start = 0 q1_end = 1 N_q1 = 128 q2_start = 0 q2_end = 1 N_q2 = 3 p1_start = -4 p1_end = 4 N_p1 = 4 p2_start = -0.5 p2_end = 0.5 N_p2 = 1 p3_start = -0.5 p3_end = 0.5 N_p3 = 1 N_ghost = 3
from ..provider.constants import Provider, string_to_provider from ..services.base import Service from .context import DisconnectOnException from .errors import ( AlreadyConnectedException, ClusterError, MultipleClustersConnectionError, NotConnectedError, PleaseDisconnectError, ) class ClusterService(Service): def connected_clusters(self): return self.services.kubernetes_service.get_cluster_names() def assert_is_connected(self): connected_clusters = self.connected_clusters() if not connected_clusters: raise NotConnectedError() if len(connected_clusters) > 1: raise MultipleClustersConnectionError(connected_clusters) return connected_clusters[0] def assert_is_disconnected(self): connected_clusters = self.connected_clusters() if connected_clusters: if len(connected_clusters) == 1: raise PleaseDisconnectError(connected_clusters[0]) raise MultipleClustersConnectionError(connected_clusters) def connect(self, cluster_name, provider_string, kubeconfig, registry): try: self.assert_is_disconnected() except PleaseDisconnectError as e: if e.current_cluster_name == cluster_name: raise AlreadyConnectedException(e.current_cluster_name) from e raise provider = string_to_provider(provider_string) provider_service = self.services.provider_broker.get_provider_service(provider) if kubeconfig is None: kubeconfig = provider_service.create_kubeconfig(cluster_name) else: assert provider == Provider.CUSTOM, "Must use --provider custom to connect with a kubeconfig" with DisconnectOnException(cluster_name, self.services): self.services.kubernetes_service.write_config(cluster_name, kubeconfig) self.services.kubernetes_service.ensure_orchestrate_namespace() cluster = provider_service.create_cluster_object( services=self.services, name=cluster_name, registry=registry, ) self.services.cluster_metadata_service.write_metadata(cluster) return self.test() def create(self, options): try: self.assert_is_disconnected() except PleaseDisconnectError as e: if e.current_cluster_name == options.get('cluster_name', ''): raise AlreadyConnectedException(e.current_cluster_name) from e raise self.services.options_validator_service.validate_cluster_options(**options) cluster_name = options.get('cluster_name', '') provider_string = options.get('provider', '') provider = string_to_provider(provider_string) provider_service = self.services.provider_broker.get_provider_service(provider) with DisconnectOnException(cluster_name, self.services): cluster = provider_service.create_kubernetes_cluster(options) self.services.kubernetes_service.ensure_orchestrate_namespace() self.services.cluster_metadata_service.write_metadata(cluster) self.services.kubernetes_service.wait_until_nodes_are_ready() return cluster.name def update(self, options): self.services.options_validator_service.validate_cluster_options(**options) cluster_name = options.get('cluster_name', '') provider_string = options.get('provider', '') provider = string_to_provider(provider_string) provider_service = self.services.provider_broker.get_provider_service(provider) with DisconnectOnException(cluster_name, self.services): cluster = provider_service.update_kubernetes_cluster(options) self.services.kubernetes_service.ensure_orchestrate_namespace() self.services.kubernetes_service.wait_until_nodes_are_ready() return cluster.name def destroy(self, cluster_name, provider_string): provider = string_to_provider(provider_string) provider_service = self.services.provider_broker.get_provider_service(provider) provider_service.destroy_kubernetes_cluster(cluster_name=cluster_name) self.services.cluster_metadata_service.ensure_metadata_deleted(cluster_name=cluster_name) def disconnect(self, cluster_name, disconnect_all): if (cluster_name and disconnect_all) or (not cluster_name and not disconnect_all): raise ClusterError('Must provide exactly one of --cluster-name <cluster_name> and --all') try: current_cluster_name = self.assert_is_connected() if cluster_name is not None and current_cluster_name != cluster_name: raise PleaseDisconnectError(cur
rent_cluster_name) except MultipleClustersConnectionError: if not disconnect_all: raise
for cname in self.connected_clusters(): try: self.services.cluster_metadata_service.ensure_metadata_deleted(cluster_name=cname) self.services.kubernetes_service.ensure_config_deleted(cluster_name=cname) self.services.logging_service.warning(f'Successfully disconnected from {cname}') except Exception as e: raise ClusterError( f'Looks like an error occured while attempting to disconnect from cluster "{cname}".' ) from e def get_connected_cluster(self): cluster_name = self.assert_is_connected() return self.services.cluster_metadata_service.read_metadata(cluster_name) def test(self): cluster = self.get_connected_cluster() provider_service = self.services.provider_broker.get_provider_service(cluster.provider) try: provider_service.test_kubernetes_cluster(cluster_name=cluster.name) except Exception as e: raise ClusterError( f'Looks like an error occured while testing cluster "{cluster.name}".' ) from e return cluster
import numpy as np from scipy.integrate import odeint from scipy.integrate import ode import matplotlib.pylab as plt import csv import time endpoint = 1000000000; # integration range dx = 10.0; # step size lam0 = 0.845258; # in unit of omegam, omegam = 3.66619*10^-17 dellam = np.array([0.00003588645221954444, 0.06486364865874367]); # deltalambda/omegam ks = [1.0,1.0/90]; # two k's thm = 0.16212913985547778; # theta_m psi0, x0 = [1.0+0.j, 0.0], 0 # initial condition savestep = 100000; # save to file every savestep steps xlin = np.arange(dx,endpoint+1*dx, dx) psi = np.zeros([len(xlin) , 2], dtype='complex_') xlinsave = np.zeros(len(xlin)/savestep); psisave = np.zeros([len(xlinsave) , 2], dtype='complex_') probsave = np.zeros([len(xlinsave) , 3]) def hamiltonian(x, deltalambda, k, thetam): return [[ 0, 0.5* np.sin(2*thetam) * ( deltalambda[0] * np.sin(k[0]*x) + deltalambda[1] * np.sin(k[1]*x) ) * np.exp( 1.0j * ( - x - np.cos(2*thetam) * ( ( deltalambda[0]/k[0] * np.cos(k[0]*x) + deltalambda[1]/k[1] * np.cos(k[1]*x) ) ) ) ) ], [ 0.5* np.sin(2*thetam) * ( deltalambda[0] * np.sin(k[0]*x) + deltalambda[1] * np.sin(k[1]*x) ) * np.exp( -1.0j * ( - x - np.cos(2*thetam) * ( deltalambda[0] /k[0] * np.cos(k[0]*x) + deltalambda[1] /k[1] * np.cos(k[1]*x) ) ) ), 0 ]] # Hamiltonian for double frequency def deripsi(t, psi, deltalambda, k , thetam): return -1.0j * np.dot( hamiltonian(t, deltalambda,k,thetam), [psi[0], psi[1]] ) sol = ode(deripsi).set_integrator('zvode', method='bdf', atol=1e-8, with_jacobian=False) sol.set_initial_value(psi0, x0).set_f_params(dellam,ks,thm) flag = 0 flagsave = 0 timestampstr = time.strftime("%Y%m%d-%H%M%S") print timestampstr while sol.successful() and sol.t < endpoint: sol.integrate(xlin[flag]) if np.mod(flag,savestep)==0: probsave[flagsave] = [sol.t, np.absolute(sol.y[1])**2, np.absolute(sol.y[0])**2] with open(r'probtrans-test-'+timestampstr+'.csv', 'a') as f_handle: np.savetxt(f_handle, pro
bsave[flagsave]) flagsave =
flagsave + 1 flag = flag + 1 print "CONGRATS" # # ploting using probsave array inside file # plt.figure(figsize=(18,13)) # plt.plot(probsave[:,0], probsave[:,1],'-') # plt.title("Probabilities",fontsize=20) # plt.xlabel("$\hat x$",fontsize=20) # plt.ylabel("Probability",fontsize=20) # plt.show() # # Template for reading the csv file # # Ploting using data file # probsavefromfile = np.loadtxt("probtrans-test-"+timestampstr+".csv") # # print test # # print len(test[1::2]), test[1::2], len(test[::2]), test[::2] # plt.figure(figsize=(18,13)) # plt.plot(probsavefromfile[::2], probsavefromfile[1::2],'-') # plt.title("Probabilities",fontsize=20) # plt.xlabel("$\hat x$",fontsize=20) # plt.ylabel("Probability",fontsize=20) # plt.show()
#!/usr/bin/env python # Usage parse_shear sequences.fna a2t.txt emb_output.b6 import sys import csv from collections import Counter, defaultdict sequences = sys.argv[1] accession2taxonomy = sys.argv[2] alignment = sys.argv[3] with open(accession2taxonomy) as inf: next(inf) csv_inf = csv.reader(inf, delimiter="\t") a2t = dict(('_'.join(row[0].split()[0].split('_')[:-1]).split('.')[0], row[-1]) for row in csv_inf) print("Loaded accession2taxonomy.") reads_counter = Counter() with open(sequences) as inf: for i, line in enumerate(inf): if i % 100000 == 0: print("Processed %d lines" % i) print(line) if line.startswith('>'): name = '_'.join(line.split()[0][1:].split('_')[:-1]).split('.')[0] if name in a2t: species = a2t[name] reads_counter.update([species]) print("Loaded read counter") counts_dict = defaultdict(Counter) with open(alignment) as inf: csv_inf = csv.reader(inf, delimiter="\t") for i, row in enumerate(csv_inf): if i % 100000 == 0: print("Processed %d records" % i) print(row) if row[-1].startswith('k'): read = row[0] read = "_".join(read.split('_')[:-1]).split('.')[0] if read in a2t: species = a2t[read] tax = row[-1]
counts_dict[species].update([tax]) print("Loaded counts_dict.") with ope
n("sheared_bayes.txt", "w") as outf: for i, species in enumerate(counts_dict.keys()): row = [0] * 10 row[-1] = reads_counter[species] row[0] = species counts = counts_dict[species] if i % 10000 == 0: print("Processed %d records" % i) print(counts) for j in counts.keys(): c = j.count(';') row[c+1] = counts[j] row = list(map(str, row)) outf.write("\t".join(row) + "\n")
n_mod self._result_queue = result_queue self._startworkingflag_ = True self._task_queue = Queue(1) self._count_lock = Lock() #---------------------------------------------------------------------- def get_result_queue(self): """""" return self._result_queue #---------------------------------------------------------------------- def get_task_queue(self): """""" return self._task_queue #---------------------------------------------------------------------- def feed(self, function, *vargs, **kwargs): """"
"" try: self._task_queue.put_nowait(tuple([function, vargs, kwargs])) return True except Full: #format_exc() return False #---------------------------------------------------------------------- def run(self): """""" while self._startworkingflag_: #pprint('Running') try:
_task = self._task_queue.get(timeout=3) result = {} result['from'] = self.name result['state'] = False result['result'] = None result['current_task'] = _task.__str__() result['exception'] = tuple() try: ret = self._process_task(_task) result['state'] = True result['result'] = ret #self._result_queue.put(result) except Exception as e: result['state'] = False result['result'] = None exception_i = (str(type(e)), str(e)) result['exception'] = exception_i finally: if self._clean_mod: _result = {} _result['state'] = result['state'] _result['result'] = result['result'] result = _result self._result_queue.put(result) self._count_lock.acquire() self._master._executed_task_count = \ self._master._executed_task_count + 1 self._count_lock.release() except Empty: pass #---------------------------------------------------------------------- def _process_task(self, task): """""" try: ret = task[0](*task[1], **task[2]) return ret except Exception as e: raise e #---------------------------------------------------------------------- def stop(self): """""" #self.stop() self._startworkingflag_ = False #---------------------------------------------------------------------- def __del__(self): """""" self.stop() #---------------------------------------------------------------------- def _exception_process(self): """""" ######################################################################## class Pool(object): """""" #---------------------------------------------------------------------- def __init__(self, thread_max=30, clean_mod=True): """Constructor""" self.thread_max = thread_max self._current_thread = [] self._daemon_thread = [] self._clean_mod = clean_mod self._result_queue = Queue() self._task_queue = Queue() self.is_alive = True self._executed_task_count = 0 self._task_count = 0 #---------------------------------------------------------------------- def _restart_thread_daemon(self): """""" #pprint('threads daemon started!') while self.is_alive: if len(self._current_thread) < self.thread_max: self._start_new_labor() else: sleep(0.5) #---------------------------------------------------------------------- def _start_new_labor(self): """""" #pprint('start new labor') _tmp_labor = LaborThread(result_queue=self._result_queue, master=self, clean_mod=self._clean_mod) _tmp_labor.daemon = True _tmp_labor.start() self._current_thread.append(_tmp_labor) #---------------------------------------------------------------------- def feed(self, target_func, *vargs, **kwargs): """""" self._task_queue.put(tuple([target_func, vargs, kwargs])) self._task_count = self._task_count + 1 #---------------------------------------------------------------------- def _dispatcher(self): """""" #pprint('dispatcher start!') while self.is_alive: try: ret = self._task_queue.get() while True: availible_threads = [None if x.get_task_queue().full() \ else x for x in self._current_thread] for i in availible_threads: if i == None: pass else: i.feed(ret[0], *ret[1], **ret[2]) ret = None break if ret == None: break else: continue except Empty: sleep(seconds=0.5) #---------------------------------------------------------------------- def stop(self): """""" for i in self._current_thread: i.stop() del i self.is_alive = False #---------------------------------------------------------------------- def start(self): """""" self.is_alive = True _ = Thread(name='restart_labor_daemon', target=self._restart_thread_daemon) _.daemon = True _.start() self._daemon_thread.append(_) _ = Thread(name='dispatcher_daemon', target=self._dispatcher) _.daemon = True _.start() #---------------------------------------------------------------------- def get_result_queue(self): """""" return self._result_queue #---------------------------------------------------------------------- def get_task_queue(self): """""" return self._task_queue #---------------------------------------------------------------------- def get_result_generator(self): """""" while True: try: ret = self._result_queue.get(timeout=1) yield ret except Empty: if self._task_count == self._executed_task_count: break else: pass #---------------------------------------------------------------------- @property def task_count(self): """The amount of tasks""" return self._task_count #---------------------------------------------------------------------- @property def executed_task_count(self): """""" return self._executed_task_count #---------------------------------------------------------------------- @property def percent(self): """""" return float(self._executed_task_count)/float(self._task_count) ######################################################################## class PoolTest(unittest.case.TestCase): """""" #---------------------------------------------------------------------- def runTest(self): """Constructor""" self.test_laborprocess() #---------------------------------------------------------------------- def test_pool(self): """""" def func1(arg1): print('func1 called!') return arg1
import spade import time class MyAgent(spade.Agent.Agent): class ReceiveBehav(spade.Behaviour.Behaviour): """This behaviour will receive all kind of messages""" def _process(self): self.msg = None # Blocking receive for 10 seconds self.msg = self._receive(True, 10) # Check wether the message arrived if self.msg: print "I got a me
ssage!" else: print "I waited but got no message" def _setup(self): print "MyAgent starting . . ." # Add the "ReceiveBehav" as the default behaviour rb =
self.ReceiveBehav() self.setDefaultBehaviour(rb) if __name__ == "__main__": a = MyAgent("agent2@127.0.0.1", "secret") a.start()
# -*- coding: utf-8 -*- # Generated by Django 1.11.15 on 2018-09-20 15:04 from __fu
ture__ import unicode_literals from django.db import migrations from django.db import models class Migration(migrations.Migration): dependencies = [ ('meinberlin_plans', '0017_rename_cost_field'), ] operations = [ migrations.AlterField( model_name='plan', name='point_label', field=models.CharField(default='Label of the location', help_text='This could be an address or the name of a landmark.', max_length=255, verbose_name='Label of the
location'), ), ]
from unittest.mock import Mock from django.db.models import QuerySet from datagrowth.resources import HttpResource from core.tests.mocks.requests import MockRequests MockErrorQuerySet = Mock(QuerySet) MockErrorQuerySet.count = Mock(return_value=0) class HttpResourceMock(HttpResource): URI_TEMPLATE = "http://localhost:8000/{}/?q={}" PARAMETERS = { "param": 1 } HEADERS = { "Accept": "application/json" } GET_SCHEMA = { "args": { "title": "resource mock arguments", "type": "array", # a single alphanumeric element "items": [ { "type": "string", "enum": ["en", "nl"] }, { "type": "string", "pattern": "[A-Za-z0-9]+" } ], "additionalItems": False, "minItems": 2 }, "kwargs": None # not allowed } POST_SCHEMA = { "args": { "title": "resource mock arguments", "type": "array", # a single alphanumeric element "items": [ { "type": "string", "enum": ["en", "nl"] }, { "type": "string", "pattern": "[A-Za-z0-9]+" } ], "additionalItems": False, "minItems": 2 }, "kwargs": { "title": "resou
rce mock keyword arguments", "
type": "object", "properties": { "query": {"type": "string"} }, "required": ["query"] } } CONFIG_NAMESPACE = "mock" def __init__(self, *args, **kwargs): super(HttpResourceMock, self).__init__(*args, **kwargs) self.session = MockRequests self.session.send.reset_mock() def send(self, method, *args, **kwargs): if method == "post": query = kwargs.get("query") if query: args += (query,) args = (self.config.source_language,) + args elif method == "get": args = (self.config.source_language,) + args return super(HttpResourceMock, self).send(method, *args, **kwargs) def auth_parameters(self): return { "auth": 1, "key": self.config.secret } def next_parameters(self): content_type, data = self.content try: nxt = data["next"] except (KeyError, TypeError): return {} return {"next": nxt} @property def meta(self): return self.variables()["meta"] def data(self, **kwargs): return { "test": kwargs.get("query") } def variables(self, *args): args = args or (self.request["args"] if self.request else tuple()) return { "url": args, "meta": args[1] if len(args) > 1 else None }
from pymol.cgo import * from pymol import cmd from pymol.vfont import plain # create the axes object, draw axes with cylinders coloured red, green, #blue for X, Y and Z obj = [ CYLINDER, 0., 0., 0., 10., 0., 0., 0.2, 1.0, 1.0, 1.0, 1.0, 0.0, 0., CYLINDER, 0., 0., 0., 0., 10., 0., 0.2, 1.0, 1.0, 1.0, 0., 1.0, 0., CYLINDER, 0., 0., 0., 0., 0., 10., 0.2, 1.0, 1.0, 1.0, 0., 0.0,
1.0, ] # add labels to axes object cyl_text(obj,plain,[-5.,-5.,-1],'O
rigin',0.20,axes=[[3.0,0.0,0.0],[0.0,3.0,0.0],[0.0,0.0,3.0]]) cyl_text(obj,plain,[10.,0.,0.],'X',0.20,axes=[[3.0,0.0,0.0],[0.0,3.0,0.0],[0.0,0.0,3.0]]) cyl_text(obj,plain,[0.,10.,0.],'Y',0.20,axes=[[3.0,0.0,0.0],[0.0,3.0,0.0],[0.0,0.0,3.0]]) cyl_text(obj,plain,[0.,0.,10.],'Z',0.20,axes=[[3.0,0.0,0.0],[0.0,3.0,0.0],[0.0,0.0,3.0]]) # then we load it into PyMOL cmd.load_cgo(obj,'axes')
null_stream = cuda.Stream.null self.comm.reduce(gg.data.ptr, gg.data.ptr, gg.size, nccl_data_type, nccl.NCCL_SUM, 0, null_stream.ptr) del gg self.model.cleargrads() gp = gather_params(self.model) nccl_data_type = _get_nccl_data_type(gp.dtype) self.comm.bcast(gp.data.ptr, gp.size, nccl_data_type, 0, null_stream.ptr) scatter_params(self.model, gp) gp = None class MultiprocessParallelUpdater(standard_updater.StandardUpdater): """Implementation of a multiprocess parallel GPU Updater. This is an implementation of :class:`Updater` that uses multiple GPUs with multi-process data parallelism. It uses Nvidia NCCL for communication between multiple GPUs. It behaves similarly to :class:`~chainer.training.updaters.StandardUpdater`. The update routine is modified to support data-parallel computation on multiple GPUs in one machine. It is based on synchronous parallel SGD: it parallelizes the gradient computation over a mini-batch, and updates the parameters only in the main device. It does not transfer the values collected by :class:`Reporter` in the sub devices to the main device. So you can only see the reported values in the main device. Args: iterators: List of dataset iterator for the training dataset. The number of the iterators must be same to the number of GPUs you use. optimizer: Optimizer to update parameters. The model should be attached to the optimizer. converter: Converter function to build input arrays. Each batch extracted by the iterator is split equally between the devices and then passed with corresponding ``device`` option to this function. :func:`~chainer.dataset.concat_examples` is used by default. devices: Dictionary or list of devices to which the training data is sent. The master device will be the first one in the list or the value attached to the key ``'main'``. """ def __init__(self, iterators, optimizer, converter=convert.concat_examples, devices=None): if not MultiprocessParallelUpdater.available(): raise Exception( 'NCCL is not enabled. MultiprocessParallelUpdater ' 'requires NCCL.\n' 'Please reinstall chainer after you install NCCL.\n' '(see https://github.com/chainer/chainer#installation).') assert len(iterators) == len(devices) for iterator in iterators[1:]: assert len(iterator.dataset) == len(iterators[0].dataset) # Correct optimizer parameters for new minibatch size optim = optimizer.__class__.__name__ if optim in ('Adam', 'AdaGrad', 'RMSprop'): optimizer.eps *= len(devices) warnings.warn('optimizer.eps is changed to {} ' 'by MultiprocessParallelUpdater for new batch size.'. format(optimizer.eps)) elif optim in ('RMSpropGraves', 'AdaDelta'): optimizer.eps *= len(devices) ** 2 # not quite right for AdaDelta warnings.warn('optimizer.eps is changed to {} ' 'by MultiprocessParallel
Updater for new batch size.'. format(optimizer.eps)) elif hasattr(optimizer, 'lr'): optimizer.lr /= len(devices) war
nings.warn('optimizer.lr is changed to {} ' 'by MultiprocessParallelUpdater for new batch size.'. format(optimizer.lr)) super(MultiprocessParallelUpdater, self).__init__( iterator=iterators[0], optimizer=optimizer, converter=converter ) if isinstance(devices, dict): main = devices.pop('main') devices = list(six.itervalues(devices)) devices = [main] + devices if devices is None or any(device is None for device in devices): raise ValueError('must specify GPU devices') self._master = optimizer.target self._devices = devices self._mpu_iterators = iterators self._initialized = False self._pipes = [] self._workers = [] self.comm = None @staticmethod def available(): return _available def _send_message(self, message): for pipe in self._pipes: pipe.send(message) def setup_workers(self): if self._initialized: return self._initialized = True self._master.cleargrads() for i in six.moves.range(1, len(self._devices)): pipe, worker_end = multiprocessing.Pipe() worker = _Worker(i, worker_end, self) worker.start() self._workers.append(worker) self._pipes.append(pipe) with cuda.Device(self._devices[0]): self._master.to_gpu(self._devices[0]) if len(self._devices) > 1: comm_id = nccl.get_unique_id() self._send_message(("set comm_id", comm_id)) self.comm = nccl.NcclCommunicator(len(self._devices), comm_id, 0) def update_core(self): self.setup_workers() self._send_message(('update', None)) with cuda.Device(self._devices[0]): # For reducing memory self._master.cleargrads() optimizer = self.get_optimizer('main') batch = self.get_iterator('main').next() batch = self.converter(batch, self._devices[0]) loss = _calc_loss(self._master, batch) self._master.cleargrads() loss.backward() # NCCL: reduce grads null_stream = cuda.Stream.null if self.comm is not None: gg = gather_grads(self._master) nccl_data_type = _get_nccl_data_type(gg.dtype) self.comm.reduce(gg.data.ptr, gg.data.ptr, gg.size, nccl_data_type, nccl.NCCL_SUM, 0, null_stream.ptr) scatter_grads(self._master, gg) del gg optimizer.update() if self.comm is not None: gp = gather_params(self._master) nccl_data_type = _get_nccl_data_type(gp.dtype) self.comm.bcast(gp.data.ptr, gp.size, nccl_data_type, 0, null_stream.ptr) def finalize(self): self._send_message(('finalize', None)) for worker in self._workers: worker.join() def _calc_loss(model, in_arrays): if isinstance(in_arrays, tuple): return model(*in_arrays) elif isinstance(in_arrays, dict): return model(**in_arrays) else: return model(in_arrays) def size_num_grads(link): """Count total size of all gradient arrays of a given link Args: link (chainer.link.Link): Target link object. """ size = 0 num = 0 for param in link.params(): if param.size == 0: continue size += param.size num += 1 return size, num def _memcpy_gather(): return cuda.cupy.ElementwiseKernel( 'raw T ptrs, raw X dtypes, raw Y info', 'raw float32 dst', ''' int id_min = id_pre; int id_max = num_src; while (id_max - id_min > 1) { int id = (id_max + id_min) / 2; if (i < info[id]) id_max = id; else id_min = id; } int id = id_min; int i_dst = i; int i_src = i; if (id > 0) i_src -= info[id]; dst[i_dst] = 0; if (ptrs[id] != NULL) { if (dtypes[id] == 0) { // fp32 float *src = reinterpret_cast<float *>(ptrs[id]);
#Ret Samys, creator of this program, can be found at RetSamys.deviantArt.com #Please feel free to change anything or to correct me or to make requests... I'm a really bad coder. =) #Watch Andrew Huang's video here: https://www.youtube.com/watch?v=4IAZY7JdSHU changecounter=0 path="for_elise_by_beethoven.mid" print """Welcome to my horribly inefficient program to tonal invert MIDI files according to Andrew Huang's #MIDIFLIP challenge as seen on https://www.youtube.com/watch?v=4IAZY7JdSHU """ pth=raw_input("Please enter your MIDI file's path here (save the file in the same directory as this program if you want to avoid typing the entire path): ") if pth!="":path=pth try: f=open(path,"rb") except: try: f=open(path+".mid","rb") except: print "Sorry, but are you sure this is where the file is?" cset=raw_input("As a standard setting, this program will flip all notes around C', which will flip the 'hands'. To use this mode press enter. You can use the old mode, which keeps the 'hands' where they are, but also creates a whole bunch of errors, by entering anything at all: ") print "Program running" print "You may abort any time by hitting CTRL+C" midi=f.read() writeme="".join(midi.split("MTrk")[0]) #final string to be written into new file for i in midi.split("MTrk")[1:]: #skip header chunk and jump directly into track chunk print "Editing Track "+str(midi.split("MTrk").index(i))+" of "+str(len(midi.split("MTrk"))-1) lowcount=0 highcount=0 i=list(i) #split string into list of characters delta=True #default value for checking delta_time offset=0 #default value for flipping pitch according to last event - since there is no such event at the beginning of a track for byte in range(len(i[4:])): #skip length bytes if delta: #delta_time checking mode if ord(i[4:][byte])>127: #determine if this is the last byte of the variable-length quantity for delta_time delta=False #found last byte! next byte should be event else: pass else: #event checking mode if ord(i[4:][byte])==255 and ord(i[4:][byte+1])==81 and ord(i[4:][byte+2])==3: #check for set tempo meta-event byte+=5 #skip set tempo meta-event elif ord(i[4:][byte])>=144 and ord(i[4:][byte])<=159: #check for note on event byte+=1 #go to note byte if cset=="": offset=(60-ord(i[4:][byte]))*2 #calculate offset to c' else: try: #skipped if lastnote is not defined offset+=(ord(lastnote)-ord(i[4:][byte]))*2 #calculate offset
except NameError: pass lastnote=i[4:][byte] #set current note to compare to next note before it's changed! try:i[byte+4]=chr(ord(i[4:][byte])+offset) #change note except: if ord(i[4:][byte])+offset>127: i[byte+4]=chr(127) highcount+=1 else: i[byte+4]=chr(0)
lowcount+=1 #journey to note off starts here for offbyte in range(len(i[byte+4:])): if ord(i[byte+4:][offbyte])==255 and ord(i[byte+4:][offbyte+1])==81 and ord(i[byte+4:][offbyte+1])==3: #check for set tempo meta-event offbyte+=5 #skip set tempo meta-event elif ord(i[byte+4:][offbyte])>=128 and ord(i[byte+4:][offbyte])<=137 and i[byte+4:][offbyte+1]==lastnote: #check if the same note is off try:i[byte+4+offbyte+1]=chr(ord(i[byte+4:][offbyte+1])+offset) #change note except: if ord(i[4:][byte])+offset>127: i[byte+4]=chr(127) else: i[byte+4]=chr(0) changecounter+=1 break elif ord(i[byte+4:][offbyte])==123: #all notes off changecounter+=1 break elif ord(i[byte+4:][offbyte])>=160 and ord(i[byte+4:][offbyte])<=175 and i[byte+4:][offbyte+1]==lastnote: #polyphonic aftertouch - just in case? Urgh, I don't actually understand this enough, when is this activated and is there a way to deactivate!?!? try:i[offbyte+1+byte+4]=chr(ord(i[byte+4:][offbyte+1])+offset) #change note except: i[offbyte+1+byte+4]=chr(127) else: pass byte+=1 #skip velocity byte else: pass if lowcount or highcount:print "WARNING: There were notes out of range: "+str(lowcount)+" too low and "+str(highcount)+" too high." writeme=writeme+"MTrk"+"".join(i) #join list of characters to final string counter=1 path=path.replace(".mid","") while True: try: newfile = open(path+"_midiflip_"+str(counter)+".mid") newfile.close() counter+=1 except IOError as e: newfile = open(path+"_midiflip_"+str(counter)+".mid","wb") newfile.write(writeme) newfile.close() break print "End of the line..." print str(changecounter)+" notes changed"
# -*- coding: utf-8 -*- # Copyright 2014, 2015 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from synapse.types import EventID, RoomID, UserID from synapse.api.errors import SynapseError from synapse.api.constants import EventTypes, Membership class EventValidator(object): def validate(self, event): EventID.from_string(event.event_id) RoomID.from_string(event.room_id) required = [ # "auth_events", "content", # "hashes", "origin", # "prev_events", "sender", "type", ] for k in required: if not hasattr(event, k): raise SynapseError(400, "Event does not have key %s" % (k,)) # Check that the following keys have string values strings = [ "origin", "sender", "type", ] if hasattr(event, "state_key"): strings.append("state_key") for s in strings: if not isinstance(getattr(event, s), basestring): raise SynapseError(400, "Not '%s' a string type" % (s,)) if event.type == EventTypes.Member: if "membership" not in event.content: raise SynapseError(400, "Content has not membership key") if event.content["membership"] not in Membership.LIST: raise SynapseError(400, "Invalid membership key") # Check that the following keys have dictionary values # TODO # Check that the following keys have the correct format for DAGs # T
ODO def validate_new(self, event): self.validate(event) UserID.from_string(event.sender) if event.type == EventTypes.Message: strings = [ "body", "msgtype", ] self._ensure_strings(event.content, strings) elif event.type == EventTypes.Topic: self._ensure_s
trings(event.content, ["topic"]) elif event.type == EventTypes.Name: self._ensure_strings(event.content, ["name"]) def _ensure_strings(self, d, keys): for s in keys: if s not in d: raise SynapseError(400, "'%s' not in content" % (s,)) if not isinstance(d[s], basestring): raise SynapseError(400, "Not '%s' a string type" % (s,))
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # RawSpeed documentation build configuration file, created by # sphinx-quickstart on Mon Aug 14 18:30:09 2017. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. import os import sys sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = ['sphinx.ext.githubpages', 'sphinx-pyexec'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. project = 'RawSpeed' copyright = '2009-2016 Klaus Post, 2016-2019 Roman Lebedev' author = '(c) Authors' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '' # The full version, including alpha/beta/rc tags. release = '' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = [] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ---------------------------------------------- # The
theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize
the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # # html_theme_options = {} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static']
""" Functions for calculating statistics and handling uncertainties. (c) Oscar Branson : https://github.com/oscarbranson """ import numpy as np import uncertainties.unumpy as un import scipy.interpolate as interp from scipy.stats import pearsonr def nan_pearsonr(x, y): xy = np.vstack([x, y]) xy = xy[:, ~np.any(np.isnan(xy),0)] n = len(x) if xy.shape[-1] < n // 2: return np.nan, np.nan return pearsonr(xy[0], xy[1]) def R2calc(meas, model, force_zero=False): if force_zero: SStot = np.sum(meas**2) else: SStot = np.sum((meas - np.nanmean(meas))**2) SSres = np.sum((meas - model)**2) return 1 - (SSres / SStot) # uncertainties unpackers def unpack_uncertainties(uarray): """ Convenience function to unpack nominal values and uncertainties from an ``uncertainties.uarray``. Returns: (nominal_values, std_devs) """ try: return un.nominal_values(uarray), un.std_devs(uarray) except: return uarray, None def nominal_values(a): try: return un.nominal_values(a) except: return a def std_devs(a): try: return un.std_devs(a) except: return a def gauss_weighted_stats(x, yarray, x_new, fwhm): """ Calculate gaussian weigted moving mean, SD and SE. Parameters ---------- x : array-like The independent variable yarray : (n,m) array Where n = x.size, and m is the number of dependent variables to smooth. x_new : array-like The new x-scale to interpolate the data fwhm : int FWHM of the gaussian kernel. Returns ------- (mean, std, se) : tuple """ sigma = fwhm / (2 * np.sqrt(2 * np.log(2))) # create empty mask array mask = np.zeros((x.size, yarray.shape[1], x_new.size)) # fill mask for i, xni in enumerate(x_new): mask[:, :, i] = gauss(x[:, np.newaxis], 1, xni, sigma) # normalise mask nmask = mask / mask.sum(0) # sum of each gaussian = 1 # calculate moving average av = (nmask * yarray[:, :, np.newaxis]).sum(0) # apply mask to data # sum along xn axis to get means # calculate moving sd diff = np.power(av - yarray[:, :, np.newaxis], 2) std = np.sqrt((diff * nmask).sum(0)) # sqrt of weighted average of data-mean # calculate moving se se = std / np.sqrt(mask.sum(0)) # max amplitude of weights is 1, so sum of weights scales # a fn of how many points are nearby. Use this as 'n' in # SE calculation. return av, std, se def gauss(x, *p): """ Gaussian function. Parameters ---------- x : array_like Independent variable. *p : parameters unpacked to A, mu, sigma A = amplitude, mu = centre, sigma = width Return ------ array_like gaussian descriped by *p. """ A, mu, sigma = p return A * np.exp(-0.5 * (-mu + x)**2 / sigma**2) # Statistical Functions def stderr(a): """ Calculate the standard error of a. """ return np.nanstd(a) / np.sqrt(sum(np.isfinite(a))) # Robust Statistics. See: # - https://en.wikipedia.org/wiki/Robust_statistics # - http://www.cscjp.co.jp/fera/document/ANALYSTVol114Decpgs1693-97_1989.pdf # - http://www.rsc.org/images/robust-statistics-technical-brief-6_tcm18-214850.pdf # - http://www.itl.nist.gov/div898/software/dataplot/refman2/auxillar/h15.htm def H15_mean(x): """ Calculate the Huber (H15) Robust mean of x. For details, see: http://www.cscjp.co.jp/fera/document/ANALYSTVol114Decpgs1693-97_1989.pdf http://www.rsc.org/images/robust-statistics-technical-brief-6_tcm18-214850.pdf """ mu = np.nanmean(x) sd = np.nanstd(x) * 1.134 sig = 1.5 hi = x > mu + sig * sd lo = x < mu - sig * sd if any(hi | lo): x[hi] = mu + sig * sd x[lo] = mu - sig * sd return H15_mean(x) else: return mu def H15_std(x): """ Calculate the Huber (H15) Robust standard deviation of x. For details, see: http://www.cscjp.co.jp/fera/document/ANALYSTVol114Decpgs1693-97_1989.pdf http://www.rsc.org/images/robust-statistics-technical-brief-6_tcm18-214850.pdf """ mu = np.nanmean(x) sd = np.nanstd(x) * 1.134 sig = 1.5 hi = x > mu + sig * sd lo = x < mu - sig * sd if any(hi | lo): x[hi] = mu + s
ig * sd x[lo] = mu - sig * sd return H15_std(x) else: return sd def H15_se(x): """ Calculate the Hube
r (H15) Robust standard deviation of x. For details, see: http://www.cscjp.co.jp/fera/document/ANALYSTVol114Decpgs1693-97_1989.pdf http://www.rsc.org/images/robust-statistics-technical-brief-6_tcm18-214850.pdf """ sd = H15_std(x) return sd / np.sqrt(sum(np.isfinite(x))) def get_total_n_points(d): """ Returns the total number of data points in values of dict. Paramters --------- d : dict """ n = 0 for di in d.values(): n += len(di) return n def get_total_time_span(d): """ Returns total length of analysis. """ tmax = 0 for di in d.values(): if di.uTime.max() > tmax: tmax = di.uTime.max() return tmax class un_interp1d(object): """ object for handling interpolation of values with uncertainties. """ def __init__(self, x, y, fill_value=np.nan, **kwargs): if isinstance(fill_value, tuple): nom_fill = tuple([un.nominal_values(v) for v in fill_value]) std_fill = tuple([un.std_devs(v) for v in fill_value]) else: nom_fill = std_fill = fill_value self.nom_interp = interp.interp1d(un.nominal_values(x), un.nominal_values(y), fill_value=nom_fill, **kwargs) self.std_interp = interp.interp1d(un.nominal_values(x), un.std_devs(y), fill_value=std_fill, **kwargs) def new(self, xn): yn = self.nom_interp(xn) yn_err = self.std_interp(xn) return un.uarray(yn, yn_err) def new_nom(self, xn): return self.nom_interp(xn) def new_std(self, xn): return self.std_interp(xn) def stack_keys(ddict, keys, extra=None): """ Combine elements of ddict into an array of shape (len(ddict[key]), len(keys)). Useful for preparing data for sklearn. Parameters ---------- ddict : dict A dict containing arrays or lists to be stacked. Must be of equal length. keys : list or str The keys of dict to stack. Must be present in ddict. extra : list (optional) A list of additional arrays to stack. Elements of extra must be the same length as arrays in ddict. Extras are inserted as the first columns of output. """ if isinstance(keys, str): d = [ddict[keys]] else: d = [ddict[k] for k in keys] if extra is not None: d = extra + d return np.vstack(d).T
gger class TestPortalProjectBase(TestPortalProjectBase): def setUp(self): super(TestPortalProjectBase, self).setUp() cr, uid = self.cr, self.uid # Useful models self.project_issue = self.registry('project.issue') # Various test issues self.issue_1_id = self.project_issue.create(cr, uid, { 'name': 'Test1', 'user_id': False, 'project_id': self.project_pigs_id}, {'mail_create_nolog': True}) self.issue_2_id = self.project_issue.create(cr, uid, { 'name': 'Test2', 'user_id': False, 'project_id': self.project_pigs_id}, {'mail_create_nolog': True}) self.issue_3_id = self.project_issue.create(cr, uid, { 'name': 'Test3', 'user_id': False, 'project_id': self.project_pigs_id}, {'mail_create_nolog': True}) self.issue_4_id = self.project_issue.create(cr, uid, { 'name': 'Test4', 'user_id': self.user_projectuser_id, 'project_id': self.project_pigs_id}, {'mail_create_nolog': True}) self.issue_5_id = self.project_issue.create(cr, uid, { 'name': 'Test5', 'user_id': self.user_portal_id, 'project_id': self.project_pigs_id}, {'mail_create_nolog': True}) self.issue_6_id = self.project_issue.create(cr, uid, { 'name': 'Test6', 'user_id': self.user_public_id, 'project_id': self.project_pigs_id}, {'mail_create_nolog': True}) class TestPortalIssue(TestPortalProjectBase): @mute_logger('openerp.addons.base.ir.ir_model', 'openerp.osv.orm') def test_00_project_access_rights(self): """ Test basic project access rights, for project and portal_project """ cr, uid, pigs_id = self.cr, self.uid, self.project_pigs_id # ---------------------------------------- # CASE1: public project # ---------------------------------------- # Do: Alfred reads project -> ok (employee ok public) # Test: all project issues visible issue_ids = self.project_issue.search(cr, self.user_projectuser_id, [('project_id', '=', pigs_id)]) test_issue_ids = set([self.issue_1_id, self.issue_2_id, self.issue_3_id, self.issue_4_id, self.issue_5_id, self.issue_6_id]) self.assertEqual(set(issue_ids), test_issue_ids, 'access rights: project user cannot see all issues of a public project') # Test: all project issues readable self.project_issue.read(cr, self.user_projectuser_id, issue_ids, ['name']) # Test: all project issues writable self.project_issue.write(cr, self.user_projectuser_id, issue_ids, {'description': 'TestDescription'}) # Do: Bert reads project -> crash, no group # Test: no project issue visible self.assertRaises(AccessError, self.project_issue.search, cr, self.user_none_id, [('project_id', '=', pigs_id)]) # Test: no project issue readable self.assertRaises(AccessError, self.project_issue.read, cr, self.user_none_id, issue_ids, ['name']) # Test: no project issue writable self.assertRaises(AccessError, self.project_issue.write, cr, self.user_none_id, issue_ids, {'description': 'TestDescription'}) # Do: Chell reads project -> ok (portal ok public) # Test: all project issues visible issue_ids = self.project_issue.search(cr, self.user_portal_id, [('project_id', '=', pigs_id)]) self.assertEqual(set(issue_ids), test_issue_ids, 'access rights: project user cannot see all issues of a public project') # Test: all project issues readable self.project_issue.read(cr, self.user_portal_id, issue_ids, ['name']) # Test: no project issue writable self.assertRaises(AccessError, self.project_issue.write, cr, self.user_portal_id, issue_ids, {'description': 'TestDescription'}) # Do: Donovan reads project -> ok (public ok public) # Test: all project issues visible issue_ids = self.project_issue.search(cr, self.user_public_id, [('project_id', '=', pigs_id)]) self.assertEqual(set(issue_ids), test_issue_ids, 'access rights: project user cannot see all issues of a public project') # ---------------------------------------- # CASE2: portal project # ---------------------------------------- self.project_project.write(cr, uid, [pigs_id], {'privacy_visibility': 'portal'}) # Do: Alfred reads project -> ok (employee ok public) # Test: all project issues visible issue_ids = self.project_issue.search(cr, self.user_projectuser_id, [('project_id', '=', pigs_id)]) self.assertEqual(set(issue_ids), test_issue_ids, 'access rights: project user cannot see all issues of a portal project') # Do: Bert reads project -> crash, no group # Test: no project issue searchable self.assertRaises(AccessError, self.project_issue.search, cr, self.user_none_id, [('project_id', '=', pigs_id)]) # Data: issue follower self.project_issue.message_subscribe_users(cr, self.user_projectuser_id, [self.issue_1_id, self.issue_3_id], [self.user_portal_id]) # Do: Chell reads project -> ok (portal ok public) # Test: only followed project issues visible + assigned issue_ids = self.project_issue.search(cr, self.user_portal_id, [('project_id', '=', pigs_id)]) test_issue_ids = set([self.issue_1_id, self.issue_3_id, self.issue_5_id]) self.assertEqual(set(issue_ids), test_issue_ids, 'access rights: portal user should see the followed issues of a portal project') # Data: issue follower cleaning self.project_issue.message_unsubscribe_users(cr, self.user_projectuser_id, [self.issue_1_id, self.issue_3_id], [self.user_portal_id]) # ---------------------------------------- # CASE3: employee project # ---------------------------------------- self.project_project.write(cr, uid, [pigs_id], {'privacy_visibility': 'employees'}) # Do: Alfred reads project -> ok (employee ok employee) # Test: all project issues visible issue_ids = self.project_issue.search(cr, self.user_projectuser_id, [('project_id', '=', pigs_id)])
test_issue_ids = set([self.issue_1_id, self.issue_2_id, self.issue_3_id, self.issue_4_id, self.issue_5_id, self.issue_6_id]) self.assertEqual(set(issue_ids), test_issue_ids, 'access rights: project user cannot see all issues of an employees project') # Do: Chel
l reads project -> ko (portal ko employee) # Test: no project issue visible + assigned issue_ids = self.project_issue.search(cr, self.user_portal_id, [('project_id', '=', pigs_id)]) self.assertFalse(issue_ids, 'access rights: portal user should not see issues of an employees project, even if assigned') # ---------------------------------------- # CASE4: followers project # ---------------------------------------- self.project_project.write(cr, uid, [pigs_id], {'privacy_visibility': 'followers'}) # Do: Alfred reads project -> ko (employee ko followers) # Test: no project issue visible issue_ids = self.project_issue.search(cr, self.user_projectuser_id, [('project_id', '=', pigs_id)]) test_issue_ids = set([self.issue_4_id]) self.assertEqual(set(issue_ids), test_issue_ids, 'access rights: employee user should not see issues of a not-followed followers project, only assigned') # Do: Chell reads project -> ko (portal ko employee) # Test: no project issue visible issue_ids = self.project_issue.search(cr, self.user_portal_id, [('project_id', '=', pigs_id)]) test_issue_ids = set([self.issue_5_id]) self.assertEqual(set(issue_ids), test_issue_ids, 'access rights: portal user should not see issues of a not-followed followers project, only assigned') # Data: subscribe Alfred, Chell and Donovan as follower self.project_project.message_subscribe_users(cr, uid, [pigs_id], [self.user_projec
#!/u
sr/bin/env python # -*- coding: utf-8 -*- # # Proprietary and confidential. # Copyright 2011 Perfect Search Corporation. # All rights reserved. # import sys sys.dont_write_bytecode = True #import clientplugin import fastbranches import serverp
lugin
( self._parse_jsonpath_field, parts) def parse_jsonpath(self, field): try: parts = self.JSONPATH_RW_PARSER.parse(field) except Exception as e: raise MeterDefinitionException(_LE( "Parse error in JSONPath specification " "'%(jsonpath)s': %(err)s") % dict(jsonpath=field, err=e), self.cfg) return parts def match_type(self, meter_name): for t in self._event_type: if fnmatch.fnmatch(meter_name, t): return True def parse_fields(self, field, message, all_values=False): getter = self._field_getter.get(field) if not getter: return elif isinstance(getter, dict): dict_val = {} for key, val in getter.items(): dict_val[key] = val(message, all_values) return dict_val elif callable(getter): return getter(message, all_values) else: return getter @staticmethod def _parse_jsonpath_field(parts, message, all_values): values = [match.value for match in parts.find(message) if match.value is not None] if values: if not all_values: return values[0] return values def get_config_file(): config_file = cfg.CONF.meter.meter_definitions_cfg_file if not os.path.exists(config_file): config_file = cfg.CONF.find_file(config_file) if not config_file: config_file = pkg_resources.resource_filename( __name__, "data/meters.yaml") return config_file def setup_meters_config(): """Setup the meters definitions from yaml config file.""" config_file = get_config_file() if config_fi
le is not None: LOG.debug(_LE("Meter Definitions configuration file: %s"), config_file) with open(config_file) as cf: config = cf.read() try: meters_config = yaml.safe_load(config) except yaml.YAMLError as err: if hasattr(err, 'pr
oblem_mark'): mark = err.problem_mark errmsg = (_LE("Invalid YAML syntax in Meter Definitions file " "%(file)s at line: %(line)s, column: %(column)s.") % dict(file=config_file, line=mark.line + 1, column=mark.column + 1)) else: errmsg = (_LE("YAML error reading Meter Definitions file " "%(file)s") % dict(file=config_file)) LOG.error(errmsg) raise else: LOG.debug(_LE("No Meter Definitions configuration file found!" " Using default config.")) meters_config = {} LOG.info(_LI("Meter Definitions: %s"), meters_config) return meters_config def load_definitions(config_def): if not config_def: return [] meter_defs = [] for event_def in reversed(config_def['metric']): try: if (event_def['volume'] != 1 or not cfg.CONF.notification.disable_non_metric_meters): meter_defs.append(MeterDefinition(event_def)) except MeterDefinitionException as me: errmsg = (_LE("Error loading meter definition : %(err)s") % dict(err=me.message)) LOG.error(errmsg) return meter_defs class InvalidPayload(Exception): pass class ProcessMeterNotifications(plugin_base.NotificationBase): event_types = [] def __init__(self, manager): super(ProcessMeterNotifications, self).__init__(manager) self.definitions = load_definitions(setup_meters_config()) def get_targets(self, conf): """Return a sequence of oslo_messaging.Target It is defining the exchange and topics to be connected for this plugin. :param conf: Configuration. #TODO(prad): This should be defined in the notification agent """ targets = [] exchanges = [ conf.nova_control_exchange, conf.cinder_control_exchange, conf.glance_control_exchange, conf.neutron_control_exchange, conf.heat_control_exchange, conf.keystone_control_exchange, conf.sahara_control_exchange, conf.trove_control_exchange, conf.zaqar_control_exchange, conf.swift_control_exchange, conf.magnetodb_control_exchange, conf.ceilometer_control_exchange, conf.magnum_control_exchange, ] for exchange in exchanges: targets.extend(oslo_messaging.Target(topic=topic, exchange=exchange) for topic in conf.notification_topics) return targets @staticmethod def _normalise_as_list(value, d, body, length): values = d.parse_fields(value, body, True) if not values: if value in d.cfg.get('lookup'): LOG.warning('Could not find %s values', value) raise InvalidPayload values = [d.cfg[value]] elif value in d.cfg.get('lookup') and length != len(values): LOG.warning('Not all fetched meters contain "%s" field', value) raise InvalidPayload return values if isinstance(values, list) else [values] def process_notification(self, notification_body): for d in self.definitions: if d.match_type(notification_body['event_type']): userid = self.get_user_id(d, notification_body) projectid = self.get_project_id(d, notification_body) resourceid = d.parse_fields('resource_id', notification_body) ts = d.parse_fields('timestamp', notification_body) metadata = d.parse_fields('metadata', notification_body) if d.cfg.get('lookup'): meters = d.parse_fields('name', notification_body, True) if not meters: # skip if no meters in payload break try: resources = self._normalise_as_list( 'resource_id', d, notification_body, len(meters)) volumes = self._normalise_as_list( 'volume', d, notification_body, len(meters)) units = self._normalise_as_list( 'unit', d, notification_body, len(meters)) types = self._normalise_as_list( 'type', d, notification_body, len(meters)) users = (self._normalise_as_list( 'user_id', d, notification_body, len(meters)) if 'user_id' in d.cfg['lookup'] else [userid]) projs = (self._normalise_as_list( 'project_id', d, notification_body, len(meters)) if 'project_id' in d.cfg['lookup'] else [projectid]) times = (self._normalise_as_list( 'timestamp', d, notification_body, len(meters)) if 'timestamp' in d.cfg['lookup'] else [ts]) except InvalidPayload: break for m, v, unit, t, r, p, user, ts in zip( meters, volumes, itertools.cycle(units), itertools.cycle(types), itertools.cycle(resources), itertools.cycle(projs), itertools.cycle(users), itertools.cycle(times)): yield sample.Sample.from_notification( name=m, type=t, unit=unit, volume=v, resource_id=r, user_id=user, project_id=p, message=notification_body, timestamp=ts, metadata=metadata)
# -*- coding: utf-8 -*- # ########################## Copyrights and license ############################ # # # Copyright 2012 Vincent Jacques <vincent@vincent-jacques.net> # # Copyright 2012 Zearin <zearin@gonk.net> # # Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> # # # # This file is part of PyGithub. # # http://pygithub.github.io/PyGithub/v1/index.html # # # # PyGithub is free software: you can redistribute it and/or modify it under # # the terms of the GNU Lesser General Public License as published by the Free # # Software Foundation, either version 3 of the License, or (at your option) # # any later version. # # # # PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY # # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # # details. # # # # You should have received a copy of the GNU Lesser General Public License # # along with PyGithub. If not, see <http://www.gnu.org/licenses/>. # # # # ############################################################################## import datetime import Framework class Issue54(Framework.TestCase): def setUp(self): Framework.TestCase.setUp(self) self.repo = self.g.get_user().get_repo("TestRepo") def testConversion(self): commit = self.repo.get_git_commit("73f320ae06cd565cf38faca34b6a482addfc721b") self.assertEqual
(commit.message, "Test commit created around Fri, 13 Jul 2012 18:43:21 GMT, that is vendredi 13 juillet 2012 20:
43:21 GMT+2\n") self.assertEqual(commit.author.date, datetime.datetime(2012, 7, 13, 18, 47, 10))
import notify2 import os from time import * start_time = time() notify2.init('') r = notify2.Notification('', '') while True: for i in [ ('TO DO', 'Write JavaScript'), ('TO DO', 'Write Python'), ('Thought of the Day', 'Support Open Source'), ('Learn. . .', 'Use Linux'), ('Thought of the Day', 'Stay Cool'), ('Thought of the Day', 'Stop runn
ing for cheese'), ('Thought of the Day', 'You are cool')]: r.update(i[0], i[1]) sleep(120) x = int(time() - start_time)%120 if x == 119: os.system('play --no-show-progress --null --channels 1 synth %s sine %f' % ( 0.5, 500)) r.show
()
import statsmodels.tsa.stattools as st import matplotlib.pylab as plt import numpy as np import pandas as pd df = pd.read_csv('gld_uso.csv') cols = ['GLD','USO'] df['hedgeRatio'] = df['USO'] / df['GLD'] data_mean = pd.rolling_mean(df['hedgeRatio'], window=20) data_std = pd.rolling_std(df['hedgeRatio'], wind
ow=20) df['numUnits'] = -1*(df['hedgeRatio']-data_mean) / data_std positions = df[['numUnits','numUnits']].copy() positions = positions * np.array([-1., 1.]) pnl = positions.shift(1) * np.array((df[cols] - df[cols].s
hift(1)) / df[cols].shift(1)) pnl = pnl.fillna(0).sum(axis=1) ret=pnl / np.sum(np.abs(positions.shift(1)),axis=1) print 'APR', ((np.prod(1.+ret))**(252./len(ret)))-1. print 'Sharpe', np.sqrt(252.)*np.mean(ret)/np.std(ret)
a connection.', # -- 200's -- CMD_OK: '200 Command OK', TYPE_SET_OK: '200 Type set to %s.', ENTERING_PORT_MODE: '200 PORT OK', SYS_STATUS_OR_HELP_REPLY: '211 System status reply', DIR_STATUS: '212 %s', FILE_STATUS: '213 %s', HELP_MSG: '214 help: %s', NAME_SYS_TYPE: '215 UNIX Type: L8', WELCOME_MSG: "220 %s", SVC_READY_FOR_NEW_USER: '220 Service ready', GOODBYE_MSG: '221 Goodbye.', DATA_CNX_OPEN_NO_XFR_IN_PROGRESS: '225 data connection open, no transfer in progress', CLOSING_DATA_CNX: '226 Abort successful', TXFR_COMPLETE_OK: '226 Transfer Complete.', ENTERING_PASV_MODE: '227 Entering Passive Mode (%s).', # where is epsv defined in the rfc's? ENTERING_EPSV_MODE: '229 Entering Extended Passive Mode (|||%s|).', USR_LOGGED_IN_PROCEED: '230 User logged in, proceed', GUEST_LOGGED_IN_PROCEED: '230 Anonymous login ok, access restrictions apply.', #i.e. CWD completed ok REQ_FILE_ACTN_COMPLETED_OK: '250 Requested File Action Completed OK', PWD_REPLY: '257 "%s"', # -- 300's -- USR_NAME_OK_NEED_PASS: '331 Password required for %s.', GUEST_NAME_OK_NEED_EMAIL: '331 Guest login ok, type your email address as password.', REQ_FILE_ACTN_PENDING_FURTHER_INFO: '350 Requested file action pending further information.', # -- 400's -- CANT_OPEN_DATA_CNX: "425 Can't open data connection.", CNX_CLOSED_TXFR_ABORTED: '426 Transfer aborted. Data connection closed.', # -- 500's -- SYNTAX_ERR: "500 Syntax error: %s", SYNTAX_ERR_IN_ARGS: '501 syntax error in argument(s) %s.', CMD_NOT_IMPLMNTD: "502 Command '%s' not implemented", BAD_CMD_SEQ: '503 Incorrect sequence of commands: %s', CMD_NOT_IMPLMNTD_FOR_PARAM: "504 Not implemented for parameter '%s'.", NOT_LOGGED_IN: '530 Please login with USER and PASS.', AUTH_FAILURE: '530 Sorry, Authentication failed.', FILE_NOT_FOUND: '550 %s: No such file or directory.', PERMISSION_DENIED: '550 %s: Permission denied.', } class ftpd(connection): UNAUTH, INAUTH, AUTHED, RENAMING = range(4) def __init__ (self, proto='tcp'): connection.__init__(self, proto) logger.debug("ftp test") self.state = self.UNAUTH self.user = 'bar' self.dtp = None self.cwd = '/' self.basedir = '/tmp/ranz' self.dtp = None self.dtf = None self.limits = {}#{ '_out' : 8192 } def chroot(self, p): self.basedir = p def sendline(self, data): self.send(data + '\r\n') def reply(self, key, *args): msg = RESPONSE[key] % args self.sendline(msg) def handle_origin(self, parent): logger.debug("setting basedir to %s" % parent.basedir) self.basedir = parent.basedir def handle_established(self): self.processors() self.reply(WELCOME_MSG, "Welcome to the ftp service") def handle_io_in(self, data): # try: # data = data.decode() # except UnicodeDecodeError: # logger.warn("error decoding") # logger.debug("io_in" + data) logger.debug(data) lastsep = data.rfind(b"\n") if lastsep == -1: logger.debug("data without linebreak") return 0 lastsep += 1 # add last \n logger.debug("input size %i, can do %i" % (len(data), lastsep)) data = data[:lastsep] lines = data.splitlines(0) for line in lines: logger.debug("processing line '%s'" % line) if len(line) == 0: continue space = line.find(b' ') if space != -1: cmd = line[:space] args = (line[space + 1:],) else: cmd = line args = () logger.warn("cmd '%s'" % cmd) r = self.processcmd(cmd, args) if isinstance(r,tuple): self.reply(*r) elif r is not None: self.reply(r) return lastsep def processcmd(self, cmd, args): logger.debug("cmd '%s'" % cmd) l = [i.decode() for i in args] cmd = cmd.upper() if self.state == self.UNAUTH: if cmd != b'USER': return NOT_LOGGED_IN self.ftp_USER(*args) elif self.state == self.INAUTH: if cmd != b'PASS': return (BAD_CMD_SEQ, "PASS required after USER") self.ftp_PASS(*l) method = getattr(self, "ftp_" + cmd.decode(), None) if method is not None: return method(*l) else: return (CMD_NOT_IMPLMNTD, cmd.decode()) def ftp_USER(self, username): if not username: return (SYNTAX_ERR, 'USER requires an argument') self.state = self.INAUTH self.user = username if username == 'anonymous': return GUEST_NAME_OK_NEED_EMAIL else: return (USR_NAME_OK_NEED_PASS, username) def ftp_PASS(self, password): if not password: return (SYNTAX_ERR, 'PASS requires an argument') self.state = self.AUTHED if self.user == 'anonymous': return GUEST_LOGGED_IN_PROCEED else: return USR_LOGGED_IN_PROCEED def ftp_FEAT(self): self.send('211-Features:\r\n' + ' PASV\r\n' + ' PORT\r\n' + '211 End\r\n') return None def ftp_PORT(self, address): if self.dtf: self.dtf.close() self.dtf = None if self.dtp: self.dtp.close() self.dtp = None addr = list(map(int, address.split(','))) ip = '%d.%d.%d.%d' % tuple(addr[:4]) port = addr[4] << 8 | addr[5] logger.debug("PORT cmd for port %i" % port) if self.remote.host != ip and "::ffff:" + self.remote.host != ip: logger.warn("Potential FTP Bounce Scan detected") return None self.dtp = ftpdataconnect(ip, port, self) return None def ftp_PASV(self): if self.dtf: self.dtf.close() self.dtf = None if self.dtp: self.dtp.close() self.dtp = None self.dtf = ftpdatalisten(host=self.local.host, port=0, ctrl=self) host = self.dtf.local.host port = self.dtf.local.port self.
reply(ENTERING_PASV_MODE, encodeHostPort(host, port)) def ftp_QUIT(self): self.reply(GOODBYE_MSG) self.close() def real_path(self, p=None): if p: name = os.path.join(self.cwd, p) else: name = self.cwd if len(name) >= 1 and name[0] == '/': name = name[1:] name = os.path.join(self.basedir, name) name = os.path.normpath(nam
e) return name def ftp_RETR(self, p): if not p: return (SYNTAX_ERR_IN_ARGS, RETR) name = self.real_path(p) if not name.startswith(self.basedir): return (PERMISSION_DENIED, p) if os.path.exists(name) and os.path.isfile(name): if self.dtp: if self.dtp.status == 'established': self.reply(FILE_STATUS_OK_OPEN_DATA_CNX) self.dtp.send_file(name) else: logger.warn("dtp state %s %s:%i <-> %s:%i!" % (self.dtp.status, self.dtp.remote.host, self.dtp.remote.port, self.dtp.local.host, self.
import collections import numpy as np import sympy from sym2num import function, var def reload_all(): """Reload modules for testing.""" import imp for m in (var, function): imp.reload(m) if __name__ == '__main__': reload_all() g = var.UnivariateCallable('g') h = var.UnivariateCallable('h') from sympy.abc import t, w, x, y, z, m output = [x**2 + sympy.erf(x) + g(x), sympy.cos(y) + 2*t + sympy.GoldenRatio, z*sympy.sqrt(sympy.sin(w)+2)*h(x, 2)]
obj = {'data': [w], 'extra': {'other': [m, z]}, 'gg': g} arguments = function.Arguments(self=obj,
t=t, state=[x, y], H=h) f = function.FunctionPrinter('f', output, arguments) print(f.print_def()) sf = function.SymbolicSubsFunction(function.Arguments(t=t, m=[x,y]), t**2+x) print( "\n" + "*" * 80 + "\n") print(sf(w**4, [2*x,3*z]))
#!/usr/bin/python #====================================================================== # # Project : hpp_IOStressTest # File : IOST_WRun_CTRL.py # Date : Oct 25, 2016 # Author : HuuHoang Nguyen # Contact : hhnguyen@apm.com # : hoangnh.hpp@gmail.com # License : MIT License # Copyright : 2016 # Description: The hpp_IOStressTest is under the MIT License, a copy of license which may be found in LICENSE # #====================================================================== import io import os import sys import time from IOST_Prepare import IOST_Prepare from IOST_Config import * from IOST_Basic import * from IOST_Host import * from IOST_Terminal import * import gtk import gobject import gtk.glade import vte #====================================================================== IOST_WRun_CTRL_Debug_Enable = 1 #====================================================================== class IOST_WRun_CTRL(): """ """ #---------------------------------------------------------------------- def __init__(self, glade_filename, window_name, builder=None): "" self.IOST_WRun_CTRL_window = window_name self.WRun_Host = IOST_Host("", "IOST_Console") # print self.WRun_Host.name if not builder: self.WRun_CTRL_Builder = gtk.Builder() self.WRun_CTRL_Builder.add_from_file(glade_filename) self.WRun_CTRL_Builder.connect_signals(self) else: self.WRun_CTRL_Builder = builder def WRun_GetCTRL_Obj(self, window_name): "" self.IOST_Objs[window_name][window_name+"_NoteBook"] = self.WRun_CTRL_Builder.get_object(self.IOST_Objs[window_name]["_NoteBook"]) self.IOST_Objs[window_name][window_name+"_Terminal_ScrolledWindow"] = self.WRun_CTRL_Builder.get_object(self.IOST_Objs[window_name]["_Terminal_ScrolledWindow"]) self.IOST_Objs[window_name][window_name+"_Terminal_L"] = self.WRun_CTRL_Builder.get_object(self.IOST_Objs[window_name]["_Terminal_L"]) def WRun_InitCTRL_Obj(self, window_name)
: "" self.IOST_Objs[window_name][window_name+"_Terminal_ScrolledWindow"].connect('button_press_event', lambda *args: True) self.WRun_Term = IOST_Terminal(self.IOST_Objs[window_name][window_name+"_NoteBook"], self.WRun_Host.name) self.IOST_Objs[window_name][window_name+"_Terminal_ScrolledWindow"].add(self.WRun_Term.IOST_vte) def WRun_IOST_VTE_show(self): self.WRun
_Term.IOST_vte.show()
#!/usr/bin/env jython from __future__ import with_statement from contextlib import
contextmanager import logging from plugins import __all__ log = logging.getLogger('kahuna') class PluginManager: """ Manages available plugins """ def __init__(self): """ Initialize the plugin list """ self.__plugins = {} def load_plugin(self, plugin_name): """ Loads a single plugin given its name """ if not plugin_na
me in __all__: raise KeyError("Plugin " + plugin_name + " not found") try: plugin = self.__plugins[plugin_name] except KeyError: # Load the plugin only if not loaded yet log.debug("Loading plugin: %s" % plugin_name) module = __import__("plugins." + plugin_name, fromlist=["plugins"]) plugin = module.load() self.__plugins[plugin_name] = plugin return plugin def call(self, plugin_name, command_name, args): """ Encapsulate the call into a context already loaded. """ try: plugin = self.load_plugin(plugin_name) except KeyError: # Plugin not found, pring generic help self.help_all() if not command_name: self.help(plugin) else: try: command = plugin._commands()[command_name] except KeyError: # Command not found in plugin. Print only plugin help self.help(plugin) with opencontext(plugin): return command(args) def help(self, plugin): """ Prints the help for the given plugin """ commands = plugin._commands() plugin_name = plugin.__module__.split('.')[-1] print "%s" % plugin.__doc__ for command in sorted(commands.iterkeys()): print " %s %s\t%s" % (plugin_name, command, commands[command].__doc__) def help_all(self): """ Prints the help for all registered plugins """ for name in sorted(__all__): plugin = self.load_plugin(name) self.help(plugin) print @contextmanager def opencontext(plugin): """ Loads the context each plugin needs to be initialized in order to be executed """ plugin._load_context() yield plugin._close_context()
# -*- coding: utf-8 -*- #
Copyright 2016, 2017 Kevin Reid and the ShinySDR contributors # # This file is part of ShinySDR. # # ShinySDR is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foun
dation, either version 3 of the License, or # (at your option) any later version. # # ShinySDR is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with ShinySDR. If not, see <http://www.gnu.org/licenses/>. """ Minimal units library. Used only for expressing units for display. Does not provide calculation or dimensions. """ from __future__ import absolute_import, division, print_function, unicode_literals from collections import namedtuple as _namedtuple from zope.interface import implementer as _implements from shinysdr.i.json import IJsonSerializable as _IJsonSerializable __all__ = [] # appended later class Unit(_namedtuple('Unit', [ 'symbol', 'si_prefix_ok'])): # TODO allow requesting binary prefixes? _implements(_IJsonSerializable) def to_json(self): return { 'type': 'Unit', 'symbol': self.symbol, 'si_prefix_ok': self.si_prefix_ok } def __str__(self): return self.symbol __all__.append('Unit') # TODO: reflectively put units into __all__ none = Unit('', True) s = Unit('s', True) degree = Unit('°', False) # degree of angle degC = Unit('°C', False) degF = Unit('°F', False) dB = Unit('dB', False) dBm = Unit('dBm', False) dBFS = Unit('dBFS', False) Hz = Unit('Hz', True) MHz = Unit('MHz', False) # TODO: Remove or refine this when si_prefix_ok is actually used ppm = Unit('ppm', False)
g_value( 'passwd', vm_, __opts__, search_global=False ), search_global=False ) def get_key(): ''' Returns the ssh private key for VM access ''' return config.get_cloud_config_value( 'private_key', get_configured_provider(), __opts__, search_global=False ) def get_keypair(vm_): ''' Return the keypair to use ''' keypair = config.get_cloud_config_value('keypair', vm_, __opts__) if keypair: return keypair else: return False def get_ip(data): ''' Return the IP address of the VM If the VM has public IP as defined by libcloud module then use it Otherwise try to extract the private IP and use that one. ''' try: ip = data.public_ips[0] except Exception: ip = data.private_ips[0] return ip def get_networkid(vm_): ''' Return the networkid to use, only valid for Advanced Zone ''' networkid = config.get_cloud_config_value('networkid', vm_, __opts__) if networkid is not None: return networkid else: return False def get_project(conn, vm_): ''' Return the project to use. ''' try: projects = conn.ex_list_projects() except AttributeError: # with versions <0.15 of libcloud this is causing an AttributeError. log.warning('Cannot get projects, you may need to update libcloud to 0.15 or later') return False projid = config.get_cloud_config_value('projectid', vm_, __opts__) if not projid: return False for project in projects: if str(projid) in (str(project.id), str(project.name)): return project log.warning("Couldn't find project {0} in projects".format(projid)) return False def create(vm_): ''' Create a single VM from a data dict ''' try: # Check for required profile parameters before sending any API calls. if vm_['profile'] and config.is_profile_configured(__opts__, __active_provider_name__ or 'cloudstack', vm_['profile'], vm_=vm_) is False: return False except AttributeError: pass # Since using "provider: <provider-engine>" is deprecated, alias provider # to use driver: "driver: <provider-engine>" if 'provider' in vm_: vm_['driver'] = vm_.pop('provider') salt.utils.cloud.fire_event( 'event', 'starting create', 'salt/cloud/{0}/creating'.format(vm_['name']), { 'name': vm_['name'], 'profile': vm_['profile'], 'provider': vm_['driver'], }, transport=__opts__['transport'] ) log.info('Creating Cloud VM {0}'.format(vm_['name'])) conn = get_conn() kwargs = { 'name': vm_['name'], 'image': get_image(conn, vm_), 'size': get_size(conn, vm_), 'location': get_location(conn, vm_), } if get_keypair(vm_) is not False: kwargs['ex_keyname'] = get_keypair(vm_) if get_networkid(vm_) is not False: kwargs['networkids'] = get_networkid(vm_) kwargs['networks'] = ( # The only attr that is used is 'id'. CloudStackNetwork(None, None, None, kwargs['networkids'], None, None), ) if get_project(conn, vm_) is not False: kwargs['project'] = get_project(conn, vm_) salt.utils.cloud.fire_event( 'event', 'requesting instance', 'salt/cloud/{0}/requesting'.format(vm_['name']), {'kwargs': {'name': kwargs['name'], 'image': kwargs['image'].name, 'size': kwargs['size'].name}}, transport=__opts__['transport'] ) displayname = cloudstack_displayname(vm_) if displayname: kwargs['ex_displayname'] = displayname else: kwargs['ex_displayname'] = kwargs['name'] volumes = {} ex_blockdevicemappings = block_device_mappings(vm_) if ex_blockdevicemappings: for ex_blockdevicemapping in ex_blockdevicemappings: if 'VirtualName' not in ex_blockdevicemapping: ex_blockdevicemapping['VirtualName'] = '{0}-{1}'.format(vm_['name'], len(volumes)) salt.utils.cloud.fire_event( 'event', 'requesting volume', 'salt/cloud/{0}/requesting'.format(ex_blockdevicemapping['VirtualName']), {'kwargs': {'name': ex_blockdevicemapping['VirtualName'], 'device': ex_blockdevicemapping['DeviceName'], 'size': ex_blockdevicemapping['VolumeSize']}}, ) try: volumes[ex_blockdevicemapping['DeviceName']] = conn.create_volume( ex_blockdevicemapping['VolumeSize'], ex_blockdevicemapping['VirtualName'] ) except Exception as exc: log.error( 'Error creating volume {0} on CLOUDSTACK\n\n' 'The following exception was thrown by libcloud when trying to ' 'requesting a volume: \n{1}'.format( ex_blockdevicemapping['VirtualName'], exc ), # Show the traceback if the debug logging level is enabled exc_info_on_loglevel=logging.DEBUG ) return False else: ex_blockdevicemapping = {} try: data = conn.create_node(**kwargs) except Exception as exc: log.error( 'Error creating {0} on CLOUDSTACK\n\n' 'The following exception was thrown by libcloud when trying to ' 'run the initi
al deployment: \n{1}'.format( vm_['name'], str(exc) ), # Show the traceback if the debug logging level is enabled exc_in
fo_on_loglevel=logging.DEBUG ) return False for device_name in six.iterkeys(volumes): try: conn.attach_volume(data, volumes[device_name], device_name) except Exception as exc: log.error( 'Error attaching volume {0} on CLOUDSTACK\n\n' 'The following exception was thrown by libcloud when trying to ' 'attach a volume: \n{1}'.format( ex_blockdevicemapping.get('VirtualName', 'UNKNOWN'), exc ), # Show the traceback if the debug logging level is enabled exc_info=log.isEnabledFor(logging.DEBUG) ) return False ssh_username = config.get_cloud_config_value( 'ssh_username', vm_, __opts__, default='root' ) vm_['ssh_host'] = get_ip(data) vm_['password'] = data.extra['password'] vm_['key_filename'] = get_key() ret = salt.utils.cloud.bootstrap(vm_, __opts__) ret.update(data.__dict__) if 'password' in data.extra: del data.extra['password'] log.info('Created Cloud VM {0[name]!r}'.format(vm_)) log.debug( '{0[name]!r} VM creation details:\n{1}'.format( vm_, pprint.pformat(data.__dict__) ) ) salt.utils.cloud.fire_event( 'event', 'created instance', 'salt/cloud/{0}/created'.format(vm_['name']), { 'name': vm_['name'], 'profile': vm_['profile'], 'provider': vm_['driver'], }, transport=__opts__['transport'] ) return ret def destroy(name, conn=None, call=None): ''' Delete a single VM, and all of its volumes ''' if call == 'function': raise SaltCloudSystemExit( 'The destroy action must be called with -d, --destroy, ' '-a or --action.' ) salt.utils.cloud.fire_event( 'event', 'destroying instance', 'salt/cloud/{0}/destroying'.format(nam
#!/usr/bin/env python # -*- coding: utf-8 -*- # -*- Author: ClarkYAN -*- from get_connection import * from get_key import * import Tkinter import tkMessageBox class mainFrame: def __init__(self): self.root = Tkinter.Tk() self.root.title('Secure Protocol Systems') self.root.geometry('600x300') self.root.resizable(width=False, height=True) # self.scroll = Tkinter.Scrollbar(self.root).pack(side=Tkinter.RIGHT) # TOP self.frm = Tkinter.Frame(self.root) labelTitle = Tkinter.Label(self.root, text="Data Owner 1", font=("Arial", 26)) labelTitle.pack() # LEFT self.frm_l = Tkinter.Frame(self.frm) labelControl = Tkinter.Label(self.frm_l, text="Control Panel", font=("Arial", 20), height=2) labelControl.pack() buttonSetUp = Tkinter.Button(self.frm_l, text="Set Up Connection", font=("Arial", 18), height=2, command=setUpConnection) buttonSetUp.pack(side=Tkinter.LEFT) buttonReceiveKey = Tkinter.Button(self.frm_l, text="Receive Public Key", font=("Arial", 18), height=2, command=getKey) buttonReceiveKey.pack(side=Tkinter.LEFT) self.frm_l.pack(side=Tkinter.TOP) s
elf.frm_r = Tkinter.Frame(self.frm) buttonEncrypted = Tkinter.Button(self.frm_r, text="Encrypting Original data", font=("Arial", 18), height=2) buttonEncrypted.pack(side=Tkinter.LEFT) buttonSend = Tkinter.Button(self.frm_r, text="Send to Cloud", font=("Arial"
, 18), height=2) buttonSend.pack(side=Tkinter.LEFT) self.frm_r.pack(side=Tkinter.BOTTOM) self.frm.pack(side=Tkinter.TOP) self.root.mainloop() def setUpConnection(): url = 'http://127.0.0.1:4000/' sender = 'data_owner_1' result = str(set_up_connection(url, sender)) tkMessageBox.showinfo("Recent Event", result) print result def getKey(): url = 'http://127.0.0.1:5000/key' sender = 'data_owner_1' result = send_info(url, sender) tkMessageBox.showinfo("Recent Event", result) print result def main(): db1 = mainFrame() if __name__ == "__main__": main()
from os.path import join, dirname fr
om setuptools import setup setup( name = 'xmppgcm', packages = ['xmppgcm'], # this must be the same as the name above version = '0.2.3', description = 'Client Library for Firebase Cloud Messaging using XMPP', long_description = open(join(dirname(__file__), 'README.txt')).read(), install_requires=['sleekxmpp',], author = 'Winster Jose', author_email = 'wtjose@gmail.com', url = 'https://github.com/winster/xmppgcm', keywords = ['gcm', 'fcm', 'xmpp', 'xmppgcm', 'xmppfcm'
], # arbitrary keywords classifiers = [], )
#!/usr/bin/
env python """ Se
rvice Subpackage """ from . import test from . import detect from . import device from . import object from . import cov from . import file
#Pizza please import pyaudiogame from pyaudiogame import storage spk = pyaudiogame.speak MyApp = pyaudiogame.App("Pizza Please") storage.screen = ["start"] storage.toppings = ["cheese", "olives", "mushrooms", "Pepperoni", "french fries"] storage.your_toppings = ["cheese"] storage.did_run = False def is_number(number, topping_list): """Will check that what the user enters is really a number and not a letter, also that it is within our list""" if number in "0123456789": number = int(number) if number <= len(topping_list)-1: return number def say_message(message): """Will check if the message has been read and if so, passes. Else, it will read the message""" if not storage.did_run: spk(message) storage.did_run = True def add_topping(key): """Will add a topping to your pizza""" number = is_number(key, storage.toppings) if number or number == 0: storage.your_toppings.append(storage.toppings[number]) spk("You added %s to your pizza. Your pizza currently has %s on top" % (storage.toppings[number], storage.your_toppings)) def remove_topping(key): """Removes toppings from the pizza""" number = is_number(key, storage.your_toppings) if number or number == 0: t = storage.your_toppings.pop(number) if t == "cheese": spk("You can't remove cheese, what are you, Italian?") storage.your_toppings.insert(0, "cheese") else: spk("You removed %s from your pizza. Now your pizza has %s on top" % (t, storage.your_toppings)) def logic(actions): """Press a and d to switch from adding and removing toppings, press 0-9 to deal with the toppings and press space to eat the pizza""" key = actions['key'] if key == "d": spk("Press a number to remove a topping from your pizza, press a to add toppings again") storage.scree
n[0] = "remove" storage.did_run = False elif key == "a": spk("Press a number to add a topping to your pizza. Press d to remove a topping you don't like") storage.screen[0] = "add" storage.did_run = False elif key == "space": spk("You sit down to enjoy a yummy pizza. You eat... eat... eat... eat... and are finally done. That was good! Now it's time for another!") storage.your_toppings = ['cheese'] storage.did_run = False elif storage.screen[0] == "star
t": spk("Welcom to pizza madness! Here you can build your own pizza to eat! Press a to add toppings, press d to remove them and when you are done, press space to eat your yummy pizza!!!") storage.screen.remove("start") storage.screen.append("add") elif storage.screen[0] == "add": say_message("Please choose a number of toppings to add! Press d to start removing toppings. Toppings are %s" % storage.toppings) if key: add_topping(key) elif storage.screen[0] == "remove" and key: remove_topping(key) MyApp.logic = logic MyApp.run()
from __future__ import unicode_liter
als from django.db import models from modpacks.models.modpack import Modpack class Server(models.Model): """ Minecraft Server details for display on the server page """ name = models.CharField(verbose_name='Server Name', max_length=200) desc = models.TextField(verbose_name='Se
rver Description', blank=True) modpack = models.ForeignKey(Modpack, verbose_name='Server Modpack') address = models.CharField(verbose_name='Server Address', max_length=200, blank=True) screenshot = models.ImageField(verbose_name='Screenshot', blank=True) dynmap = models.CharField(verbose_name='DynMap URL', max_length=200, blank=True) slug = models.SlugField() def get_absolute_url(self): return reverse("server", self.slug) def __str__(self): return self.name
#!/usr/bin/python3 # Copyright (c) 2018-2021 Dell Inc. or its subsidiaries. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import json import logging import os import re import sys import requests from requests.packages.urllib3.exceptions import InsecureRequestWarning from credential_helper import CredentialHelper from dell_nfv import ConfigOvercloud from ironic_helper import IronicHelper from logging_helper import LoggingHelper from utils import Utils logging.basicConfig() logger = logging.getLogger(os.path.splitext(os.path.basename(sys.argv[0]))[0]) requests.packages.urllib3.disable_warnings(InsecureRequestWarning) home_dir = os.path.expanduser('~') UC_USERNAME = UC_PASSWORD = UC_PROJECT_ID = UC_AUTH_URL = '' class ConfigEdge(ConfigOvercloud): """ Description: Class responsible for overcloud configurations. """ ironic = IronicHelper() ironic_client = ironic.get_ironic_client() nodes = ironic_client.node.list() get_drac_credential = CredentialHelper() def __init__(self, overcloud_name, node_type, node_type_data): self.node_type = node_type self.node_type_data = json.loads(node_type_data) self.mtu = int(self.node_type_data["nfv_mtu"]) _dir = (re.sub(r'[^a-z0-9]', " ", node_type.lower()).replace(" ", "_")) _ntl = re.sub(r'[^a-z0-9]', "", node_type.lower()) ne_name = "nic_environment_{}.yaml".format(_ntl) instack_name = "instackenv_{}.json".format(_ntl) nic_env_file = os.path.join(home_dir, _dir, ne_name) instackenv_file = os.path.join(home_dir, _dir, instack_name) self.instackenv = instackenv_file self.nic_env = nic_env_file super().__init__(overcloud_name) def fetch_nfv_parameters(self): logger.debug("Retrieving NFV parameters") ntd = self.node_type_data enable_hugepage = Utils.string_to_bool(ntd["hpg_enable"]) enable_numa = Utils.string_to_bool(ntd["numa_enable"]) nfv_type = self._get_nfv_type(ntd) is_ovs_dpdk = bool(nfv_type and nfv_type in ["dpdk", "both"]) hostos_cpu_count = int(ntd["numa_hostos_cpu_count"]) _dir = (re.sub(r'[^a-z0-9]', " ", self.node_type.lower()).replace(" ", "_")) ntl = re.sub(r'[^a-z0-9]', "", self.node_type.lower()) _f_name = "nic_environment_{}.yaml".format(ntl) nic_env_file = os.path.join(home_dir, _dir, _f_name) params = {} params_dell_env = params["dell_env"] = {} kernel_args = "iommu=pt intel_iommu=on" if enable_hugepage: hpg_num = self.nfv_params.calculate_hugepage_count( ntd["hpg_size"]) kernel_args += (" default_hugepagesz={} hugepagesz={}" " hugepages={}").format(ntd["hpg_size"], ntd["hpg_size"][0:-1], s
tr(hpg_num)) if enable_numa: _, node_data = self.nfv_params.select_compute_node(self.node_type,
self.instackenv) self.nfv_params.parse_data(node_data) self.nfv_params.get_all_cpus() self.nfv_params.get_host_cpus(hostos_cpu_count) self.nfv_params.get_nova_cpus() self.nfv_params.get_isol_cpus() if is_ovs_dpdk: dpdk_nics = self.find_ifaces_by_keyword(nic_env_file, 'Dpdk') logger.debug("DPDK-NICs >>" + str(dpdk_nics)) self.nfv_params.get_pmd_cpus(self.mtu, dpdk_nics) self.nfv_params.get_socket_memory(self.mtu, dpdk_nics) kernel_args += " isolcpus={}".format(self.nfv_params.isol_cpus) # dell-environmment role specific cpu parameters params_dell_env["IsolCpusList"] = self.nfv_params.isol_cpus params_dell_env["NovaComputeCpuDedicatedSet"] = self.nfv_params.nova_cpus if is_ovs_dpdk: params_dpdk = params["dpdk"] = {} params_dpdk["OvsDpdkCoreList"] = self.nfv_params.host_cpus params_dpdk["OvsPmdCoreList"] = self.nfv_params.pmd_cpus params_dpdk["OvsDpdkSocketMemory"] = self.nfv_params.socket_mem # params_dpdk["IsolCpusList"] = self.nfv_params.isol_cpus # Populated in dell_env file # params_dpdk["NovaComputeCpuDedicatedSet"] = self.nfv_params.nova_cpus # Populated in dell_env file # params_dpdk["NovaComputeCpuSharedSet"] = self.nfv_params.shared_cpus # Not used in current Architecture params_dell_env["KernelArgs"] = kernel_args return params def _get_nfv_type(self, node_type_data): if ("nfv_type" in node_type_data and len(node_type_data["nfv_type"].strip()) != 0 and node_type_data["nfv_type"].strip() in ("dpdk", "sriov", "both")): return node_type_data["nfv_type"].strip() return None def main(): parser = argparse.ArgumentParser() parser.add_argument("--overcloud_name", default=None, help="The name of the overcloud") parser.add_argument("--edge_site", default=None, dest="node_type", help="The name of edge site being configured") parser.add_argument("--edge_site_data", default=None, dest="node_type_data", help="The edge site metadata") parser.add_argument("--debug", default=False, action='store_true', help="Turn on debugging for this script") LoggingHelper.add_argument(parser) args = parser.parse_args() LoggingHelper.configure_logging(args.logging_level) config_edge = ConfigEdge(args.overcloud_name, args.node_type, args.node_type_data) params = config_edge.fetch_nfv_parameters() logger.debug(">>>>>> nfv parameters {}".format(str(params))) return json.dumps(params) if __name__ == "__main__": res = main() logger.debug(">>>>>> res {}".format(str(res))) sys.stdout.write(res)
from sqlalchemy import * from test.lib import * class FoundRowsTest(fixtures.TestBase, AssertsExecutionResults): """tests rowcount functionality""" __requires__ = ('sane_rowcount', ) @classmethod def setup_class(cls): global employees_table, metadata metadata = MetaData(testing.db) employees_table = Table('employees', metadata, Column('employee_id', Integer, Sequence('employee_id_seq', optional=True), primary_key=True), Column('name', String(50)), Column('department', String(1)), ) metadata.create_all() def setup(self): global data data = [ ('Angela', 'A'), ('Andrew', 'A'), ('Anand', 'A'),
('Bob', 'B'), ('Bobette', 'B'), ('Buffy', 'B'), ('Charlie', 'C'), ('Cynthia', 'C'), ('Chris', 'C') ] i = employees_table.insert() i.execute(*[{'name':n, 'department':d} for n, d in data]) def teardown(self): employees_table.delete().execute() @classmethod def t
eardown_class(cls): metadata.drop_all() def testbasic(self): s = employees_table.select() r = s.execute().fetchall() assert len(r) == len(data) def test_update_rowcount1(self): # WHERE matches 3, 3 rows changed department = employees_table.c.department r = employees_table.update(department=='C').execute(department='Z') print "expecting 3, dialect reports %s" % r.rowcount assert r.rowcount == 3 def test_update_rowcount2(self): # WHERE matches 3, 0 rows changed department = employees_table.c.department r = employees_table.update(department=='C').execute(department='C') print "expecting 3, dialect reports %s" % r.rowcount assert r.rowcount == 3 def test_delete_rowcount(self): # WHERE matches 3, 3 rows deleted department = employees_table.c.department r = employees_table.delete(department=='C').execute() print "expecting 3, dialect reports %s" % r.rowcount assert r.rowcount == 3
#!/usr/bin/python import RPi.GPIO as GPIO import signal import time from on_off import * class keypad(): def __init__(self, columnCount = 3): GPIO.setmode(GPIO.BCM) # CONSTANTS if columnCount is 3: self.KEYPAD = [ [1,2,3], [4,5,6], [7,8,9], ["*",0,"#"] ] self.ROW = [27,23,22,24] self.COLUMN = [10,25,9] elif columnCount is 4: self.KEYPAD = [ [1,2,3,"A"], [4,5,6,"B"], [7,8,9,"C"], ["*",0,"#","D"] ] self.ROW = [18,23,24,25] self.COLUMN = [4,17,22,21] else: return def timer(self, sig, frame): raise Exception('Time is up') def KeyPadAuthor(self): # Initialize the keypad class kp = keypad() arr = [] # Loop while waiting for a keypress digit = None while len(arr) < 5: tempDigit = self.getKey() if tempDigit != digit: digit = tempDigit; if digit != None: # every time a key is pressed it resets the timer signal.alarm(7) arr.append(str(digit)) blinkKey() print (digit) # return the result return (arr) def getKey(self): # Set all columns as output low for j in range(len(self.COLUMN)): GPIO.setup(self.COLUMN[j], GPIO.OUT) GPIO.output(self.COLUMN[j], GPIO.LOW) # Set all rows as input for i in range(len(self.ROW)): GPIO.setup(self.ROW[i], GPIO.IN, pull_up_down=GPIO.PUD_UP) # Scan rows for pushed key/button # A valid key press should set "rowVal" between 0 and 3. rowVal = -1 for i in range(len(self.ROW)): tmpRead = GPIO.input(self.ROW[i]) if tmpRead == 0: rowVal = i # if rowVal is not 0 thru 3 then no button was pressed and we can exit if rowVal <0 or rowVal >3: self.exit() return # Convert columns to input for j in range(len(self.COLUMN)): GPIO.setup(self.COLUMN[j], GPIO.IN, pull_up_down=GPIO.PUD_DOWN) # Switch the i-th row found from scan to output GPIO.setup(self.ROW[rowVa
l], GPIO.OUT) GPIO.output(self.ROW[rowVal], GPIO.HIGH) # Scan columns for still-pushed key/button # A valid key press should set "colVal" between 0 and 2. colVal = -1 for j in range(len(self.COLUMN)): tmpRead = GPIO.inpu
t(self.COLUMN[j]) if tmpRead == 1: colVal=j # if colVal is not 0 thru 2 then no button was pressed and we can exit if colVal <0 or colVal >2: self.exit() return # Return the value of the key pressed self.exit() return self.KEYPAD[rowVal][colVal] def exit(self): # Reinitialize all rows and columns as input at exit for i in range(len(self.ROW)): GPIO.setup(self.ROW[i], GPIO.IN, pull_up_down=GPIO.PUD_UP) for j in range(len(self.COLUMN)): GPIO.setup(self.COLUMN[j], GPIO.IN, pull_up_down=GPIO.PUD_UP)
from __future__ import print_function from builtins import range import sys sys.path.insert(1,"../../") import h2o from tests import pyunit_utils import random import os def javapredict_dynamic_data(): # Generate random dataset dataset_params = {} dataset_params['rows'] = random.sample(list(range(5000,15001)),1)[0] dataset_params['cols'] = random.sample(list(range(10,21)),1)[0] dataset_params['categorical_fraction'] = round(random.random(),1) left_over = (1 - dataset_params['categorical_fraction']) dataset_params['integer_fraction'] = round(left_over - round(random.uniform(0,left_over),1),1) if dataset_params['integer_fraction'] + dataset_params['categorical_fraction'] == 1: if dataset_params['integer_fraction'] > dataset_params['categorical_fraction']: dataset_params['integer_fraction'] = dataset
_params['integer_fraction'] - 0.1 else: dataset_params['categorical_fraction'] = dataset_para
ms['categorical_fraction'] - 0.1 dataset_params['missing_fraction'] = random.uniform(0,0.5) dataset_params['has_response'] = True dataset_params['randomize'] = True dataset_params['factors'] = random.randint(2,2000) print("Dataset parameters: {0}".format(dataset_params)) train = h2o.create_frame(**dataset_params) print("Training dataset:") print(train) # Save dataset to results directory results_dir = pyunit_utils.locate("results") h2o.download_csv(train,os.path.join(results_dir,"kmeans_dynamic_training_dataset.log")) # Generate random parameters params = {} params['k'] = random.sample(list(range(1,10)),1)[0] if random.randint(0,1): params['max_iterations'] = random.sample(list(range(1,1000)),1)[0] if random.randint(0,1): params['standardize'] = random.sample([True, False],1)[0] if random.randint(0,1): params['seed'] = random.sample(list(range(1,1000)),1)[0] if random.randint(0,1): params['init'] = random.sample(['Random','PlusPlus','Furthest'],1)[0] print("Parameter list: {0}".format(params)) x = train.names x.remove("response") y = "response" pyunit_utils.javapredict(algo="kmeans", equality=None, train=train, test=None, x=x, y=y, compile_only=True, **params) if __name__ == "__main__": pyunit_utils.standalone_test(javapredict_dynamic_data) else: javapredict_dynamic_data()
) file.close() self.close() return True except IOError: return False def MoranI(self): QApplication.setOverrideCursor(Qt.WaitCursor) if len(self.nb)==0: if self.comboBox_5.currentText()!='within distance': nb = self.poly2nb() else: if self.lineEdit.text() == '': QApplication.restoreOverrideCursor() QMessageBox.information(None, 'Missing data', 'Within distance must be set up!') return nb = self.point2nb() self.nb = nb else: nb = self.nb cardnb = self.card(nb) zero = 0 if len(cardnb)==0: zero += 1 return glist = [] for m in cardnb: s = [] if m>0: s = [1]*m glist.append(s) n = len(cardnb) effn = n-zero if effn<1: return # vlist = [None]*n # vlist = [[None]]*n vlist = [[0]] * n if self.comboBox_2.currentText()=='B': for i in xrange(n): g = glist[i] if cardnb[i]>0: vlist[i] = g elif self.comboBox_2.currentText()=='C' or self.comboBox_2.currentText()=='U': D = sum(list(itertools.chain.from_iterable(glist))) if D<1: return if self.comboBox_2.currentText()=='C': nu = float(effn) else: nu = 1.0 qr = nu/float(D) for i in xrange(n): if cardnb[i]>0: vlist[i] = [x * qr for x in glist[i]] elif self.comboBox_2.currentText()=='S': q = [] for i in xrange(len(glist)): gg = [] for j in xrange(len(glist[i])): gg.append(power(2*glist[i][j],2)) q.append(sqrt(sum(gg))) for i in xrange(n): if cardnb[i]>0: if q[i]>0: mpl = (1.0/float(q[i])) else: mpl = 0.0 v = [x * mpl for x in glist[i]] glist[i] = v Q = sum(list(itertools.chain.from_iterable(glist))) if Q<1: return qr = float(effn)/float(Q) for i in xrange(n): if cardnb[i]>0: vlist[i] = [x * qr for x in glist[i]] elif self.comboBox_2.currentText()=='W': for i in xrange(n): g = glist[i] d = sum(g) if cardnb[i]>0: if d>0: mpl = (1.0/float(d)) else: mpl = 0.0 vlist[i] = [x * mpl for x in g] listw = vlist # self.plainTextEdit.insertPlainText("listw: %s\n" % listw) # return # S0 = sum(sum(filter(None, listw))) S0 = sum(sum(listw)) S1 = 0 rS = [0]*len(nb) cS = [0]*len(nb) for i in xrange(len(nb)): ij = nb[i] wij = listw[i] rS[i] = sum(wij) for j in xrange(len(ij)): dij = wij[j] ijj = ij[j] cS[ijj] = cS[ijj] + dij try: ijlkup = nb[ijj].index(i) dji = listw[ijj][ijlkup] except ValueError: dji = 0 S1 = S1 + (dij * dij) + (dij * dji) S2 = sum(power([x + y for x, y in zip(rS, cS)],2)) S02 = float(S0) * float(S0) n1 = n-1 n2 = n-2 n3 = n-3 nn = n*n x = self.datRead() if len(x)!=len(nb): return x = array(x) z = x-mean(x) zz = sum(power(z,2)) K = (len(x)*sum(power(z,4)))/power(zz,2) ans = empty([n]) for i in xrange(n): if cardnb[i]==0: ans[i] = 0 else: sm = 0 for j in xrange(cardnb[i]): k = int(nb[i][j]) wt = listw[i][j] tmp = z[k] sm = sm+(tmp*wt) ans[i] = sm lz = ans I = (float(n)/float(S0)) * ((sum(z * lz))/float(zz)) EI = (-1.0)/float(n1) if self.comboBox_4.currentText()=='randomization': VI = float(n) * (float(S1) * (float(nn) - 3.0 * float(n) + 3.0) - float(n) * float(S2) + 3.0 * float(S02)) tmp = float(K) * (float(S1) * (float(nn) - float(n)) - 2.0 * float(n) * float(S2) + 6.0 * float(S02)) if tmp>VI: self.plainTextEdit.insertPlainText('Kurtosis overflow, distribution of variable does not meet test assumptions\n') VI = (VI - tmp)/(float(n1) * float(n2) * float(n3) * float(S02)) tmp = (VI - power(EI,2)) if tmp<0: self.plainTextEdit.insertPlainText('Negative variance, ndistrib
ution of variable does not
meet test assumptions\n') VI = tmp else: VI = (float(nn) * float(S1) - float(n) * float(S2) + 3.0 * float(S02))/(float(S02) * (float(nn) - 1.0)) tmp = (VI - power(EI,2)) if tmp < 0: self.plainTextEdit.insertPlainText('Negative variance, ndistribution of variable does not meet test assumptions\n') VI = tmp ZI = (I - EI)/sqrt(VI) if self.comboBox_3.currentText()=='less': PrI = self.pnorm(ZI) elif self.comboBox_3.currentText()=='greater': PrI = 1.0-self.pnorm(ZI) else: PrI = 2.0*(1.0-self.pnorm(abs(ZI))) self.plainTextEdit.insertPlainText("Moran's I: %s\n" % I) self.plainTextEdit.insertPlainText("Expectation: %s\n" % EI) self.plainTextEdit.insertPlainText("Variance: %s\n" % VI) self.plainTextEdit.insertPlainText("Moran's I standard deviate: %s\n" % ZI) self.plainTextEdit.insertPlainText("p-value: %s\n" % PrI) ans = empty([n]) for i in xrange(n): if cardnb[i]==0: ans[i] = 0 else: sm = 0 for j in xrange(cardnb[i]): k = int(nb[i][j]) wt = listw[i][j] diff = x[i]-x[k] res = diff*diff sm = sm+(res*wt) ans[i] = sm res = ans C = (float(n1)/(2.0*float(S0))) * ((sum(res))/float(zz)) EC = 1.0 if self.comboBox_4.currentText()=='randomization': VC = (float(n1) * float(S1) * (float(nn) - 3.0 * float(n) + 3.0 - float(K) * float(n1))) VC = VC - ((1.0/4.0) * (float(n1) * float(S2) * (float(nn) + 3.0 * float(n) - 6.0 - float(K) * (float(nn) - float(n) + 2.0)))) VC = VC + (float(S02) * (float(nn) - 3.0 - float(K) * (power(n1,2)))) VC = VC/(float(n) * float(n2) * float(n3) * float(S02)) else: VC = ((2.0 * float(S1) + float(S2)) * float(n1) - 4.0 * float(S02))/(2.0 * (float(n) + 1.0) * float(S02)) ZC = (EC - C)/sqrt(VC) if self.comboBox_3.currentText()=='less': PrI = self.pnorm(ZC) elif self.comboBox_3.currentText()=='greater': PrI = 1.0-self.pnorm(ZC) else: PrI = 2.0*(1.0-self.pnorm(abs(ZC))) self.plainTextEdit.insertPlainText('\n\n') self.plainTextEdit.insertPlainText("Geary's c: %s\n" % C) self.plainTextEdit.insertPlainText("Expectation: %s\n" % EC) self.plainTextEdit.insertPlainText("Variance: %s\n" % VC) self.plainTextEdit.insertPlainText("Geary's c standard deviate: %s\n" % ZC) self.plainTextEdit.insertPlainText("p-value: %s\n" % PrI) QApplication.restoreOverrideCursor() def pnorm(self, z): return (1.0 + math.erf(z / sqrt(2.0))) / 2.0 def normpdf(x, mean, sd): var = float(sd)**2 pi = 3.1
def mm_loops(X,Y,Z): m = len(X) n = len(Y) for i in xrange(len(X)): xi = X[i] for j in xrange(len(Y)): yj = Y[j] total = 0 for k in xrange(len(yj)): to
tal += xi[k] * yj[k] Z[i][j] = total return Z def make_matrix(m,n): mat = [] for i in xrange(m): mat.append(range(n)) return mat if __name__ == '__main__': n = 200 x = make_matrix(n,n) z = ma
ke_matrix(n,n) mm_loops(x, x, z)
one] try: Use(ve, error='should not raise').validate('x') except SchemaError as e: assert e.autos == ["ve('x') raised ValueError()"] assert e.errors == ['should not raise'] try: Use(se).validate('x') except SchemaError as e: assert e.autos == [None, 'first auto'] assert e.errors == [None, 'first error'] try: Use(se, error='second error').validate('x') except SchemaError as e: assert e.autos == [None, 'first auto'] assert e.errors == ['second error', 'first error'] def test_or_error_handling(): try: Or(ve).validate('x') except SchemaError as e: assert e.autos[0].startswith('Or(') assert e.autos[0].endswith(") did not validate 'x'") assert e.autos[1] == "ve('x') raised ValueError()" assert len(e.autos) == 2 assert e.errors == [None, None] try: Or(ve, error='should not raise').validate('x') except SchemaError as e: assert e.autos[0].startswith('Or(') assert e.autos[0].endswith(") did not validate 'x'") assert e.autos[1] == "ve('x') raised ValueError()" assert len(e.autos) == 2 assert e.errors == ['should not raise', 'should not raise'] try: Or('o').validate('x') except SchemaError as e: assert e.autos == ["Or('o') did not validate 'x'", "'o' does not match 'x'"] assert e.errors == [None, None] try: Or('o', error='second error').validate('x') except SchemaError as e: assert e.autos == ["Or('o') did not validate 'x'", "'o' does not match 'x'"] assert e.errors == ['second error', 'second error'] def test_and_error_handling(): try: And(ve).validate('x') except SchemaError as e: assert e.autos == ["ve('x') raised ValueError()"] assert e.errors == [None] try: And(ve, error='should not raise').validate('x') except SchemaError as e: assert e.autos == ["ve('x') raised ValueError()"] assert e.errors == ['should not raise'] try: And(str, se).validate('x') except SchemaError as e: assert e.autos == [None, 'first auto'] assert e.errors == [None, 'first error'] try: And(str, se, error='second error').validate('x') except SchemaError as e: assert e.autos == [None, 'first auto'] assert e.errors == ['second error', 'first error'] def test_schema_error_handling(): try: Schema(Use(ve)).validate('x') except SchemaError as e: assert e.autos == [None, "ve('x') raised ValueError()"] assert e.errors == [None, None] try: Schema(Use(ve), error='should not raise').validate('x') except SchemaError as e: assert e.autos == [None, "ve('x') raised ValueError()"] assert e.errors == ['should not raise', None] try: Schema(Use(se)).validate('x') except SchemaError as e: assert e.autos == [None, None, 'first auto'] assert e.errors == [None, None, 'first error'] try: Schema(Use(se), error='second error').validate('x') except SchemaError as e: assert e.autos == [None, None, 'first auto'] assert e.errors == ['second error', None, 'first error'] def test_use_json(): import json gist_schema = Schema(And(Use(json.loads), # first convert from JSON {Optional('description'): basestring, 'public': bool, 'files': {basestring: {'content': basestring}}})) gist = '''{"description": "the description for this gist", "public": true, "files": { "file1.txt": {"content": "String file contents"}, "other.txt": {"content": "Another file contents"}}}''' assert gist_schema.validate(gist) def test_error_reporting(): s = Schema({'<files>': [Use(open, error='<files> should be readable')], '<path>': And(os.path.exists, error='<path> should exist'), '--count': Or(None, And(Use(int), lambda n: 0 < n < 5), error='--count should be integer 0 < n < 5')}, error='Error:') s.validate({'<files>': [], '<path>': './', '--count': 3}) try: s.validate({'<files>': [], '<path>': './', '--count': '10'}) except SchemaError as e: assert e.code == 'Error:\n--count should be integer 0 < n < 5' try: s.validate({'<files>': [], '<path>': './hai', '--count': '2'}) except SchemaError as e: assert e.code == 'Error:\n<path> should exist' try: s.validate({'<files>': ['hai'], '<path>': './', '--count': '2'}) except SchemaError as e: assert e.code == 'Error:\n<files> should be readable' def test_schema_repr(): # what about repr with `error`s? schema = Schema([Or(None, And(str, Use(float)))]) repr_ = "Schema([Or(None, And(<type 'str'>, Use(<type 'float'>)))])" # in Python 3 repr contains <class 'str'>, not <type 'str'> assert repr(schema).replace('class', 'type') == repr_ def test_validate_object(): schema = Schema({object: str}) assert schema.validate({42: 'str'}) == {42: 'str'} with SE: schema.validate({42: 777}) def test_issue_9_prioritized_key_comparison(): validate = Schema({'key': 42, object: 42}).validate assert validate({'key': 42, 777: 42}) == {'key': 42, 777: 42} def test_issue_9_prioritized_key_comparison_in_dicts(): # http://stackoverflow.com/q
uestions/14588098/docopt-schema-validation s = Schema({'ID': Use(int, error='ID should be an int'), 'FILE': Or(None, Use(open, error='FILE should be readable')), Optional(str): object}) data = {'ID': 10, 'FILE': None, 'other': 'other', 'other2': 'other2'} assert s.validate(data) == data data = {'ID': 10, 'FILE': None} assert s.validate(data) == data def test_missing_keys_exception_with_non_str_dict_keys(): s = Schema({And(str, Use(str.lower
), 'name'): And(str, len)}) with SE: s.validate(dict()) with SE: try: Schema({1: 'x'}).validate(dict()) except SchemaMissingKeyError as e: assert e.args[0] == "Missing keys: 1" raise # PyPy does have a __name__ attribute for its callables. @mark.skipif(platform.python_implementation() == 'PyPy', reason='Running on PyPy') def test_issue_56_cant_rely_on_callables_to_have_name(): s = Schema(methodcaller('endswith', '.csv')) assert s.validate('test.csv') == 'test.csv' with SE: try: s.validate('test.py') except SchemaError as e: assert "operator.methodcaller" in e.args[0] raise def test_exception_handling_with_bad_validators(): BadValidator = namedtuple("BadValidator", ["validate"]) s = Schema(BadValidator("haha")) with SE: try: s.validate("test") except SchemaError as e: assert "TypeError" in e.args[0] raise def test_issue_83_iterable_validation_return_type(): TestSetType = type("TestSetType", (set,), dict()) data = TestSetType(["test", "strings"]) s = Schema(set([str])) assert isinstance(s.validate(data), TestSetType) def test_optional_key_convert_failed_randomly_while_with_another_optional_object(): """ In this test, created_at string "2015-10-10 00:00:00" is expected to be converted to a datetime instance. - it works when the schema is s = Schema({ 'created_at': _datetime_validator, Optional(basestring): object, }) - but when wrapping the key 'created_at' with Optional, it fails randomly :return: """ import datetime fmt = '%Y-%m-%d %H:%M:%S' _datetime_validator = Or(None, Use(lambda i: datetime.datetime.strptime(i, fmt))) # FIXME given tests enough for i in range(1024): s = Schema({ Optional('created_at'): _datetime_validator, Optional('updated_at'): _
S, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """Module to build p
ipeline fragment that produces given PCollections. For internal use only; no backwards-compatibility guarantees. """ from __future__ import absolute_import import apache_beam as beam from apache_beam.pipeline i
mport PipelineVisitor from apache_beam.testing.test_stream import TestStream class PipelineFragment(object): """A fragment of a pipeline definition. A pipeline fragment is built from the original pipeline definition to include only PTransforms that are necessary to produce the given PCollections. """ def __init__(self, pcolls, options=None): """Constructor of PipelineFragment. Args: pcolls: (List[PCollection]) a list of PCollections to build pipeline fragment for. options: (PipelineOptions) the pipeline options for the implicit pipeline run. """ assert len(pcolls) > 0, ( 'Need at least 1 PCollection as the target data to build a pipeline ' 'fragment that produces it.') for pcoll in pcolls: assert isinstance(pcoll, beam.pvalue.PCollection), ( '{} is not an apache_beam.pvalue.PCollection.'.format(pcoll)) # No modification to self._user_pipeline is allowed. self._user_pipeline = pcolls[0].pipeline # These are user PCollections. Do not use them to deduce anything that # will be executed by any runner. Instead, use # `self._runner_pcolls_to_user_pcolls.keys()` to get copied PCollections. self._pcolls = set(pcolls) for pcoll in self._pcolls: assert pcoll.pipeline is self._user_pipeline, ( '{} belongs to a different user pipeline than other PCollections ' 'given and cannot be used to build a pipeline fragment that produces ' 'the given PCollections.'.format(pcoll)) self._options = options # A copied pipeline instance for modification without changing the user # pipeline instance held by the end user. This instance can be processed # into a pipeline fragment that later run by the underlying runner. self._runner_pipeline = self._build_runner_pipeline() _, self._context = self._runner_pipeline.to_runner_api( return_context=True, use_fake_coders=True) from apache_beam.runners.interactive import pipeline_instrument as instr self._runner_pcoll_to_id = instr.pcolls_to_pcoll_id( self._runner_pipeline, self._context) # Correlate components in the runner pipeline to components in the user # pipeline. The target pcolls are the pcolls given and defined in the user # pipeline. self._id_to_target_pcoll = self._calculate_target_pcoll_ids() self._label_to_user_transform = self._calculate_user_transform_labels() # Below will give us the 1:1 correlation between # PCollections/AppliedPTransforms from the copied runner pipeline and # PCollections/AppliedPTransforms from the user pipeline. # (Dict[PCollection, PCollection]) ( self._runner_pcolls_to_user_pcolls, # (Dict[AppliedPTransform, AppliedPTransform]) self._runner_transforms_to_user_transforms ) = self._build_correlation_between_pipelines( self._runner_pcoll_to_id, self._id_to_target_pcoll, self._label_to_user_transform) # Below are operated on the runner pipeline. (self._necessary_transforms, self._necessary_pcollections) = self._mark_necessary_transforms_and_pcolls( self._runner_pcolls_to_user_pcolls) self._runner_pipeline = self._prune_runner_pipeline_to_fragment( self._runner_pipeline, self._necessary_transforms) def deduce_fragment(self): """Deduce the pipeline fragment as an apache_beam.Pipeline instance.""" return beam.pipeline.Pipeline.from_runner_api( self._runner_pipeline.to_runner_api(use_fake_coders=True), self._runner_pipeline.runner, self._options) def run(self, display_pipeline_graph=False, use_cache=True, blocking=False): """Shorthand to run the pipeline fragment.""" try: preserved_skip_display = self._runner_pipeline.runner._skip_display preserved_force_compute = self._runner_pipeline.runner._force_compute preserved_blocking = self._runner_pipeline.runner._blocking self._runner_pipeline.runner._skip_display = not display_pipeline_graph self._runner_pipeline.runner._force_compute = not use_cache self._runner_pipeline.runner._blocking = blocking return self.deduce_fragment().run() finally: self._runner_pipeline.runner._skip_display = preserved_skip_display self._runner_pipeline.runner._force_compute = preserved_force_compute self._runner_pipeline.runner._blocking = preserved_blocking def _build_runner_pipeline(self): return beam.pipeline.Pipeline.from_runner_api( self._user_pipeline.to_runner_api(use_fake_coders=True), self._user_pipeline.runner, self._options) def _calculate_target_pcoll_ids(self): pcoll_id_to_target_pcoll = {} for pcoll in self._pcolls: pcoll_id_to_target_pcoll[self._runner_pcoll_to_id.get(str(pcoll), '')] = pcoll return pcoll_id_to_target_pcoll def _calculate_user_transform_labels(self): label_to_user_transform = {} class UserTransformVisitor(PipelineVisitor): def enter_composite_transform(self, transform_node): self.visit_transform(transform_node) def visit_transform(self, transform_node): if transform_node is not None: label_to_user_transform[transform_node.full_label] = transform_node v = UserTransformVisitor() self._runner_pipeline.visit(v) return label_to_user_transform def _build_correlation_between_pipelines( self, runner_pcoll_to_id, id_to_target_pcoll, label_to_user_transform): runner_pcolls_to_user_pcolls = {} runner_transforms_to_user_transforms = {} class CorrelationVisitor(PipelineVisitor): def enter_composite_transform(self, transform_node): self.visit_transform(transform_node) def visit_transform(self, transform_node): self._process_transform(transform_node) for in_pcoll in transform_node.inputs: self._process_pcoll(in_pcoll) for out_pcoll in transform_node.outputs.values(): self._process_pcoll(out_pcoll) def _process_pcoll(self, pcoll): pcoll_id = runner_pcoll_to_id.get(str(pcoll), '') if pcoll_id in id_to_target_pcoll: runner_pcolls_to_user_pcolls[pcoll] = (id_to_target_pcoll[pcoll_id]) def _process_transform(self, transform_node): if transform_node.full_label in label_to_user_transform: runner_transforms_to_user_transforms[transform_node] = ( label_to_user_transform[transform_node.full_label]) v = CorrelationVisitor() self._runner_pipeline.visit(v) return runner_pcolls_to_user_pcolls, runner_transforms_to_user_transforms def _mark_necessary_transforms_and_pcolls(self, runner_pcolls_to_user_pcolls): necessary_transforms = set() all_inputs = set() updated_all_inputs = set(runner_pcolls_to_user_pcolls.keys()) # Do this until no more new PCollection is recorded. while len(updated_all_inputs) != len(all_inputs): all_inputs = set(updated_all_inputs) for pcoll in all_inputs: producer = pcoll.producer while producer: if producer in necessary_transforms: break # Mark the AppliedPTransform as necessary. necessary_transforms.add(producer) # Record all necessary input and side input PCollections. updated_all_inputs.update(producer.inputs) # pylint: disable=map-builtin-not-iterating side_input_pvalues = set( map(lambda side_input: side_input.pvalue, producer.side_inputs)) updated_all_inputs.update(side_input_pvalues) # Go to its parent AppliedPTransform. producer = producer.parent return nec
"""This test checks for correct wait4() behavior. """ import os import time from test.fork_wait import ForkWait from test.test_support import run_unittest, reap_children, get_attribute # If either of these do not exist, skip this test. get_attribute(os, 'fork') get_attribute(
os, 'wait4') class Wait4Test(ForkWait): def wait_impl(self, cpid): for i in range(10): # wait4() shouldn't hang, but some of the buildbots seem to hang # in the forking tests. This is an attempt to fix the problem. spid, status, rusage = os.wait4(cpid, os.WNOHANG) if
spid == cpid: break time.sleep(1.0) self.assertEqual(spid, cpid) self.assertEqual(status, 0, "cause = %d, exit = %d" % (status&0xff, status>>8)) self.assertTrue(rusage) def test_main(): run_unittest(Wait4Test) reap_children() if __name__ == "__main__": test_main()
from flask import render_template from app import app, db, models import json @app.route('/') @app.route('/index') def index(): # obtain today's words # words = mode
ls.Words.query.all() # words = list((str(word[0]), word[1]) for word in db.session.query(models.Words, db.func.count(models.Words.id).label("total")).group_by(models.Words.word).order_by("total DESC")) data = db.session.query(models.Words, db.func.count(models.Words.id).label("total")).group_by(models.Words.word).order_by("total DESC").all()[:50] words = [_[0].word for _ in data] count = [_[1] for _ in
data] return render_template('index.html', words=words, count = count)
, gxparam_extra_kwargs, default=None): from argparse import FileType """Based on a type, convert to appropriate gxtp class """ if default is None and (param.type in (int, float)): default = 0 if param.type == int: mn = None mx = None if param.choices is not None: mn =
min(param.choices) mx = max(param.choices) gxparam = gxtp.IntegerParam(flag, default, label=label, min=mn, max=mx, num_dashes=num_dashes, **gxparam_extra_kwargs) elif param.choices is not None: choices = {k: k for k in param.choices} gxparam = gxtp.SelectParam(flag, default=default, label=label, num_dashes=num_dashes, options=choices, **gxp
aram_extra_kwargs) elif param.type == float: gxparam = gxtp.FloatParam(flag, default, label=label, num_dashes=num_dashes, **gxparam_extra_kwargs) elif param.type is None or param.type == str: gxparam = gxtp.TextParam(flag, value=default, label=label, num_dashes=num_dashes, **gxparam_extra_kwargs) elif param.type == locate('file'): gxparam = gxtp.DataParam(flag, label=label, num_dashes=num_dashes, **gxparam_extra_kwargs) elif isinstance(param.type, FileType): if 'w' in param.type._mode: gxparam = gxtp.OutputParameter( flag, format='data', default=default, label=label, num_dashes=num_dashes, **gxparam_extra_kwargs ) else: gxparam = gxtp.DataParam( flag, default=default, label=label, num_dashes=num_dashes, **gxparam_extra_kwargs ) else: gxparam = None return gxparam def __args_from_nargs(self, param, repeat_name, repeat_var_name, positional, flag): """Based on param.nargs, return the appropriate overrides """ gxrepeat_args = [] gxrepeat_kwargs = {} gxrepeat_cli_after = None gxrepeat_cli_before = None gxrepeat_cli_actual = None gxparam_cli_before = None gxparam_cli_after = None if positional: gxrepeat_cli_actual = '"$%s"' % (repeat_var_name) else: gxrepeat_cli_actual = '%s "$%s"' % (param.option_strings[0], repeat_var_name) if isinstance(param.nargs, int): # N (an integer). N arguments from the command line will be # gathered together into a list. For example: if param.nargs > 1: gxrepeat_args = [repeat_name, 'repeat_title'] gxrepeat_kwargs = { 'min': param.nargs, 'max': param.nargs, } else: # If we have only one, we don't want a gxrepeat, so we leave well # enough alone gxrepeat_args = None elif param.nargs == '?': # '?'. One argument will be consumed from the command line if # possible, and produced as a single item. If no command-line # argument is present, the value from default will be produced. # Note that for optional arguments, there is an additional case - # the option string is present but not followed by a command-line # argument. In this case the value from const will be produced # This does NOT provide a way to access the value in const, but # that seems like a HORRIBLE idea anyway. Seriously, who does that. gxparam_cli_before = """\n#if $%s and $%s is not None:""" % (flag, flag) gxparam_cli_after = '#end if' gxrepeat_args = None elif param.nargs is None: # Very similar to '?' but without the case of "optional + specified # withouth an argument" being OK # # This has changed over time, we're (probably) going overboard here. gxparam_cli_before = """\n#if $%s and $%s is not None:""" % (flag, flag) gxparam_cli_after = '#end if' gxrepeat_args = None elif param.nargs == '*': # '*'. All command-line arguments present are gathered into a list. # Note that it generally doesn't make much sense to have more than # one positional argument with nargs='*', but multiple optional # arguments with nargs='*' is possible. For example: # This needs to be handled with a # set files = '" "'.join( [ str( $file ) for $file in $inputB ] ) gxrepeat_args = [repeat_name, 'repeat_title'] # gxrepeat_cli_after = '#end if\n' gxrepeat_cli_after = '' gxrepeat_cli_before = """\n#set %s = '" "'.join([ str($var.%s) for $var in $%s ])""" % (repeat_var_name, flag, repeat_name) elif param.nargs == '+': # '+'. Just like '*', all command-line args present are gathered # into a list. Additionally, an error message will be generated if # there wasn't at least one command-line argument present. For # example: gxrepeat_args = [repeat_name, 'repeat_title'] gxrepeat_kwargs = {'min': 1} gxrepeat_cli_after = '' gxrepeat_cli_before = """\n#set %s = '" "'.join([ str($var.%s) for $var in $%s ])""" % (repeat_var_name, flag, repeat_name) else: raise Exception("TODO: Handle argparse.REMAINDER") return (gxrepeat_args, gxrepeat_kwargs, gxrepeat_cli_after, gxrepeat_cli_before, gxrepeat_cli_actual, gxparam_cli_before, gxparam_cli_after) def __init__(self): self.repeat_count = 0 self.positional_count = Counter() def _VersionAction(self, param, tool=None): # passing tool is TERRIBLE, I know. # TODO handle their templating of version # This is kinda ugly but meh. tool.root.attrib['version'] = param.version # Count the repeats for unique names # TODO improve def _StoreAction(self, param, tool=None): """ Parse argparse arguments action type of "store", the default. param: argparse.Action """ gxparam = None gxrepeat = None self.repeat_count += 1 gxparam_extra_kwargs = {} if not param.required: gxparam_extra_kwargs['optional'] = True # Positional arguments don't have an option strings positional = len(param.option_strings) == 0 if not positional: flag = max(param.option_strings, key=len) # Pick the longest of the options strings else: flag = '' self.positional_count['param.dest'] += 1 repeat_name = 'repeat_%s' % self.repeat_count repeat_var_name = 'repeat_var_%s' % self.repeat_count # TODO: Replace with logic supporting characters other than - flag_wo_dashes = flag.lstrip('-') num_dashes = len(flag) - len(flag_wo_dashes) # Moved because needed in developing repeat CLI if positional: v = self.positional_count[param.dest] flag_wo_dashes = '%s%s' % (param.dest, '_' + str(v) if v > 1 else '') # SO unclean gxparam_extra_kwargs['positional'] = True # Figure out parameters and overrides from param.nargs, mainly. # This is really unpleasant. (gxrepeat_args, gxrepeat_kwargs, gxrepeat_cli_after, gxrepeat_cli_before, gxrepeat_cli_actual, gxparam_cli_before, gxparam_cli_after) = \ self.__args_from_nargs(param, repeat_name, repeat_var_name, positional, flag_wo_dashes) # Build the gxrepeat if it's needed if gxrepeat_args is not None: gxrepeat = gxtp.Repeat(*gxrepeat_args, **gxrepeat_kwargs) if gxrepeat_cli_before is not None: gxrepeat.command_line_before_override = gxrepeat_cli_before if gxrepeat_cli_after is not None: gxrepeat.command_line_after_override = gxrepeat_cli_after i
from model import Event from geo.geomodel import
geotypes def get(handler, response): lat = handler.request.get('lat') lon = handler.request.get('lng') response.events = Event.proximity_fetch( Event.all(), geotypes.Point(float(lat),float(
lon)), )
# Copyright (c) 2015-2016 Cisco Systems, Inc. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. import click from molecule import util from molecule.command import base from molecule.dependency import ansible_galaxy from molecule.dependency import shell class Dependency(base.Base): def execute(self, exit=True): """ Execute the actions that should run prior to a converge and return a tuple. :param exit: (Unused) Provided to complete method signature. :return: Return a tuple provided by :meth:`.AnsiblePlaybook.execute`. """ debug = self.args.get('debug') if self.molecule.state.installed_deps: return (None, None) dependency_name = self.molecule.dependency if dependency_name == 'galaxy': dd = self.molecule.config.config.get('dependency') if dd.get('requirements_file'): msg = "Downloading dependencies with '{}'...".format( dependency_name) util.print_info(msg) g = ansible_galaxy.AnsibleGalaxy( self.molecule.config.config, debug=debug) g.execute() self.molecule.state.change_state('installed_deps', True)
elif dependency_name == 'shell': dd = self.molecule.config.config.get('dependency') if dd.get('command'): msg = "Downloading dependencies with '{}'...".format( dependency_name) util.print_info(msg) s = shell.Shell(self
.molecule.config.config, debug=debug) s.execute() self.molecule.state.change_state('installed_deps', True) return (None, None) @click.command() @click.pass_context def dependency(ctx): # pragma: no cover """ Perform dependent actions on the current role. """ d = Dependency(ctx.obj.get('args'), {}) d.execute util.sysexit(d.execute()[0])
import save import client def start(): def callback(): client.client.chat('/novice') found_nations = [ (name, style, id) for name, style, id in client.get_nations() if name == 'Poles' ] if found_nations: name, style, id = found_nations[0] print 'change nation to', name, style, id client.freeciv.func.
set_nation_settings(id, '
Player', style, 2) return True save.load_game('data/tutorial.sav', before_callback=callback)
# -*- coding: utf-8 -*- # # Copyright (C) Pootle contributors. # # This file is a part of the Pootle project. It is distributed under the GPL3 # or later license. See the LICENSE file for a copy of the license and the # AUTHORS file for copyright and authorship information. import os import pytest from translate.filters import checks from django.db import IntegrityError from pytest_pootle.factories import LanguageDBFactory from pootle.core.delegate import revision from pootle_app.models import Directory from pootle_language.models import Language from pootle_project.models import Project from pootle_store.models import Store from pootle_translationproject.models import TranslationProject @pytest.mark.django_db def test_tp_create_fail(po_directory, tutorial, english): # Trying to create a TP with no Language raises a RelatedObjectDoesNotExist # which can be caught with Language.DoesNotExist with pytest.raises(Language.DoesNotExist): TranslationProject.objects.create() # TP needs a project set too... with pytest.raises(Project.DoesNotExist): TranslationProject.objects.create
(language=english) # There is already an english tutorial was automagically set up with pytest.raises(IntegrityError): TranslationProject.objects.create(project=tutorial, language=english) @pytest.mark.django_db def test_tp_create_parent_dirs(tp0): parent = tp0.create_parent_dirs("%sfoo/bar/baz.po" % tp0.pootle_path) assert ( parent == Directory.objects.
get( pootle_path="%sfoo/bar/" % tp0.pootle_path)) @pytest.mark.django_db def test_tp_create_templates(project0_nongnu, project0, templates, no_templates_tps, complex_ttk): # As there is a tutorial template it will automatically create stores for # our new TP template_tp = TranslationProject.objects.create( language=templates, project=project0) template = Store.objects.create( name="foo.pot", translation_project=template_tp, parent=template_tp.directory) template.update(complex_ttk) tp = TranslationProject.objects.create( project=project0, language=LanguageDBFactory()) tp.init_from_templates() assert tp.stores.count() == template_tp.stores.count() assert ( [(s, t) for s, t in template_tp.stores.first().units.values_list("source_f", "target_f")] == [(s, t) for s, t in tp.stores.first().units.values_list("source_f", "target_f")]) @pytest.mark.django_db def test_tp_init_from_template_po(project0, templates, no_templates_tps, complex_ttk): # When initing a tp from a file called `template.pot` the resulting # store should be called `langcode.po` if the project is gnuish project0.config["pootle_fs.translation_mappings"] = dict( default="/<dir_path>/<language_code>.<ext>") template_tp = TranslationProject.objects.create( language=templates, project=project0) template = Store.objects.create( name="template.pot", translation_project=template_tp, parent=template_tp.directory) template.update(complex_ttk) tp = TranslationProject.objects.create( project=project0, language=LanguageDBFactory()) tp.init_from_templates() store = tp.stores.get() assert store.name == "%s.po" % tp.language.code @pytest.mark.django_db def test_tp_create_with_files(project0_directory, project0, store0, settings): # lets add some files by hand trans_dir = settings.POOTLE_TRANSLATION_DIRECTORY language = LanguageDBFactory() tp_dir = os.path.join(trans_dir, "%s/project0" % language.code) os.makedirs(tp_dir) with open(os.path.join(tp_dir, "store0.po"), "w") as f: f.write(store0.serialize()) TranslationProject.objects.create(project=project0, language=language) @pytest.mark.django_db def test_tp_stats_created_from_template(po_directory, templates, tutorial): language = LanguageDBFactory(code="foolang") tp = TranslationProject.objects.create(language=language, project=tutorial) tp.init_from_templates() assert tp.stores.all().count() == 1 stats = tp.data_tool.get_stats() assert stats['total'] == 2 # there are 2 words in test template assert stats['translated'] == 0 assert stats['fuzzy'] == 0 assert stats['suggestions'] == 0 assert stats['critical'] == 0 @pytest.mark.django_db def test_can_be_inited_from_templates(po_directory, tutorial, templates): language = LanguageDBFactory() tp = TranslationProject(project=tutorial, language=language) assert tp.can_be_inited_from_templates() @pytest.mark.django_db def test_cannot_be_inited_from_templates(project0, no_templates_tps): language = LanguageDBFactory() tp = TranslationProject(project=project0, language=language) assert not tp.can_be_inited_from_templates() @pytest.mark.django_db def test_tp_checker(po_directory, tp_checker_tests): language = Language.objects.get(code="language0") checker_name_, project = tp_checker_tests tp = TranslationProject.objects.create(project=project, language=language) checkerclasses = [ checks.projectcheckers.get(tp.project.checkstyle, checks.StandardChecker) ] assert [x.__class__ for x in tp.checker.checkers] == checkerclasses @pytest.mark.django_db def test_tp_cache_on_delete(tp0): proj_revision = revision.get( tp0.project.directory.__class__)( tp0.project.directory) orig_revision = proj_revision.get("stats") tp0.delete() assert ( proj_revision.get("stats") != orig_revision)
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import pytest import numpy as np import torch.nn as nn import torch as T from torch.autograd import Variable as var import torch.nn.functional as F from torch.nn.utils import clip_grad_norm_ import torch.optim as optim import numpy as np import sys import os import math import time sys.path.insert(0, '.') import functools from dnc import DNC from test_utils import generate_data, criterion def test_rnn_1(): T.manual_seed(1111) input_size = 100 hidden_size = 100 rnn_type = 'rnn' num_layers = 1 num_hidden_layers = 1 dropout = 0 nr_cells = 1 cell_size = 1 read_heads = 1 gpu_id = -1 debug = True lr = 0.001 sequence_max_length = 10 batch_size = 10 cuda = gpu_id clip = 10 length = 10 rnn = DNC( input_size=input_size, hidden_size=hidden_size, rnn_type=rnn_type, num_layers=num_layers, num_hidden_layers=num_hidden_layers, dropout=dropout, nr_cells=nr_cells, cell_size=cell_size, read_heads=read_heads, gpu_id=gpu_id, debug=debug ) optimizer = optim.Adam(rnn.parameters(), lr=lr) optimizer.zero_grad() input_data, target_output = generate_data(batch_size, length, input_size, cuda) target_output = target_output.transpose(0, 1).contiguous() output, (chx, mhx, rv), v = rnn(input_data, None) output = output.transpose(0, 1) loss = criterion((output), target_output) loss.backward() T.nn.utils.clip_grad_norm_(rnn.parameters(), clip) optimizer.step() assert target_output.size() == T.Size([21, 10, 100]) assert chx[0][0].size() == T.Size([10,100]) asse
rt mhx['memory'].size() == T.Size([10,1,1]) assert rv.size() == T.Size([10, 1]) def test_rnn_n(): T.manual_seed(1111) input_size = 100 hidden_size = 100 rnn_type = 'rnn' num_layers = 3 num_hidden_layers = 5 dropout = 0.2 nr_cells = 12 cell_size = 17 read_heads = 3 gpu_id = -1 debug = True lr = 0.001 sequence_max_length = 10 batch_size = 10 cuda = gpu_id clip = 20 length = 13 rnn = DNC( input_size=input_size, hidden_size=hidden_siz
e, rnn_type=rnn_type, num_layers=num_layers, num_hidden_layers=num_hidden_layers, dropout=dropout, nr_cells=nr_cells, cell_size=cell_size, read_heads=read_heads, gpu_id=gpu_id, debug=debug ) optimizer = optim.Adam(rnn.parameters(), lr=lr) optimizer.zero_grad() input_data, target_output = generate_data(batch_size, length, input_size, cuda) target_output = target_output.transpose(0, 1).contiguous() output, (chx, mhx, rv), v = rnn(input_data, None) output = output.transpose(0, 1) loss = criterion((output), target_output) loss.backward() T.nn.utils.clip_grad_norm_(rnn.parameters(), clip) optimizer.step() assert target_output.size() == T.Size([27, 10, 100]) assert chx[1].size() == T.Size([num_hidden_layers,10,100]) assert mhx['memory'].size() == T.Size([10,12,17]) assert rv.size() == T.Size([10, 51]) def test_rnn_no_memory_pass(): T.manual_seed(1111) input_size = 100 hidden_size = 100 rnn_type = 'rnn' num_layers = 3 num_hidden_layers = 5 dropout = 0.2 nr_cells = 12 cell_size = 17 read_heads = 3 gpu_id = -1 debug = True lr = 0.001 sequence_max_length = 10 batch_size = 10 cuda = gpu_id clip = 20 length = 13 rnn = DNC( input_size=input_size, hidden_size=hidden_size, rnn_type=rnn_type, num_layers=num_layers, num_hidden_layers=num_hidden_layers, dropout=dropout, nr_cells=nr_cells, cell_size=cell_size, read_heads=read_heads, gpu_id=gpu_id, debug=debug ) optimizer = optim.Adam(rnn.parameters(), lr=lr) optimizer.zero_grad() input_data, target_output = generate_data(batch_size, length, input_size, cuda) target_output = target_output.transpose(0, 1).contiguous() (chx, mhx, rv) = (None, None, None) outputs = [] for x in range(6): output, (chx, mhx, rv), v = rnn(input_data, (chx, mhx, rv), pass_through_memory=False) output = output.transpose(0, 1) outputs.append(output) output = functools.reduce(lambda x,y: x + y, outputs) loss = criterion((output), target_output) loss.backward() T.nn.utils.clip_grad_norm_(rnn.parameters(), clip) optimizer.step() assert target_output.size() == T.Size([27, 10, 100]) assert chx[1].size() == T.Size([num_hidden_layers,10,100]) assert mhx['memory'].size() == T.Size([10,12,17]) assert rv == None
from collections import defaultdict import fileinput mem = defaultdict(int) s1 = -100000 s2 = -100000 def condit
ion(line): global mem l = line[-3:] if l[1] == "==": if mem[l[0]] == int(l[2]): return True else: return False elif l[1] == "<": if mem[l[0]] < int(l[2]): return True else: return False el
if l[1] == ">": if mem[l[0]] > int(l[2]): return True else: return False elif l[1] == "<=": if mem[l[0]] <= int(l[2]): return True else: return False elif l[1] == ">=": if mem[l[0]] >= int(l[2]): return True else: return False elif l[1] == "!=": if mem[l[0]] != int(l[2]): return True else: return False for line in fileinput.input(): line = line.split() if condition(line): if line[1] == "inc": mem[line[0]] += int(line[2]) elif line[1] == "dec": mem[line[0]] -= int(line[2]) if mem[line[0]] > s2: s2 = mem[line[0]] for k in mem.keys(): if mem[k] > s1: s1 = mem[k] print(s1) print(s2)
import numpy, sys import scipy.linalg, scipy.special ''' VBLinRegARD: Linear basis regression with automatic relevance priors using Variational Bayes. For more details on the algorithm see Apprendix of Roberts, McQuillan, Reece & Aigrain, 2013, MNRAS, 354, 3639. History: 2011: Translated by Thomas Evans from original Matlab code by Stephen J Roberts 2013: Documentation added by Suzanne Aigrain ''' def logdet(a): ''' Compute log of determinant of matrix a using Cholesky decomposition ''' # First make sure that matrix is symmetric: if numpy.allclose(a.T,a) == False: print 'MATRIX NOT SYMMETRIC' # Second make sure that matrix is positive definite: eigenvalues = scipy.linalg.eigvalsh(a) if min(eigenvalues) <=0: print 'Matrix is NOT positive-definite' print ' min eigv = %.16f' % min(eigenvalues) step1 = scipy.linalg.cholesky(a) step2 = numpy.diag(step1.T) out = 2. * numpy.sum(numpy.log(step2), axis=0) return out def bayes_linear_fit_ard(X, y): ''' Fit linear basis model with design matrix X to data y. Calling sequence: w, V, invV, logdetV, an, bn, E_a, L = bayes_linear_fit_ard(X, y) Inputs: X: design matrix y: target data Outputs w: basis function weights ***need to document the others!*** ''' # uninformative priors a0 = 1e-2 b0 = 1e-4 c0 = 1e-2 d0 = 1e-4 # pre-process data [N, D] = X.shape X_corr = X.T * X Xy_corr = X.T * y an = a0 + N / 2. gammaln_an = scipy.special.gammaln(an) cn = c0 + 1 / 2. D_gammaln_cn = D * scipy.special.gammaln(cn) # iterate to find hyperparameters L_last = -sys.float_info.max max_iter = 500 E_a = numpy.matrix(numpy.ones(D) * c0 / d0).T for iter in range(max_iter): # covariance and weight of linear model invV = numpy.matrix(numpy.diag(numpy.array(E_a)[:,0])) + X_corr V = numpy.matrix(scipy.linalg.inv(invV))
logdetV =
-logdet(invV) w = numpy.dot(V, Xy_corr)[:,0] # parameters of noise model (an remains constant) sse = numpy.sum(numpy.power(X*w-y, 2), axis=0) if numpy.imag(sse)==0: sse = numpy.real(sse)[0] else: print 'Something went wrong' bn = b0 + 0.5 * (sse + numpy.sum((numpy.array(w)[:,0]**2) * numpy.array(E_a)[:,0], axis=0)) E_t = an / bn # hyperparameters of covariance prior (cn remains constant) dn = d0 + 0.5 * (E_t * (numpy.array(w)[:,0]**2) + numpy.diag(V)) E_a = numpy.matrix(cn / dn).T # variational bound, ignoring constant terms for now L = -0.5 * (E_t*sse + numpy.sum(scipy.multiply(X,X*V))) + \ 0.5 * logdetV - b0 * E_t + gammaln_an - an * scipy.log(bn) + an + \ D_gammaln_cn - cn * numpy.sum(scipy.log(dn)) # variational bound must grow! if L_last > L: # if this happens, then something has gone wrong.... file = open('ERROR_LOG','w') file.write('Last bound %6.6f, current bound %6.6f' % (L, L_last)) file.close() raise Exception('Variational bound should not reduce - see ERROR_LOG') return # stop if change in variation bound is < 0.001% if abs(L_last - L) < abs(0.00001 * L): break # print L, L_last L_last = L if iter == max_iter: warnings.warn('Bayes:maxIter ... Bayesian linear regression reached maximum number of iterations.') # augment variational bound with constant terms L = L - 0.5 * (N * numpy.log(2 * numpy.pi) - D) - scipy.special.gammaln(a0) + \ a0 * numpy.log(b0) + D * (-scipy.special.gammaln(c0) + c0 * numpy.log(d0)) return w, V, invV, logdetV, an, bn, E_a, L
,'P',pic,Ref) s[2]=Props('S','T',T[2],'P',pic,Ref) rho[2]=Props('D','T',T[2],'P',pic,Ref) T[3]=288 p[3]=pic h[3]=Props('H','T',T[3],'P',pic,Ref) s[3]=Props('S','T',T[3],'P',pic,Ref) rho[3]=Props('D','T',T[3],'P',pic,Ref) rho3=Props('D','T',T[3],'P',pic,Ref) h4s=Props('H','T',s[3],'P',pc,Ref) Wdot2=mdot*(h4s-h[3])/eta_oi h[4]=h[3]+(1-f_p)*Wdot2/mdot T[4]=Props('T','H',h[4],'P',pc,Ref) s[4]=Props('S','T',T[4],'P',pc,Ref) rho[4]=Props('D','T',T[4],'P',pc,Ref) sbubble_e=Props('S','T',Tbubble_e,'Q',0,Ref) sbubble_c=Props('S','T',Tbubble_c,'Q',0,Ref) sdew_e=Props('S','T',Te,'Q',1,Ref) sdew_c=Props('S','T',Tc,'Q',1,Ref) hsatL=Props('H','T',Tbubble_e,'Q',0,Ref) hsatV=Props('H','T',Te,'Q',1,Ref) ssatL=Props('S','T',Tbubble_e,'Q',0,Ref) ssatV=Props('S','T',Te,'Q',1,Ref) vsatL=1/Props('D','T',Tbubble_e,'Q',0,Ref) vsatV=1/Props('D','T',Te,'Q',1,Ref) x=(h[5]-hsatL)/(hsatV-hsatL) s[6]=x*ssatV+(1-x)*ssatL T[6]=x*Te+(1-x)*Tbubble_e rho[6]=1.0/(x*vsatV+(1-x)*vsatL) h[6]=h[5] h[7]=h[1] s[7]=s[1] T[7]=T[1] p=[numpy.nan,pe,pic,pic,pc,pc,pe,pe] COP=Q/(Wdot1+Wdot2) RE=h[1]-h[6] if prints==True: print('x5:',x) print('COP:', COP) print('COPH', (Q+Wdot1+Wdot2)/(Wdot1+Wdot2)) print(T[2]-273.15,T[4]-273.15,p[2]/p[1],p[4]/p[3]) print(mdot,mdot*(h[4]-h[5]),pic) print('Vdot1',mdot/rho1,'Vdisp',mdot/rho1/(3500/60.)*1e6/0.7) print('Vdot2',mdot/rho3,'Vdisp',mdot/rho3/(3500/60.)*1e6/0.7) print(mdot*(h[4]-h[5]),Tc-273.15) for i in range(1,len(T)-1): print('%d & %g & %g & %g & %g & %g \\\\' %(i,T[i]-273.15,p[i],h[i],s[i],rho[i])) else: print(Tsat_ic,COP) if skipPlot==False: if axis==None: ax=matplotlib.pyplot.gca() else: ax=axis if Ts_Ph in ['ph','Ph']: ax.plot(h,p) elif Ts_Ph in ['Ts','ts']: s_copy=s.copy() T_copy=T.copy() for i in range(1,len(s)-1): ax.plot(s[i],T[i],'bo',mfc='b',mec='b') dT=[0,-5,5,-20,5,5,5] ds=[0,0.05,0,0,0,0,0] ax.text(s[i]+ds[i],T[i]+dT[i],str(i)) s=list(s) T=list(T) s.insert(7,sdew_e) T.insert(7,Te) s.insert(5,sbubble_c) T.insert(5,Tbubble_c) s.insert(5,sdew_c) T.insert(5,Tc) ax.plot(s,T) s=s_copy T=T_copy else: raise TypeError('Type of Ts_Ph invalid') return COP def EconomizedCycle(Ref,Qin,Te,Tc,DTsh,DTsc,eta_oi,f_p,Ti,Ts_Ph='Ts',skipPlot=False,axis=None,**kwargs): """ This function plots an economized cycle, on the current axis, or that given by the optional parameter *axis* Required parameters: * Ref : Refrigerant [string] * Qin : Cooling capacity [W] * Te : Evap Temperature [K] * Tc : Condensing Temperature [K] * DTsh : Evaporator outlet superheat [K] * DTsc : Condenser outlet subcooling [K] * eta_oi : Adiabatic efficiency of compressor (no units) in range [0,1] * f_p : fraction of compressor power lost as ambient heat transfer in range [0,1] * Ti : Saturation temperature corresponding to intermediate pressure [K] Optional parameters: * Ts_Ph : 'Ts' for a Temperature-Entropy plot, 'Ph' for a Pressure-Enthalpy * axis : An axis to use instead of the active axis * skipPlot : If True, won't actually plot anything, just print COP """ m=1 T=numpy.zeros((11)) h=numpy.zeros_like(T) p=numpy.zeros_like(T) s=numpy.zeros_like(T) rho=numpy.zeros_like(T) T[0]=numpy.NAN s[0]=numpy.NAN T[1]=Te+DTsh pe=Props('P','T',Te,'Q',1.0,Ref) pc=Props('P','T',Tc,'Q',1.0,Ref) pi=Props('P','T',Ti,'Q',1.0,Ref) p[1]=pe h[1]=Props('H','T',T[1],'P',pe,Ref) s[1]=Props('S','T',T[1],'P',pe,Ref) rho[1]=Props('D','T',T[1],'P',pe,Ref) h2s=Props('H','S',s[1],'P',pi,Ref) wdot1=(h2s-h[1])/eta_oi h[2]=h[1]+(1-f_p[0])*wdot1 p[2]=pi T[2]=T_hp(Ref,h[2],pi,T2s) s[2]=Props('S','T',T[2],'P',pi,Ref) rho[2]=Props('D','T',T[2],'P',pi,Ref) T[5]=Tc-DTsc h[5]=Props('H','T',T[5],'P',pc,Ref)
s[5]=Props('S','T',T[5],'P',pc,Ref) rho[5]=Props('D','T',T[5],'P',pc,Ref) p[5]=pc p[6]=pi h[6]=h[5] p[7]=pi p[8]=pi p[6]=pi T[7]=Ti h[7]=Props('H','T',Ti,'Q',1,Ref) s[7]=Props('S','T',Ti,'Q',1,Ref) rho[7]=Props('D','T',Ti,'Q',1,Ref) T[8]=Ti h[8]=Props('H','T',Ti,'Q',0,Ref) s[8]=Props('S','T',Ti,'Q',0,Ref) rho[8]=Props('D','T',Ti,'Q',0,Ref) x6=(h[6]-h[8])/(h[7]-h[8]) #Vapor Quality s[6]=s[7]*x6+s[8]*(1-x6) rho[6]=1.0/(x6/r
ho[7]+(1-x6)/rho[8]) T[6]=Ti #Injection mass flow rate x=m*(h[6]-h[8])/(h[7]-h[6]) p[3]=pi h[3]=(m*h[2]+x*h[7])/(m+x) T[3]=T_hp(Ref,h[3],pi,T[2]) s[3]=Props('S','T',T[3],'P',pi,Ref) rho[3]=Props('D','T',T[3],'P',pi,Ref) T4s=newton(lambda T: Props('S','T',T,'P',pc,Ref)-s[3],T[2]+30) h4s=Props('H','T',T4s,'P',pc,Ref) p[4]=pc wdot2=(h4s-h[3])/eta_oi h[4]=h[3]+(1-f_p[1])*wdot2 T[4]=T_hp(Ref,h[4],pc,T4s) s[4]=Props('S','T',T[4],'P',pc,Ref) rho[4]=Props('D','T',T[4],'P',pc,Ref) p[9]=pe h[9]=h[8] T[9]=Te hsatL_e=Props('H','T',Te,'Q',0,Ref) hsatV_e=Props('H','T',Te,'Q',1,Ref) ssatL_e=Props('S','T',Te,'Q',0,Ref) ssatV_e=Props('S','T',Te,'Q',1,Ref) vsatL_e=1/Props('D','T',Te,'Q',0,Ref) vsatV_e=1/Props('D','T',Te,'Q',1,Ref) x9=(h[9]-hsatL_e)/(hsatV_e-hsatL_e) #Vapor Quality s[9]=ssatV_e*x9+ssatL_e*(1-x9) rho[9]=1.0/(x9*vsatV_e+(1-x9)*vsatL_e) s[10]=s[1] T[10]=T[1] h[10]=h[1] p[10]=p[1] Tbubble_e=Te Tbubble_c=Tc sbubble_e=Props('S','T',Tbubble_e,'Q',0,Ref) sbubble_c=Props('S','T',Tbubble_c,'Q',0,Ref) sdew_e=Props('S','T',Te,'Q',1,Ref) sdew_c=Props('S','T',Tc,'Q',1,Ref) Wdot1=m*wdot1 Wdot2=(m+x)*wdot2 if skipPlot==False: if axis==None: ax=matplotlib.pyplot.gca() else: ax=axis if Ts_Ph in ['ph','Ph']: ax.plot(h,p) ax.set_yscale('log') elif Ts_Ph in ['Ts','ts']: ax.plot(numpy.r_[s[7],s[3]],numpy.r_[T[7],T[3]],'b') s_copy=s.copy() T_copy=T.copy() dT=[0,-5,5,-12,5,12,-12,0,0,0] ds=[0,0.05,0.05,0,0.05,0,0.0,0.05,-0.05,-0.05] for i in range(1,len(s)-1): ax.plot(s[i],T[i],'bo',mfc='b',mec='b') ax.text(s[i]+ds[i],T[i]+dT[i],str(i),ha='center',va='center') s=list(s) T=list(T) s.insert(10,sdew_e) T.insert(10,Te) s.insert(5,sbubble_c) T.insert(5,Tbubble_c) s.insert(5,sdew_c) T.insert(5,Tc) ax.plot(s,T,'b') s=s_copy T=T_copy else: raise TypeError('Type of Ts_Ph invalid') COP=m*(h[1]-h[9])/(m*(h[2]-h[1])+(m+x)*(h[4]-h[3])) for i in range(1,len(T)-1): print('%d & %g & %g & %g & %g & %g \\\\' %(i,T[i]-273.15,p[i],h[i],s[i],rho[i])) print(x,m*(h[1]-h[9]),(m*(h[2]-h[1])+(m+x)*(h[4]-h[3])),COP) mdot=Qin/(h[1]-h[9]) mdot_inj=x*mdot print('x9',x9,) print('Qcond',(mdot+mdot_inj)*(h[4]-h[5]),'T4',T[4]-273.15) print(mdot,mdot+mdot_inj) f=3500/60. eta_v=0.7 print('Vdisp1: ',mdot/(rho[1]*f*eta_v)*1e6,'cm^3') print('Vdisp2: ',(mdot+mdot_inj)/(rho[1]*f*eta_v)*1e6,'cm^3') return COP if __name__=='__main__': Ph = CoolProp.Plots.Plots.Ph Ts = CoolProp.Plots.Plots.Ts Ref='R290' fig=matplotlib.pyplot.figure(figsize=(4,3)) ax=fig.add_axes((0.15,0.15,0.8,0.8)) Ph(Ref,Tmin=273.15-30,hbounds=[0,600],axis=ax) COP=TwoStage('Propane',10000,273.15-5,273.15+43.3,5,7,0.7,0.3,15+273.15,3,prints = True) matplotlib.pyplot.show() Ref='R290' fig=matplotlib.pyplot.figure(figsize=(4,3)) ax=fig.add_axes((0.15,0.15,0.8,0.8)
jango.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, u'maasserver.bootimage': { 'Meta': {'unique_together': "((u'nodegroup', u'architecture', u'subarchitecture', u'release', u'purpose'),)", 'object_name': 'BootImage'}, 'architecture': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'nodegroup': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['maasserver.NodeGroup']"}), 'purpose': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'release': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'subarchitecture': ('django.db.models.fields.CharField', [], {'max_length': '255'}) }, u'maasserver.componenterror': { 'Meta': {'object_name': 'ComponentError'}, 'component': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}), 'created': ('django.db.models.fields.DateTimeField', [], {}), 'error': ('django.db.models.fields.CharField', [], {'max_length': '1000'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'updated': ('django.db.models.fields.DateTimeField', [], {}) }, u'maasserver.config': { 'Meta': {'object_name': 'Config'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'value': ('maasserver.fields.JSONObjectField', [], {'null': 'True'}) }, u'maasserver.dhcplease': { 'Meta': {'object_name': 'DHCPLease'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'ip': ('django.db.models.fields.IPAddressField', [], {'unique': 'True', 'max_length': '15'}), 'mac': ('maasserver.fields.MACAddressField', [], {}), 'nodegroup': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['maasserver.NodeGroup']"}) }, u'maasserver.filestorage': { 'Meta': {'object_name': 'FileStorage'}, 'content': ('metadataserver.fields.BinaryField', [], {}), 'filename': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) }, u'maasserver.macaddress': { 'Meta': {'object_name': 'MACAddress'}, 'created': ('django.db.models.fields.DateTimeField', [], {}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'mac_address': ('maasserver.fields.MACAddressField', [], {'unique': 'True'}), 'node': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['maasserver.Node']"}), 'updated': ('django.db.models.fields.DateTimeField', [], {}) }, u'maasserver.node': { 'Meta': {'object_name': 'Node'}, 'after_commissioning_action': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'architecture': ('django.db.models.fields.CharFie
ld', [], {'default': "u'i386/generic'", 'max_length': '31'}), 'cpu_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'created': ('django.db.models.fields.DateTimeField', [], {}), 'distro_series': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '10', 'null': 'True', 'blank': 'True'}), 'error': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255', 'blank': 'True'}), 'hardware_details': ('maasserver.fields.XMLField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'hostname': ('django.db.models.fields.CharField', [], {'default': "u''", 'unique': 'True', 'max_length': '255', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'memory': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'netboot': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'nodegroup': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['maasserver.NodeGroup']", 'null': 'True'}), 'owner': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}), 'power_parameters': ('maasserver.fields.JSONObjectField', [], {'default': "u''", 'blank': 'True'}), 'power_type': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '10', 'blank': 'True'}), 'status': ('django.db.models.fields.IntegerField', [], {'default': '0', 'max_length': '10'}), 'system_id': ('django.db.models.fields.CharField', [], {'default': "u'node-2cd56f00-3548-11e2-b1cb-9c4e363b1c94'", 'unique': 'True', 'max_length': '41'}), 'tags': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['maasserver.Tag']", 'symmetrical': 'False'}), 'token': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['piston.Token']", 'null': 'True'}), 'updated': ('django.db.models.fields.DateTimeField', [], {}) }, u'maasserver.nodegroup': { 'Meta': {'object_name': 'NodeGroup'}, 'api_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '18'}), 'api_token': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['piston.Token']", 'unique': 'True'}), 'cluster_name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100', 'blank': 'True'}), 'created': ('django.db.models.fields.DateTimeField', [], {}), 'dhcp_key': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'maas_url': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255', 'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}), 'status': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'updated': ('django.db.models.fields.DateTimeField', [], {}), 'uuid'
# -*- coding: utf-8 -*- from __future__ import uni
code_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('freebasics', '0005_remove_selected_template_field'), ] operations = [ migrations.AlterField( model_name='freebasicstemplatedata', name='site_name_url', field=models.CharField(max_length=255, uni
que=True, null=True, blank=True), ), ]
#!/usr/bin/env python imp
ort os import sys sys.path.insert( 0, os.path.join( os.path.dirname(os.path.abspath(__file__)), '..', '..', '..', 'common', 'security-features', 'tools')) import generate class ReferrerPolicyConfig(object): def __init__(self): self.selection_pattern = \ '%(source_context_list)s.%(delivery_type)s/' + \ '%(delivery_value)
s/' + \ '%(subresource)s/' + \ '%(origin)s.%(redirection)s.%(source_scheme)s' self.test_file_path_pattern = 'gen/' + self.selection_pattern + '.html' self.test_description_template = 'Referrer Policy: Expects %(expectation)s for %(subresource)s to %(origin)s origin and %(redirection)s redirection from %(source_scheme)s context.' self.test_page_title_template = 'Referrer-Policy: %s' self.helper_js = '/referrer-policy/generic/test-case.sub.js' # For debug target only. self.sanity_checker_js = '/referrer-policy/generic/sanity-checker.js' self.spec_json_js = '/referrer-policy/spec_json.js' self.test_case_name = 'TestCase' script_directory = os.path.dirname(os.path.abspath(__file__)) self.spec_directory = os.path.abspath( os.path.join(script_directory, '..', '..')) if __name__ == '__main__': generate.main(ReferrerPolicyConfig())
import os.path import shutil import zipfile import click from pros.config import ConfigNotFoundException from .depot import Depot from ..templates import BaseTemplate, Template, ExternalTemplate from pros.common.utils import logger class LocalDepot(Depot): def fetch_template(self, template: BaseTemplate, destination: str, **kwargs) -> Template: if 'location' not in kwargs: logger(__name__).debug(f"Template not specified. Provided arguments: {kwargs}") raise KeyError('Location of local template must be specified.') location = kwargs['location'] if os.path.isdir(location): location_dir =
location if not os.path.isfile(os.path.join(location_dir, 'template.
pros')): raise ConfigNotFoundException(f'A template.pros file was not found in {location_dir}.') template_file = os.path.join(location_dir, 'template.pros') elif zipfile.is_zipfile(location): with zipfile.ZipFile(location) as zf: with click.progressbar(length=len(zf.namelist()), label=f"Extracting {location}") as progress_bar: for file in zf.namelist(): zf.extract(file, path=destination) progress_bar.update(1) template_file = os.path.join(destination, 'template.pros') location_dir = destination elif os.path.isfile(location): location_dir = os.path.dirname(location) template_file = location elif isinstance(template, ExternalTemplate): location_dir = template.directory template_file = template.save_file else: raise ValueError(f"The specified location was not a file or directory ({location}).") if location_dir != destination: n_files = len([os.path.join(dp, f) for dp, dn, fn in os.walk(location_dir) for f in fn]) with click.progressbar(length=n_files, label='Copying to local cache') as pb: def my_copy(*args): pb.update(1) shutil.copy2(*args) shutil.copytree(location_dir, destination, copy_function=my_copy) return ExternalTemplate(file=template_file) def __init__(self): super().__init__('local', 'local')
import numpy as np def array_generator(): array = np.array([(1, 2, 3, 4, 5), (10, 20, 30, 40, 50)]) return array def multiply_by_number(array, number): print(array) multiplied = array * number print(multiplied) return multiplied def divide_by_number(array, number): # Either the numer or the elements of the array need to be double # to get a double value print(array) multiplied = array / number print(multiplied) return multiplie
d def addition(array_1, array_2): return array_1 + array_2 def elemtwise_mul(array_1, array_2): return array_1 * array_2 if __name__ == "__main__": # ----------------------------------------------------- x = array_generator() two_x = multiply_by_number(x, 2) half_x = divide_by_number(x, 2) added = addition(two_x, half_x) element_multiplied = elemtwise_mul(x, two_x) # ----------------------------------------------------- print('Y') y = np.array([(1, 2 ,3), (4, 5,
6)]) # ! print(y) print('Z') z = np.array([(1, 2), (3, 4), (5, 6)]) # ! print(z) print('D') d = np.dot(y, z) print(d)
""" calc.py >>> import calc >>> s='2+4+8+7-5+3-1' >>> calc.calc(s) 18 >>> calc.calc('2*3+4-5*4') -10 """ import re from operator import concat operator_function_table = { '+' : lambda x, y: x + y, '-' : lambda x, y: x - y, '*' : lambda x, y: x * y, '/' : lambda x, y: x / y } op_re_add_sub = '\+|\-' op_re_mult_div = '\*|\/' op_re = op_re_add_sub + '|' + op_re_mult_div def calc(s): a
dd_sub_operands = re
.split(op_re_add_sub, s) add_sub_operators = re.findall(op_re_add_sub, s) post_mult_div = [str(calc_helper(operand)) for operand in add_sub_operands] new_calc_l = [reduce(concat, list(x)) for x in zip(post_mult_div[:-1], add_sub_operators)] new_calc_l.extend(post_mult_div[-1]) new_calc_s = reduce(concat, new_calc_l) result = calc_helper(new_calc_s) return result def calc_helper(s): operands = [int(k) for k in re.split(op_re, s)] operators = filter(lambda x: x, re.split('\d', s)) operator_functions = [operator_function_table[x] for x in operators] for f in operator_functions: result = apply(f, [operands[0], operands[1]]) operands = [result] + operands[2:] final_result = operands[0] return final_result
#!/usr/bin/env python def main(): import sys raw_data = load_csv(sys.argv[1]) create_table(raw_data) def get_stencil_num(k): # add the stencil operator if k['Stencil Kernel coefficients'] in 'constant': if int(k['Stencil Kernel semi-bandwidth'])==4: stencil = 0 else: stencil = 1 elif 'no-symmetry' in k['Stencil Kernel coefficients']: stencil = 5 elif 'sym' in k['Stencil Kernel coefficients']: if int(k['Stencil Kernel semi-bandwidth'])==1: stencil = 3 else: stencil = 4 else: stencil = 2 return stencil def create_table(raw_data): from operator import itemgetter import matplotlib.pyplot as plt import pylab from csv import DictWriter ts_l = set() for k in raw_data: ts_l.add(k['Time stepper orig name']) ts_l = list(ts_l) #tb_l = [3, 7] tb_l = set() for k in raw_data: tb_l.add(k['Time unroll']) tb_l = list(tb_l) tb_l = map(int,tb_l) tb_l.sort() #print tb_l req_fields = [('WD main-loop RANK0 MStencil/s MAX', 2), ('Time stepper orig name', 0), ('Stencil Kernel semi-bandwidth', 1), ('Stencil Kernel coefficients', 0), ('Precision', 0), ('Time unroll',1), ('Number of time steps',1), ('Number of tests',1), ('Local NX',1), ('Local NY',1), ('Local NZ',1), ('Total Memory Transfer', 2), ('Thread group size' ,1), ('Intra-diamond prologue/epilogue MStencils',1), ('Total cache block size (kB):',1)] data = [] for k in raw_data: tup = dict() # defaults if k['Intra-diamond prologue/epilogue MStencils'] == '': k['Intra-diamond prologue/epilogue MStencils'] = 0 if k['Total cache block size (kB):'] == '': k['Total cache block size (kB):'] = 0 # add the general fileds for f in req_fields: try: v = k[f[0]] if f[1]==1: v = int(k[f[0]]) if f[1]==2: v = float(k[f[0]]) except: print f[0] tup[f[0]] = v # add the stencil operator tup['Kernel'] = get_stencil_num(k) data.append(tup) # data = sorted(data, key=itemgetter(0, 1, 2, 3,4)) # for i in data: print i data2 = [] for tup in data: if tup['Local NX'] > 96: tup['Actual Bytes/LUP'] = actual_BpU(tup) tup['Model'] = models(tu
p) # model error tup['Err %'] = 100 * (tup['Model'] - tup['Actual Bytes/LUP'])/tup['Actual Bytes/LUP'] tup['D_width'] = (tup['Time unroll']+1)*2*tup['Stencil Kernel semi-bandwidth'] tup['Performance'] = tup['WD main-loop RANK0 MStencil/s MAX'] data2.append(tup) #for i in data2: print i from operator import itemgetter data2 =
sorted(data2, key=itemgetter('Time stepper orig name', 'Kernel', 'Thread group size', 'Local NX', 'D_width')) fields = ['Time stepper orig name', 'Kernel', 'Thread group size', 'Local NX', 'Precision', 'D_width', 'Total cache block size (kB):', 'Actual Bytes/LUP', 'Model', 'Err %', 'Performance'] with open('Arithmetic_intensity_model.csv', 'w') as output_file: r = DictWriter(output_file,fieldnames=fields) r.writeheader() for k in data2: k2 = dict() for f in k.keys(): for f2 in fields: if f == f2: k2[f] = k[f] r.writerow(k2) def actual_BpU(tup): total_mem = tup['Total Memory Transfer'] R = tup['Stencil Kernel semi-bandwidth'] nt = tup['Number of time steps'] * tup['Number of tests'] nx = tup['Local NX'] ny = tup['Local NY'] nz = tup['Local NZ'] oh = tup['Intra-diamond prologue/epilogue MStencils'] stencil_size = 2*ny*nz + ny*nz*(nx+2*R) BpU = (total_mem * 10**9) / ( stencil_size * nt - oh*10**6*tup['Number of tests']) return BpU def models(tup): if tup['Precision'] == 'DP': word_size = 8 elif tup['Precision'] == 'SP': word_size = 4 R = tup['Stencil Kernel semi-bandwidth'] TB = tup['Time unroll'] ny = tup['Local NY'] # number of streamed copies of the domain (buffers) if tup['Kernel'] == 0: nb = 3 elif tup['Kernel'] == 1: nb = 2 elif tup['Kernel'] == 4: nb = 2+13 elif tup['Kernel'] == 5: nb = 2+7 width = (TB+1)*2*R YT_section = float((TB+1)**2 * 2 * R) # no temporal blocking model if tup['Time stepper orig name'] == 'Naive': bpu = (1 + nb) * word_size else: # temporal blocking model bpu = ( ((width - 2*R) + width) + (nb*width + 2*R) ) * word_size / YT_section return bpu def load_csv(data_file): from csv import DictReader with open(data_file, 'rb') as output_file: data = DictReader(output_file) data = [k for k in data] return data if __name__ == "__main__": main() # if 'constant' in tup['Stencil Kernel coefficients']: # BpU1 = (YT_section + width + 2*R + (t_order-1)*width) * word_size / YT_section # tup['No TS'] = BpU1 # # BpU2 = (width + 2*R + (t_order-1)*width) * word_size / YT_section # tup['All TS'] = BpU2 # # BpU3 = ((width - 2*R) + 2*width + 2*R + (t_order-1)*width) * word_size / YT_section # tup['Interior TS'] = BpU3 # # BpU4 = ( ((width - 2*R) + width) + ((t_order+1)*width + 2*R) ) * word_size / YT_section # tup['Interior TS 2'] = BpU4 # # elif 'variable' in tup['Stencil Kernel coefficients']: # BpU1 = (YT_section + width + 2*R + width*(6*R+1) + (t_order-1)*width) * word_size / YT_section # tup['No TS'] = BpU1 # # BpU2 = (width + 2*R + width*(6*R+1) + (t_order-1)*width) * word_size / YT_section # tup['All TS'] = BpU2 # # BpU3 = ((width - 2*R) + 2*width + 2*R + width*(6*R+1) + (t_order-1)*width) * word_size / YT_section # tup['Interior TS'] = BpU3 # # BpU4 = ( ((width - 2*R) + width) + ((t_order+1)*width + 2*R) + ((R*6+1)*width) ) * word_size / YT_section # tup['Interior TS 2'] = BpU4
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). You # may not use this file except in compliance with the License. A copy of # the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF # ANY KIND, either express or implied. See the License for the specific # language governing permissions and limitations under the License. from ..lib import elasticbeanstalk from ..core import io from ..resources.strings import prompts, strings from ..objects.exceptions import TimeoutError from . import commonops def scale(app_name, env_name, number, confirm, timeout=None): options = [] # get environment env = elasticbeanstalk.describe_configuration_settings( app_name, env_name )['OptionSettings'] # if single instance, offer to switch to load-balanced namespace = 'aws:elasticbeanstalk:environment' setting = next((n for n in env if n["Namespace"] == namespace), None) value = setting['Value'] if value == 'SingleInstance': if not confirm: ## prompt to switch to LoadBalanced environment type io.echo(prompts['scale.switchtoloadbala
nce']) io.log_warning(prompts['scale.switchtoloadbalancewarn']) switch = io.get_boolean_response() if not switch: return options.append({'Namespace': namesp
ace, 'OptionName': 'EnvironmentType', 'Value': 'LoadBalanced'}) # change autoscaling min AND max to number namespace = 'aws:autoscaling:asg' max = 'MaxSize' min = 'MinSize' for name in [max, min]: options.append( {'Namespace': namespace, 'OptionName': name, 'Value': str(number) } ) request_id = elasticbeanstalk.update_environment(env_name, options) try: commonops.wait_for_success_events(request_id, timeout_in_minutes=timeout or 5, can_abort=True) except TimeoutError: io.log_error(strings['timeout.error'])
#!/usr/bin/python # Copyright 2014 Google Inc. # # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Script for generating the Android framework's version of Skia from gyp files. """ import android_framework_gyp import os import shutil import sys import tempfile # Find the top of trunk SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__)) SKIA_DIR = os.path.normpath(os.path.join(SCRIPT_DIR, os.pardir, os.pardir, os.pardir)) # Find the directory with our helper files, and add it to the path. GYP_GEN_DIR = os.path.join(SKIA_DIR, 'platform_tools', 'android', 'gyp_gen') sys.path.append(GYP_GEN_DIR) import gypd_parser import generate_user_config import makefile_writer import vars_dict_lib # Folder containing all gyp files and generated gypd files. GYP_FOLDER = 'gyp' # TODO(scroggo): Update the docstrings to match the style guide: # http://google-styleguide.googlecode.com/svn/trunk/pyguide.html#Comments def clean_gypd_files(folder): """ Remove the gypd files generated by android_framework_gyp.main(). @param folder Folder in which to delete all files ending with 'gypd'. """ assert os.path.isdir(folder) files = os.listdir(folder) for f in files: if f.endswith('gypd'): os.remove(os.path.join(folder, f)) def generate_var_dict(target_dir, target_file, skia_arch_type, have_neon): """ Create a VarsDict for a particular arch type. Each paramater is passed directly to android_framework_gyp.main(). @param target_dir Directory containing gyp files. @param target_file Target gyp file. @param skia_arch_type Target architecture. @param have_neon Whether the target should build for neon. @return a VarsDict containing the variable definitions determined by gyp. """ result_file = android_framework_gyp.main(target_dir, target_file, skia_arch_type, have_neon) var_dict = vars_dict_lib.VarsDict() gypd_parser.parse_gypd(var_dict, result_file) clean_gypd_files(target_dir) print '.', return var_dict def main(target_dir=None): """ Read gyp files and create Android.mk for the Android framework's external/skia. @param target_dir Directory in which to place 'Android.mk'. If None, the file will be placed in skia's root directory. """ # Create a temporary folder to hold gyp and gypd files. Create it in SKIA_DIR # so that it is a sibling of gyp/, so the relationships between gyp files and # other files (e.g. platform_tools/android/gyp/dependencies.gypi, referenced # by android_deps.gyp as a relative path) is unchanged. # Use mkdtemp to find an unused folder name, but then delete it so copytree # can be called with a non-existent directory. tmp_folder = tempfile.mkdtemp(dir=SKIA_DIR) os.rmdir(tmp_folder) shutil.copytree(os.path.join(SKIA_DIR, GYP_FOLDER), tmp_folder) try: main_gyp_file = 'android_framework_lib.gyp' print 'Creating Android.mk', # Generate a separate VarsDict for each architecture type. For each # archtype: # 1. call android_framework_gyp.main() to generate gypd files # 2. call parse_gypd to read those gypd files into the VarsDict # 3. delete the gypd files # # Once we have the VarsDict for each architecture type, we combine them all # into a single Android.mk file, which can build targets of any # architecture type. # The default uses a non-existant archtype, to find all the general # variable definitions. default_var_dict = generate_var_dict(tmp_folder, main_gyp_file, 'other', False) arm_var_dict = generate_var_dict(tmp_folder, main_gyp_file, 'arm', False) arm_neon_var_dict = generate_var_dict(tmp_folder, main_gyp_file, 'arm', True) x86_var_dict = generate_var_dict(tmp_folder, main_gyp_file, 'x86', False) mips_var_dict = generate_var_dict(tmp_folder, main_gyp_file, 'mips', False) arm64_var_dict = generate_var_dict(tmp_folder, main_gyp_file, 'arm64', False) # Compute the intersection of all targets. All the files in the intersection # should be part of the makefile always. Each dict will now contain trimmed # lists containing only variable definitions specific to that configuration. var_dict_list = [default_var_dict, arm_var_dict, arm_neon_var_dict, x86_var_dict, mips_var_dict, arm64_var_dict] common = vars_dict_lib.intersect(var_dict_list) # Create SkUserConfig user_config = os.path.join(SKIA_DIR, 'include', 'config', 'SkUserConfig.h') if target_dir: dst_dir = target_dir else: dst_dir = os.path.join(SKIA_DIR, 'include', 'core') generate_user_config.generate_user_config( original_sk_user_config=user_config, target_dir=dst_dir, ordered_set=common.DEFINES) # Now that the defines have been written to SkUserConfig, they are not # needed in Android.mk. common.DEFINES.reset() # Further trim arm_neon_var_dict with arm_var_dict. After this call, # arm_var_dict (which will now be the intersection) includes all definitions # used by both arm and arm + neon, and arm_neon_var_dict will only contain # those specific to arm + neon. arm_var_dict = vars_dict_lib.intersect([arm_var_dict, arm_neon_var_dict]) # Now create a list of VarsDictData holding everything but common. deviations_from_common = [] deviations_from_common.append(makefile_writer.VarsDictData( arm_var_dict, 'arm')) deviations_from_common.append(makefile_writer.VarsDictData( arm_neon_var_dict, 'arm', 'ARCH_ARM_HAVE_NEON')) deviations_from_common.append(makefile_writer.VarsDictData(x86_var_dict,
'x86')) # Currently, x86_64 is identical to x86 deviations_from_common.append(makefile_writer.VarsDictData(x86_var_dict, 'x86_64')) deviations_from_common.append(makefile_writer.VarsDictData(mips_var_dict, 'mips'))
deviations_from_common.append(makefile_writer.VarsDictData(arm64_var_dict, 'arm64')) makefile_writer.write_android_mk(target_dir=target_dir, common=common, deviations_from_common=deviations_from_common) finally: shutil.rmtree(tmp_folder) if __name__ == '__main__': main()
""" Code : Remove the dependency for Kodak Bank, default excel parser macros and just GNU/Linux to acheive it. Authors : Ramaseshan, Anandhamoorthy , Engineers, Fractal
io Data Pvt Ltd, Magadi, Karnataka. Licence : GNU GPL v3. Code Repo URL : https://github.com/ramaseshan/kodak_bank_excel_parser """ im
port pyexcel as pe import pyexcel.ext.xls import unicodedata import sys import time def delete_content(pfile): pfile.seek(0) pfile.truncate() filename = sys.argv[1] fileout = filename.split('.')[0]+".txt" print "Reading file ",filename records = pe.get_array(file_name=filename) f = open(fileout,'w') print "Starting to process data. Hold your breath" for count,rec in enumerate(records[1:]): rec[0] = "DATALIFE" rec[1] = "RPAY" rec[5] = "04182010000104" rec[4] = time.strftime("%d/%m/%Y") line = "" for value in rec: if value and type(value) is unicode: value = unicodedata.normalize('NFKD', value).encode('ascii','ignore') if rec[6] % 2 == 0: rec[6] = int(rec[6]) # Cross check payment types with mahesh if rec[2] == "NEFT" or rec[2] == "IFT": line = line + str(value)+"~" else: print "Your Payment Type is Wrong in column %d. Please correct it and run the script again."%(count+2) print "Exiting Script" delete_content(f) f.close() sys.exit() f.write(line[:-1]) f.write("\n") f.close() print "Finished writing ",fileout
from py4j.java_gateway import JavaGateway, GatewayParameters gateway = JavaGateway(gateway_parameters=GatewayParameters(port=25333)) doc1 = gateway.jvm.gate.Fac
tory.newDocument("initial text") print(doc1.getContent().toString()) doc2 = gateway.jvm.gate.plugin.python.PythonSlave.loadDocument("docs/doc1.xml") print(doc2.getContent().toString()) js1 = gateway.jvm.gate.plugin.python.PythonSlave.getBdocDocumentJson(doc2) pr
int(js1) gateway.close()
from django.http import HttpResponse,HttpResponseRedirect from django.shortcuts import render_to_response from django import forms from django.forms import ModelForm from django.db.models import F from django.db import connection from django.utils import simplejson from django.contrib import messages from django.contrib.auth.models import User from django.contrib.auth.decorators import login_required from django.template import RequestContext from django.core.exceptions import ObjectDoesNotExist from django.core.mail import send_mass_mail from django.utils.translation import ugettext as _ from notebook.notes.models import create_model, create_model_form from notebook.scraps.models import Scrap, Folder from notebook.notes.views import getT, getW, getNote, get_public_notes, get_public_tags, remove_private_tag_notes, __get_ws_tags from notebook.notes.views import folders_index, settings_tag_add, settings_tag_update, settings_tag, settings_tags from notebook.notes.views import getSearchResults, getlogger, __getQStr, __get_notes_context from notebook.notes.views import __get_folder_context, __get_pre_url import notebook import datetime from datetime import date log = getlogger('scraps.views') #this method is used for processing the request users send via the browser button @login_required def add_scrap(request): username = request.user.username N = getNote(username, 'scrapbook') T = getT(username) #W = getW(username) #w = W.objects.get(name='scrapbook') if request.method == 'POST': tags = T.objects.all() #form require tags to be required. So don't use form now, and use the code from add_note in notebook.notes.views for adding a snippet #AddNForm = create_model_form("AddNForm_add_scrap_post_"+str(username), N, fields={'tags':forms.ModelMultipleChoiceField(queryset=tags)}) n = N() post = request.POST.copy() tag_names = post.getlist('item[tags][]') tags = [] for tag_name in tag_names: t, created = T.objects.get_or_create(name=tag_name) #==============Don't need below any more since add_tags will do this logic================================================================= # if created or not w.tags.filter(name=t.name).exists(): # w.tags.add(t) #=============================================================================== #tags.append(t.id) tags.append(t.name) #if not tag_names: # tags = [T.objects.get(name='untagged').id] if not tags or (len(tags) == 1 and tags[0] == u''): tags = None #f = AddNForm(post, instance=n) #log.debug("f.errors:"+str(f.errors)) #TODO:handle errors such as url broken #n = f.save(commit=False) n.title = post.get('title') n.desc = post.get('desc') n.url = post.get('url') private = post.get('private', False) if private in ['true', 'on']: n.private = True else: n.private = False n.vote = post.get('vote') n.save() n.add_tags(tags, 'scrapbook') n.save() #called this specifically to save the url to the social db as well return render_to_response("include/notes/addNote_result.html",\ {'message':_('Scrap is successfully added! You can close this window, or it will be closed for you in 1 second.')}) else: tags = __get_ws_tags(request, username, 'scrapbook') from django.forms import TextInput #by adding the tags field specifically here, we avoided it using tags of another user (a strange error which repeat even after changing class names and variable names) AddNForm_scrap = create_model_form("AddNForm_add_scrap_get_"+str(username), N, fields={#'tags':forms.ModelMultipleChoiceField(queryset=tags) }, options={'exclude':['deleted'], 'fields':['url','title','tags','desc','vote','private'], 'widgets':{'title': TextInput(at
trs={'size': 80}),
}}) url = request.GET.get('url') title = request.GET.get('title') desc = request.GET.get('desc') #default_tag_id = T.objects.get(name='untagged').id addNoteForm = AddNForm_scrap(initial={'url': url, 'title':title, 'desc':desc#, 'tags': [default_tag_id] }) #no need of the custimized form in the scrapbook template return render_to_response('scrapbook/notes/addNote.html', {'addNoteForm': addNoteForm, 'desc':desc, 'url':url, 'tags':tags}) @login_required def share(request, username): print 'share in note called' note_ids = request.POST.getlist('note_ids') N = getNote(request.user.username) msgs = [] for note_id in note_ids: note = N.objects.get(id=note_id) message = 'From osl scraps:'+' '+note.title+' '+note.url+' ' desc = note.desc desc = desc.replace('\r','') desc = desc.replace('\n','')#TODO: if len(desc) > 100: desc = desc[:300] + '...... view more from http://new.notebook.opensourcelearning.org/'+\ username+'/scrapbook/scraps/note/'+unicode(note.id)+'/' message = message+desc msg = (message.encode('utf8'), '', 'yuanliangliu@gmail.com', ['buzz@gmail.com']) msgs.append(msg) #share_note(note_id, username) send_mass_mail(tuple(msgs), fail_silently=False) return HttpResponse('success', mimetype="text/plain")