prefix
stringlengths
0
918k
middle
stringlengths
0
812k
suffix
stringlengths
0
962k
from sevenbridges.meta.resource import Resource from sevenbridges.meta.fields import StringField, DictField class BatchGroup(Resource): """ Batch group for a b
atch task. Represents the group that is assigned to the child task from the batching criteria that was used when the task was started. """ value = StringField(read_only=True) fields = DictField(re
ad_only=True) def __str__(self): return '<Batch group>'
# from django.shortcuts import render, get_object_or_404 # from .models import Album, Song # def index(request): # all_albums = Album.objects.all() # context = { # 'all_albums':all_
albums, # } # retu
rn render(request, 'music/index.html', context) # def detail(request, album_id): # album = get_object_or_404(Album, pk=album_id) # return render(request, 'music/detail.html', {'album': album}) # def favourite(request, album_id): # album = get_object_or_404(Album, pk=album_id) # try: # selected_song = album.song_set.get(pk=request.POST['song']) # except(KeyError, Song.DoesNotExist): # return render(request, 'music/detail.html', { # 'album': album, # 'error_message':'Did not select a valid song' # }) # else: # selected_song.is_favourite = True # selected_song.save() # return render(request, 'music/detail.html', {'album': album}) from django.views import generic from django.views.generic.edit import CreateView, UpdateView, DeleteView from django.core.urlresolvers import reverse_lazy from django.shortcuts import render, redirect from django.contrib.auth import authenticate, login from django.views.generic import View from .forms import UserForm from .models import Album class IndexView(generic.ListView): template_name = "music/index.html" def get_queryset(self): return Album.objects.all() class DetailView(generic.DetailView): model = Album template_name = "music/detail.html" class AlbumCreate(CreateView): model = Album fields = ['artist', 'title', 'genre', 'logo'] class AlbumUpdate(UpdateView): model = Album fields = ['artist', 'title', 'genre', 'logo'] class AlbumDelete(DeleteView): model = Album success_url = reverse_lazy('music:index') class UserFormView(View): form_class = UserForm template_name = 'music/registration_form.html' #blank form (POST) def get(self, request): form = self.form_class(None) return render(request, self.template_name, {'form':form}) #process form data (POST) def post(self, request): form = self.form_class(request.POST) if form.is_valid(): user = form.save(commit=False) #cleaned data username = form.cleaned_data['username'] password = form.cleaned_data['password'] user.set_password(password) user.save() #return user objects if correct credentials user = authenticate(username=username, password=password) if user is not None: if user.is_active: login(request, user) #request.user.username return redirect('music:index') return render(request, self.template_name, {'form':form})
import sys from logging import warning from glob import iglob import json import os import shutil from ..common import chdir, run from .cache import cache_specs from .dirs import get_specs_dir def load_all_specs(*, basedir=get_specs_dir(), skip_update_check=True): os.makedirs(basedir, exist_ok=True) if not skip_update_check: with chdir(basedir): res, _, _ = run(['git', 'fetch', 'origin']) if res != 'success': print("Error fetching specs", file=sys.stderr) _, res, _ = run(['git', 'log', 'HEAD..origin/master']) if res != '': print("Spec updat
es found - Updating", file=sys.stderr) with chdir(basedir): ru
n(['git', 'pull', 'origin', 'master']) # the repo has a /specs folder basedir = os.path.join(basedir, 'specs') cache_specs(basedir) spec_files = iglob(os.path.join(basedir, '_cache', '*.json')) # load_spec returns a (name, spec) tuple, so we just let the dict() constructor # turn that into the {name: spec} pairs of a dictionary for us return dict([load_spec(filename, basedir) for filename in spec_files]) def load_some_specs(idents, *, basedir=get_specs_dir()): # the repo has a /specs folder basedir = os.path.join(basedir, 'specs') cache_specs(basedir) wanted_spec_files = [os.path.join(basedir, '_cache', '{}.json'.format(ident)) for ident in idents] all_spec_files = iglob(os.path.join(basedir, '_cache', '*.json')) loadable_spec_files = set(all_spec_files).intersection(wanted_spec_files) # load_spec returns a (name, spec) tuple, so we just let the dict() constructor # turn that into the {name: spec} pairs of a dictionary for us return dict([load_spec(filename) for filename in loadable_spec_files]) def load_spec(filename, basedir): with open(filename, 'r', encoding='utf-8') as specfile: loaded_spec = json.load(specfile) name = os.path.splitext(os.path.basename(filename))[0] assignment = loaded_spec['assignment'] # Ask if user wants to re-cache specs to fix discrepancy if name != assignment: warning('assignment "{}" does not match the filename {}'.format(assignment, filename)) recache = input("Re-cache specs? (Y/N)") if recache and recache.lower()[0] == "y": shutil.rmtree(os.path.join(basedir, '_cache')) cache_specs(basedir) return assignment, loaded_spec
from
django.apps import AppConfig class MainConfig(AppConfig): name =
'main'
import unittest from chat.commands.commandlist import CommandList from chat.command import Command from tests.structs.dummychat import DummyChat class TestCommands(unittest.TestCase): def setUp(self): self.chat = DummyChat() def test_
get(self): command = CommandList.get('help', s
elf.chat, 'message') self.assertTrue(command and isinstance(command, Command), 'Command get failed') def test_validate(self): fail_msg = 'Command validate failed' self.assertTrue(CommandList.validate('help'), fail_msg) self.assertTrue(CommandList.validate('!help'), fail_msg) self.assertTrue(CommandList.validate('song'), fail_msg) self.assertTrue(CommandList.validate('!song'), fail_msg) self.assertTrue(CommandList.validate('restart'), fail_msg) self.assertTrue(CommandList.validate('!restart'), fail_msg) self.assertFalse(CommandList.validate('not a function'), fail_msg) self.assertFalse(CommandList.validate('!not a function'), fail_msg)
class TestRailTestCase: def _
_init__(self, title, section, suite, steps): self.title = title self.section_name = section self.suite_name = suite self.steps = steps self.type_id = 1 self.priority_id = 4 def to_json_dict(self): return { 'title': self.title, 'type_id': self.type_id, 'priorit
y_id': self.priority_id, 'custom_steps_separated': self.steps }
""" Tools for sending email. """ from django.conf import settings from django.core.exceptions import ImproperlyConfigured from django.utils.importlib import import_module # Imported for backwards compatibility, and for the sake # of a cleaner namespace. These symbols used to be in # django/core/mail.py before the introduction of email # backends and the subsequent reorganization (See #10355) from django.core.mail.utils import CachedDnsName, DNS_NAME from django.core.mail.message import \ EmailMessage, EmailMultiAlternatives, \ SafeMIMEText, SafeMIMEMultipart, \ DEFAULT_ATTACHMENT_MIME_TYPE, make_msgid, \ BadHeaderError, forbid_multi_line_headers from django.core.mail.backends.smtp import EmailBackend as _SMTPConnection def get_connection(backend=None, fail_silently=False, **kwds): """Load an e-mail backend and return an instance of it. If backend is None (default) settings.EMAIL_BACKEND is used. Both fail_silently and other keyword arguments are used in the constructor of the backend. """ path = backend or settings.EMAIL_BACKEND try: mod_name, klass_name = path.rsplit('.', 1) mod = import_module(mod_name) except ImportError, e: raise ImproperlyConfigured(('Error importing email backend module %s: "%s"' % (mod_name, e))) try: klass = getattr(mod, klass_name) except AttributeError: raise ImproperlyConfigured(('Module "%s" does not define a ' '"%s" class' % (mod_name, klass_name))) return klass(fail_silently=fail_silently, **kwds) def send_mail(subject, message, from_email, recipient_list, fail_silently=False, auth_user=None, auth_password=None, connection=None): """ Easy wrapper for sending a single message to a recipient list. All members of the recipient list wi
ll see the other recipients in the 'To' field. If auth_user is None, the EMAIL_HOST_USER setting is used. If auth_passwo
rd is None, the EMAIL_HOST_PASSWORD setting is used. Note: The API for this method is frozen. New code wanting to extend the functionality should use the EmailMessage class directly. """ connection = connection or get_connection(username=auth_user, password=auth_password, fail_silently=fail_silently) return EmailMessage(subject, message, from_email, recipient_list, connection=connection).send() def send_mass_mail(datatuple, fail_silently=False, auth_user=None, auth_password=None, connection=None): """ Given a datatuple of (subject, message, from_email, recipient_list), sends each message to each recipient list. Returns the number of e-mails sent. If from_email is None, the DEFAULT_FROM_EMAIL setting is used. If auth_user and auth_password are set, they're used to log in. If auth_user is None, the EMAIL_HOST_USER setting is used. If auth_password is None, the EMAIL_HOST_PASSWORD setting is used. Note: The API for this method is frozen. New code wanting to extend the functionality should use the EmailMessage class directly. """ connection = connection or get_connection(username=auth_user, password=auth_password, fail_silently=fail_silently) messages = [EmailMessage(subject, message, sender, recipient) for subject, message, sender, recipient in datatuple] return connection.send_messages(messages) def mail_admins(subject, message, fail_silently=False, connection=None, html_message=None): """Sends a message to the admins, as defined by the ADMINS setting.""" if not settings.ADMINS: return mail = EmailMultiAlternatives(u'%s%s' % (settings.EMAIL_SUBJECT_PREFIX, subject), message, settings.SERVER_EMAIL, [a[1] for a in settings.ADMINS], connection=connection) if html_message: mail.attach_alternative(html_message, 'text/html') mail.send(fail_silently=fail_silently) def mail_managers(subject, message, fail_silently=False, connection=None, html_message=None): """Sends a message to the managers, as defined by the MANAGERS setting.""" if not settings.MANAGERS: return mail = EmailMultiAlternatives(u'%s%s' % (settings.EMAIL_SUBJECT_PREFIX, subject), message, settings.SERVER_EMAIL, [a[1] for a in settings.MANAGERS], connection=connection) if html_message: mail.attach_alternative(html_message, 'text/html') mail.send(fail_silently=fail_silently) class SMTPConnection(_SMTPConnection): def __init__(self, *args, **kwds): import warnings warnings.warn( 'mail.SMTPConnection is deprecated; use mail.get_connection() instead.', DeprecationWarning ) super(SMTPConnection, self).__init__(*args, **kwds)
port': raise ImportError, 'test_support must be imported from the test package' import sys class Error(Exception): """Base class for regression test exceptions.""" class TestFailed(Error): """Test failed.""" class TestSkipped(Error): """Test skipped. This can be raised to indicate that a test was deliberatly skipped, but not because a feature wasn't available. For example, if some resource can't be used, such as the network appears to be unavailable, this should be raised instead of TestFailed. """ class ResourceDenied(TestSkipped): """Test skipped because it requested a disallowed resource. This is raised when a test calls requires() for a resource that has not be enabled. It is used to distinguish between expected and unexpected skips. """ verbose = 1 # Flag set to 0 by regrtest.py use_resources = None # Flag set to [] by regrtest.py max_memuse = 0 # Disable bigmem tests (they will still be run with # small sizes, to make sure they work.) # _original_stdout is meant to hold stdout at the time regrtest began. # This may be "the real" stdout, or IDLE's emulation of stdout, or whatever. # The point is to have some flavor of stdout the user can actually see. _original_stdout = None def record_original_stdout(stdout): global _original_stdout _original_stdout = stdout def get_original_stdout(): return _original_stdout or sys.stdout def unload(name): try: del sys.modules[name] except KeyError: pass def unlink(filename): import os try: os.unlink(filename) except OSError: pass def forget(modname): '''"Forget" a module was ever imported by removing it from sys.modules and deleting any .pyc and .pyo files.''' unload(modname) import os for dirname in sys.path: unlink(os.path.join(dirname, modname + os.extsep + 'pyc')) # Deleting the .pyo file cannot be within the 'try' for the .pyc since # the chance exists that there is no .pyc (and thus the 'try' statement # is exited) but there is a .pyo file. unlink(os.path.join(dirname, modname + os.extsep + 'pyo')) def is_resource_enabled(resource): """Test whether a resource is enabled. Known resources are set by regrtest.py.""" return use_resources is not None and resource in use_resources def requires(resource, msg=None): """Raise ResourceDenied if the specified resource is not available. If the caller's module is __main__ then automatically return True. The possibility of False being returned occurs when regrtest.py is executing.""" # see if the caller's module is __main__ - if so, treat as if # the resource was set if sys._getframe().f_back.f_globals.get("__name__") == "__main__": return if not is_resource_enabled(resource): if msg is None: msg = "Use of the `%s' resource not enabled" % resource raise ResourceDenied(msg) def bind_port(sock, host='', preferred_port=54321): """Try to bind the sock to a port. If we are running multiple tests and we don't try multiple ports, the test can fails. This makes the test more robust.""" import socket, errno # some random ports that hopefully no one is listening on. for port in [preferred_port, 9907, 10243, 32999]: try: sock.bind((host, port)) return port except socket.error, (err, msg): if err != errno.EADDRINUSE: raise print >>sys.__stderr__, \ ' WARNING: failed to listen on port %d, trying another' % port raise TestFailed, 'unable to find port to listen on' FUZZ = 1e-6 def fcmp(x, y): # fuzzy comparison function if type(x) == type(0.0) or type(y) == type(0.0): try: x, y = coerce(x, y) fuzz = (abs(x) + abs(y)) * FUZZ if abs(x-y) <= fuzz: return 0 except: pass elif type(x) == type(y) and type(x) in (type(()), type([])): for i in range(min(len(x), len(y))): outcome = fcmp(x[i], y[i]) if outcome != 0: return outcome return cmp(len(x), len(y)) return cmp(x, y) try: unicode have_unicode = 1 except NameError: have_unicode = 0 is_jython = sys.platform.startswith('java') import os # Filename used for testing if os.name == 'java': # Jython disallows @ in module names TESTFN = '$test' elif os.name == 'riscos': TESTFN = 'testfile' else: TESTFN = '@test' # Unicode name only used if TEST_FN_ENCODING exists for the platform. if have_unicode: # Assuming sys.getfilesystemencoding()!=sys.getdefaultencoding() # TESTFN_UNICODE is a filename that can be encoded using the # file system encoding, but *not* with the default (ascii) encoding if isinstance('', unicode): # python -U # XXX perhaps unicode() should accept Unicode strings? TESTFN_UNICODE = "@test-\xe0\xf2" else: # 2 latin characters. TESTFN_UNICODE = unicode("@test-\xe0\xf2", "latin-1") TESTFN_ENCODING = sys.getfilesystemencoding() # TESTFN_UNICODE_UNENCODEABLE is a filename that should *not* be # able to be encoded by *either* the default or filesystem encoding. # This test really only makes sense on Windows NT platforms # which have special Unicode support in posixmodule. if (not hasattr(sys, "getwindowsversion") or sys.getwindowsversion()[3] < 2): # 0=win32s or 1=9x/ME TESTFN_UNICODE_UNENCODEABLE = None else: # Japanese characters (I think - from bug 846133) TESTFN_UNICODE_UNENCODEABLE = eval('u"@test-\u5171\u6709\u3055\u308c\u308b"') try: # XXX - Note - should be using TESTFN_ENCODING here - but for # Windows, "mbcs" currently always operates as if in # errors=ignore' mode - hence we get '?' characters rather than # the exception. 'Latin1' operates as we expect - ie, fails. # See [ 850997 ] mbcs encoding ignores errors TESTFN_UNICODE_UNENCODEABLE.encode("Latin1") except UnicodeEncodeError: pass else: print \ 'WARNING: The filename %r CAN be encoded by the filesystem. ' \
'Unicode filename tests may not be effective' \ % TESTFN_UNICODE_UNENCODEABLE # Make sure we can write to TESTFN, try in /tmp if we can't fp = None try: fp = open(TESTFN, 'w+') except IOError: TMP_TESTFN = os.path.join('/tmp', TESTFN) try: fp = open(TMP_TESTFN, 'w+')
TESTFN = TMP_TESTFN del TMP_TESTFN except IOError: print ('WARNING: tests will fail, unable to write to: %s or %s' % (TESTFN, TMP_TESTFN)) if fp is not None: fp.close() unlink(TESTFN) del os, fp def findfile(file, here=__file__): """Try to find a file on sys.path and the working directory. If it is not found the argument passed to the function is returned (this does not necessarily signal failure; could still be the legitimate path).""" import os if os.path.isabs(file): return file path = sys.path path = [os.path.dirname(here)] + path for dn in path: fn = os.path.join(dn, file) if os.path.exists(fn): return fn return file def verify(condition, reason='test failed'): """Verify that condition is true. If not, raise TestFailed. The optional argument reason can be given to provide a better error text. """ if not condition: raise TestFailed(reason) def vereq(a, b): """Raise TestFailed if a == b is false. This is better than verify(a == b) because, in case of failure, the error message incorporates repr(a) and repr(b) so you can see the inputs. Note that "not (a == b)" isn't necessarily the same as "a != b"; the former is tested. """
import mock import lxml.etree as ET from .utils import make_cobertura def test_parse_path(): from pycobertura import Cobertura xml_path = 'foo.xml' with mock.patch('pycobertura.cobertura.os.path.exists', return_value=True): with mock.patch('pycobertura.cobertura.ET.parse') as mock_parse: cobertura = Cobertura(xml_path) assert cobertura.xml is mock_parse.return_value.getroot.return_value def test_version(): cobertura = make_cobertura() assert cobertura.version == '1.9' def test_line_rate(): cobertura = make_cobertura() assert cobertura.line_rate() == 0.9 def test_line_rate_by_class(): cobertura = make_cobertura() expected_line_rates = { 'Main': 1.0, 'search.BinarySearch': 0.9166666666666666, 'search.ISortedArraySearch': 1.0, 'search.LinearSearch': 0.7142857142857143, } for class_name in cobertura.classes(): assert cobertura.line_rate(class_name) == \ expected_line_rates[class_name] def test_branch_rate(): cobertura = make_cobertura() assert cobertura.branch_rate() == 0.75 def test_branch_rate_by_class(): cobertura = make_cobertura() expected_branch_rates = { 'Main': 1.0, 'search.BinarySearch': 0.8333333333333334, 'search.ISortedArraySearch': 1.0, 'search.LinearSearch': 0.6666666666666666, } for class_name in cobertura.classes(): assert cobertura.branch_rate(class_name) == \ expected_branch_rates[class_name] def test_total_misses(): cobertura = make_cobertura() assert cobertura.total_misses() == 3 def test_missed_statements_by_class_name(): cobertura = make_cobertura() expected_missed_statements = { 'Main': [], 'search.BinarySearch': [24], 'search.ISortedArraySearch': [], 'search.LinearSearch': [19, 24], } for class_name in cobertura.classes(): assert cobertura.missed_statements(class_name) == \ expected_missed_statements[class_name] def test_list_packages(): cobertura = make_cobertura() packages = cobertura.packages() assert packages == ['', 'search'] def test_list_classes(): cobertura = make_cobertura() classes = cobertura.classes() assert classes == [ 'Main', 'search.BinarySearch', 'search.ISortedArraySearch', 'search.LinearSearch' ] def test_hit_lines__by_iterating_over_classes(): cobertura = make_cobertura() expected_lines = { 'Main': [10, 16, 17, 18, 19, 23, 25, 26, 28, 29, 30], 'search.BinarySearch': [12, 16, 18, 20, 21, 23, 25, 26, 28, 29, 31], 'search.ISortedArraySearch': [], 'search.LinearSearch': [9, 13, 15, 16, 17], } for class_name in cobertura.classes(): assert cobertura.hit_statements(class_name) == expected_lines[class_name] def test_missed_lines(): cobertura = make_cobertura() expected_lines = { 'Main': [], 'search.BinarySearch': [24], 'search.ISortedArraySearch': [], 'search.LinearSearch': [19, 20, 21, 22, 23, 24], } for class_name in cobertura.classes(): assert cobertura.missed_lines(class_name) == expected_lines[class_name] def test_total_statements(): cobertura = make_cobertura() assert cobertura.total_statements() == 30 def test_total_statements_by_class(): cobertura = make_cobertura() expected_total_statements = { 'Main': 11, 'search.BinarySearch': 12, 'search.ISortedArraySearch': 0, 'search.LinearSearch': 7, } for class_name in cobertura.classes(): assert cobertura.total_statements(class_name) == \ expected_total_statements[class_name] def test_total_misses(): cobertura = make_cobertura() assert cobertura.total_misses() == 3 def test_total_misses_by_class(): cobertura = make_cobertura() expected_total_misses = { 'Main': 0, 'search.BinarySearch': 1, 'search.ISortedArraySearch': 0, 'search.LinearSearch': 2, } for class_name in cobertura.classes(): assert cobertura.total_misses(class_name) == \ expected_total_misses[class_name] def test_total_hits(): cobertura = make_cobertura() assert cobertura.total_hits() == 27 def test_total_hits_by_class(): cobertura = make_cobertura() expected_total_misses = { 'Main': 11, 'search.BinarySearch': 11, 'search.ISortedArraySearch': 0, 'search.LinearSearch': 5, } for class_name in cobertura.classes(): assert cobertura.total_hits(class_name) == \ expected_total_misses[class_name] def test_filename(): cobertura = make_cobertura() expected_filenames = { 'Main': 'Main.java', 'search.BinarySearch': 'search/BinarySearch.java', 'search.ISortedArraySearch': 'search/ISortedArraySearch.java', 'search.LinearSearch': 'search/LinearSearch.java', } for class_name in cobertura.classes(): assert cobertura.filename(class_name) == \ expected_filenames[class_name] def test_filepath(): base_path = 'foo/bar/baz' cobertura = make_cobertura(base_path=base_path) expected_filepaths = { 'Main': 'foo/bar/baz/Main.java', 'search.BinarySearch': 'foo/bar/baz/search/BinarySearch.java', 'search.ISortedArraySearch': 'foo/bar/baz/search/ISortedArraySearch.java', 'search.LinearSearch': 'foo/bar/baz/search/LinearSearch.java', } for class_name in cobertura.classes(): assert cobertura.filepath(class_name) == \ expected_filepaths[class_name] def test_class_source__sources_not_found(): cobertura = make_cobertura('tests/cobertura.xml') expected_sources = { 'Main': [(0, 'tests/Main.java not found', None)], 'search.BinarySearch': [(0, 'tests/search/BinarySearch.java not found', None)], 'search.ISortedArraySearch': [(0, 'tests/search/ISortedArraySearch.java not found', None)], 'search.LinearSearch': [(0, 'tests/search/LinearSearch.java not found', None)], } for class_name in cobertura.classes(): assert cobertura.class_source(class_name) == expected_sources[class_name] def test_line_statuses(): cobertura = make_cobertura('tests/dummy.source1/coverage.xml') expected_line_statuses = { 'dummy/__init__': [], 'dummy/dummy': [ (1, True), (2, True), (4, True), (5, False), (6, False), ], 'dummy/dummy2': [ (1, True), (2, True), ], 'dummy/dummy4': [ (1, False), (2, False), (4, False), (5, False), (6, False) ], } for class_name in cobertura.classes(): assert cobertura.line_statuses(class_name) == \ expected_line_statuses[class_name] def test_class_source__sources_found(): cobertura = make_cobertura('tests/dummy.source1/coverage.xml') expected_sources = { 'dummy/__init__': [], 'dummy/dummy': [ (1, 'def foo():\n', True), (2, ' pass\n', True), (3, '\n', None), (4, 'def bar():\n', True), (5, " a = 'a'\n", False), (6, " b = 'b'\n", False), ], 'dummy/dummy2': [ (1, 'def baz():\n', True), (2, ' pass\n', True) ], 'dummy/dummy4': [ (1, 'def barbaz():\n', False), (2, ' pass\n', False
), (3, '\n', None), (4, 'def foobarbaz():\n', False), (5, ' a = 1 + 3\n', False), (6, ' pass\n', False) ], } for class_name in cobertura.classes(): assert cobertura.class_source(class_name) == \ expected
_sources[class_name]
from optparse import make_option from django.core.management.base import BaseCommand from crits.core.mongo_tools import mongo_connector import pprint class Command(BaseCommand): """ Gets a count of indicator types and object types in CRITs """ help = "Gets a count of indicator types and object types in CRITs" option_list = BaseCommand.option_list + ( make_option('--sort_count', '-s', dest='sort_count', default=False, action="store_true", help='Sort by count instead of by the type\'s name.' ), make_option('--agg_obj_by_collection', '-a', dest='agg_obj_by_collection', default=False, action="store_true", help='For object types: Aggregate by collection instead of ' 'combining all results.' ), ) all_object_collections = [ "actors", "backdoors", "campaigns", "certificates", "domains", "email", "events", "exploits", "indicators", "ips", "pcaps", "raw_data", "sample", "screenshots", "targets", "yara_rules" ] def handle(self, *args, **kwargs): sort_count = kwargs.get('sort_count') agg_obj_by_collection = kwargs.get('agg_obj_by_collection') pp = pprint.PrettyPrinter(indent=4) self.aggregate_indicator_types(sort_count, pp) self.aggregate_object_types(sort_count, agg_obj_by_collection, pp) def aggregate_indicator_types(self, sort_count, pp): collection = "indicators" pipe = [ { "$group": {"_id":"$type" , "count":{"$sum": 1}}}, {"$sort": {"_id": 1}} ] if sort_count is True: pipe.append({"$sort": {"count": 1}}) else: pipe.append({"$sort": {"_id": 1}}) db = mongo_connector(collection) results = db.aggregate(pipeline=pipe) print "INDICATOR TYPES IN COLLECTION [%s]" % collection pp.pprint(results) print def aggregate_object_for_collection(self, collection, sort_count): pipe = [ {"$unwind": "$objects"}, {"$group" : {"_id": {"obj_type": {"$cond": {"if": {"$and": [{"$gt":["$objects.name", None] }, {"$ne": ["$objects.type", "$objects.name"]}] }, "then": {"$concat": [ "$objects.type", " - ", "$objects.name" ]}, "else": "$objects.type" } } }, "count": {"$sum": 1} } } ] if sort_count is True: pipe.append({"$sort": {"count": 1}}) else: pipe.append({"$sort": {"_id": 1}}) db = mongo_connector(collection) results = db.aggregate(pipeline=pipe) return results def aggregate_object_types(self, sort_count, is_agg_per_collection, pp): results = {} for collection in self.all_object_collections: object_types = self.aggregate_object_for_collection(collection, sort_count) results[collection] = object_types if is_agg
_per_collection: for collection in self.all_object_collections: print "OBJECT TYPES FOR COLLECTION: [%s]" % collection.upper() if len(results[collection]['re
sult']) != 0: pp.pprint(results[collection]['result']) else: print "None found." print else: all_obj_types = {} for collection in self.all_object_collections: collection_results = results[collection] for collection_result in collection_results['result']: obj_type = collection_result['_id']['obj_type'] all_obj_types[obj_type] = collection_result['count'] + all_obj_types.get(obj_type, 0); print "OBJECT TYPES FOR ALL COLLECTIONS" if(sort_count): import operator sorted_x = sorted(all_obj_types.items(), key=operator.itemgetter(1)) pp.pprint(sorted_x) else: pp.pprint(all_obj_types) print print
# coding=utf-8 # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) from pants.backend.jvm.tasks.jvm_task import JvmTask from pants.backend.jvm.tasks.jvm_tool_task_mixin import JvmToolTaskMixin from pants.base.target import Target from pants.console.stty_utils import preserve_stty_settings from pants.java.util import execute_java class ScalaRepl(JvmToolTaskMixin, JvmTask): @classmethod def register_options(cls, register): super(ScalaRepl, cls).register_options(register) register('--main', default='scala.tools.nsc.MainGenericRunner', help='The entry point for running the repl.') cls.register_jvm_tool(register, 'scala-repl', default=['//:scala-repl']) @classmethod def prepare(cls, options, round_manager): super(ScalaRepl, cls).prepare(options, round_manager) # TODO(John Sirois): these are fake requirements in order to force compile run before this # goal. Introduce a RuntimeClasspath product for JvmCompile and PrepareResources to populate # and depend on that. # See: https://github.com/pantsbuild/pants/issues/310 round_manager.require_data('resources_by_target') round_manager.require_data('classes_by_target') def execute(self): (accept_predicate, reject_predicate) = Target.lang_discriminator('java') targets = self.require_homogeneous_targets(accept_predicate, reject_predicate) if targets: tools_classpath = self.tool_classpath('scala-repl') self.context.release_lock() with preserve_stty_settings(): classpath = self.classpath(targets, cp=tools_classpath) # The scala repl requires -Dscala.usejavacp=true since Scala 2.8 when launching in the way # we do here (not passing -classpath as a program arg to scala.tools.nsc.MainGenericRunner). jvm_options = self.jvm_options if not any(opt.startswith('-Dscala.usejavacp=') for opt in jvm_options): jvm_options.append('-Dscala.usejavacp=true') print('') # Start REPL output on a new line. try: # NOTE: We execute with no workunit, as capturing REPL output makes it very sluggish. execute_java(classpath=classpath,
main=self.get_options().main, jvm_options=jvm_options, args=self.args) except KeyboardInterrupt:
# TODO(John Sirois): Confirm with Steve Gury that finally does not work on mac and an # explicit catch of KeyboardInterrupt is required. pass
e if self.__move_mode != 'vertical': new_time = self.track.xToTime(pos.x()) else: new_time = None if self.__move_mode != 'horizontal': new_value = self.track.yToValue(pos.y()) else: new_value = None with self.project.apply_mutations('%s: Change control point' % self.track.track.name): self.track.highlightedPoint().point.time = new_time self.track.highlightedPoint().point.value = new_value evt.accept() return super().mouseReleaseEvent(evt) def mouseDoubleClickEvent(self, evt: QtGui.QMouseEvent) -> None: if evt.button() == Qt.LeftButton and evt.modifiers() == Qt.NoModifier: # If the first half of the double click initiated a move, # cancel that move now. if self.__moving_point is not None: self.track.setPointPos(self.__moving_point, self.__moving_point_original_pos) self.__moving_point = None time = self.track.xToTime(evt.pos().x()) for point in self.track.track.points: if point.time == time: with self.project.apply_mutations( '%s: Change control point' % self.track.track.name): point.value = self.track.yToValue(evt.pos().y()) break else: with self.project.apply_mutations( '%s: Insert control point' % self.track.track.name): self.track.track.create_control_point( self.track.xToTime(evt.pos().x()), self.track.yToValue(evt.pos().y())) evt.accept() return super().mouseDoubleClickEvent(evt) class ControlTrackToolBox(tools.ToolBox): def __init__(self, **kwargs: Any) -> None: super().__init__(**kwargs) self.addTool(EditControlPointsTool) class ControlPoint(core.AutoCleanupMixin, object): def __init__(self, track_editor: 'ControlTrackEditor', point: model.ControlPoint) -> None: super().__init__() self.__track_editor = track_editor self.__point = point self.__pos = QtCore.QPoint( self.__track_editor.timeToX(self.__point.time), self.__track_editor.valueToY(self.__point.value)) self.__listeners = core.ListenerList() self.add_cleanup_function(self.__listeners.cleanup) self.__listeners.add(self.__point.time_changed.add(self.onTimeChanged)) self.__listeners.add(self.__point.value_changed.add(self.onValueChanged)) def onTimeChanged(self, change: music.PropertyValueChange[audioproc.MusicalTime]) -> None: self.__pos = QtCore.QPoint( self.__track_editor.timeToX(change.new_value), self.__pos.y()) self.__track_editor.update() def onValueChanged(self, cha
nge: music.PropertyValueChange[float]) -> None: self.__pos = QtCore.QPoint( self.__pos.x(), self.__track_editor.valueToY(change.new_value)) self.__track_editor.update() @property def index(self) -> int: return self.__point.index @property def point(self) -> model.ControlPoint: return self.__point @property def point_id(self) -> int: return self.__point.id @property
def time(self) -> audioproc.MusicalTime: return self.__point.time def pos(self) -> QtCore.QPoint: return self.__pos def setPos(self, pos: QtCore.QPoint) -> None: if pos is None: self.__pos = QtCore.QPoint( self.__track_editor.timeToX(self.__point.time), self.__track_editor.valueToY(self.__point.value)) else: self.__pos = pos def recomputePos(self) -> None: self.__pos = QtCore.QPoint( self.__track_editor.timeToX(self.__point.time), self.__track_editor.valueToY(self.__point.value)) class ControlTrackEditor(time_view_mixin.ContinuousTimeMixin, base_track_editor.BaseTrackEditor): def __init__(self, **kwargs: Any) -> None: super().__init__(**kwargs) self.__mouse_pos = None # type: QtCore.QPoint self.__highlighted_point = None # type: ControlPoint self.__playback_time = None # type: audioproc.MusicalTime self.__listeners = core.ListenerList() self.points = [] # type: List[ControlPoint] for point in self.track.points: self.addPoint(len(self.points), point) self.__listeners.add(self.track.points_changed.add(self.onPointsChanged)) self.setDefaultHeight(120) self.scaleXChanged.connect(self.__onScaleXChanged) self.playbackPositionChanged.connect(self.__playbackPositionChanged) def cleanup(self) -> None: for points in self.points: points.cleanup() self.points.clear() super().cleanup() def createToolBox(self) -> ControlTrackToolBox: return ControlTrackToolBox(track=self, context=self.context) def __onScaleXChanged(self, scale_x: fractions.Fraction) -> None: for cpoint in self.points: cpoint.recomputePos() self.update() @property def track(self) -> model.ControlTrack: return down_cast(model.ControlTrack, super().track) def setHighlightedPoint(self, cpoint: ControlPoint) -> None: if cpoint is not self.__highlighted_point: self.__highlighted_point = cpoint self.update() def highlightedPoint(self) -> ControlPoint: return self.__highlighted_point def updateHighlightedPoint(self) -> None: if self.__mouse_pos is None: self.setHighlightedPoint(None) return closest_cpoint = None # type: ControlPoint closest_dist = None # type: int for cpoint in self.points: dist = ((cpoint.pos().x() - self.__mouse_pos.x()) ** 2 + (cpoint.pos().y() - self.__mouse_pos.y()) ** 2) if dist < 20**2 and (closest_dist is None or dist < closest_dist): closest_dist = dist closest_cpoint = cpoint self.setHighlightedPoint(closest_cpoint) def setPointPos(self, cpoint: ControlPoint, pos: QtCore.QPoint) -> None: cpoint.setPos(pos) self.update() def addPoint(self, insert_index: int, point: model.ControlPoint) -> None: cpoint = ControlPoint(track_editor=self, point=point) self.points.insert(insert_index, cpoint) self.update() def removePoint(self, remove_index: int, point: QtCore.QPoint) -> None: cpoint = self.points.pop(remove_index) cpoint.cleanup() self.update() def onPointsChanged(self, change: music.PropertyListChange[model.ControlPoint]) -> None: if isinstance(change, music.PropertyListInsert): self.addPoint(change.index, change.new_value) self.updateHighlightedPoint() elif isinstance(change, music.PropertyListDelete): self.removePoint(change.index, change.old_value) self.updateHighlightedPoint() else: raise TypeError(type(change)) def __playbackPositionChanged(self, time: audioproc.MusicalTime) -> None: if self.__playback_time is not None: x = self.timeToX(self.__playback_time) self.update(x - self.xOffset(), 0, 2, self.height()) self.__playback_time = time if self.__playback_time is not None: x = self.timeToX(self.__playback_time) self.update(x - self.xOffset(), 0, 2, self.height()) def valueToY(self, value: float) -> int: return int(self.height() - int(self.height() * value)) def yToValue(self, y: int) -> float: return float(self.height() - y) / self.height() def leaveEvent(self, evt: QtCore.QEvent) -> None: self.__mouse_pos = None self.setHighlightedPoint(None) super().leaveEvent(evt) def mousePressEvent(self, evt: QtGu
# -*- coding: utf-8 -*- # Copyright 2015 AvanzOsc (http://www.avanzosc.es) # Copyright 2015-2017 - Pedro M. Baeza <pedro.baeza@tecnativa.com> # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl) { "name": "Procurement Purchas
e No Grouping", "version": "10.0.1.0.0", "author": "AvanzOSC," "Tecnativa," "Odoo Community Association (OCA)", "website": "https://github.com/OCA/purchase-workflow",
"category": "Procurements", "depends": [ 'purchase', 'procurement', ], "data": [ 'views/product_category_view.xml', ], 'installable': True, 'license': 'AGPL-3', }
# Copyright 2018 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Extract from notebook for Serving Optimization on Keras """ from __future__ import print_function from datetime import datetime import os import sh import sys import tensorflow as tf from tensorflow import data from tensorflow.python.saved_model import tag_constants from tensorflow.python.tools import freeze_graph from tensorflow.python import ops from tensorflow.tools.graph_transforms import TransformGraph from inference_test import inference_test, load_mnist_keras from optimize_graph import (run_experiment, get_graph_def_from_saved_model, describe_graph, get_size, get_metagraph, get_graph_def_from_file, convert_graph_def_to_saved_model, freeze_model, optimize_graph, TRANSFORMS) NUM_CLASSES = 10 MODELS_LOCATION = 'models/mnist' MODEL_NAME = 'keras_classifier' def keras_model_fn(params): inputs = tf.keras.layers.Input(shape=(28, 28), name='input_image') input_layer = tf.keras.layers.Reshape(target_shape=(28, 28, 1), name='reshape')(inputs) # convolutional layers conv_inputs = input_layer for i in range(params.num_conv_layers): filters = params.init_filters * (2**i) conv = tf.keras.layers.Conv2D(kernel_size=3, filters=filters, strides=1, padding='SAME', activation='relu')(conv_inputs) max_pool = tf.keras.layers.MaxPool2D(pool_size=2, strides=2, padding='SAME')(conv) batch_norm = tf.keras.layers.BatchNormalization()(max_pool) conv_inputs = batch_norm flatten = tf.keras.layers.Flatten(name='flatten')(conv_inputs) # fully-connected layers dense_inputs = flatten for i in range(len(params.hidden_units)): dense = tf.keras.layers.Dense(units=params.hidden_units[i], activation='relu')(dense_inputs) dropout = tf.keras.layers.Dropout(params.dropout)(dense) dense_inputs = dropout # softmax classifier logits = tf.keras.layers.Dense(units=NUM_CLASSES, name='logits')(dense_inputs) softmax = tf.keras.layers.Activation('softmax', name='softmax')(logits) # keras model model = tf.keras.models.Model(inputs, softmax) return model def create_estimator_keras(params, run_config): keras_model = keras_model_fn(params) print(keras_model.summary()) optimizer = tf.keras.optimizers.Adam(lr=params.learning_rate) keras_model.compile(loss='sparse_categorical_crossentropy', optimizer='adam', metrics=['accuracy']) mnist_classifier = tf.keras.estimator.model_to_estimator( keras_model=keras_model, config=run_config ) return mnist_classifier ##
## Train and Export Model def train_and_export_model(train_data, train_labels): model_dir = os.path.join(MODELS_LOCATION, MODEL_NAME) hparams = tf.contrib.training.HParams( batch_size=100, hidden_units=[512, 512]
, num_conv_layers=3, init_filters=64, dropout=0.2, max_training_steps=50, eval_throttle_secs=10, learning_rate=1e-3, debug=True ) run_config = tf.estimator.RunConfig( tf_random_seed=19830610, save_checkpoints_steps=1000, keep_checkpoint_max=3, model_dir=model_dir ) if tf.gfile.Exists(model_dir): print('Removing previous artifacts...') tf.gfile.DeleteRecursively(model_dir) os.makedirs(model_dir) estimator = run_experiment(hparams, train_data, train_labels, run_config, create_estimator_keras) def make_serving_input_receiver_fn(): inputs = {'input_image': tf.placeholder( shape=[None,28,28], dtype=tf.float32, name='serving_input_image')} return tf.estimator.export.build_raw_serving_input_receiver_fn(inputs) export_dir = os.path.join(model_dir, 'export') if tf.gfile.Exists(export_dir): tf.gfile.DeleteRecursively(export_dir) estimator.export_savedmodel( export_dir_base=export_dir, serving_input_receiver_fn=make_serving_input_receiver_fn() ) return export_dir def setup_model(): train_data, train_labels, eval_data, eval_labels = load_mnist_keras() export_dir = train_and_export_model(train_data, train_labels) return export_dir, eval_data NUM_TRIALS = 10 def main(args): if len(args) > 1 and args[1] == '--inference': export_dir = args[2] _, _, eval_data, _ = load_mnist_keras() total_load_time = 0.0 total_serve_time = 0.0 saved_model_dir = os.path.join( export_dir, [f for f in os.listdir(export_dir) if f.isdigit()][0]) for i in range(0, NUM_TRIALS): load_time, serving_time = inference_test(saved_model_dir, eval_data, repeat=10000) total_load_time += load_time total_serve_time += serving_time print("****************************************") print("*** Load time on original model: {:.2f}".format(total_load_time / NUM_TRIALS)) print("*** Serve time on original model: {:.2f}".format(total_serve_time / NUM_TRIALS)) print("****************************************") total_load_time = 0.0 total_serve_time = 0.0 optimized_export_dir = os.path.join(export_dir, 'optimized') for i in range(0, NUM_TRIALS): load_time, serving_time = inference_test(optimized_export_dir, eval_data, signature='serving_default', repeat=10000) total_load_time += load_time total_serve_time += serving_time print("****************************************") print("*** Load time on optimized model: {:.2f}".format(total_load_time / NUM_TRIALS)) print("*** Serve time on optimized model: {:.2f}".format(total_serve_time / NUM_TRIALS)) print("****************************************") else: # generate and output original model export_dir, eval_data = setup_model() saved_model_dir = os.path.join(export_dir, os.listdir(export_dir)[-1]) describe_graph(get_graph_def_from_saved_model(saved_model_dir)) get_size(saved_model_dir, 'saved_model.pb') get_metagraph(saved_model_dir) # freeze model and describe it freeze_model(saved_model_dir, 'softmax/Softmax', 'frozen_model.pb') frozen_filepath = os.path.join(saved_model_dir, 'frozen_model.pb') describe_graph(get_graph_def_from_file(frozen_filepath)) get_size(saved_model_dir, 'frozen_model.pb', include_vars=False) # optimize model and describe it optimize_graph(saved_model_dir, 'frozen_model.pb', TRANSFORMS, 'softmax/Softmax') optimized_filepath = os.path.join(saved_model_dir, 'optimized_model.pb') describe_graph(get_graph_def_from_file(optimized_filepath)) get_size(saved_model_dir, 'optimized_model.pb', include_vars=False) # convert to saved model and output metagraph again optimized_export_dir = os.path.join(export_dir, 'optimized') convert_graph_def_to_saved_model(optimized_export_dir, optimized_filepath, 'softmax', 'softmax/Softmax:0') get_size(optimized_export_dir, 'saved_model.pb') get_metagraph(optimized_export_dir) if __name__ == '__main__': main(sys.argv)
# Copyright 2015 Dell Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. '''Volume driver for Dell Storage Center.''' from oslo_log import log as logging from oslo_utils import excutils from cinder import exception from cinder.i18n import _, _LE, _LI from cinder.volume.drivers.dell import dell_storagecenter_common from cinder.volume.drivers import san LOG = logging.getLogger(__name__) class DellStorageCenterISCSIDriver(san.SanISCSIDriver, dell_storagecenter_common.DellCommonDriver): '''Implements commands for Dell StorageCenter ISCSI management. To enable the driver add the following line to the cinder configuration: volume_driver=cinder.volume.drivers.dell.DellStorageCenterISCSIDriver ''' VERSION = '1.0.2' def __init__(self, *args, **kwargs): super(DellStorageCenterISCSIDriver, self).__init__(*args, **kwargs) self.backend_name = ( self.configuration.safe_get('volume_backend_name') or 'Dell-iSCSI') def initialize_connection(self, volume, connector): # Initialize_connection will find or create a server identified by the # connector on the Dell backend. It will then map the volume to it # and return the properties as follows.. # {'driver_volume_type': 'iscsi', # data = {'target_discovered': False, # 'target_iqn': preferred iqn, # 'target_iqns': all iqns, # 'target_portal': preferred portal, # 'target_portals': all portals, # 'target_lun': preferred lun, # 'target_luns': all luns, # 'access_mode': access_mode # } # We use id to name the volume name as it is a # known unique name. volume_name = volume.get('id') initiator_name = connector.get('initiator') multipath = connector.get('multipath', False) LOG.info(_LI('initialize_ connection: %(vol)s:%(initiator)s'), {'vol': volume_name, 'initiator': initiator_name}) with self._client.open_connection() as api: try:
# Find our server. server = api.find_server(initiator_name) # No? Create it. if server is None: server = api.create_server(initiator_name) # Find the volume on the storage center. scvolume
= api.find_volume(volume_name) # if we have a server and a volume lets bring them together. if server is not None and scvolume is not None: mapping = api.map_volume(scvolume, server) if mapping is not None: # Since we just mapped our volume we had best update # our sc volume object. scvolume = api.find_volume(volume_name) # Our return. iscsiprops = {} ip = None port = None if not multipath: # We want to make sure we point to the specified # ip address for our target_portal return. This # isn't an issue with multipath since it should # try all the alternate portal. ip = self.configuration.iscsi_ip_address port = self.configuration.iscsi_port # Three cases that should all be satisfied with the # same return of Target_Portal and Target_Portals. # 1. Nova is calling us so we need to return the # Target_Portal stuff. It should ignore the # Target_Portals stuff. # 2. OS brick is calling us in multipath mode so we # want to return Target_Portals. It will ignore # the Target_Portal stuff. # 3. OS brick is calling us in single path mode so # we want to return Target_Portal and # Target_Portals as alternates. iscsiprops = (api.find_iscsi_properties(scvolume, ip, port)) # Return our iscsi properties. return {'driver_volume_type': 'iscsi', 'data': iscsiprops} except Exception: error = (_('Failed to initialize connection ' '%(initiator)s %(vol)s') % {'initiator': initiator_name, 'vol': volume_name}) LOG.error(error) raise exception.VolumeBackendAPIException(error) # We get here because our mapping is none or we have no valid iqn to # return so blow up. raise exception.VolumeBackendAPIException( _('Unable to map volume')) def terminate_connection(self, volume, connector, force=False, **kwargs): # Grab some initial info. initiator_name = connector.get('initiator') volume_name = volume.get('id') LOG.debug('Terminate connection: %(vol)s:%(initiator)s', {'vol': volume_name, 'initiator': initiator_name}) with self._client.open_connection() as api: try: scserver = api.find_server(initiator_name) # Find the volume on the storage center. scvolume = api.find_volume(volume_name) # If we have a server and a volume lets pull them apart. if (scserver is not None and scvolume is not None and api.unmap_volume(scvolume, scserver) is True): LOG.debug('Connection terminated') return except Exception: with excutils.save_and_reraise_exception(): LOG.error(_LE('Failed to terminate connection ' '%(initiator)s %(vol)s'), {'initiator': initiator_name, 'vol': volume_name}) raise exception.VolumeBackendAPIException( _('Terminate connection failed'))
from pytest import fixture from itertools i
mport combinations import msgpack as pymsgpack values = [ 42, 7, 3.14, 2.71, 'lorem', 'ipsum', True, False, None, b'lorem', b'ipsum', [], [ 'lorem', 42, 3.14, True, None, ['ipsum']], dict(), { 'lorem': 'ipsum', 'dolor': 42, 'sit': 3.1
4, 'amet': [ True, None], 'consectetur':{ 'adipisicing': 'elit'}}] pairs = tuple(combinations(values, 2)) @fixture def cxxjson(): from cxx import json return json @fixture def cxxmsgpack(): from cxx import msgpack return msgpack
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et: # Copyright 2014-2017 Florian Bruhin (The Compiler) <mail@qutebrowser.org> # # This file is part of qutebrowser. # # qutebrowser is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # qutebrowser is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with qutebrowser. If not, see <http://www.gnu.org/licenses/>. # pylint: disable=unused-import,wildcard-import,unused-wildcard-import """The qutebrowser test suite conftest file.""" import os import sys import warnings import pytest import hypothesis from PyQt5.QtCore import PYQT_VERSION pytest.register_assert_rewrite('helpers') from helpers import logfail from helpers.logfail import fail_on_logging from helpers.messagemock import message_mock from helpers.fixtures import * from qutebrowser.utils import qtutils # Set hypothesis settings hypothesis.settings.register_profile('default', hypothesis.settings(strict=True)) hypothesis.settings.load_profile('default') def _apply_platform_markers(config, item): """Apply a skip marker to a given item.""" markers =
[ ('posix', os.name != 'posix', "Requires a POSIX os"), ('windows', os.name != 'nt', "Requires Windows"), ('linux', not sys.platform.startswith('linux'), "Requires Linux"), ('mac', sys.platform != 'darwin', "Requires macOS"), ('not_mac', sys.platform == 'darwin', "Skipped on macOS"), ('not_frozen', getattr(sys, 'frozen', False), "Can't be run when frozen"), ('frozen', not geta
ttr(sys, 'frozen', False), "Can only run when frozen"), ('ci', 'CI' not in os.environ, "Only runs on CI."), ('issue2478', os.name == 'nt' and config.webengine, "Broken with QtWebEngine on Windows"), ] for searched_marker, condition, default_reason in markers: marker = item.get_marker(searched_marker) if not marker or not condition: continue if 'reason' in marker.kwargs: reason = '{}: {}'.format(default_reason, marker.kwargs['reason']) del marker.kwargs['reason'] else: reason = default_reason + '.' skipif_marker = pytest.mark.skipif(condition, *marker.args, reason=reason, **marker.kwargs) item.add_marker(skipif_marker) def pytest_collection_modifyitems(config, items): """Handle custom markers. pytest hook called after collection has been performed. Adds a marker named "gui" which can be used to filter gui tests from the command line. For example: pytest -m "not gui" # run all tests except gui tests pytest -m "gui" # run only gui tests It also handles the platform specific markers by translating them to skipif markers. Args: items: list of _pytest.main.Node items, where each item represents a python test that will be executed. Reference: http://pytest.org/latest/plugins.html """ remaining_items = [] deselected_items = [] for item in items: deselected = False if 'qapp' in getattr(item, 'fixturenames', ()): item.add_marker('gui') if hasattr(item, 'module'): module_path = os.path.relpath( item.module.__file__, os.path.commonprefix([__file__, item.module.__file__])) module_root_dir = module_path.split(os.sep)[0] assert module_root_dir in ['end2end', 'unit', 'helpers', 'test_conftest.py'] if module_root_dir == 'end2end': item.add_marker(pytest.mark.end2end) _apply_platform_markers(config, item) if item.get_marker('xfail_norun'): item.add_marker(pytest.mark.xfail(run=False)) if item.get_marker('js_prompt'): if config.webengine: js_prompt_pyqt_version = 0x050700 else: js_prompt_pyqt_version = 0x050300 item.add_marker(pytest.mark.skipif( PYQT_VERSION <= js_prompt_pyqt_version, reason='JS prompts are not supported with this PyQt version')) if deselected: deselected_items.append(item) else: remaining_items.append(item) config.hook.pytest_deselected(items=deselected_items) items[:] = remaining_items def pytest_ignore_collect(path): """Ignore BDD tests if we're unable to run them.""" skip_bdd = hasattr(sys, 'frozen') rel_path = path.relto(os.path.dirname(__file__)) return rel_path == os.path.join('end2end', 'features') and skip_bdd @pytest.fixture(scope='session') def qapp(qapp): """Change the name of the QApplication instance.""" qapp.setApplicationName('qute_test') return qapp def pytest_addoption(parser): parser.addoption('--qute-delay', action='store', default=0, type=int, help="Delay between qutebrowser commands.") parser.addoption('--qute-profile-subprocs', action='store_true', default=False, help="Run cProfile for subprocesses.") parser.addoption('--qute-bdd-webengine', action='store_true', help='Use QtWebEngine for BDD tests') def pytest_configure(config): webengine_arg = config.getoption('--qute-bdd-webengine') webengine_env = os.environ.get('QUTE_BDD_WEBENGINE', '') config.webengine = bool(webengine_arg or webengine_env) # Fail early if QtWebEngine is not available # pylint: disable=unused-variable if config.webengine: import PyQt5.QtWebEngineWidgets @pytest.fixture(scope='session', autouse=True) def check_display(request): if (not request.config.getoption('--no-xvfb') and 'QUTE_BUILDBOT' in os.environ and request.config.xvfb is not None): raise Exception("Xvfb is running on buildbot!") if sys.platform == 'linux' and not os.environ.get('DISPLAY', ''): raise Exception("No display and no Xvfb available!") @pytest.hookimpl(tryfirst=True, hookwrapper=True) def pytest_runtest_makereport(item, call): """Make test information available in fixtures. See http://pytest.org/latest/example/simple.html#making-test-result-information-available-in-fixtures """ outcome = yield rep = outcome.get_result() setattr(item, "rep_" + rep.when, rep)
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import unittest from airflow import DAG from airflow.api_connexion.schemas.event_log_schema import ( EventLogCollection, event_log_collection_schema, event_log_schema, ) from airflow.models import Log, TaskInstance from airflow.operators.dummy import DummyOperator from airflow.utils import timezone from airflow.utils.session import create_session, provide_session class TestEventLogSchemaBase(unittest.TestCase): def setUp(self) -> None: with create_session() as session: session.query(Log).delete() self.default_time = "2020-06-09T13:00:00+00:00" self.default_time2 = '2020-06-11T07:00:00+00:00' def tearDown(self) -> None: with create_session() as session: session.query(Log).delete() def _create_task_instance(self): with DAG( 'TEST_DAG_ID', start_date=timezone.parse(self.default_time), end_date=timezone.parse(self.default_time), ): op1 = DummyOperator(task_id="TEST_TASK_ID", owner="airflow") return TaskInstance(task=op1, execution_date=timezone.parse(self.default_time)) class TestEventLogSchema(TestEventLogSchemaBase): @provide_session def test_serialize(self, session): event_log_model = Log(event="TEST_EVENT", task_instance=self._create_task_instance()) session.add(event_log_model) session.commit() event_log_model.dttm = timezone.parse(self.default_time) log_model = session.query(Log).first() deserialized_log = event_log_schema.dump(log_model) self.assertEqual( deserialized_log, { "event_log_id": event_log_model.id, "event": "TEST_EVENT", "dag_id": "TEST_DAG_ID", "task_id": "TEST_TASK_ID", "execution_date": self.default_time, "owner": 'airflow', "when": self.default_time, "extra": None, }, ) class TestEventLogCollection(TestEventLogSchemaBase): @provide_session def test_serialize(self, session): event_log_model_1 = Log(event="TEST_EVENT_1", task_instance=self._create_task_instance()) event_log_model_2 = Log(event="TEST_EVENT_2", task_instance=self._create_task_instance()) event_logs = [event_log_model_1, event_log_model_2] session.add_all(event_logs) session.commit() event_log_model_1.dttm = timezone.parse(self.default_time) event_log_model_2.dttm = timezone.parse(self.default_time2) instance = EventLogCollection(event_logs=event_logs, total_entries=2) deserialized_event_logs = event_log_collection_schema.dump(instance) self.assertEqual( deserialized_event_logs, { "event_logs": [ { "event_log_id": event_log_model_1.id, "event": "TEST_EVENT_1", "dag_id": "TEST_DAG_ID", "task_id": "TEST_TASK_ID", "execution_date": self.default_time, "owner": 'airflow', "when": self.default_time, "extra": None, }, { "event_log_id": event_log_model_2.id, "event": "TEST_EVEN
T_2", "dag_id": "TEST_DAG_ID", "task_id": "TEST_TASK_ID", "execution_date": self.default_time, "owner": 'airflow', "when": self.default_time2,
"extra": None, }, ], "total_entries": 2, }, )
lf._autoname) def _register_keybindings(self): """Register accelerators for static labels that must change the focus""" newId_t = wx.NewId() newId_n = wx.NewId() newId_a = wx.NewId() newId_d = wx.NewId() newId_o = wx.NewId() self.Bind(wx.EVT_MENU, self.OnActivateType, id=newId_t) self.Bind(wx.EVT_MENU, self.OnActivateName, id=newId_n) self.Bind(wx.EVT_MENU, self.OnActivateAccess, id=newId_a) self.Bind(wx.EVT_MENU, self.OnActivateDefault, id=newId_d) self.Bind(wx.EVT_MENU, self.OnActivateNotes, id=newId_o) aTable = wx.AcceleratorTable([ wx.AcceleratorEntry(wx.ACCEL_ALT, ord('T'), newId_t), wx.AcceleratorEntry(wx.ACCEL_ALT, ord('N'), newId_n), wx.AcceleratorEntry(wx.ACCEL_ALT, ord('A'), newId_a), wx.AcceleratorEntry(wx.ACCEL_ALT, ord('D'), newId_d), wx.AcceleratorEntry(wx.ACCEL_ALT, ord('O'), newId_o) ]) self.SetAcceleratorTable(aTable) def OnActivateType(self, event): """activate type combo""" self.m_type.SetFocus() def OnActivateName(self, event): """activate name entry""" self.m_name.SetFocus() def OnActivateAccess(self, event): """activate acces combo""" self.m_choice2.SetFocus() def OnActivateDefault(self, event): """activate default value""" self.m_textCtrl8.SetFocus() def OnActivateNotes(self, event): """Activate notes""" self.m_richText1.SetFocus() def OnEnterName(self, event): """This event is generated when the enter is pressed in the name entry""" self.m_choice2.SetFocus() def OnTypeChanged(self, event): """This event happens when the return type is changed. The main goal of this callback is handling template types for argument specification""" iSel = self.m_type.GetCurrentSelection() _type = self._types.get(self.m_type.GetString(iSel), None) template_args = False if _type is not None: if _type._template is not None: template_args = True if template_args is True: self.m_staticText67.Enable(True) self.m_template_args.Enable(True) self.m_staticText68.Enable(True) else: self.m_staticText67.Enable(False) self.m_template_args.Enable(False) self.m_staticText68.Enable(False) self.m_template_args.SetValue('') self.AutoName() def CopyAttributes(self, member): """Get the atributes""" member._name = self._name member._typei = copy.copy(self._typei) member._access = self._access member._static = self._static member._default = self._default member._volatile = self._volatile member._mutable = self._mutable member._bitField = self._bitField if self._bitField: member._bitFieldSize = self._bitFieldSize member._note = self._note member.inner_class.AutoInit() def SetAttributes(self, member): """Set the attributes""" self.m_name.SetValue(member._name) ti = member._typei iSel = self.m_type.FindString(ti.scoped) self.m_type.SetSelection(iSel) iSel = self.m_choice2.FindString(member._access) self.m_choice2.SetSelection(iSel) self.m_checkBox105.SetValue(member._static) self.m_textCtrl8.SetValue(member._default) self.m_checkBox49.SetValue(member._volatile) self.m_checkBox48.SetValue(member._mutable) self.m_const.SetValue(ti._const) self.m_ptr.SetValue(ti._ptr) self.m_reference.SetValue(ti._ref) self.m_pptr.SetValue(ti._ptr_to_ptr) self.m_constptr.SetValue(ti._const_ptr) self.m_array.SetValue(ti._array) if ti._array is True: self.m_textCtrl7.Show(True) self.m_textCtrl7.Enable(True) self.m_textCtrl7.SetValue(str(ti._array_size)) else: self.m_textCtrl7.SetValue('0') self.m_checkBox51.SetValue(member._bitField) if ti._type_args is not None: self.m_staticText67.Enable(True) self.m_template_args.Enable(True) self.m_staticText68.Enable(True) self.m_template_args.SetValue(ti._type_args) if member._bitField is True: self.m_textCtrl39.Show(True) self.m_textCtrl39.Enable(True) self.m_textCtrl39.SetValue(str(member._bitFieldSize)) self.m_richText1.SetValue(member._note) self.SetTitle("Edit member") def Validate(self): """Dialog validation""" self._name = self.m_name.GetValue() if len(self._name) == 0: wx.MessageBox("Member name must not be empty", "Error", wx.OK | wx.CENTER | wx.ICON_ERROR, self) return False iSel = self.m_type.GetCurrentSelection() if iSel == wx.NOT_FOUND: wx.MessageBox("Invalid type", "Error", wx.OK | wx.CENTER | wx.ICON_ERROR, self) return False typename = self.m_type.GetString(iSel) iSel = self.m_choice2.GetCurrentSelection() if iSel == wx.NOT_FOUND: wx.MessageBox("Invalid access", "Error", wx.OK | wx.CENTER | wx.ICON_ERROR, self) return False self._static = self.m_checkBox105.IsChecked() self._access = self.m_choice2.GetString(iSel) self._default = self.m_textCtrl8.GetValue() self._volatile = self.m_checkBox49.GetValue() self._mutable = self.m_checkBox48.GetValue() if self.m_array.IsChecked(): try: asize = int(self.m_textCtrl7.GetValue()) except: asize = '' else: asize = None if typename in self._nested_template_types: self._typei = model.cc.typeinst( type=self._types['@'], type_alias=typename, const=self.m_const.IsChecked(), ptr=self.m_ptr.IsChecked(), ref=self.m_reference.IsChecked(), ptrptr=self.m_pptr.IsChecked(), constptr=self.m_constptr.IsChecked(), array=self.m_array.IsChecked(), arraysize=asize ) else: _type = self._types[typename] if _type._template is not None: #we construct type instance with explicit arguments type_args = self.m_template_args.GetValue() self._typei = model.cc.typeinst( type=_type, type_args=type_args, const=self.m_const.IsChecked(), ptr=self.m_ptr.IsChecked(), ref=self.m_reference.IsChecked(), ptrptr=self.m_pptr.IsChecked(), constptr=self.m_constptr.IsChecked(), array=self.m_ar
ray.IsChecked(), arraysize=asize ) else: self._typei = model.cc.typeinst( type=self._types[typename], const=self.m_const.IsChecked(), ptr=self.m_ptr.IsChecked(), ref=self.m_referenc
e.IsChecked(), ptrptr=self.m_pptr.IsChecked(), constptr=self.m_constptr.IsChecked(), array=self.m_array.IsChecked(), arraysize=asize ) self._bitField = self.m_checkBox51.IsChecked() if self._bitField is True: self._bitFieldSize = int(self.m_textCtrl39.GetValue()) else: self._bitFieldSize = 0 self._note = self.m_richText1.GetValue() return True def get_kwargs(self): """return arguments for object instance""" return {'parent': self._container, 'name': self._name, 'type': self._typei, 'access': self._access, 'static': self._static, 'volatile': self._volatile, 'mutable'
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """Defines a top-level glue class that operates the Transport and Flasher classes.""" import logging import time from .._ffi import get_global_func from ..contrib import graph_runtime from ..rpc import RPCSession from .transport import TransportLogger try: from .base import _rpc_connect except ImportError: raise ImportError("micro tvm is not enabled. Set USE_MICRO to ON in config.cmake") class Session: """MicroTVM Device Session Parameters ---------- config : dict configuration for this session (as generated by `tvm.micro.device.host.default_config()`, for example) Example -------- .. code-block:: python c_mod = ... # some module generated with "c" as the target dev_config = micro.device.arm.stm32f746xx.default_config('127.0.0.1', 6666) with tvm.micro.Session(dev_config) as sess: micro_mod = sess.create_micro_mod(c_mod) """ def __init__( self, binary=None, flasher=None, transport_context_manager=None, session_name="micro-rpc" ): """Configure a new session. Parameters ---------- binary : MicroBinary If given, `flasher` must also be given. During session initialization, this binary will be flashed to the device before the transport is created. flasher : Flasher If given, `binary` must also be given. Used to flash `binary` during session initialization. transport_context_manager : ContextManager[transport.Transport] If given, `flasher` and `binary` should not be given. On entry, this context manager should establish a tarnsport between this TVM instance and the device. session_name : str Name of the session, used for debugging. """ self.binary = binary self.flasher = flasher self.transport_context_manager = transport_context_manager self.session_name = session_name self._rpc = None self._graph_runtime = None def get_system_lib(self): return self._rpc.get_function("runtime.SystemLib")() def __enter__(self): """Initialize this session and establish an RPC session with the on-device RPC server. Returns ------- Session : Returns self. """ if self.flasher is not None: self.transport_context_manager = self.flasher.flash(self.binary) time.sleep(3.0) self.transport = TransportLogger( self.session_name, self.transport_context_manager, level=logging.INFO ).__enter__()
self._rpc = RPCSession( _rpc_connect(self.session_name, self.transport.write, self.transport.read) ) self.context = self._rpc.cpu
(0) return self def __exit__(self, exc_type, exc_value, exc_traceback): """Tear down this session and associated RPC session resources.""" self.transport.__exit__(exc_type, exc_value, exc_traceback) def create_local_graph_runtime(graph_json_str, mod, ctx): """Create a local graph runtime driving execution on the remote CPU context given. Parameters ---------- graph_json_str : str A string containing the graph representation. mod : tvm.runtime.Module The remote module containing functions in graph_json_str. ctx : tvm.Context The remote CPU execution context. Returns ------- tvm.contrib.GraphRuntime : A local graph runtime instance that executes on the remote device. """ device_type_id = [ctx.device_type, ctx.device_id] fcreate = get_global_func("tvm.graph_runtime.create") return graph_runtime.GraphModule(fcreate(graph_json_str, mod, *device_type_id))
one: b_values = numpy.zeros((n_out,), dtype=theano.config.floatX) b = theano.shared(value=b_values, name='b', borrow=True) self.W = W self.b = b lin_output = T.dot(input, self.W) + self.b self.output = ( lin_output if activation is None else activation(lin_output) ) # parameters of the model self.params = [self.W, self.b] # start-snippet-2 class MLP(object): """Multi-Layer Perceptron Class A multilayer perceptron is a feedforward artificial neural network model that has one layer or more of hidden units and nonlinear activations. Intermediate layers usually have as activation function tanh or the sigmoid function (defined here by a ``HiddenLayer`` class) while the top layer is a softmax layer (defined here by a ``LogisticRegression`` class). """ def __init__(self, rng, input, n_in, n_hidden, n_out): """Initialize the parameters for the multilayer perceptron :type rng: numpy.random.RandomState :param rng: a random number generator used to initialize weights :type input: theano.tensor.TensorType :param input: symbolic variable that describes the input of the architecture (one minibatch) :type n_in: int :param n_in: number of input units, the dimension of the space in which the datapoints lie :type n_hidden: int :param n_hidden: number of hidden units :type n_out: int :param n_out: number of output units, the dimension of the space in which the labels lie """ # Since we are dealing with a one hidden layer MLP, this will translate # into a HiddenLayer with a tanh activation function connected to the # LogisticRegression layer; the activation function can be replaced by # sigmoid or any other nonlinear function sel
f.hiddenLayer = HiddenLayer( rng=rng, input=input, n_in=n_in, n_out=n_hidden, activation=T.tanh ) # The logistic regression
layer gets as input the hidden units # of the hidden layer self.logRegressionLayer = LogisticRegression( input=self.hiddenLayer.output, n_in=n_hidden, n_out=n_out ) # end-snippet-2 start-snippet-3 # L1 norm ; one regularization option is to enforce L1 norm to # be small self.L1 = ( abs(self.hiddenLayer.W).sum() + abs(self.logRegressionLayer.W).sum() ) # square of L2 norm ; one regularization option is to enforce # square of L2 norm to be small self.L2_sqr = ( (self.hiddenLayer.W ** 2).sum() + (self.logRegressionLayer.W ** 2).sum() ) # negative log likelihood of the MLP is given by the negative # log likelihood of the output of the model, computed in the # logistic regression layer self.negative_log_likelihood = ( self.logRegressionLayer.negative_log_likelihood ) # same holds for the function computing the number of errors self.errors = self.logRegressionLayer.errors # the parameters of the model are the parameters of the two layer it is # made out of self.params = self.hiddenLayer.params + self.logRegressionLayer.params # end-snippet-3 # keep track of model input self.input = input def test_mlp(learning_rate=0.01, L1_reg=0.00, L2_reg=0.0001, n_epochs=1000, dataset='mnist.pkl.gz', batch_size=200, n_hidden=100): """ Demonstrate stochastic gradient descent optimization for a multilayer perceptron This is demonstrated on MNIST. :type learning_rate: float :param learning_rate: learning rate used (factor for the stochastic gradient :type L1_reg: float :param L1_reg: L1-norm's weight when added to the cost (see regularization) :type L2_reg: float :param L2_reg: L2-norm's weight when added to the cost (see regularization) :type n_epochs: int :param n_epochs: maximal number of epochs to run the optimizer :type dataset: string :param dataset: the path of the MNIST dataset file from http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz """ datasets = load_data(dataset) train_set_x, train_set_y = datasets[0] valid_set_x, valid_set_y = datasets[1] test_set_x, test_set_y = datasets[2] # compute number of minibatches for training, validation and testing n_train_batches = train_set_x.get_value(borrow=True).shape[0] // batch_size n_valid_batches = valid_set_x.get_value(borrow=True).shape[0] // batch_size n_test_batches = test_set_x.get_value(borrow=True).shape[0] // batch_size ###################### # BUILD ACTUAL MODEL # ###################### print('... building the model') # allocate symbolic variables for the data index = T.lscalar() # index to a [mini]batch x = T.matrix('x') # the data is presented as rasterized images y = T.ivector('y') # the labels are presented as 1D vector of # [int] labels rng = numpy.random.RandomState(1234) # construct the MLP class classifier = MLP( rng=rng, input=x, n_in=28 * 28, n_hidden=n_hidden, n_out=10 ) # start-snippet-4 # the cost we minimize during training is the negative log likelihood of # the model plus the regularization terms (L1 and L2); cost is expressed # here symbolically cost = ( classifier.negative_log_likelihood(y) + L1_reg * classifier.L1 + L2_reg * classifier.L2_sqr ) # end-snippet-4 # compiling a Theano function that computes the mistakes that are made # by the model on a minibatch test_model = theano.function( inputs=[index], outputs=classifier.errors(y), givens={ x: test_set_x[index * batch_size:(index + 1) * batch_size], y: test_set_y[index * batch_size:(index + 1) * batch_size] } ) validate_model = theano.function( inputs=[index], outputs=classifier.errors(y), givens={ x: valid_set_x[index * batch_size:(index + 1) * batch_size], y: valid_set_y[index * batch_size:(index + 1) * batch_size] } ) # start-snippet-5 # compute the gradient of cost with respect to theta (sorted in params) # the resulting gradients will be stored in a list gparams gparams = [T.grad(cost, param) for param in classifier.params] # specify how to update the parameters of the model as a list of # (variable, update expression) pairs # given two lists of the same length, A = [a1, a2, a3, a4] and # B = [b1, b2, b3, b4], zip generates a list C of same size, where each # element is a pair formed from the two lists : # C = [(a1, b1), (a2, b2), (a3, b3), (a4, b4)] updates = [ (param, param - learning_rate * gparam) for param, gparam in zip(classifier.params, gparams) ] # compiling a Theano function `train_model` that returns the cost, but # in the same time updates the parameter of the model based on the rules # defined in `updates` train_model = theano.function( inputs=[index], outputs=cost, updates=updates, givens={ x: train_set_x[index * batch_size: (index + 1) * batch_size], y: train_set_y[index * batch_size: (index + 1) * batch_size] } ) # end-snippet-5 ############### # TRAIN MODEL # ############### print('... training') # early-stopping parameters patience = 10000 # look as this many examples regardless patience_increase = 2 # wait this much longer when a new best is # found improvement_threshold = 0.995 # a relative improvement of this much is # considered significant validation_frequency = min(n_t
import pytest import requests import time from threading import Thread from bottle import default_app, WSGIRefServer from tomviz.acquisition import server class Server(Thread): def __init__(self, dev=False, port=9999): super(Server, self).__init__() self.host = 'localhost' self.port = port self.base_url = 'http://%s:%d' % (self.host, self.port) self.url = '%s/acquisition' % self.base_url self.dev = dev self._server = WSGIRefServer(host=self.host, port=self.port) def run(self): self.setup() self._server.run(app=default_app()) def start(self): super(Server, self).start() # Wait for bottle to start while True: try: requests.get(self.base_url) break except requests.ConnectionError: time.sleep(0.1) def setup(self, adapter=None): server.setup(dev=self.dev, adapter=adapter) def stop(self): self._server.srv.
shutdown() # Force the socket to close so we can reuse the same port self._server.srv.socket.close() @pytest.fixture(scope="module") def acquisition_server(): srv = Server() srv.start() yield srv srv.stop() srv.join() @pytest.fixture(scope="module") def acquisition_dev_server(): srv = Server(dev=True, port=9
998) srv.start() yield srv srv.stop() srv.join()
# coding=utf-8 # Copyright 2018 The Google AI Language Team Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed
on
an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Compare a txt file of predictions with gold targets from a TSV file.""" from absl import app from absl import flags from language.compgen.nqg.tasks import tsv_utils from tensorflow.io import gfile FLAGS = flags.FLAGS flags.DEFINE_string("gold", "", "tsv file containing gold targets.") flags.DEFINE_string("predictions", "", "txt file with predicted targets.") def main(unused_argv): gold_examples = tsv_utils.read_tsv(FLAGS.gold) preds = [] with gfile.GFile(FLAGS.predictions, "r") as f: for line in f: preds.append(line.rstrip()) correct = 0 incorrect = 0 for pred, gold_example in zip(preds, gold_examples): if pred == gold_example[1]: correct += 1 else: incorrect += 1 print("Incorrect for example %s.\nTarget: %s\nPrediction: %s" % (gold_example[0], gold_example[1], pred)) print("correct: %s" % correct) print("incorrect: %s" % incorrect) print("pct: %s" % str(float(correct) / float(correct + incorrect))) if __name__ == "__main__": app.run(main)
return "{%s}" % ", ".join(items) def assert_instance(obj, class_, label): """Raises ArgumentError when `obj` is not instance of `cls`""" if not isinstance(obj, class_): raise ModelInconsistencyError("%s should be sublcass of %s, " "provided: %s" % (label, class_.__name__, type(obj).__name__)) def assert_all_instances(list_, class_, label="object"): """Raises ArgumentError when objects in `list_` are not instances of `cls`""" for obj in list_ or []: assert_instance(obj, class_, label="object") class MissingPackageError(Exception): """Exception raised when encountered a missing package.""" pass class MissingPackage(object): """Bogus class to handle missing optional packages - packages that are not necessarily required for Cubes, but are needed for certain features.""" def __init__(self, package, feature = None, source = None, comment = None): self.package = package self.feature = feature self.source = source self.comment = comment def __call__(self, *args, **kwargs): self._fail() def __getattr__(self, name): self._fail() def _fail(self): if self.feature: use = " to be able to use: %s" % self.feature else: use = "" if self.source: source = " from %s" % self.source else: source = "" if self.comment: comment = ". %s" % self.comment else: comment = "" raise MissingPackageError("Optional package '%s' is not installed. " "Please install the package%s%s%s" % (self.package, source, use, comment)) def optional_import(name, feature=None, source=None, comment=None): """Optionally import package `name`. If package does not exist, import a placeholder object, that raises an exception with more detailed description about the missing package.""" try: return __import__(
name) except ImportError: return MissingPackage(name, feature, source, comment) def expand_dictionary(record, separator = '.'): """Return expanded dictionary: treat keys are paths separated by `separator`, create sub-dictionaries as necessa
ry""" result = {} for key, value in record.items(): current = result path = key.split(separator) for part in path[:-1]: if part not in current: current[part] = {} current = current[part] current[path[-1]] = value return result def localize_common(obj, trans): """Localize common attributes: label and description""" if "label" in trans: obj.label = trans["label"] if "description" in trans: obj.description = trans["description"] def localize_attributes(attribs, translations): """Localize list of attributes. `translations` should be a dictionary with keys as attribute names, values are dictionaries with localizable attribute metadata, such as ``label`` or ``description``.""" for (name, atrans) in translations.items(): attrib = attribs[name] localize_common(attrib, atrans) def get_localizable_attributes(obj): """Returns a dictionary with localizable attributes of `obj`.""" # FIXME: use some kind of class attribute to get list of localizable attributes locale = {} try: if obj.label: locale["label"] = obj.label except: pass try: if obj.description: locale["description"] = obj.description except: pass return locale def decamelize(name): s1 = re.sub('(.)([A-Z][a-z]+)', r'\1 \2', name) return re.sub('([a-z0-9])([A-Z])', r'\1 \2', s1) def to_identifier(name): return re.sub(r' ', r'_', name).lower() def to_label(name, capitalize=True): """Converts `name` into label by replacing underscores by spaces. If `capitalize` is ``True`` (default) then the first letter of the label is capitalized.""" label = name.replace("_", " ") if capitalize: label = label.capitalize() return label def coalesce_option_value(value, value_type, label=None): """Convert string into an object value of `value_type`. The type might be: `string` (no conversion), `integer`, `float`, `list` – comma separated list of strings. """ value_type = value_type.lower() try: if value_type in ('string', 'str'): return_value = str(value) elif value_type == 'list': if isinstance(value, compat.string_type): return_value = value.split(",") else: return_value = list(value) elif value_type == "float": return_value = float(value) elif value_type in ["integer", "int"]: return_value = int(value) elif value_type in ["bool", "boolean"]: if not value: return_value = False elif isinstance(value, compat.string_type): return_value = value.lower() in ["1", "true", "yes", "on"] else: return_value = bool(value) else: raise ArgumentError("Unknown option value type %s" % value_type) except ValueError: if label: label = "parameter %s " % label else: label = "" raise ArgumentError("Unable to convert %svalue '%s' into type %s" % (label, astring, value_type)) return return_value def coalesce_options(options, types): """Coalesce `options` dictionary according to types dictionary. Keys in `types` refer to keys in `options`, values of `types` are value types: string, list, float, integer or bool.""" out = {} for key, value in options.items(): if key in types: out[key] = coalesce_option_value(value, types[key], key) else: out[key] = value return out def read_json_file(path, kind=None): """Read a JSON from `path`. This is convenience function that provides more descriptive exception handling.""" kind = "%s " % str(kind) if kind else "" if not os.path.exists(path): raise ConfigurationError("Can not find %sfile '%s'" % (kind, path)) try: f = compat.open_unicode(path) except IOError: raise ConfigurationError("Can not open %sfile '%s'" % (kind, path)) try: content = json.load(f) except ValueError as e: raise SyntaxError("Syntax error in %sfile %s: %s" % (kind, path, str(e))) finally: f.close() return content def sorted_dependencies(graph): """Return keys from `deps` ordered by dependency (topological sort). `deps` is a dictionary where keys are strings and values are list of strings where keys is assumed to be dependant on values. Example:: A ---> B -+--> C | +--> D --> E Will be: ``{"A": ["B"], "B": ["C", "D"], "D": ["E"],"E": []}`` """ graph = dict((key, set(value)) for key, value in graph.items()) # L ← Empty list that will contain the sorted elements L = [] # S ← Set of all nodes with no dependencies (incoming edges) S = set(parent for parent, req in graph.items() if not req) while S: # remove a node n from S n = S.pop() # insert n into L L.append(n) # for each node m with an edge e from n to m do # (n that depends on m) parents = [parent for parent, req in graph.items() if n in req] for parent in parents: graph[parent].remove(n) # remove edge e from the graph # if m has no other incoming edges then insert m into S if not graph[parent]: S.add(pare
import math class P4(object): def p4(self): '''4-momentum, px, py, pz, E''' return self._tlv def p3(self): '''3-momentum px, py, pz''' return self._tlv.Vect() def e(self): '''energy''' return self._tlv.E() def pt(self): '''transverse momentum (magnitude of p3 in transverse plane)''' return self._tlv.Pt() def theta(self): '''angle w/r to transverse plane''' return math.pi/2 - self._tlv.Theta() def eta(self): '''pseudo-rapidity (-ln(tan self._tlv.Theta()/2)). theta = 0 -> eta = +inf
theta = pi/2 -> 0 theta = pi -> eta = -inf ''
' return self._tlv.Eta() def phi(self): '''azymuthal angle (from x axis, in the transverse plane)''' return self._tlv.Phi() def m(self): '''mass''' return self._tlv.M() def __str__(self): return 'pt = {e:5.1f}, e = {e:5.1f}, eta = {eta:5.2f}, theta = {theta:5.2f}, phi = {phi:5.2f}, mass = {m:5.2f}'.format( pt = self.pt(), e = self.e(), eta = self.eta(), theta = self.theta(), phi = self.phi(), m = self.m() )
from django.conf.urls import url from .viewsets import Bo
okmarkViewSet bookmark_list = BookmarkViewSet.as_view({ 'get': 'list', 'post': 'create' }) bookmark_detail = BookmarkViewSet.as_view({ 'get': 'retrieve', 'patch': 'update', 'delete': 'destroy' }) urlpatterns = [ url(r'^bookmarks/$', bookmark_list, name='bookmarks'), url(r'^bookmarks/(?P<pk>[0-9]+)/$', bookmark_detail, name='bookmark'
), ]
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright (c) 2018 Dell EMC Inc. # GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'status': ['preview'], 'supported_by': 'community', 'metadata_version': '1.1'} DOCUMENTATION = ''' --- module: idrac_redfish_command version_added: "2.8" short_description: Manages Out-Of-Band controllers using iDRAC OEM Redfish APIs description: - Builds Redfish URIs locally and sends them to remote OOB controllers to perform an action. - For use with Dell iDRAC operations that require Redfish OEM extensions options: category: required: true description: - Category to execute on OOB controller command: required: true description: - List of commands to execute on OOB controller baseuri: required: true description: - Base URI of OOB controller username: required: true description: - User for authentication with OOB controller password: required: true description: - Password for authentication with OOB controller timeout: description: - Timeout in seconds for URL requests to OOB controller default: 10 type: int version_added: '2.8' author: "Jose Delarosa (@jose-delarosa)" ''' EXAMPLES = ''' - name: Create BIOS configuration job (schedule BIOS setting update) idrac_redfish_command: category: Systems command: CreateBiosConfigJob baseuri: "{{ baseuri }}" username: "{{ username }}" password: "{{ password }}" ''' RETURN = ''' msg: description: Message with action result or error description returned: always type: str sample: "Action was successful" ''' import re from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.redfish_utils import RedfishUtils, HEADERS from ansible.module_utils._text import to_native class IdracRedfishUtils(RedfishUtils): def create_bios_config_job(self): result = {} key = "Bios" jobs = "Jobs" # Search for 'key' entry and extract URI from it response = self.get_request(self.root_uri + self.systems_uris[0]) if response['ret'] is False: return response result['ret'] = True data = response['data'] if key not in data: return {'ret': False, 'msg': "Key %s not found" % key} bios_uri = data[key]["@odata.id"] # Ex
tract proper URI response = self.get_request(self.root_uri + bios_uri) if response['ret'] is False: return response result['ret'] = True data = response['data'] set_bios_attr_uri = data["@Redfish.Settings"]["SettingsObject"][
"@odata.id"] payload = {"TargetSettingsURI": set_bios_attr_uri} response = self.post_request( self.root_uri + self.manager_uri + "/" + jobs, payload, HEADERS) if response['ret'] is False: return response response_output = response['resp'].__dict__ job_id = response_output["headers"]["Location"] job_id = re.search("JID_.+", job_id).group() # Currently not passing job_id back to user but patch is coming return {'ret': True, 'msg': "Config job %s created" % job_id} CATEGORY_COMMANDS_ALL = { "Systems": ["CreateBiosConfigJob"], "Accounts": [], "Manager": [] } def main(): result = {} module = AnsibleModule( argument_spec=dict( category=dict(required=True), command=dict(required=True, type='list'), baseuri=dict(required=True), username=dict(required=True), password=dict(required=True, no_log=True), timeout=dict(type='int', default=10) ), supports_check_mode=False ) category = module.params['category'] command_list = module.params['command'] # admin credentials used for authentication creds = {'user': module.params['username'], 'pswd': module.params['password']} # timeout timeout = module.params['timeout'] # Build root URI root_uri = "https://" + module.params['baseuri'] rf_uri = "/redfish/v1/" rf_utils = IdracRedfishUtils(creds, root_uri, timeout) # Check that Category is valid if category not in CATEGORY_COMMANDS_ALL: module.fail_json(msg=to_native("Invalid Category '%s'. Valid Categories = %s" % (category, CATEGORY_COMMANDS_ALL.keys()))) # Check that all commands are valid for cmd in command_list: # Fail if even one command given is invalid if cmd not in CATEGORY_COMMANDS_ALL[category]: module.fail_json(msg=to_native("Invalid Command '%s'. Valid Commands = %s" % (cmd, CATEGORY_COMMANDS_ALL[category]))) # Organize by Categories / Commands if category == "Systems": # execute only if we find a System resource result = rf_utils._find_systems_resource(rf_uri) if result['ret'] is False: module.fail_json(msg=to_native(result['msg'])) for command in command_list: if command == "CreateBiosConfigJob": # execute only if we find a Managers resource result = rf_utils._find_managers_resource(rf_uri) if result['ret'] is False: module.fail_json(msg=to_native(result['msg'])) result = rf_utils.create_bios_config_job() # Return data back or fail with proper message if result['ret'] is True: del result['ret'] module.exit_json(changed=True, msg='Action was successful') else: module.fail_json(msg=to_native(result['msg'])) if __name__ == '__main__': main()
# -*- coding: utf-8 -*- # Copyright 2015 Eficent - Jordi Ballester Alomar # License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html). from odoo import api, fields, models class AnalyticAccountOpen(models.TransientModel): _name = 'analytic.account.ope
n' _descript
ion = 'Open single analytic account' analytic_account_id = fields.Many2one( 'account.analytic.account', 'Analytic Account', required=True ) include_child = fields.Boolean( 'Include child accounts', default=True ) @api.model def _get_child_analytic_accounts(self, curr_id): result = {} result[curr_id] = True # Now add the children self.env.cr.execute(''' WITH RECURSIVE children AS ( SELECT parent_id, id FROM account_analytic_account WHERE parent_id = %s UNION ALL SELECT a.parent_id, a.id FROM account_analytic_account a JOIN children b ON(a.parent_id = b.id) ) SELECT * FROM children order by parent_id ''', (curr_id,)) res = self.env.cr.fetchall() for x, y in res: result[y] = True return result @api.multi def analytic_account_open_window(self): self.ensure_one() act_window_id = self.env.ref( 'analytic.action_account_analytic_account_form') result = act_window_id.read()[0] acc_id = self.analytic_account_id.id acc_ids = [] if self.include_child: acc_ids = self._get_child_analytic_accounts(acc_id) else: acc_ids.append(acc_id) result['domain'] = "[('id','in', ["+','.join(map(str, acc_ids))+"])]" return result
# -*- coding: utf-8 -*- ### BEGIN LICENSE # Copyright (C) 2009 Philip Peitsch <philip.peitsch@gmail.com> #This program is free software: you can redistribute it and/or modify it #under the terms of the GNU General Public License version 3, as published #by the Free Software Foundation. # #This program is distributed in the hope that it will be useful, but #WITHOUT ANY WARRANTY; without even the implied warranties of #MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR #PURPOSE. See the GNU General Public License for more details. # #You should have received a copy of the GNU General Public License along #with this program. If not, see <http://www.gnu.org/licenses/>. ### END LICENSE import sys import os import gtk from hudsonnotifier.hudsonnotifierconfig import getdatapath class AboutHudsonnotifierDialog(gtk.AboutDialog): __gtype_name__ = "AboutHudsonnotifierDialog" def __init__(self): """__init__ - This function is typically not called directly. Creation of a AboutHudsonnotifierDialog requires redeading the associated ui file and parsing the ui definition extrenally, and then calling AboutHudsonnotifierDialog.finish_initializing(). Use the convenience function NewAboutHudsonnotifierDialog to create NewAboutHudsonnotifierDialog objects. """ pass def finish_initializing(self, builder): """finish_initalizing should be called after parsing the ui definition and creating a AboutHudsonnotifierDialog object with it in order to finish initializing the start of the new AboutHudsonnotifierDialog instance. """ #get a reference to the builder and set up the signals self.builder = builder self.builder.connect_signals(self) #code for other initialization actions should be added here def NewAboutHudsonnotifierDialog(): """NewAboutHudsonnotifierDialog - returns a fully instantiated AboutHudsonnotifierDialog object. Use this function rather than creating a Ab
outHudsonnotifierDialog instance directly. """ #look for the ui file that describes the ui ui_filename = os.path.join(getdatapath(), 'ui', 'AboutHudsonnotifierDialog.ui') if not os.path.exists(ui_filename): ui_filename = None builder = gtk.Builder() builder.add_from_file(ui_filename) dialog = builder.get_object("about_hudsonnotifier_dialog") dialog.finish_initializing(builder) return dialog if _
_name__ == "__main__": dialog = NewAboutHudsonnotifierDialog() dialog.show() gtk.main()
self.source is None: mod_type = self.etc[2] if mod_type==imp.PY_SOURCE: self._reopen() try: self.source = self.file.read() finally: self.file.close() elif mod_type==imp.PY_COMPILED: if os.path.exists(self.filename[:-1]): f = open(self.filename[:-1], 'rU') self.source = f.read() f.close() elif mod_type==imp.PKG_DIRECTORY: self.source = self._get_delegate().get_source() return self.source def _get_delegate(self): return ImpImporter(self.filename).find_module('__init__') def get_filename(self, fullname=None): if self.etc[2]==imp.PKG_DIRECTORY: return self._get_delegate().get_filename() elif self.etc[2] in (imp.PY_SOURCE, imp.PY_COMPILED, imp.C_EXTENSION): return self.filename return None try: import zipimport from zipimport import zipimporter def iter_zipimport_modules(importer, prefix=''): dirlist = zipimport._zip_directory_cache[importer.archive].keys() dirlist.sort() _prefix = importer.prefix plen = len(_prefix) yielded = {} import inspect for fn in dirlist: if not fn.startswith(_prefix): continue fn = fn[plen:].split(os.sep) if len(fn)==2 and fn[1].startswith('__init__.py'): if fn[0] not in yielded: yielded[fn[0]] = 1 yield fn[0], True if len(fn)!=1: continue modname = inspect.getmodulename(fn[0]) if modname=='__init__': continue if modname and '.' not in modname and modname not in yielded: yielded[modname] = 1 yield prefix + modname, False iter_importer_modules.register(zipimporter, iter_zipimport_modules) except ImportError: pass def get_importer(path_item): """Retrieve a PEP 302 importer for the given path item The returned importer is cached in sys.path_importer_cache if it was newly created by a path hook. If there is no importer, a wrapper around the basic import machinery is returned. This wrapper is never inserted into the importer cache (None is inserted instead). The cache (or part of it) can be cleared manually if a rescan of sys.path_hooks is necessary. """ if type(path_item) == unicode: path_item = path_item.encode(sys.getfilesystemencoding()) try: importer = sys.path_importer_cache[path_item] except KeyError: for path_hook in sys.path_hooks: try: importer = path_hook(path_item) break except ImportError: pass else: importer = None sys.path_importer_cache.setdefault(path_item, importer) if importer is None: try: importer = ImpImporter(path_item) except ImportError: importer = None return importer def iter_importers(fullname=""): """Yield PEP 302 importers for the given module name If fullname contains a '.', the importers will be for the package containing fullname, otherwise they will be importers for sys.meta_path, sys.path, and Python's "classic" import machinery, in that order. If the named module is in a package, that package is imported as a side effect of invoking this function. Non PEP 302 mechanisms (e.g. the Windows registry) used by the standard import machinery to find files in alternative locations are partially supported, but are searched AFTER sys.path. Normally, these locations are searched BEFORE sys.path, preventing sys.path entries from shadowing them. For this to
cause a visible difference in behaviour, there must be a module or package name that is accessible via both sys.path and one of the non PEP 302 file system mechanisms. In this case, the emulation will find the form
er version, while the builtin import mechanism will find the latter. Items of the following types can be affected by this discrepancy: imp.C_EXTENSION, imp.PY_SOURCE, imp.PY_COMPILED, imp.PKG_DIRECTORY """ if fullname.startswith('.'): raise ImportError("Relative module names not supported") if '.' in fullname: # Get the containing package's __path__ pkg = '.'.join(fullname.split('.')[:-1]) if pkg not in sys.modules: __import__(pkg) path = getattr(sys.modules[pkg], '__path__', None) or [] else: for importer in sys.meta_path: yield importer path = sys.path for item in path: yield get_importer(item) if '.' not in fullname: yield ImpImporter() def get_loader(module_or_name): """Get a PEP 302 "loader" object for module_or_name If the module or package is accessible via the normal import mechanism, a wrapper around the relevant part of that machinery is returned. Returns None if the module cannot be found or imported. If the named module is not already imported, its containing package (if any) is imported, in order to establish the package __path__. This function uses iter_importers(), and is thus subject to the same limitations regarding platform-specific special import locations such as the Windows registry. """ if module_or_name in sys.modules: module_or_name = sys.modules[module_or_name] if isinstance(module_or_name, ModuleType): module = module_or_name loader = getattr(module, '__loader__', None) if loader is not None: return loader fullname = module.__name__ else: fullname = module_or_name return find_loader(fullname) def find_loader(fullname): """Find a PEP 302 "loader" object for fullname If fullname contains dots, path must be the containing package's __path__. Returns None if the module cannot be found or imported. This function uses iter_importers(), and is thus subject to the same limitations regarding platform-specific special import locations such as the Windows registry. """ for importer in iter_importers(fullname): loader = importer.find_module(fullname) if loader is not None: return loader return None def extend_path(path, name): """Extend a package's path. Intended use is to place the following code in a package's __init__.py: from pkgutil import extend_path __path__ = extend_path(__path__, __name__) This will add to the package's __path__ all subdirectories of directories on sys.path named after the package. This is useful if one wants to distribute different parts of a single logical package as multiple directories. It also looks for *.pkg files beginning where * matches the name argument. This feature is similar to *.pth files (see site.py), except that it doesn't special-case lines starting with 'import'. A *.pkg file is trusted at face value: apart from checking for duplicates, all entries found in a *.pkg file are added to the path, regardless of whether they are exist the filesystem. (This is a feature.) If the input path is not a list (as is the case for frozen packages) it is returned unchanged. The input path is not modified; an extended copy is returned. Items are only appended to the copy at the end. It is assumed that sys.path is a sequence. Items of sys.path that are not (unicode or 8-bit) strings referring to existing directories are ignored. Unicode items of sys.path that cause errors when used as filenames may cause this function to raise an exception (in line with os.path.isdir() behavior). """ if not isinstance(path, list): # This could happen e.g. when this is called from inside a # frozen package. Return the path unchanged in that case. return
out.insert(0, _read_memory_buffer((layer.n, ), layer.mask, dtype='int32')) layer_outshape = (layer.batch, layer.out_c, layer.out_h, layer.out_w) out.insert(0, _read_memory_buffer(layer_outshape, layer.output)) elif i == net.n-1: if layer.type == LAYERTYPE.CONNECTED: darknet_outshape = (layer.batch, layer.out_c) elif layer.type in [LAYERTYPE.SOFTMAX]: darknet_outshape = (layer.batch, layer.outputs) else: darknet_outshape = (layer.batch, layer.out_c, layer.out_h, layer.out_w) out.insert(0, _read_memory_buffer(darknet_outshape, layer.output)) return out dtype = 'float32' img = LIB.letterbox_image(LIB.load_image_color(DARKNET_TEST_IMAGE_PATH.encode('utf-8'), 0, 0), net.w, net.h) darknet_output = get_darknet_output(net, img) batch_size = 1 data = np.empty([batch_size, img.c, img.h, img.w], dtype) i = 0 for c in range(img.c): for h in range(img.h): for k in range(img.w): data[0][c][h][k] = img.data[i] i = i + 1 tvm_out = _get_tvm_output(net, data, build_dtype) for tvm_outs, darknet_out in zip(tvm_out, darknet_output): tvm.testing.assert_allclose(darknet_out, tvm_outs, rtol=1e-3, atol=1e-3) def verify_rnn_forward(net): '''Test network with given input data on both darknet and tvm''' def get_darknet_network_predict(net, data): return LIB.network_predict(net, data) from cffi import FFI ffi = FFI() np_arr = np.zeros([1, net.inputs], dtype='float32') np_arr[0,
84] = 1 cffi_arr = ffi.cast('float*', np_arr.ctypes.data) tvm_out = _get_tvm_output(net, np_arr)[0] darknet_output = get_darknet_network_predict(net, cffi_arr) darknet_out = np.zeros(net.outputs, dtype='float32') for i in range(net.outputs): darknet_out[i] = darknet_output[i] last_layer = net.layers[net.n-1] darknet_o
utshape = (last_layer.batch, last_layer.outputs) darknet_out = darknet_out.reshape(darknet_outshape) tvm.testing.assert_allclose(darknet_out, tvm_out, rtol=1e-4, atol=1e-4) def test_forward_extraction(): '''test extraction model''' model_name = 'extraction' cfg_name = model_name + '.cfg' weights_name = model_name + '.weights' cfg_url = 'https://github.com/pjreddie/darknet/blob/master/cfg/' + cfg_name + '?raw=true' weights_url = 'http://pjreddie.com/media/files/' + weights_name + '?raw=true' net = _load_net(cfg_url, cfg_name, weights_url, weights_name) verify_darknet_frontend(net) LIB.free_network(net) def test_forward_alexnet(): '''test alexnet model''' model_name = 'alexnet' cfg_name = model_name + '.cfg' weights_name = model_name + '.weights' cfg_url = 'https://github.com/pjreddie/darknet/blob/master/cfg/' + cfg_name + '?raw=true' weights_url = 'http://pjreddie.com/media/files/' + weights_name + '?raw=true' net = _load_net(cfg_url, cfg_name, weights_url, weights_name) verify_darknet_frontend(net) LIB.free_network(net) def test_forward_resnet50(): '''test resnet50 model''' model_name = 'resnet50' cfg_name = model_name + '.cfg' weights_name = model_name + '.weights' cfg_url = 'https://github.com/pjreddie/darknet/blob/master/cfg/' + cfg_name + '?raw=true' weights_url = 'http://pjreddie.com/media/files/' + weights_name + '?raw=true' net = _load_net(cfg_url, cfg_name, weights_url, weights_name) verify_darknet_frontend(net) LIB.free_network(net) def test_forward_yolov2(): '''test yolov2 model''' model_name = 'yolov2' cfg_name = model_name + '.cfg' weights_name = model_name + '.weights' cfg_url = 'https://github.com/pjreddie/darknet/blob/master/cfg/' + cfg_name + '?raw=true' weights_url = 'http://pjreddie.com/media/files/' + weights_name + '?raw=true' net = _load_net(cfg_url, cfg_name, weights_url, weights_name) build_dtype = {} verify_darknet_frontend(net, build_dtype) LIB.free_network(net) def test_forward_yolov3(): '''test yolov3 model''' model_name = 'yolov3' cfg_name = model_name + '.cfg' weights_name = model_name + '.weights' cfg_url = 'https://github.com/pjreddie/darknet/blob/master/cfg/' + cfg_name + '?raw=true' weights_url = 'http://pjreddie.com/media/files/' + weights_name + '?raw=true' net = _load_net(cfg_url, cfg_name, weights_url, weights_name) build_dtype = {} verify_darknet_frontend(net, build_dtype) LIB.free_network(net) def test_forward_convolutional(): '''test convolutional layer''' net = LIB.make_network(1) layer = LIB.make_convolutional_layer(1, 224, 224, 3, 32, 1, 3, 2, 0, 1, 0, 0, 0, 0) net.layers[0] = layer net.w = net.h = 224 LIB.resize_network(net, 224, 224) verify_darknet_frontend(net) LIB.free_network(net) def test_forward_dense(): '''test fully connected layer''' net = LIB.make_network(1) layer = LIB.make_connected_layer(1, 75, 20, 1, 0, 0) net.layers[0] = layer net.w = net.h = 5 LIB.resize_network(net, 5, 5) verify_darknet_frontend(net) LIB.free_network(net) def test_forward_dense_batchnorm(): '''test fully connected layer with batchnorm''' net = LIB.make_network(1) layer = LIB.make_connected_layer(1, 12, 2, 1, 1, 0) for i in range(5): layer.rolling_mean[i] = np.random.rand(1) layer.rolling_variance[i] = np.random.rand(1) layer.scales[i] = np.random.rand(1) net.layers[0] = layer net.w = net.h = 2 LIB.resize_network(net, 2, 2) verify_darknet_frontend(net) LIB.free_network(net) def test_forward_maxpooling(): '''test maxpooling layer''' net = LIB.make_network(1) layer = LIB.make_maxpool_layer(1, 224, 224, 3, 2, 2, 0) net.layers[0] = layer net.w = net.h = 224 LIB.resize_network(net, 224, 224) verify_darknet_frontend(net) LIB.free_network(net) def test_forward_avgpooling(): '''test avgerage pooling layer''' net = LIB.make_network(1) layer = LIB.make_avgpool_layer(1, 224, 224, 3) net.layers[0] = layer net.w = net.h = 224 LIB.resize_network(net, 224, 224) verify_darknet_frontend(net) LIB.free_network(net) def test_forward_batch_norm(): '''test batch normalization layer''' net = LIB.make_network(1) layer = LIB.make_convolutional_layer(1, 224, 224, 3, 32, 1, 3, 2, 0, 1, 1, 0, 0, 0) for i in range(32): layer.rolling_mean[i] = np.random.rand(1) layer.rolling_variance[i] = np.random.rand(1) net.layers[0] = layer net.w = net.h = 224 LIB.resize_network(net, 224, 224) verify_darknet_frontend(net) LIB.free_network(net) def test_forward_shortcut(): '''test shortcut layer''' net = LIB.make_network(3) layer_1 = LIB.make_convolutional_layer(1, 224, 224, 3, 32, 1, 3, 2, 0, 1, 0, 0, 0, 0) layer_2 = LIB.make_convolutional_layer(1, 111, 111, 32, 32, 1, 1, 1, 0, 1, 0, 0, 0, 0) layer_3 = LIB.make_shortcut_layer(1, 0, 111, 111, 32, 111, 111, 32) layer_3.activation = 1 layer_3.alpha = 1 layer_3.beta = 1 net.layers[0] = layer_1 net.layers[1] = layer_2 net.layers[2] = layer_3 net.w = net.h = 224 LIB.resize_network(net, 224, 224) verify_darknet_frontend(net) LIB.free_network(net) def test_forward_reorg(): '''test reorg layer''' net = LIB.make_network(2) layer_1 = LIB.make_convolutional_layer(1, 222, 222, 3, 32, 1, 3, 2, 0, 1, 0, 0, 0, 0) layer_2 = LIB.make_reorg_layer(1, 110, 110, 32, 2, 0, 0, 0) net.layers[0] = layer_1 net.layers[1] = layer_2 net.w = net.h = 222 LIB.resize_network(net, 222, 222) verify_darknet_frontend(net) LIB.free_network(net) def test_forward_region(): '''test region layer''' net = LIB.make_network(2) layer_1 = LIB.make_convolutional_layer(1, 19, 19, 3, 425, 1, 1, 1, 0, 1, 0, 0, 0, 0) layer_2 = LIB.make_region_layer(1, 19, 19, 5, 80, 4) layer_2.softmax = 1 net.layers[0] = layer_
# -*- coding: utf-8 -*- """ Layer.py - base layer for gabbs maps ====================================================================== AUTHOR: Wei Wan, Purdue University EMAIL: rcac-help@purdue.edu Copyright (c) 2016 Purdue University See the file "license.terms" for information on usage and redistribution of this file, and for a DISCLAIMER OF ALL WARRANTIES. ====================================================================== """ from os.path import isfile from PyQt4.QtGui import QAction, QIcon from qgis.gui import * from gabbs.layers.LayerProperty import * from gabbs.MapUtils import iface, debug_trace import math class Layer(object): """Base class for layers""" layerName = None """Layer type name in menu""" layerIcon = None """Group icon in menu""" layerTypeName = None """Layer type identificator used to store in project""" layerTypeId = None """Numerical ID used in versions < 2.3""" layerId = None """Store 2 qgis objects""" layer = None layerAction = None layerAttribution = None def __init__(self): object.__init__(self) def getLayer(self): return self.layer def getLayerId(self): return self.layerId def setAddLayerCallback(self, addLayerCallback): """Set post processing in add layer method in canvas class """ self.addLayerCallback = addLayerCallback def loadStyleFile(self, symPath): if isfile(symPath): res = self.layer.loadNamedStyle(symPath) if res[1]: return True else: return Fals
e else: return False def getScale(self, zoomlevel): dpi = iface.mainWindow.physicalDpiX()
inchesPerMeter = 39.37 maxScalePerPixel = 156543.04 try: zoomlevel = int(zoomlevel) scale = (dpi * inchesPerMeter * maxScalePerPixel) / (math.pow(2, zoomlevel)) scale = int(scale) return scale except TypeError: raise #pass except Exception as e: raise e
cos
t, zeros = map(int, input().split()) print(int(round(cost, -zeros
)))
# Copyright 2015 Mirantis, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from networking_vsphere._i18n import _ from neutron.agent.common import config DEFAULT_BRIDGE_MAPPINGS = [] DEFAULT_UPLINK_MAPPINGS = [] DEFAULT_VLAN_RANGES = [] DEFAULT_TUNNEL_RANGES = [] DEFAULT_TUNNEL_TYPES = [] agent_opts = [ cfg.IntOpt('polling_interval', default=2, help=_("The number of seconds the agent will wait between " "polling for local device changes.")), cfg.I
ntOpt('quitting_rpc_timeout', default=10, help=_("Set new timeout in seconds for new rpc calls after " "agent receives SIGTERM. If value is set to 0, rpc "
"timeout won't be changed")), cfg.BoolOpt('log_agent_heartbeats', default=False, help=_("Log agent heartbeats")), cfg.IntOpt('report_interval', default=30, help='Seconds between nodes reporting state to server.'), ] vmware_opts = [ cfg.FloatOpt( 'task_poll_interval', default=2, help=_('The interval of task polling in seconds.')), cfg.IntOpt( 'api_retry_count', default=10, help=_('number of times an API must be retried upon ' 'session/connection related errors')), cfg.IntOpt( 'connections_pool_size', default=100, help=_('number of vsphere connections pool ' 'must be higher for intensive operations')), cfg.StrOpt('vsphere_login', default='administrator', help=_("Vsphere login.")), cfg.ListOpt('network_maps', default=DEFAULT_BRIDGE_MAPPINGS, help=_("List of <physical_network>:<bridge>.")), cfg.ListOpt('uplink_maps', default=DEFAULT_UPLINK_MAPPINGS, help=_("List of <physical_network>:<active uplinks>:" "<failover uplinks>." "Use semicolon between uplink names")), cfg.StrOpt('vsphere_hostname', default='vsphere', help=_("Vsphere host name or IP.")), cfg.StrOpt('vsphere_password', default='', help=_("Vsphere password.")), ] dvs_opts = [ cfg.BoolOpt('clean_on_restart', default=True, help=_("Run DVS cleaning procedure on agent restart.")), cfg.BoolOpt('precreate_networks', default=False, help=_("Precreate networks on DVS")), ] cfg.CONF.register_opts(dvs_opts, "DVS") cfg.CONF.register_opts(agent_opts, "DVS_AGENT") cfg.CONF.register_opts(vmware_opts, "ML2_VMWARE") config.register_agent_state_opts_helper(cfg.CONF) CONF = cfg.CONF
#!/usr/bin/env python # # Copyright (C) 2012 Jay Sigbrandt <jsigbrandt@slb.com> # Martin Owens <doctormo@gmail.com> # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 3.0 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULA
R P
URPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library. # """ Test crontab usage. """ import os import sys import unittest import crontab from datetime import date, time, datetime, timedelta try: from test import test_support except ImportError: from test import support as test_support crontab.LOG.setLevel(crontab.logging.ERROR) TEST_DIR = os.path.dirname(__file__) class DummyStdout(object): def write(self, text): pass BASIC = '@hourly firstcommand\n\n' USER = '\n*/4 * * * * user_command # user_comment\n\n\n' crontab.CRONCMD = "%s %s" % (sys.executable, os.path.join(TEST_DIR, 'data', 'crontest')) def flush(): pass class Attribute(object): def __init__(self, obj, attr, value): self.obj = obj self.attr = attr self.value = value def __enter__(self, *args, **kw): if hasattr(self.obj, self.attr): self.previous = getattr(self.obj, self.attr) setattr(self.obj, self.attr, self.value) def __exit__(self, *args, **kw): if hasattr(self, 'previous'): setattr(self.obj, self.attr, self.previous) else: delattr(self.obj, self.attr) class UseTestCase(unittest.TestCase): """Test use documentation in crontab.""" def setUp(self): self.filenames = [] def test_01_empty(self): """Open system crontab""" cron = crontab.CronTab() self.assertEqual(cron.render(), "") self.assertEqual(cron.__unicode__(), "") self.assertEqual(repr(cron), "<Unattached CronTab>") def test_02_user(self): """Open a user's crontab""" cron = crontab.CronTab(user='basic') self.assertEqual(cron.render(), BASIC) self.assertEqual(repr(cron), "<User CronTab 'basic'>") def test_03_usage(self): """Dont modify crontab""" cron = crontab.CronTab(tab='') sys.stdout = DummyStdout() sys.stdout.flush = flush try: exec(crontab.__doc__) except ImportError: pass sys.stdout = sys.__stdout__ self.assertEqual(cron.render(), '') def test_04_username(self): """Username is True""" cron = crontab.CronTab(user=True) self.assertNotEqual(cron.user, True) self.assertEqual(cron.render(), USER) self.assertEqual(repr(cron), "<My CronTab>") def test_05_nouser(self): """Username doesn't exist""" cron = crontab.CronTab(user='nouser') self.assertEqual(cron.render(), '') def test_06_touser(self): """Write to use API""" cron = crontab.CronTab(tab=USER) self.assertEqual(repr(cron), "<Unattached CronTab>") cron.write_to_user('bob') filename = os.path.join(TEST_DIR, 'data', 'spool', 'bob') self.filenames.append(filename) self.assertTrue(os.path.exists(filename)) self.assertEqual(repr(cron), "<User CronTab 'bob'>") def test_07_ioerror_read(self): """No filename ioerror""" with self.assertRaises(IOError): cron = crontab.CronTab(user='error') cron.read() def test_07_ioerror_write(self): """User not specified, nowhere to write to""" cron = crontab.CronTab() with self.assertRaises(IOError): cron.write() def test_08_cronitem(self): """CronItem Standalone""" item = crontab.CronItem(line='noline') self.assertTrue(item.is_enabled()) with self.assertRaises(UnboundLocalError): item.delete() item.command = str('nothing') self.assertEqual(item.render(), '* * * * * nothing') def test_10_time_object(self): """Set slices using time object""" item = crontab.CronItem(command='cmd') self.assertEqual(str(item.slices), '* * * * *') item.setall(time(1, 2)) self.assertEqual(str(item.slices), '2 1 * * *') self.assertTrue(item.is_valid()) item.setall(time(0, 30, 0, 0)) self.assertEqual(str(item.slices), '30 0 * * *') self.assertTrue(item.is_valid()) self.assertEqual(str(item), '30 0 * * * cmd') def test_11_date_object(self): """Set slices using date object""" item = crontab.CronItem(command='cmd') self.assertEqual(str(item.slices), '* * * * *') item.setall(date(2010, 6, 7)) self.assertEqual(str(item.slices), '0 0 7 6 *') self.assertTrue(item.is_valid()) def test_12_datetime_object(self): """Set slices using datetime object""" item = crontab.CronItem(command='cmd') self.assertEqual(str(item.slices), '* * * * *') item.setall(datetime(2009, 8, 9, 3, 4)) self.assertTrue(item.is_valid()) self.assertEqual(str(item.slices), '4 3 9 8 *') def test_20_slice_validation(self): """CronSlices class and objects can validate""" CronSlices = crontab.CronSlices self.assertTrue(CronSlices('* * * * *').is_valid()) self.assertTrue(CronSlices.is_valid('* * * * *')) self.assertTrue(CronSlices.is_valid('*/2 * * * *')) self.assertTrue(CronSlices.is_valid('* 1,2 * * *')) self.assertTrue(CronSlices.is_valid('* * 1-5 * *')) self.assertTrue(CronSlices.is_valid('* * * * MON-WED')) self.assertTrue(CronSlices.is_valid('@reboot')) sliced = CronSlices('* * * * *') sliced[0].parts = [300] self.assertEqual(str(sliced), '300 * * * *') self.assertFalse(sliced.is_valid()) self.assertFalse(CronSlices.is_valid('P')) self.assertFalse(CronSlices.is_valid('*/61 * * * *')) self.assertFalse(CronSlices.is_valid('* 1,300 * * *')) self.assertFalse(CronSlices.is_valid('* * 50-1 * *')) self.assertFalse(CronSlices.is_valid('* * * * FRO-TOO')) self.assertFalse(CronSlices.is_valid('@retool')) def test_25_open_pipe(self): """Test opening pipes""" from crontab import open_pipe, CRONCMD pipe = open_pipe(CRONCMD, h=None, a='one', abc='two') (out, err) = pipe.communicate() self.assertEqual(err, b'') self.assertEqual(out, b'--abc=two|-a|-h|one\n') def test_07_zero_padding(self): """Can we get zero padded output""" cron = crontab.CronTab(tab="02 3-5 2,4 */2 01 cmd") self.assertEqual(str(cron), '2 3-5 2,4 */2 1 cmd\n') with Attribute(crontab, 'ZERO_PAD', True): self.assertEqual(str(cron), '02 03-05 02,04 */2 01 cmd\n') def tearDown(self): for filename in self.filenames: if os.path.exists(filename): os.unlink(filename) if __name__ == '__main__': test_support.run_unittest( UseTestCase, )
from .. import BaseForm from wtforms import StringField, TextAreaField from wtforms.validators import DataRequired class CategoryForm(BaseForm): name = StringField('name', validators=[ DataRequired() ]) description = TextAreaField('description',
validators=[
DataRequired() ])
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright 2013 The Plaso Project Authors. # Please see the AUTHORS file for details on individual authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This file contains the Property List (Plist) Parser. Plaso's engine calls PlistParser when it encounters Plist files to be processed. """ import binascii import logging from binplist import binplist from plaso.lib import errors from plaso.lib import utils from plaso.parsers import interface from plaso.parsers import manager class PlistParser(interface.BasePluginsParser): """De-serializes and parses plists the event objects are generated by plist. The Plaso engine calls parsers by their Parse() method. This parser's Parse() has GetTopLevel() which deserializes plist files using the binplist library and calls plugins (PlistPlugin) registered through the interface by their Process() to produce event objects. Plugins are how this parser understands the content inside a plist file, each plugin holds logic specific to a particular plist file. See the interface and plist_plugins/ directory for examples of how plist plugins are implemented. """ NAME = 'plist' DESCRIPTION = u'Parser for binary and text plist files.' _plugin_classes = {} def __init__(self): """Initializes a parser object.""" super(PlistParser, self).__init__() self._plugins = PlistParser.GetPluginObjects() def GetTopLevel(self, file_object, file_name=''): """Returns the deserialized content of a plist as a dictionary object. Args: file_object: A file-like object to parse. file_name: The name of the file-like object. Returns: A dictionary object representing the contents of the plist. """ try: top_level_object = binplist.readPlist(file_object) except binplist.FormatError as exception: raise errors.UnableToParseFile( u'[{0:s}] File is not a plist file: {1:s}'.format( self.NAME, utils.GetUnicodeString(exception))) except ( LookupError, binascii.Error, ValueError, AttributeError) as exception: raise errors.UnableToParseFile( u'[{0:s}] Unable to parse XML file, reason: {1:s}'.format( self.NAME, exception)) except OverflowError as exception: raise errors.UnableToParseFile( u'[{0:s}] Unable to parse: {1:s} with error: {2:s}'.format( self.NAME, file_name, exception)) if not top_level_object: raise errors.UnableToParseFile( u'[{0:s}] File is not a plist: {1:s}'.format( self.NAME, utils.GetUnicodeString(file_name))) # Since we are using readPlist from binplist now instead of manually # opening up the BinarPlist file we loose this option. Keep it commented
# out for now but this needs to be tested a bit more. # TODO: Re-evaluate if we can delete this or still require it. #if bpl.is_corrupt: # logging.warning( # u'[{0:s}] corruption detected in binary plist: {1:
s}'.format( # self.NAME, file_name)) return top_level_object def Parse(self, parser_context, file_entry): """Parse and extract values from a plist file. Args: parser_context: A parser context object (instance of ParserContext). file_entry: A file entry object (instance of dfvfs.FileEntry). """ # TODO: Should we rather query the stats object to get the size here? file_object = file_entry.GetFileObject() file_size = file_object.get_size() if file_size <= 0: file_object.close() raise errors.UnableToParseFile( u'[{0:s}] file size: {1:d} bytes is less equal 0.'.format( self.NAME, file_size)) # 50MB is 10x larger than any plist seen to date. if file_size > 50000000: file_object.close() raise errors.UnableToParseFile( u'[{0:s}] file size: {1:d} bytes is larger than 50 MB.'.format( self.NAME, file_size)) top_level_object = None try: top_level_object = self.GetTopLevel(file_object, file_entry.name) except errors.UnableToParseFile: file_object.close() raise if not top_level_object: file_object.close() raise errors.UnableToParseFile( u'[{0:s}] unable to parse: {1:s} skipping.'.format( self.NAME, file_entry.name)) file_system = file_entry.GetFileSystem() plist_name = file_system.BasenamePath(file_entry.name) for plugin_object in self._plugins: try: plugin_object.Process( parser_context, plist_name=plist_name, top_level=top_level_object) except errors.WrongPlistPlugin as exception: logging.debug(u'[{0:s}] Wrong plugin: {1:s} for: {2:s}'.format( self.NAME, exception[0], exception[1])) file_object.close() manager.ParsersManager.RegisterParser(PlistParser)
# -*- coding: utf-8 -*- # @author: vuolter from __future__ import absolute_import, unicode_literals import os import re import sys from future import standard_library standard_library.install_aliases() def char(text, chars, repl=''): return re.sub(r'[{0}]+'.format(chars), repl, text) _UNIXBADCHARS = ('\0', '/', '\\') _MACBADCHARS = _UNIXBADCHARS + (':',) _WINBADCHARS = _MACBADCHARS + ('<', '>', '"', '|', '?', '*') _WINBADWORDS = ( 'com1', 'com2', 'com3', 'com4', 'com5', 'com6', 'com7', 'com8', 'com9', 'lpt1', 'lpt2', 'lpt3', 'lpt4', 'lpt5', 'lpt6', 'lpt7', 'lpt8', 'lpt9', 'con', 'prn') def name(text, sep='_', allow_whitespaces=False): """Remove invalid characters.""" if os.name == 'nt': bc = _WINBADCHARS elif sys.platform == 'darwin': bc = _MACBADCHARS else: bc = _UNIXBADCHARS repl = r''.join(bc) if not allow_whitespaces: repl += ' ' res = char(te
xt, repl, sep).strip() if os.name == 'nt' and res.lower() in _WINBADWORDS: res = sep + res return res def pattern(text, rules): for rule in rules: try: pattr, repl, flags = r
ule except ValueError: pattr, repl = rule flags = 0 text = re.sub(pattr, repl, text, flags) return text def truncate(text, offset): maxtrunc = len(text) // 2 if offset > maxtrunc: raise ValueError('String too short to truncate') trunc = (len(text) - offset) // 3 return '{0}~{1}'.format(text[:trunc * 2], text[-trunc:]) def uniquify(seq): """Remove duplicates from list preserving order.""" seen = set() seen_add = seen.add return type(seq)(x for x in seq if x not in seen and not seen_add(x))
from .killableprocess import Popen, mswindows if mswindows: from .wi
nprocess import STARTUP
INFO, STARTF_USESHOWWINDOW
#!/usr/bin/env python # -*- coding: utf8 -*- import os import argparse import tensorflow as tf from gym import wrappers from yarll.environment.registration import make class ModelRunner(object): """ Run an already learned model. Currently only supports one variation of an environment. """ def __init__(self, env, model_directory: str, save_directory: str, **usercfg) ->
None: super(ModelRunner, self).__init__() self.env = env self.model_directory = model_directory self.save_directory = save_directory self.config = dict( episode_max_length=self.env.spec.tags.get('wrapper_config.TimeLimit.max_episode_steps'), repeat_n_actions=1 ) self.config.update(usercfg) self.session = tf.Session()
self.saver = tf.train.import_meta_graph(os.path.join(self.model_directory, "model.meta")) self.saver.restore(self.session, os.path.join(self.model_directory, "model")) self.action = tf.get_collection("action")[0] self.states = tf.get_collection("states")[0] def choose_action(self, state): """Choose an action.""" return self.session.run([self.action], feed_dict={self.states: [state]})[0] def get_trajectory(self, render: bool = False): """ Run agent-environment loop for one whole episode (trajectory) Return dictionary of results """ state = self.env.reset() for _ in range(self.config["episode_max_length"]): action = self.choose_action(state) for _ in range(self.config["repeat_n_actions"]): _, _, done, _ = self.env.step(action) if done: # Don't continue if episode has already ended break if done: break if render: self.env.render() return def run(self): for _ in range(self.config["n_iter"]): self.get_trajectory() parser = argparse.ArgumentParser() parser.add_argument("environment", metavar="env", type=str, help="Gym environment to execute the model on.") parser.add_argument("model_directory", type=str, help="Directory from where model files are loaded.") parser.add_argument("save_directory", type=str, help="Directory where results of running the model are saved") parser.add_argument("--iterations", default=100, type=int, help="Number of iterations to run the algorithm.") def main(): args = parser.parse_args() env = make(args.environment) runner = ModelRunner(env, args.model_directory, args.save_directory, n_iter=args.iterations) try: runner.env = wrappers.Monitor(runner.env, args.save_directory, video_callable=False, force=True) runner.run() except KeyboardInterrupt: pass if __name__ == "__main__": main()
ype): rng = jtu.rand_default(self.rng()) args_maker = lambda: [rng(shape, dtype)] self._CheckAgainstNumpy(np.linalg.slogdet, jnp.linalg.slogdet, args_maker, tol=1e-3) self._CompileAndCheck(jnp.linalg.slogdet, args_maker) @parameterized.named_parameters(jtu.cases_from_list( {"testcase_name": "_shape={}".format(jtu.format_shape_
dtype_string(shape, dtype)), "shape": shape, "dtype": dtype} for shape in [(1, 1), (4, 4), (5, 5), (2, 7, 7)] for dtype in float_types + complex_types)) @jtu.skip_on_devices("tpu") @jtu.skip_on_flag("jax_skip_slow_tests", True) def testSlogdetGrad(self, shape, dtype): rng = jtu.rand_default(self.rng()) a = rng(shape, dtype) jtu.check_grads(jnp.linalg.slogdet, (a,), 2, atol=1e-1, rtol=2e-1) def testIssue1213(self): for n in range(5): mat = jnp.array([np.d
iag(np.ones([5], dtype=np.float32))*(-.01)] * 2) args_maker = lambda: [mat] self._CheckAgainstNumpy(np.linalg.slogdet, jnp.linalg.slogdet, args_maker, tol=1e-3) @parameterized.named_parameters(jtu.cases_from_list( {"testcase_name": "_shape={}_leftvectors={}_rightvectors={}".format( jtu.format_shape_dtype_string(shape, dtype), compute_left_eigenvectors, compute_right_eigenvectors), "shape": shape, "dtype": dtype, "compute_left_eigenvectors": compute_left_eigenvectors, "compute_right_eigenvectors": compute_right_eigenvectors} for shape in [(0, 0), (4, 4), (5, 5), (50, 50), (2, 6, 6)] for dtype in float_types + complex_types for compute_left_eigenvectors, compute_right_eigenvectors in [ (False, False), (True, False), (False, True), (True, True) ])) # TODO(phawkins): enable when there is an eigendecomposition implementation # for GPU/TPU. @jtu.skip_on_devices("gpu", "tpu") def testEig(self, shape, dtype, compute_left_eigenvectors, compute_right_eigenvectors): rng = jtu.rand_default(self.rng()) n = shape[-1] args_maker = lambda: [rng(shape, dtype)] # Norm, adjusted for dimension and type. def norm(x): norm = np.linalg.norm(x, axis=(-2, -1)) return norm / ((n + 1) * jnp.finfo(dtype).eps) def check_right_eigenvectors(a, w, vr): self.assertTrue( np.all(norm(np.matmul(a, vr) - w[..., None, :] * vr) < 100)) def check_left_eigenvectors(a, w, vl): rank = len(a.shape) aH = jnp.conj(a.transpose(list(range(rank - 2)) + [rank - 1, rank - 2])) wC = jnp.conj(w) check_right_eigenvectors(aH, wC, vl) a, = args_maker() results = lax.linalg.eig(a, compute_left_eigenvectors, compute_right_eigenvectors) w = results[0] if compute_left_eigenvectors: check_left_eigenvectors(a, w, results[1]) if compute_right_eigenvectors: check_right_eigenvectors(a, w, results[1 + compute_left_eigenvectors]) self._CompileAndCheck(partial(jnp.linalg.eig), args_maker, rtol=1e-3) @parameterized.named_parameters(jtu.cases_from_list( {"testcase_name": "_shape={}".format( jtu.format_shape_dtype_string(shape, dtype)), "shape": shape, "dtype": dtype} for shape in [(4, 4), (5, 5), (8, 8), (7, 6, 6)] for dtype in float_types + complex_types)) # TODO(phawkins): enable when there is an eigendecomposition implementation # for GPU/TPU. @jtu.skip_on_devices("gpu", "tpu") def testEigvalsGrad(self, shape, dtype): # This test sometimes fails for large matrices. I (@j-towns) suspect, but # haven't checked, that might be because of perturbations causing the # ordering of eigenvalues to change, which will trip up check_grads. So we # just test on small-ish matrices. rng = jtu.rand_default(self.rng()) args_maker = lambda: [rng(shape, dtype)] a, = args_maker() tol = 1e-4 if dtype in (np.float64, np.complex128) else 1e-1 jtu.check_grads(lambda x: jnp.linalg.eigvals(x), (a,), order=1, modes=['fwd', 'rev'], rtol=tol, atol=tol) @parameterized.named_parameters(jtu.cases_from_list( {"testcase_name": "_shape={}".format( jtu.format_shape_dtype_string(shape, dtype)), "shape": shape, "dtype": dtype} for shape in [(4, 4), (5, 5), (50, 50)] for dtype in float_types + complex_types)) # TODO: enable when there is an eigendecomposition implementation # for GPU/TPU. @jtu.skip_on_devices("gpu", "tpu") def testEigvals(self, shape, dtype): rng = jtu.rand_default(self.rng()) args_maker = lambda: [rng(shape, dtype)] a, = args_maker() w1, _ = jnp.linalg.eig(a) w2 = jnp.linalg.eigvals(a) self.assertAllClose(w1, w2, rtol={np.complex128: 1e-14}) @jtu.skip_on_devices("gpu", "tpu") def testEigvalsInf(self): # https://github.com/google/jax/issues/2661 x = jnp.array([[jnp.inf]]) self.assertTrue(jnp.all(jnp.isnan(jnp.linalg.eigvals(x)))) @parameterized.named_parameters(jtu.cases_from_list( {"testcase_name": "_shape={}".format(jtu.format_shape_dtype_string(shape, dtype)), "shape": shape, "dtype": dtype} for shape in [(1, 1), (4, 4), (5, 5)] for dtype in float_types + complex_types)) @jtu.skip_on_devices("gpu", "tpu") def testEigBatching(self, shape, dtype): rng = jtu.rand_default(self.rng()) shape = (10,) + shape args = rng(shape, dtype) ws, vs = vmap(jnp.linalg.eig)(args) self.assertTrue(np.all(np.linalg.norm( np.matmul(args, vs) - ws[..., None, :] * vs) < 1e-3)) @parameterized.named_parameters(jtu.cases_from_list( {"testcase_name": "_n={}_lower={}".format( jtu.format_shape_dtype_string((n,n), dtype), lower), "n": n, "dtype": dtype, "lower": lower} for n in [0, 4, 5, 50] for dtype in float_types + complex_types for lower in [True, False])) def testEigh(self, n, dtype, lower): rng = jtu.rand_default(self.rng()) tol = 1e-3 args_maker = lambda: [rng((n, n), dtype)] uplo = "L" if lower else "U" a, = args_maker() a = (a + np.conj(a.T)) / 2 w, v = jnp.linalg.eigh(np.tril(a) if lower else np.triu(a), UPLO=uplo, symmetrize_input=False) self.assertLessEqual( np.linalg.norm(np.eye(n) - np.matmul(np.conj(T(v)), v)), 1e-3) with jax.numpy_rank_promotion('allow'): self.assertLessEqual(np.linalg.norm(np.matmul(a, v) - w * v), tol * np.linalg.norm(a)) self._CompileAndCheck(partial(jnp.linalg.eigh, UPLO=uplo), args_maker, rtol=1e-3) def testEighZeroDiagonal(self): a = np.array([[0., -1., -1., 1.], [-1., 0., 1., -1.], [-1., 1., 0., -1.], [1., -1., -1., 0.]], dtype=np.float32) w, v = jnp.linalg.eigh(a) with jax.numpy_rank_promotion('allow'): self.assertLessEqual(np.linalg.norm(np.matmul(a, v) - w * v), 1e-3 * np.linalg.norm(a)) @parameterized.named_parameters(jtu.cases_from_list( {"testcase_name": "_shape={}".format( jtu.format_shape_dtype_string(shape, dtype)), "shape": shape, "dtype": dtype} for shape in [(4, 4), (5, 5), (50, 50)] for dtype in float_types + complex_types)) def testEigvalsh(self, shape, dtype): rng = jtu.rand_default(self.rng()) n = shape[-1] def args_maker(): a = rng((n, n), dtype) a = (a + np.conj(a.T)) / 2 return [a] self._CheckAgainstNumpy(np.linalg.eigvalsh, jnp.linalg.eigvalsh, args_maker, tol=1e-3) @parameterized.named_parameters(jtu.cases_from_list( {"testcase_name": "_shape={}_lower={}".format(jtu.format_shape_dtype_string(shape, dtype), lower), "shape": shape, "dtype": dtype, "lower":lower} for shape in [(1, 1), (4, 4), (5, 5), (50, 50), (2, 10, 10)] for dtype in float_types + complex_types for lower in [True, False])) def testEighGrad(self, shape, dtype, lower): r
""" Kodi resolveurl plugin Copyright (C) 2014 smokdpi This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import re import urllib import urllib2 from lib import jsunpack from urlparse import urlparse from resolveurl import common from resolveurl.resolver import ResolveUrl, ResolverError from resolveurl.hmf import HostedMediaFile class VideoZooResolver(ResolveUrl): name = "videozoo" domains = ["byzoo.org", "playpanda.net", "videozoo.me", "videowing.me", "easyvideo.me", "play44.net", "playbb.me", "video44.net"] pattern = 'http://((?:www\.)*(?:play44|playbb|video44|byzoo|playpanda|videozoo|videowing|easyvideo)\.(?:me|org|net|eu)/(?:embed[/0-9a-zA-Z]*?|gplus|picasa|gogo/)(?:\.php)*)\?.*?((?:vid|video|id|file)=[%0-9a-zA-Z_\-\./]+|.*)[\?&]*.*' def __init__(self): self.net = common.Net() def get_url(self, host, media_id): return self._default_get_url(host, media_id, 'http://{host}?vid={media_id}') def get_media_url(self, host, media_id): web_url = self.get_url(host, media_id) headers = { 'User-Agent': common.IOS_USER_AGENT, 'Referer': web_url } stream_url = '' new_host = urlparse(web_url).netloc html = self.net.http_GET(web_url, headers=headers).content if 'videozoo' not in new_host: r = re.search('(?:playlist:|timer\s*=\s*null;).+?url\s*[:=]+\s*[\'"]+(.+?)[\'"]+', html, re.DOTALL) else: r = re.search('\*/\s+?(eval\(function\(p,a,c,k,e,d\).+)\s+?/\*', html) if r: try: r = jsunpack.unpack(r.group(1)) if r: r = re.search('\[{"url":"(.+?)"', r.replace('\\', '')) except: if r: re_src = re.search('urlResolvers\|2F(.+?)\|', r.group(1)) re_url = re.search('php\|3D(.+?)\|', r.group(1)) if re_src and re_url: stream_url = 'http://%s/%s.php?url=%s' % (new_host, re_src.group(1), re_url.group(1))
stream_url = self._redirect_test(stream_url) else: raise ResolverError('File not found') if r: stream_url = urllib.unquote_p
lus(r.group(1)) if 'http' not in stream_url: stream_url = 'http://' + host + '/' + stream_url.replace('/gplus.php', 'gplus.php').replace('/picasa.php', 'picasa.php') stream_url = self._redirect_test(stream_url) if stream_url: if 'google' in stream_url: return HostedMediaFile(url=stream_url).resolve() else: return stream_url else: raise ResolverError('File not found') def _redirect_test(self, url): opener = urllib2.build_opener() opener.addheaders = [('User-agent', common.IOS_USER_AGENT)] opener.addheaders = [('Referer', urlparse(url).netloc)] try: resp = opener.open(url) if url != resp.geturl(): return resp.geturl() else: return url except urllib2.HTTPError, e: if e.code == 403: if url != e.geturl(): return e.geturl() raise ResolverError('File not found')
#!/usr/bin/env python # Standard packages import os import sys import argparse # Third-party packages from toil.job import Job # Package methods from ddb import configuration from ddb_ngsflow import gatk from ddb_ngsflow import annotation from ddb_ngsflow import pipeline from ddb_ngsflow.align import bwa from ddb_ngsflow.utils import utilities from ddb_ngsflow.qc import qc from ddb_ngsflow.coverage import sambamba from ddb_ngsflow.variation import variation from ddb_ngsflow.variation import freebayes from ddb_ngsflow.variation import mutect from ddb_ngsflow.variation import platypus from ddb_ngsflow.variation import vardict from ddb_ngsflow.variation import scalpel from ddb_ngsflow.variation.sv import pindel if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('-s', '--samples_file', help="Input configuration file
for samples") parser.add_argument('-c', '--configuration', help="Configuration file for various settings") Job.Runner.addToilOptions(parser) args = parser.parse_args() args.l
ogLevel = "INFO" sys.stdout.write("Setting up analysis directory\n") if not os.path.exists("Logs"): os.makedirs("Logs") if not os.path.exists("FinalVCFs"): os.makedirs("FinalVCFs") if not os.path.exists("FinalBAMs"): os.makedirs("FinalBAMs") if not os.path.exists("Intermediates"): os.makedirs("Intermediates") if not os.path.exists("Coverage"): os.makedirs("Coverage") if not os.path.exists("Reports"): os.makedirs("Reports") sys.stdout.write("Parsing configuration data\n") config = configuration.configure_runtime(args.configuration) sys.stdout.write("Parsing sample data\n") samples = configuration.configure_samples(args.samples_file, config) # Workflow Graph definition. The following workflow definition should create a valid Directed Acyclic Graph (DAG) root_job = Job.wrapJobFn(pipeline.spawn_batch_jobs, cores=1) # Per sample jobs for sample in samples: vcfanno_job = Job.wrapJobFn(annotation.vcfanno, config, sample, samples, "{}.snpEff.{}.vcf".format(sample, config['snpeff']['reference']), cores=int(config['vcfanno']['num_cores']), memory="{}G".format(config['vcfanno']['max_mem'])) # Create workflow from created jobs root_job.addChild(vcfanno_job) # Start workflow execution Job.Runner.startToil(root_job, args)
import importlib from .base import BaseTransport from ..service import Service class LocalTransport(BaseTransport): def __init__(self): super(LocalTransport, self).__init__() self.__service = None def __repr__(self): return self.__class__.__name__ def configure(self, service_name='', service_version='', service_meta=None, **kwargs): instance = self._import_service_and_instantiate_service(service_name, service_version) self.service = instance @property def service(self): raise AttributeError("Cannot access service property directly") @service.setter def service(self, service_instance): self.__service = service_instance def _import_service_and_instantiate_service(self, service_name, service_version): if not service_name and service_version: raise Exception( 'service_name and service_version are required ' 'arguments for local transport') module = importlib.import_module('%s.service' % (service_name,)) for name in dir(module): if name.startswith('_'): continue obj = getattr(module, name) if not self._looks_like_service_
class(obj, service_name, service_version): continue instance = obj() # uber-safe final check to make sure we have the correct service class if not isinstance(instance, Service): continue return instance raise Exception( 'Could not find appropriate Service class. Services '
'must subclass servant.Service and define an action_map, ' 'name and version.' ) def _looks_like_service_class(self, obj, service_name, service_version): return ( getattr(obj, 'name', '') == service_name and getattr(obj, 'version', -1) == service_version and isinstance(getattr(obj, 'action_map', None), dict) and hasattr(obj, 'run_actions') ) def is_connected(self): return True def send(self, request): return self.__service.handle_request(request)
import squeakspace.common.util as ut import squeakspace.common.util_http as ht import squeakspace.proxy.server.db_sqlite3 a
s db import squeakspace.common.squeak_ex as ex import config def post_handler(environ): query = ht.parse_post_request(environ) cookies = ht.parse_cookies(environ) user_id = ht.get_required_cookie(cookies, 'user_id')
session_id = ht.get_required_cookie(cookies, 'session_id') node_name = ht.get_required(query, 'node_name') url = ht.get_required(query, 'url') real_node_name = ht.get_required(query, 'real_node_name') fingerprint = ht.get_optional(query, 'fingerprint') conn = db.connect(config.db_path) try: c = db.cursor(conn) db.set_node_addr(c, user_id, session_id, node_name, url, real_node_name, fingerprint) db.commit(conn) raise ht.ok_json({'status' : 'ok'}) except ex.SqueakException as e: raise ht.convert_squeak_exception(e) finally: db.close(conn) def get_handler(environ): query = ht.parse_get_request(environ) cookies = ht.parse_cookies(environ) user_id = ht.get_required_cookie(cookies, 'user_id') session_id = ht.get_required_cookie(cookies, 'session_id') node_name = ht.get_required(query, 'node_name') conn = db.connect(config.db_path) try: c = db.cursor(conn) addr = db.read_node_addr(c, user_id, session_id, node_name) raise ht.ok_json({'status' : 'ok', 'addr' : addr}) except ex.SqueakException as e: raise ht.convert_squeak_exception(e) finally: db.close(conn) def delete_handler(environ): query = ht.parse_post_request(environ) cookies = ht.parse_cookies(environ) user_id = ht.get_required_cookie(cookies, 'user_id') session_id = ht.get_required_cookie(cookies, 'session_id') node_name = ht.get_required(query, 'node_name') conn = db.connect(config.db_path) try: c = db.cursor(conn) db.delete_node_addr(c, user_id, session_id, node_name) db.commit(conn) raise ht.ok_json({'status' : 'ok'}) except ex.SqueakException as e: raise ht.convert_squeak_exception(e) finally: db.close(conn) def main_handler(environ): ht.dispatch_on_method(environ, { 'POST' : post_handler, 'GET' : get_handler, 'DELETE' : delete_handler}) def application(environ, start_response): return ht.respond_with_handler(environ, start_response, main_handler)
from .pathutils import grep_r from . import project import os import re def is_partial(path): '''Check if file is a Sass partial''' return os.path.basename(path).startswith('_') def partial_import_regex(partial): '''Get name of Sass partial file as would be used for @import''' def from_curdir(cwd): relpath = os.path.relpath(partial, cwd) dirname, basename = os.path.split(relpath) name = os.path.splitext(basename)[0][1:] partial_import = os.path.join(dirname, name).replace("\\","/") import_stmt = re.compile('''@import\s+['"]{0}['"]'''.format(partial_import)) return import_stmt return from_curdir def get_rec(file_path, start, files=None, partials=None): ''' Recursively find files importing `partial` in `start` and if any are partials themselves, find those importing them. ''' if files is None: files = [] if partials is None: partials = [] if not is_partial(file_path): files.append(file_path
) return (files, partials) else: partials.append(file_path) partial_fn = partial_import_regex(os.path.join(start, file_path)) for f in grep_r(p
artial_fn, start, exts=['.sass','.scss']): if f not in files and f not in partials: files, partials = get_rec(f, start, files, partials) return (files, partials) def get(path): '''Get files affected by change in contents of `path`''' rel, root = project.splitpath(path) deps, _ = get_rec(rel, root) return (deps, root)
# # Copyright (C) 2014 National Institute For Space Research (INPE) - Brazil. # # This f
ile is part of Python Client API for Web Time Series Service. # # Web Time Series Service for Python is free software: you can # redistr
ibute it and/or modify it under the terms of the # GNU Lesser General Public License as published by # the Free Software Foundation, either version 3 of the License, # or (at your option) any later version. # # Web Time Series Service for Python is distributed in the hope that # it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with Web Time Series Service for Python. See LICENSE. If not, write to # e-sensing team at <esensing-team@dpi.inpe.br>. # """Python Client API for Web Time Series Services (WTSS).""" from .wtss import wtss from .wtss import time_series
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'Subscription.frequency' db.add_column('billing_subscription', 'frequency', self.gf('django.db.models.fields.CharField')(default='MONTHLY', max_length=10), keep_default=False) def backwards(self, orm): # Deleting field 'Subscription.frequency' db.delete_column('billing_subscription', 'frequency') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields
.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.rel
ated.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'billing.subscription': { 'Meta': {'object_name': 'Subscription'}, 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'currency': ('django.db.models.fields.CharField', [], {'max_length': '10'}), 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'frequency': ('django.db.models.fields.CharField', [], {'default': "'MONTHLY'", 'max_length': '10'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'price': ('django.db.models.fields.DecimalField', [], {'max_digits': '10', 'decimal_places': '2'}), 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '200'}), 'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}) }, 'billing.usersubscription': { 'Meta': {'object_name': 'UserSubscription'}, 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'subscription': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['billing.Subscription']"}), 'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) } } complete_apps = ['billing']
#!/usr/bin/env python # -*- coding: utf-8 -*- import gi gi.require_version('Gtk', '3.0') import sys import pygame from gi.repository import Gtk from sugar3.activity.activity import Activity from sugar3.graphics.toolbarbox import ToolbarBox from sugar3.activity.widgets import ActivityToolbarButton from sugar3.graphics.toolbutton import ToolButton from sugar3.activity.widgets import StopButton from sugar3.graphics.objectchooser import ObjectChooser from gettext import gettext as _ import sugargame.canvas import conozco from points_list import Data from save_util import save, fixValues class IknowEditor(Activity): def __init__(self, handle): Activity.__init__(self, handle) self.init_vars() self.build_toolbar() self.actividad = conozco.Conozco(self) self.build_canvas() self.run_canvas() self.show_all() def init_vars(self): self._image = None def build_toolbar(self): self.max_participants = 1 toolbar_box = ToolbarBox() self.set_toolbar_box(toolbar_box) toolbar_box.show() activity_button = ActivityToolbarButton(self) toolbar_box.toolbar.insert(activity_button, -1) activity_button.show() # new pic button new_pic = ToolButton('new-pic') new_pic.connect('clicked', self._new_picture) new_pic.set_tooltip(_('New picture')) toolbar_box.toolbar.insert(new_pic, -1) # add / remove point buttons add_point = ToolButton("row-insert") add_point.connect("clicked", self._add_point) add_point.set_tooltip(_("Add a point")) toolbar_box.toolbar.insert(add_point, -1) rem_point = ToolButton("row-remove") rem_point.connect("clicked", self._remove_point) rem_point.set_tooltip(_("Remove the selected point")) toolbar_box.too
lbar.insert(rem_point, -1) # save list button save = ToolButton('filesave') save.connec
t('clicked', self._save) save.set_tooltip(_('Save data')) toolbar_box.toolbar.insert(save, -1) # separator and stop button separator = Gtk.SeparatorToolItem() separator.props.draw = False separator.set_expand(True) toolbar_box.toolbar.insert(separator, -1) separator.show() stop_button = StopButton(self) toolbar_box.toolbar.insert(stop_button, -1) stop_button.show() def build_canvas(self): self.table = Gtk.Table(1, 2, False) self.box1 = Gtk.HBox() self.box1.set_size_request(350, 350) self.box1.show() self.box2 = Gtk.HBox() self.box2.set_size_request(50, 200) self.box2.show() self.table.attach(self.box1, 0, 1, 0, 1) self.table.attach(self.box2, 1, 2, 0, 1) self.labels_and_values = Data(self) self.labels_and_values.connect("some-changed", self._some_changed) self.box2.add(self.labels_and_values) self.set_canvas(self.table) def run_canvas(self): self.actividad.canvas = sugargame.canvas.PygameCanvas(self, main=self.actividad.run, modules=[pygame.display, pygame.font]) self.box1.add(self.actividad.canvas) self.actividad.canvas.grab_focus() def _save(self, widget): l = self.labels_and_values.get_info() scale = self.actividad.getScale() shiftx = self.actividad.getShiftX() shifty = self.actividad.getShiftY() ready = fixValues(l, scale, shiftx, shifty) save(ready) def _new_picture(self, widget): try: chooser = ObjectChooser(parent=self) except: chooser = None f = None if chooser is not None: result = chooser.run() if result == Gtk.ResponseType.ACCEPT: dsobject = chooser.get_selected_object() f = dsobject.file_path if f is not None: self._image = pygame.image.load(f) self.actividad.set_background(self._image) def _add_point(self, widget, label="", value="City", dx='0', dy='-14'): pos = self.labels_and_values.add_value(label, value, dx, dy) def _remove_point(self, widget): path = self.labels_and_values.remove_selected_value() self._update_points() def _add_coor(self, pos): if self._image is not None: self.labels_and_values.update_selected_value(pos) def _some_changed(self, treeview, path, new_label): self._update_points() def _update_points(self): l = self.labels_and_values.get_info() self.actividad.update_points(l)
import unittest import os from PIL import Image from SUASSystem.utils import crop_target class SUASSystemUtilsDataFunctionsTestCase(unittest.TestCase): def test_crop_image(self): """ Test the crop image method. """ input_image_path = "tests/images/image2_test_image_bounder.jpg" output_crop_image_path = "tests/images/test_crop.jpg" top_left_coords = [250.0, 200.0] bottom_right_coords = [350.0, 300.0] crop_target(input_image_path, output_crop_image_path, top_left_coords,
bottom_right_coords) saved_crop = Image.open(output_crop_image_path).load() input_image = Image.open(input_image_path).load() self.assertEqual(saved_crop[0, 0], input_image[250, 200])
self.assertEqual(saved_crop[1, 1], input_image[251, 201]) self.assertEqual(saved_crop[50, 50], input_image[300, 250]) self.assertEqual(saved_crop[99, 99], input_image[349, 299]) os.remove("tests/images/test_crop.jpg")
# -*- coding: utf-8 -*- # # Copyright © 2012 - 2015 Michal Čihař <michal@cihar.com> # # This file is part of Weblate <http://weblate.org/> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # """Hook scripts handling""" import os.path import subprocess from weblate.trans.util import get_clean_env def get_script_name(name): ''' Returns script name from string possibly containing full path and parameters. ''' return os.path.basename(name).split()[0] def run_post_push_script(component): """Run post push hook""" run_hook(component, component.post_push_script) def run_post_update_script(component): """Run post update hook""" run_hook(component, component.post_update_script) def run_pre_commit_script(component, filename): """ Pre commit hook """ run_hook(component, component.pre_commit_script, filename) def run_post_commit_script(component, filename): """ Post commit hook """ run_hook(component, component.post_commit_script, filename) def run_hook(component, script, *args): """ Generic script hook executor. """ if script: command = [script] if args: command.extend(args) environment = get_clean_env() if component.is_repo_link: target = component.linked_subproject else: target = component enviro
nment['WL_VCS'] = target.vcs environment['WL_REPO'] = target.repo environment['WL_PATH'] = target.get_path() environment['WL_FILEMASK'] = component.filemask environment['WL_FILE_FORMAT'] = component.file_format try: subprocess.check_call( command, en
v=environment, cwd=component.get_path(), ) return True except (OSError, subprocess.CalledProcessError) as err: component.log_error( 'failed to run hook script %s: %s', script, err ) return False
import sy
s #line = sys.stdin.read() #print line datas = [] for line in sys.stdin: datas.append(line)
print datas
#!/usr/bin/python # -*- coding: utf-8 -*- # def add(x, y): a=1 while a>0: a = x & y b = x ^ y x = b y = a << 1 return b def vowel_count(word): vowels_counter = 0 for letter in word: if letter.isalpha(): if letter.upper() in 'AEIOUY': vowels_counter += 1 return vowels_counter if __name__ == '__main__': # Assignment N 1 text="Proin eget tortor risus. Cras ultricies ligula sed magna dictum porta. Proin eget tortor risus. Curabitur non nulla sit amet nisl tempus convallis quis ac lectus. Donec rutrum congue leo eget malesuada." list=text.split() max_vowel_number=0 for i in range(0,len(list)-1): print "word=",list[i]," number of vowels",vowel_count(list[i]) if vowel_count(list[i])>max_vowel_number: max_vowel_number=vowel_count(list[i]) print "Maximum number of vowels
is",max
_vowel_number # Assignment N 2 text="Proin eget tortor risus. Cras ultricies ligula sed magna dictum porta. Proin eget tortor risus. Curabitur non nulla sit amet nisl tempus convallis quis ac lectus. Donec rutrum congue leo eget malesuada." list=text.split() length=len(list[0]) words=[] words.append(list[0]) for i in range(1,len(list)-1): if length<len(list[i]): length=len(list[i]) words[:] = [] words.append(list[i]) elif length==len(list[i]): words.append(list[i]) print "maximum length=",length,"words are",words # Assignment N 3 text="Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nulla quis lorem ut libero malesuada feugiat. Lorem ipsum dolor sit amet, consectetur adipiscing elit. Donec rutrum congue leo eget malesuada. Cras ultricies ligula sed magna dictum porta." list=text.split() i=len(text)-1 mirrored_text='' while i>=0: mirrored_text=mirrored_text+(text[i]) i-=1 print mirrored_text # Assignment N 4 import os content=dir(os) content_len=len(content) for k in range(0,content_len-1): s="os"+"."+content[k]+".__doc__" print(eval(s)) import sys content=dir(sys) content_len=len(content) for k in range(0,content_len-1): s="sys"+"."+content[k]+".__doc__" print(eval(s)) # Assignment N 5 input=12345 a=str(input) str_len=len(a) i=0 total=int(a[i]) while i<str_len-1: total=add(total,int(a[add(i,1)])) i=add(i,1) print total
# Copyright 22011 Canonical Ltd. This software is licensed under the # GNU Affero General Public License version 3 (see the file LICENSE). """All the interfaces that are exposed through
the webservice. There is a declaration in ZCML somewhere that looks like: <webservice:register module="lp.patchwebservice" /> which tells `lazr.restful` that it should look for webservice exports here. """ __metaclass__ = type __all__ = [
'ITemporaryBlobStorage', 'ITemporaryStorageManager', ] from lp.services.temporaryblobstorage.interfaces import ( ITemporaryBlobStorage, ITemporaryStorageManager, ) from lp.services.webservice.apihelpers import ( patch_operations_explicit_version, ) # ITemporaryBlobStorage patch_operations_explicit_version( ITemporaryBlobStorage, 'beta', "getProcessedData", "hasBeenProcessed") # ITemporaryStorageManager patch_operations_explicit_version( ITemporaryStorageManager, 'beta', "fetch")
.qt import * from aqt.utils import ( TR, HelpPage, disable_help_button, openHelp, showInfo, showWarning, tr, ) def video_driver_name_for_platform(driver: VideoDriver) -> str: if driver == VideoDriver.ANGLE: return tr(TR.PREFERENCES_VIDEO_DRIVER_ANGLE) elif driver == VideoDriver.Software: if isMac: return tr(TR.PREFERENCES_VIDEO_DRIVER_SOFTWARE_MAC) else: return tr(TR.PREFERENCES_VIDEO_DRIVER_SOFTWARE_OTHER) else: if isMac: return tr(TR.PREFERENCES_VIDEO_DRIVER_OPENGL_MAC) else: return tr(TR.PREFERENCES_VIDEO_DRIVER_OPENGL_OTHER) class Preferences(QDialog): def __init__(self, mw: AnkiQt) -> None: QDialog.__init__(self, mw, Qt.Window) self.mw = mw self.prof = self.mw.pm.profile self.form = aqt.forms.preferences.Ui_Preferences() self.form.setupUi(self) disable_help_button(self) self.form.buttonBox.button(QDialogButtonBox.Help).setAutoDefault(False) self.form.buttonBox.button(QDialogButtonBox.Close).setAutoDefault(False) qconnect( self.form.buttonBox.helpRequested, lambda: openHelp(HelpPage.PREFERENCES) ) self.silentlyClose = True self.prefs = self.mw.col.get_preferences() self.setupLang() self.setupCollection() self.setupNetwork() self.setupBackup() self.setupOptions() self.show() def accept(self) -> None: # avoid exception if main window is already closed if not self.mw.col: return self.updateCollection() self.updateNetwork() self.updateBackup() self.updateOptions() self.mw.pm.save() self.mw.reset() self.done(0) aqt.dialogs.markClosed("Preferences") def reject(self) -> None: self.accept() # Language ###################################################################### def setupLang(self) -> None: f = self.form f.lang.addItems([x[0] for x in anki.lang.langs]) f.lang.setCurrentIndex(self.langIdx()) qconnect(f.lang.currentIndexChanged, self.onLangIdxChanged) def langIdx(self) -> int: codes = [x[1] for x in anki.lang.langs] lang = anki.lang.currentLang if lang in anki.lang.compatMap: lang = anki.lang.compatMap[lang] else: lang = lang.replace("-", "_") try: return codes.index(lang) except: return codes.index("en_US") def onLangIdxChanged(self, idx: int) -> None: code = anki.lang.langs[idx][1] self.mw.pm.setLang(code) showInfo( tr(TR.PREFERENCES_PLEASE_RESTART_ANKI_TO_COMPLETE_LANGUAGE), parent=self ) # Collection options ###################################################################### def setupCollection(self) -> None: import anki.consts as c f = self.form qc = self.mw.col.conf self.setup_video_driver() f.newSpread.addItems(list(c.newCardSchedulingLabels(self.mw.col).values())) f.useCurrent.setCurrentIndex(int(not qc.get("addToCur", True))) s = self.prefs.sched f.lrnCutoff.setValue(int(s.learn_ahead_secs / 60.0)) f.timeLimit.setValue(int(s.time_limit_secs / 60.0)) f.showEstimates.setChecked(s.show_intervals_on_buttons) f.showProgress.setChecked(s.show_remaining_due_counts) f.newSpread.setCurrentIndex(s.new_review_mix) f.dayLearnFirst.setChecked(s.day_learn_first) f.dayOffset.setValue(s.rollover) if s.scheduler_version < 2: f.dayLearnFirst.setVisible(False) f.legacy_timezone.setVisible(False) else: f.legacy_timezone.setChecked(not s.new_timezone) def setup_video_driver(self) -> None: self.video_drivers = VideoDriver.all_for_platform() names = [ tr(TR.PREFERENCES_VIDEO_DRIVER, driver=video_driver_name_for_platform(d))
for d in self.video_drivers ] self.form.video_driver.addItems(names) self.form.video_driver.setCurrentIndex( self.video_drivers.index(self.mw.pm.video_driver()) ) def update_video_driver(self) -> None: new_driver = self.video_drivers[self.form.video_driver.currentIndex
()] if new_driver != self.mw.pm.video_driver(): self.mw.pm.set_video_driver(new_driver) showInfo(tr(TR.PREFERENCES_CHANGES_WILL_TAKE_EFFECT_WHEN_YOU)) def updateCollection(self) -> None: f = self.form d = self.mw.col self.update_video_driver() qc = d.conf qc["addToCur"] = not f.useCurrent.currentIndex() s = self.prefs.sched s.show_remaining_due_counts = f.showProgress.isChecked() s.show_intervals_on_buttons = f.showEstimates.isChecked() s.new_review_mix = f.newSpread.currentIndex() s.time_limit_secs = f.timeLimit.value() * 60 s.learn_ahead_secs = f.lrnCutoff.value() * 60 s.day_learn_first = f.dayLearnFirst.isChecked() s.rollover = f.dayOffset.value() s.new_timezone = not f.legacy_timezone.isChecked() self.mw.col.set_preferences(self.prefs) d.setMod() # Network ###################################################################### def setupNetwork(self) -> None: self.form.media_log.setText(tr(TR.SYNC_MEDIA_LOG_BUTTON)) qconnect(self.form.media_log.clicked, self.on_media_log) self.form.syncOnProgramOpen.setChecked(self.prof["autoSync"]) self.form.syncMedia.setChecked(self.prof["syncMedia"]) self.form.autoSyncMedia.setChecked(self.mw.pm.auto_sync_media_minutes() != 0) if not self.prof["syncKey"]: self._hideAuth() else: self.form.syncUser.setText(self.prof.get("syncUser", "")) qconnect(self.form.syncDeauth.clicked, self.onSyncDeauth) self.form.syncDeauth.setText(tr(TR.SYNC_LOG_OUT_BUTTON)) def on_media_log(self) -> None: self.mw.media_syncer.show_sync_log() def _hideAuth(self) -> None: self.form.syncDeauth.setVisible(False) self.form.syncUser.setText("") self.form.syncLabel.setText( tr(TR.PREFERENCES_SYNCHRONIZATIONNOT_CURRENTLY_ENABLED_CLICK_THE_SYNC) ) def onSyncDeauth(self) -> None: if self.mw.media_syncer.is_syncing(): showWarning("Can't log out while sync in progress.") return self.prof["syncKey"] = None self.mw.col.media.force_resync() self._hideAuth() def updateNetwork(self) -> None: self.prof["autoSync"] = self.form.syncOnProgramOpen.isChecked() self.prof["syncMedia"] = self.form.syncMedia.isChecked() self.mw.pm.set_auto_sync_media_minutes( self.form.autoSyncMedia.isChecked() and 15 or 0 ) if self.form.fullSync.isChecked(): self.mw.col.modSchema(check=False) self.mw.col.setMod() # Backup ###################################################################### def setupBackup(self) -> None: self.form.numBackups.setValue(self.prof["numBackups"]) def updateBackup(self) -> None: self.prof["numBackups"] = self.form.numBackups.value() # Basic & Advanced Options ###################################################################### def setupOptions(self) -> None: self.form.pastePNG.setChecked(self.prof.get("pastePNG", False)) self.form.uiScale.setValue(int(self.mw.pm.uiScale() * 100)) self.form.pasteInvert.setChecked(self.prof.get("pasteInvert", False)) self.form.showPlayButtons.setChecked(self.prof.get("showPlayButtons", True)) self.form.nightMode.setChecked(self.mw.pm.night_mode()) self.form.interrupt_audio.setChecked(self.mw.pm.interrupt_audio()) self._recording_drivers = [ RecordingDriver.QtAudioInput, RecordingDriver.PyAudio, ] # The plan is to phase out PyAudio
"""Custom urls.py for django-registration.""" from django.conf import settings from django.conf.urls import include, url from django.views.generic import TemplateView from regi
stration.backends.default.views import ( Activatio
nView, RegistrationView, ) from registration_email.forms import EmailRegistrationForm urlpatterns = [ # django-registration views url(r'^activate/complete/$', TemplateView.as_view( template_name='registration/activation_complete.html'), name='registration_activation_complete'), url(r'^activate/(?P<activation_key>\w+)/$', ActivationView.as_view( template_name='registration/activate.html', get_success_url=getattr( settings, 'REGISTRATION_EMAIL_ACTIVATE_SUCCESS_URL', lambda request, user: '/'), ), name='registration_activate'), url(r'^register/$', RegistrationView.as_view( form_class=EmailRegistrationForm, get_success_url=getattr( settings, 'REGISTRATION_EMAIL_REGISTER_SUCCESS_URL', lambda request, user: '/'), ), name='registration_register'), url(r'^register/complete/$', TemplateView.as_view( template_name='registration/registration_complete.html'), name='registration_complete'), url(r'^register/closed/$', TemplateView.as_view( template_name='registration/registration_closed.html'), name='registration_disallowed'), # django auth urls url(r'', include('registration_email.auth_urls')), ]
#!/usr/bin/env python import json DEBUG = False import sys import tweepy import time #consumer_key =
'HcMP89vDDumRhHeQBYbE3Asnp' #consumer_secret = 'kcXfsNyBl7tan1u2DgV7E10MpsVxhbwTjmbjp3YL9XfDdMJiYt' #access_key = '67882386-IXbLKaQEtTbZF9yotuLTjgitqjwBkouIstmlW4ecG' #access_secret = 'SyVrXlIDkidYr3JlNiTQ8tjZ973gIKy5m
fpEwFpQWN3Gy' consumer_key = 'Mcof8aJtJVDqQwz4OMDn2AyZu' consumer_secret = 'mjsHber2Gj79uc2unbzSRdwGyNyZGjEPBEn4ZHXQZW8FeGeSkv' access_key = '833745600743079936-hK2K3umAtnfYYuLGLDwD7uzj9ssPCDU' access_secret = '2Odz7Cky2gb3dZJsO1E65zNL8i84ZnoxLrM9uihSEDb6M' auth = tweepy.OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_key, access_secret) api = tweepy.API(auth) class CustomStreamListener(tweepy.StreamListener): def __init__(self, data_dir): # query_fname = format_filename(query) time_now = time.strftime("%Y-%m-%d_%H.%M.%S") self.outfile = "%s/stream_%s.json" % (data_dir, time_now) def on_data(self, data): try: with open(self.outfile, 'a') as f: f.write(data) print(data) return True except BaseException as e: print("Error on_data: %s" % str(e)) time.sleep(5) return True def on_error(self, status_code): print >> sys.stderr, 'Encountered error with status code:', status_code return True # Don't kill the stream def on_timeout(self): print >> sys.stderr, 'Timeout...' return True # Don't kill the stream # run the code with try to handle the exception try: sapi = tweepy.streaming.Stream(auth, CustomStreamListener('twitter-data')) sapi.filter(track=["transjakarta", "trans jakarta", "bus way", "busway"], languages=["in"]) except: pass
# coding=utf-8 # Licensed Materials - Property of IBM # Copyright IBM Corp. 2016 """ Publish and subscribe to MQTT messages. Additional information at http://mqtt.org and http://ibmstreams.github.io/streamsx.messaging """ from future.builtins import * from streamsx.topology.topology import * from streamsx.topology import schema class MqttStreams(object): """ A simple connector to a MQTT broker for publishing string tuples to MQTT topics, and subscribing to MQTT topics and creating streams. A connector is for a specific MQTT Broker as specified in the configuration object config. Any number of publish()and subscribe() connections may be created from a single mqtt_streams connector. Sample use: :: topo = Topology("An MQTT application") # define configuration information config = {} config['clientID'] = "test_MQTTpublishClient" config['qos'] = int("1") #(needs to be int vs long) config['keepAliveInterval'] = int(20) (needs to be int vs long) config['commandTimeout'] = 30000 (needs to be int vs long) config['period'] = 5000 (needs to be int vs long) config['messageQueueSize'] = 10 (needs to be int vs long) config['reconnectionBound'] = int(20)
config['retain'] = True config['password'] = "foobar" config['trustStore'] = "/tmp/no-such-trustStore" config['trustStorePassword'] = "woohoo"
config['keyStore'] = "/tmp/no-such-keyStore" config['keyStorePassword'] = "woohoo" # create the connector's configuration property map config['serverURI'] = "tcp://localhost:1883" config['userID'] = "user1id" config[' password'] = "user1passwrd" # create the connector mqstream = MqttStreams(topo, config) # publish a python source stream to the topic "python.topic1" topic = "python.topic1" src = topo.source(test_functions.mqtt_publish) mqs = mqstream.publish(src, topic) # subscribe to the topic "python.topic1" topic = ["python.topic1", ] mqs = mqstream.subscribe(topic) mqs.print() Configuration properties apply to publish and subscribe unless stated otherwise. serverURI Required String. URI to the MQTT server, either tcp://<hostid>[:<port>]} or ssl://<hostid>[:<port>]}. The port defaults to 1883 for "tcp:" and 8883 for "ssl:" URIs. clientID Optional String. A unique identifier for a connection to the MQTT server. he MQTT broker only allows a single onnection for a particular clientID. By default a unique client ID is automatically generated for each use of publish() and subscribe(). The specified clientID is used for the first publish() or subscribe() use and suffix is added for each subsequent uses. keepAliveInterval Optional Integer. Automatically generate a MQTT ping message to the server if a message or ping hasn't been sent or received in the last keelAliveInterval seconds. Enables the client to detect if the server is no longer available without having to wait for the TCP/IP timeout. A value of 0 disables keepalive processing. The default is 60. commandTimeout Optional Long. The maximum time in milliseconds to wait for a MQTT connect or publish action to complete. A value of 0 causes the client to wait indefinitely. The default is 0. period Optional Long. The time in milliseconds before attempting to reconnect to the server following a connection failure. The default is 60000. userID Optional String. The identifier to use when authenticating with a server configured to require that form of authentication. password Optional String. The identifier to use when authenticating with server configured to require that form of authentication. trustStore Optional String. The pathname to a file containing the public certificate of trusted MQTT servers. If a relative path is specified, the path is relative to the application directory. Required when connecting to a MQTT server with an ssl:/... serverURI. trustStorePassword Required String when trustStore is used. The password needed to access the encrypted trustStore file. keyStore Optional String. The pathname to a file containing the MQTT client's public private key certificates. If a relative path is specified, the path is relative to the application directory. Required when an MQTT server is configured to use SSL client authentication. keyStorePassword Required String when keyStore is used. The password needed to access the encrypted keyStore file. messageQueueSize [subscribe] Optional Integer. The size, in number of messages, of the subscriber's internal receive buffer. Received messages are added to the buffer prior to being converted to a stream tuple. The receiver blocks when the buffer is full. The default is 50. retain [publish] Optional Boolean. Indicates if messages should be retained on the MQTT server. Default is false. qos Optional Integer. The default MQTT quality of service used for message handling. The default is 0. """ def __init__(self, topology, config): self.topology = topology self.config = config.copy() self.opCnt = 0 def publish(self, pub_stream, topic): parms = self.config.copy() parms['topic'] = topic parms['dataAttributeName'] = "string" if (++self.opCnt > 1): # each op requires its own clientID clientId = parms['clientID'] if (clientId is not None and len(clientId) > 0): parms['clientID'] = clientId + "-" + str(id(self)) + "-" + str(self.opCnt) # convert pub_stream outputport schema from spl po to spl rstring type forOp = pub_stream._map(streamsx.topology.functions.identity, schema.CommonSchema.String) op = self.topology.graph.addOperator(kind="com.ibm.streamsx.messaging.mqtt::MQTTSink") op.addInputPort(outputPort=forOp.oport) op.setParameters(parms) return None def subscribe(self, topic): parms = self.config.copy() if (parms['retain'] is not None): del parms['retain'] parms['topics'] = topic parms['topicOutAttrName'] = "topic" parms['dataAttributeName'] = "string" if (++self.opCnt > 1): # each op requires its own clientID clientId = parms['clientID'] if (clientId is not None and len(clientId) > 0): parms['clientID'] = clientId + "-" + str(id(self)) + "-" + str(self.opCnt) op = self.topology.graph.addOperator(kind="com.ibm.streamsx.messaging.mqtt::MQTTSource") oport = op.addOutputPort(schema=schema.StreamSchema("tuple<rstring topic, rstring string>")) op.setParameters(parms) pop = self.topology.graph.addPassThruOperator() pop.addInputPort(outputPort=oport) pOport = pop.addOutputPort(schema=schema.CommonSchema.String) return Stream(self.topology, pOport)
def fat(n): result = 1 while n > 0: result = result * n n = n - 1
return result # testes print(
"Fatorial de 3: ", fat(3));
from datetime import datetime def days_diff(date1, date2): """ Find absolute diff in days between dates """ days = datetime(*date1) - datetime(*date2) print abs(days) return abs(days
.days) if __name__ == '__main__': # These "asserts" using only for self-checking and not necessary for auto-testing assert days_diff((1982, 4, 19), (1982, 4, 22)) == 3 assert days_diff((2014, 1, 1), (2014, 8, 27)) == 238 assert days_d
iff((2014, 8, 27), (2014, 1, 1)) == 238
import sys
import requests try:
from .helper import * except SystemError: from helper import * def compareRequestsAndSelenium(url): html1 = str(requests.get(url).text) try: driver = webdriver.Firefox() driver.maximize_window() driver.get(url) html2 = str(driver.page_source) finally: driver.close() view_diff(url, html1, html2) # url = 'http://www.healthgrades.com/physician/dr-jeannine-villella-y4jts' # compareRequestsAndSelenium(url) # url = 'https://www.betterdoctor.com/wendy-tcheng' # compareRequestsAndSelenium(url) if __name__ == '__main__': compareRequestsAndSelenium(sys.argv[1])
""" This module contains several handy functions primarily meant for internal use. """ from datetime import date, datetime, timedelta from time import mktime import re import sys from types import MethodType __all__ = ('asint', 'asbool', 'convert_to_datetime', 'timedelta_seconds', 'time_difference', 'datetime_ceil', 'combine_opts', 'get_callable_name', 'obj_to_ref', 'ref_to_obj', 'maybe_ref', 'to_unicode', 'iteritems', 'itervalues', 'xrange') def asint(text): """ Safely converts a string to an integer, returning None if the string is None. :type text: str :rtype: int """ if text is not None: return int(text) def asbool(obj): """ Interprets an object as a boolean value. :rtype: bool """ if isinstance(obj, str): obj = obj.strip().lower() if obj in ('true', 'yes', 'on', 'y', 't', '1'): return True if obj in ('false', 'no', 'off', 'n', 'f', '0'): return False raise ValueError('Unable to interpret value "%s" as boolean' % obj) return bool(obj) _DATE_REGEX = re.compile( r'(?P<year>\d{4})-(?P<month>\d{1,2})-(?P<day>\d{1,2})' r'(?: (?P<hour>\d{1,2}):(?P<minute>\d{1,2}):(?P<second>\d{1,2})' r'(?:\.(?P<microsecond>\d{1,6}))?)?') def convert_to_datetime(input): """ Converts the given object to a datetime object, if possible. If an actual datetime object is passed, it is returned unmodified. If the input is a string, it is parsed as a datetime. Date strings are accepted in three different forms: date only (Y-m-d), date with time (Y-m-d H:M:S) or with date+time with microseconds (Y-m-d H:M:S.micro). :rtype: datetime """ if isinstance(input, datetime): return input elif isinstance(input, date): return datetime.fromordinal(input.toordinal()) elif isinstance(input, str): m = _DATE_REGEX.match(input) if not m: raise ValueError('Invalid date string') values = [(k, int(v or 0)) for k, v in m.groupdict().items()] values = dict(values) return datetime(**values) raise TypeError('Unsupported input type: %s' % type(input)) def timedelta_seconds(delta): """ Converts the given timedelta to seconds. :type delta: timedelta :rtype: float """ return delta.days * 24 * 60 * 60 + delta.seconds + \ delta.microseconds / 1000000.0 def time_difference(date1, date2): """ Returns the time difference in seconds between the given two datetime objects. The difference is calculated as: date1 - date2. :param date1: the later datetime :type date1: datetime :param date2: the earlier datetime :type date2: datetime :rtype: float """ later = mktime(date1.timetuple()) + date1.microsecond / 1000000.0 earlier = mktime(date2.timetuple()) + date2.microsecond / 1000000.0 return later - earlier def datetime_ceil(dateval): """ Rounds the given datetime object upwards. :type dateval: datetime """ if dateval.microsecond > 0: return dateval + timedelta(seconds=1, microseconds= -dateval.microsecond) return dateval def combine_opts(global_config, prefix, local_config={}): """ Returns a subdictionary from keys and values of ``global_config`` where the key starts with the given prefix, combined with options from local_config. The keys in the subdictionary have the prefix removed. :type global_config: dict :type prefix: str :type local_config: dict :rtype: dict """ prefixlen = len(prefix) subconf = {} for key, value in global_config.items(): if key.startswith(prefix): key = key[prefixlen:] subconf[key] = value subconf.update(local_config) return subconf def get_callable_name(func): """ Returns the best available display name for the given function/callable. """ f_self = getattr(func, '__self__', None) or getattr(func, 'im_self', None) if f_self and hasattr(func, '__name__'): if isinstance(f_self, type): # class method return '%s.%s' % (f_self.__name__, func.__name__) # bound method return '%s.%s' % (f_self.__class__.__name__, func.__name__) if hasattr(func, '__call__'): if hasattr(func, '__name__'): # function, unbound method or a class with a __call__ method return func.__name__ # instance of a class with a __call__ method return func.__class__.__name__ raise TypeError('Unable to determine a name for %s -- ' 'maybe it is not a callable?' % repr(func)) def obj_to_ref(obj): """ Returns the path to the given object. """ ref = '%s:
%s' % (obj.__module__, get_callable_name(obj)) try: obj2 = ref_to_obj(ref) if obj != obj2: raise ValueError except Exception: raise ValueError('Cannot determine the reference to %s' % repr(obj)) return ref def ref_to_obj(ref): """ Returns the object pointed to by ``ref``. """ if not isinstance(ref, basestring): raise TypeError('References must be strings')
if not ':' in ref: raise ValueError('Invalid reference') modulename, rest = ref.split(':', 1) try: obj = __import__(modulename) except ImportError: raise LookupError('Error resolving reference %s: ' 'could not import module' % ref) try: for name in modulename.split('.')[1:] + rest.split('.'): obj = getattr(obj, name) return obj except Exception: raise LookupError('Error resolving reference %s: ' 'error looking up object' % ref) def maybe_ref(ref): """ Returns the object that the given reference points to, if it is indeed a reference. If it is not a reference, the object is returned as-is. """ if not isinstance(ref, str): return ref return ref_to_obj(ref) def to_unicode(string, encoding='ascii'): """ Safely converts a string to a unicode representation on any Python version. """ if hasattr(string, 'decode'): return string.decode(encoding, 'ignore') return string # pragma: nocover if sys.version_info < (3, 0): # pragma: nocover iteritems = lambda d: d.iteritems() itervalues = lambda d: d.itervalues() xrange = xrange basestring = basestring else: # pragma: nocover iteritems = lambda d: d.items() itervalues = lambda d: d.values() xrange = range basestring = str
# -*- coding: utf-8
-*- # © <YEA
R(S)> ClearCorp # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). import account_move_line
#! /usr/bin/env python """Read infofiles. """ import glob import os, os.path import sys import threading import time import skytools import cc.util from cc import json from cc.daemon import CCDaemon from cc.message import is_msg_req_valid from cc.reqs import InfofileMessage class InfoStamp: def __init__(self, fn, st): self.filename = fn self.filestat = st self.modified = 1 def check_send(self, st): if (st.st_mtime != self.filestat.st_mtime or st.st_size != self.filestat.st_size): # st changed, new mod self.modified = 1 self.filestat = st return 0 elif self.modified: return 1 else: return 0 class InfofileCollector(CCDaemon): log = skytools.getLogger('d:InfofileCollector') def reload(self): super(InfofileCollector, self).reload() self.infodir = self.cf.getfile('infodir') self.infomask = self.cf.get('infomask') self.compression = self.cf.get ('compression', 'none') if self.compression not in (None, '', 'none', 'gzip', 'bzip2'): self.log.error ("unknown compression: %s", self.compression) self.compression_level = self.cf.getint ('compression-level', '') self.maint_period = self.cf.getint ('maint-period', 60 * 60) self.stats_period = self.cf.getint ('stats-period', 30) self.msg_suffix = self.cf.get ('msg-suffix', '') if self.msg_suffix and not is_msg_req_valid (self.msg_suffix): self.log.error ("invalid msg-suffix: %s", self.msg_suffix) self.msg_suffix = None self.use_blob = self.cf.getbool ('use-blob', True) def startup(self): super(InfofileCollector, self).startup() # fn -> stamp self.infomap = {} # activate periodic maintenance self.do_maint() def process_file(self, fs): f = open(fs.filename, 'rb') st = os.fstat(f.fileno()) if fs.check_send(st): body = f.read() if len(body) != st.st_size: return fs.modified = 0 self.log.debug('Sending: %s', fs.filename) self.send_file(fs, body) self.stat_inc('count') f.close() def send_file(self, fs, body): cfb = cc.util.compress (body, self.compression, {'level': self.compression_level}) self.log.debug ("file compressed from %i to %i", len(body), len(cfb)) if self.use_blob: data = '' blob = cfb else: data = cfb.encode('base64') blob = None msg = InfofileMessage( filename = fs.filename.replace('\\', '/'), mtime = fs.filestat.st_mtime, comp = self.compression, data = data) if self.msg_suffix: msg.req += '.' + self.msg_suffix
self.ccpublish (msg, blob) self.stat_inc ('infosender.bytes.read', len(body)) self.stat_inc ('infosender.bytes.sent', len(cfb)) def find_new(self):
fnlist = glob.glob (os.path.join (self.infodir, self.infomask)) newlist = [] for fn in fnlist: try: st = os.stat(fn) except OSError, e: self.log.info('%s: %s', fn, e) continue if fn not in self.infomap: fstamp = InfoStamp(fn, st) self.infomap[fn] = fstamp else: old = self.infomap[fn] if old.check_send(st): newlist.append(old) self.log.debug ("files found - all: %i, new: %i", len(fnlist), len(newlist)) return newlist def _work (self): self.connect_cc() newlist = self.find_new() for fs in newlist: try: self.process_file(fs) except (OSError, IOError), e: self.log.info('%s: %s', fs.filename, e) self.stat_inc('changes', len(newlist)) def work (self): t = time.time() while self.looping and self.stats_period > time.time() - t: self._work() self.sleep(1) return 1 def stop (self): """ Called from signal handler """ super(InfofileCollector, self).stop() self.log.info ("stopping") self.maint_timer.cancel() def do_maint (self): """ Drop removed files from our cache """ self.log.info ("cleanup") current = glob.glob (os.path.join (self.infodir, self.infomask)) removed = set(self.infomap) - set(current) for fn in removed: self.log.debug ("forgetting file %s", fn) del self.infomap[fn] self.log.info ("current: %i, removed: %i", len(current), len(removed)) self.maint_timer = threading.Timer (self.maint_period, self.do_maint) self.maint_timer.start() if __name__ == '__main__': s = InfofileCollector('infofile_collector', sys.argv[1:]) s.start()
"""empty message Revision ID: ded3fd1d7f9d Revises: b70e85abec53 Cr
eate Date: 2020-12-30 22:46:59.418950 """ from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import mysql # revision identifiers, used by Alembic. revision = 'ded3fd1d7f9d' dow
n_revision = 'b70e85abec53' branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('hashfiles', sa.Column('checksum', sa.String(length=256), nullable=False)) op.drop_column('hashfiles', 'hash_str') # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('hashfiles', sa.Column('hash_str', mysql.VARCHAR(length=256), nullable=False)) op.drop_column('hashfiles', 'checksum') # ### end Alembic commands ###
s: dict of numpy arrays (one per output) """ @abstractmethod def getOutputElementCount(self, name): """Return the number of elements in the given output of the region Called from the scope of the region's PyRegion.getOutputElementCount() method. name: the name of the output """ @abstractmethod def getName(self): """ Return the name of the region """ class TestRegion(PyRegion): """ TestRegion is designed for testing and exploration of CLA Network mechanisms. Each TestRegion instance takes on a specific role via the associated TestRegionRole policy (TBD). """ def __init__(self, **kwargs): super(PyRegion, self).__init__(**kwargs) # Learning, inference, and other parameters. # By default we start out in stage learn with inference disabled # The specialization policy is what gives this region instance its identity. # Users set this via setIdentityPolicyInstance() before running the network self.identityPolicy = None # Debugging support, used in _conditionalBreak self.breakPdb = False self.breakKomodo = False # Construct ephemeral variables (those that aren't serialized) self.__constructEphemeralInstanceVars() # Variables set up in initialize() #self._sfdr = None # FDRCSpatial instance return def __constructEphemeralInstanceVars(self): """ Initialize ephemeral instance variables (those that aren't serialized) """ assert not hasattr(self, 'ephemeral') self.ephemeral = DictObj() self.ephemeral.logPathInput = '' self.ephemeral.logPathOutput = '' self.ephemeral.logPathOutputDense = '' self.ephemeral._fpLogInput = None self.ephemeral._fpLogOutput = None self.ephemeral._fpLogOutputDense = None return ############################################################################# # # Initialization code # ############################################################################# def initialize(self, dims, splitterMaps): """ Called by network after all links have been set up dims, splitterMaps: Unused legacy args """ self.identityPolicy.initialize(self) _debugOut(self.identityPolicy.getName()) return ############################################################################# # # Core compute methods: learning, inference, and prediction # ############################################################################# def compute(self, inputs, outputs): """ Run one iteration of the region's compute. The guts of the compute are contained in the _compute() call so that we can profile it if requested. """ # Uncomment this to find out who is generating divide by 0, or other numpy warnings # numpy.seterr(divide='raise', invalid='raise', over='raise') self.identityPolicy.compute(inputs, outputs) _debugOut(("%s: inputs=%s; outputs=%s") % \ (self.identityPolicy.getName(),inputs, outputs)) return ############################################################################# # # NuPIC 2 Support # These methods are required by NuPIC 2 # ############################################################################# def getOutputElementCount(self, name): nOutputElements = self.identityPolicy.getOutputElementCount(name) return nOutputElements # TODO: as a temporary hack, getParameterArrayCount checks to see if there's a # variable, private or not, with that name. If so, it attempts to return the # length of that variable. def getParameterArrayCount(self, name, index): p = self.getParameter(name) if (not hasattr(p, '__len__')): raise Exception("Attempt to access parameter '{0!s}' as an array but it is not an array".format(name)) return len(p) # TODO: as a temporary hack, getParameterArray checks to see if there's a # variable, private or not, with that name. If so, it returns the value of the # variable. def getParameterArray(self, name, index, a): p = self.getParameter(name) if (not hasattr(p, '__len__')): raise Exception("Attempt to access parameter '{0!s}' as an array but it is not an array".format(name)) if len(p) > 0: a[:] = p[:] return ############################################################################# # # Region API support methods: getSpec, getParameter, and setParameter # ############################################################################# @classmethod def getSpec(cls): """Return the base Spec for TestRegion. """ spec = dict( description="TestRegion", singleNodeOnly=True, inputs=dict( bottomUpIn=dict( description="""The input vector.""", dataType='Real32', count=0, required=False, regionLevel=True, isDefaultInput=True, requireSplitterMap=False), topDownIn=dict( description="""The top-down input signal, generated from feedback from upper levels""", dataType='Real32', count=0, required = False, regionLevel=True, isDefaultInput=False, requireSplitterMap=False), ), outputs=dict( bottomUpOut=dict( description="""The output signal generated from the bottom-up inputs from lower levels.""", dataType='Real32', count=0, regionLevel=True, isDefaultOutput=True), topDownOut=dict( description="""The top-down output signal, generated from feedback from upper levels""", dataType='Real32', count=0, regionLevel=True, isDefaultOutput=False), ), parameters=dict( logPathInput=dict( description='Optional name of input log file. If set, every input vector' ' will be logged to this file.', accessMode='ReadWrite', dataType='Byte', count=0, constraints=''), logPathOutput=dict( description='Optional name of output log file. If set, every output vector' ' will be logged to this file.', accessMode='ReadWrite', dataType='Byte', count=0, constraints=''), logPathOutputDense=dict( description='Optional name of output log file. If set, every output vector' ' will be logged to this file as a dense vector.', accessMode='ReadWrite', dataType='Byte', count=0, constraints=''), breakPdb=dict( description='Set to 1 to stop in the pdb debugger on the next compute', dataType='UInt32', count=1, constraints='bool', defaultValue=0, accessMode='ReadWrite'), breakKomodo=dict( description='Set to 1 to stop in the Komodo debugger on the ne
xt compute', dataType='UInt32', count=1, constraints='bool', defaultValue=0, accessMode='ReadWrite'), ), commands=dict( setIdentityPolicyInstance=dict(description= "Set identity policy instance BERORE running the network. " + \ "The instance MUST be derived from TestRegion's " +
\ "RegionIdentityPolicyBase class."), getIdentityPolicyInstance=dict(description= "Returns identity policy instance that was associated with " + \ "the TestRegion instance via the setIdentityPolicyInstance " + \ "command."), ) ) return spec def getParameter(self, parameterName, index=-1): """ Get the value of a NodeSpec parameter. Most parameters are handled automatically by PyRegion's parameter get mechanism. The ones that need special treatment are explicitly handled here. """ assert not (parameterName in self.__dict__ and parameterName in self.ephemeral)
import unittest import os, sys, imp from qgis import utils from qgis.core import QgsVectorLayer, QgsField, QgsProject, QGis from qgis.PyQt.QtCore import QVariant from .qgis_models import set_up_interface from mole3.qgisinteraction import layer_interaction as li from mole3.qgisinteraction import plugin_interaction as pi from mole3.tests.qgis_models import HybridLayer class PstPluginInteractionTest(unittest.TestCase): def create_layer_with_features(self, name, type='Polygon'): v_layer_name = li.biuniquify_layer_name(name) if type == 'Point': v_layer = QgsVectorLayer('{}?crs=EPSG:3857'.format(type), v_layer_name, 'memory', False) else: v_layer = HybridLayer(type, v_layer_name) provider = v_layer.dataProvider() v_layer.startEditing() attributes = [QgsField('COLOR_RED', QVariant.String), QgsField('COLOR_GRE', QVariant.String), QgsField('COLOR_BLU', QVariant.String), QgsField('COLOR_ALP', QVariant.String)] provider.addAttributes(attributes) v_layer.commitChanges() return v_layer def add_pointsamplingtool_to_plugins(self): plugin_folder = os.path.join(utils.plugin_paths[0], 'pointsamplingtool', '__init__.py') self.assertTrue(os.path.exists
(str(plugin_folder)), 'Path to plugin not found. ({})'.format(str(plugin_folder))) sys.modules['pointsamplingtool'] = imp.load_source('pointsamplingtool', plugin_folder) def setUp(self): self.qgis_app, self
.canvas, self.iface = set_up_interface() utils.plugin_paths = [os.path.expanduser('~/.qgis2/python/plugins')] utils.updateAvailablePlugins() utils.loadPlugin('pointsamplingtool') utils.iface = self.iface utils.startPlugin('pointsamplingtool') def tearDown(self): if self.qgis_app is not None: del(self.qgis_app) def test_if_plugin_is_available(self): self.assertNotEqual(utils.available_plugins, [], 'No plugins were loaded.') self.assertIn('pointsamplingtool', utils.available_plugins) def test_if_plugin_is_accessible(self): self.add_pointsamplingtool_to_plugins() psti = pi.PstInteraction(utils.iface) self.assertIsNotNone(psti) def test_if_all_fields_are_selected(self): self.add_pointsamplingtool_to_plugins() registry = QgsProject.instance() point_layer = self.create_layer_with_features('point', 'Point') poly_layer1 = self.create_layer_with_features('poly1') poly_layer2 = self.create_layer_with_features('poly2') registry.addMapLayer(point_layer) registry.addMapLayer(poly_layer1) registry.addMapLayer(poly_layer2) psti = pi.PstInteraction(utils.iface) psti.set_input_layer(point_layer.name()) selected_fields = psti.pst_dialog.fieldsTable psti.select_and_rename_files_for_sampling() fields_point = point_layer.dataProvider().fields() fields_poly1 = poly_layer1.dataProvider().fields() fields_poly2 = poly_layer2.dataProvider().fields() rows_expected = fields_point.count() + fields_poly1.count() + fields_poly2.count() self.assertEqual(selected_fields.rowCount(), rows_expected) def test_if_field_names_are_unique(self): self.add_pointsamplingtool_to_plugins() registry = QgsProject.instance() point_layer = self.create_layer_with_features('test_pointlayer', 'Point') poly_layer1 = self.create_layer_with_features('test_polygonlayer1') poly_layer2 = self.create_layer_with_features('test_polygonlayer2') registry.addMapLayer(point_layer) registry.addMapLayer(poly_layer1) registry.addMapLayer(poly_layer2) psti = pi.PstInteraction(utils.iface) psti.set_input_layer(point_layer.name()) map = psti.select_and_rename_files_for_sampling() appendix = ['R', 'G', 'B', 'a'] poly_fields = psti.pst_dialog.rastItems[poly_layer1.name()] for i in range(1, len(poly_fields)): self.assertEqual(poly_fields[i][1], '01{}_{}'.format(poly_layer1.name()[:6], appendix[i-1])) poly_fields = psti.pst_dialog.rastItems[poly_layer2.name()] for i in range(1, len(poly_fields)): self.assertEqual(poly_fields[i][1], '02{}_{}'.format(poly_layer1.name()[:6], appendix[i-1])) self.assertEqual(map[poly_layer1.name()], '01{}'.format(poly_layer1.name()[:6])) self.assertEqual(map[poly_layer2.name()], '02{}'.format(poly_layer2.name()[:6])) if __name__ == '__main__': unittest.main()
# the problem described below was fixed in 9758! # keep_htpsit=False fails since 9473, # on some installations (?) with: # case A (see below in the code): # RuntimeError: Could not locate the Fermi level! # or the energies from the 2nd one behave strange, no convergence: # iter: 1 18:21:49 +1.7 -3608.512512 0 19 # iter: 2 18:22:31 +1.9 -3148.936317 0 # iter: 3 18:23:13 +2.1 -2375.137532 0 # iter: 4 18:23:58 +2.4 -0.9 -1040.851545 216 11 # iter: 5 18:24:43 +2.6 -1.0 822.569589 597 14 # case B (see below in the code): # No convergence when starting from a converged (keep_htpsit=True) run! # WFS error grows to positive values! # Is it an extreme case of https://trac.fysik.dtu.dk/projects/gpaw/ticket/51 ? import os import sys from ase import Atoms from gpaw import GPAW from gpaw import ConvergenceError from gpaw.mpi import rank from gpaw.eigensolvers.rmm_diis_old import RMM_DIIS from gpaw import setup_paths if len(sys.argv) == 1: run = 'A' else: run = sys.argv[1] assert run in ['A', 'B'] # Use setups from the $PWD and $PWD/.. first setup_paths.insert(0, '.') setup_paths.insert(0, '../') positions=[ (-0.069, 0.824,-1.295), ( 0.786, 0.943,-0.752), (-0.41
4,-0.001,-0.865), (-0.282,-0.674,-3.822), ( 0.018,-0.147,-4.624), (-0.113,-0.080,-3.034), ( 2.253, 1.261, 0.151), ( 2.606, 0.638,-0.539), ( 2.455, 0.790, 1.019), ( 3.106,-0.276,-1.795), ( 2.914, 0.459,-2.386), ( 2.447,-1.053,-1.919), ( 6.257,-0.625,-0.626), ( 7.107,-1.002,-0.317), ( 5.526,-1.129,-0.131), ( 5.451,-1.261,-2.937), ( 4.585,-0.957,-2.503), ( 6.079,-0.919,-2.200), (-0.515, 3.689, 0.482), (-0.218, 3.020,-0.189), ( 0.046, 3.568, 1.382), (-0.205, 2.640
,-3.337), (-1.083, 2.576,-3.771), (-0.213, 1.885,-2.680), ( 0.132, 6.301,-0.278), ( 1.104, 6.366,-0.068), (-0.148, 5.363,-0.112), (-0.505, 6.680,-3.285), (-0.674, 7.677,-3.447), (-0.965, 6.278,-2.517), ( 4.063, 3.342,-0.474), ( 4.950, 2.912,-0.663), ( 3.484, 2.619,-0.125), ( 2.575, 2.404,-3.170), ( 1.694, 2.841,-3.296), ( 3.049, 2.956,-2.503), ( 6.666, 2.030,-0.815), ( 7.476, 2.277,-0.316), ( 6.473, 1.064,-0.651), ( 6.860, 2.591,-3.584), ( 6.928, 3.530,-3.176), ( 6.978, 2.097,-2.754), ( 2.931, 6.022,-0.243), ( 3.732, 6.562,-0.004), ( 3.226, 5.115,-0.404), ( 2.291, 7.140,-2.455), ( 1.317, 6.937,-2.532), ( 2.586, 6.574,-1.669), ( 6.843, 5.460, 1.065), ( 7.803, 5.290, 0.852), ( 6.727, 5.424, 2.062), ( 6.896, 4.784,-2.130), ( 6.191, 5.238,-2.702), ( 6.463, 4.665,-1.259), ( 0.398, 0.691, 4.098), ( 0.047, 1.567, 3.807), ( 1.268, 0.490, 3.632), ( 2.687, 0.272, 2.641), ( 3.078, 1.126, 3.027), ( 3.376,-0.501, 2.793), ( 6.002,-0.525, 4.002), ( 6.152, 0.405, 3.660), ( 5.987,-0.447, 4.980), ( 0.649, 3.541, 2.897), ( 0.245, 4.301, 3.459), ( 1.638, 3.457, 3.084), (-0.075, 5.662, 4.233), (-0.182, 6.512, 3.776), (-0.241, 5.961, 5.212), ( 3.243, 2.585, 3.878), ( 3.110, 2.343, 4.817), ( 4.262, 2.718, 3.780), ( 5.942, 2.582, 3.712), ( 6.250, 3.500, 3.566), ( 6.379, 2.564, 4.636), ( 2.686, 5.638, 5.164), ( 1.781, 5.472, 4.698), ( 2.454, 6.286, 5.887), ( 6.744, 5.276, 3.826), ( 6.238, 5.608, 4.632), ( 7.707, 5.258, 4.110), ( 8.573, 8.472, 0.407), ( 9.069, 7.656, 0.067), ( 8.472, 8.425, 1.397), ( 8.758, 8.245, 2.989), ( 9.294, 9.091, 3.172), ( 7.906, 8.527, 3.373), ( 4.006, 7.734, 3.021), ( 4.685, 8.238, 3.547), ( 3.468, 7.158, 3.624), ( 5.281, 6.089, 6.035), ( 5.131, 7.033, 6.378), ( 4.428, 5.704, 5.720), ( 5.067, 7.323, 0.662), ( 5.785, 6.667, 0.703), ( 4.718, 7.252, 1.585)] prefix = 'b256H2O' L = 9.8553729 atoms = Atoms('32(OH2)', positions=positions) atoms.set_cell((L,L,L),scale_atoms=False) atoms.set_pbc(1) r = [1, 1, 2] atoms = atoms.repeat(r) n = [56 * ri for ri in r] # nbands (>=128) is the number of bands per 32 water molecules nbands = 2*6*11 # 132 for ri in r: nbands = nbands*ri # the next line decreases memory usage es = RMM_DIIS(keep_htpsit=False) calc = GPAW(nbands=nbands, # uncomment next two lines to use lcao/sz #mode='lcao', #basis='sz', gpts=tuple(n), #maxiter=5, width = 0.01, eigensolver = es, txt=prefix + '.txt', ) if run == 'A': atoms.set_calculator(calc) pot = atoms.get_potential_energy() elif run == 'B': # converge first with keep_htpsit=True calc.set(eigensolver='rmm-diis') calc.set(txt=prefix + '_True.txt') atoms.set_calculator(calc) pot = atoms.get_potential_energy() # fails to converge with keep_htpsit=False calc.set(eigensolver=es) calc.set(maxiter=200) calc.set(txt=prefix + '_False.txt') atoms.set_calculator(calc) pot = atoms.get_potential_energy()
from flask import Flask from flask.ext.bootstrap import Bootstrap from flask.ext.mail import Mail from flask.ext.moment import Moment from flask.ext.
sqlalchemy import SQLAlchemy from config import config from flask.ext.redis import Redis bootstrap = Bootstrap() mail = Mail() moment = Moment() db = SQLAlchemy() redis1 = Redis() def create_app(config_name): app = Flask(__name__) app.config.from_object(config[config_name]) config[config_name].init_app(app) app.config['REDIS_HOST'] = 'local
host' app.config['REDIS_PORT'] = 6379 app.config['REDIS_DB'] = 0 bootstrap.init_app(app) mail.init_app(app) moment.init_app(app) db.init_app(app) redis1.init_app(app) from .main import main as main_blueprint # from .main.common import common app.register_blueprint(main_blueprint) # app.register_blueprint(common) return app
# -*- coding:
utf-8 -*- from .ba
se import WatershedBEM
""" A tool for converting kv6 models into pmf. GreaseMonkey, 2013 - Public Domain WARNING: I haven't checked to ensure that X,Y are around the right way. If you find your models have been flipped inadvertently, let me know! --GM """ from __future__ import print_function import sys, struct # Backwards compatibility - make new code work on old version, not vice-versa PY2 = sys.version_info[0] == 2 PY3 = sys.version_info[0] == 3 if PY2: # This script didn't use range() anyway, so no problem overwriting it in Py2 import __builtin__ range = getattr(__builtin__, "xrange") _ord = ord else: _ord = lambda x: x USAGE_MSG = """ usage: python2 kv62pmf.py in.kv6 out.pmf ptsize ptspacing bonename """ if len(sys.argv) <= 4: print(USAGE_MSG) exit() if not sys.argv[3].isdigit(): raise Exception("expected a number for the 3rd argument") if not sys.argv[4].isdigit(): raise Exception("expected a number for the 4th argument") ptsize = int(sys.argv[3]) ptspacing = int(sys.argv[4]) if ptsize < 1 or ptsize > 65535: raise Exception("point size out of range (1..65535)") bonename = sys.argv[4] if PY3: bonename = bonename.encode() if len(bonename) > 15: raise Exception("bone name too large") infp = open(sys.argv[1],"rb") if infp.read(4) != b"Kvxl": raise Exception("not a KV6 file") xsiz, ysiz, zsiz, xpivot, ypivot, zpivot, blklen = struct.unpack("<IIIfffI", infp.read(28)) print(xsiz, ysiz, zsiz, xpivot, ypivot, zpivot) xpivot = int(xpivot*ptspacing+0.5) ypivot = int(ypivot*ptspacing+0.5) zpivot = int(zpivot*ptspacing+0.5) # yeah i know this is basically worst case assuming x,y,z pivot is within the model bounds if max(max(xsiz,ysiz),zsiz)*ptspacing > 65535: raise Exception("point size a bit TOO large to fit into a pmf") if blklen > 4096: raise Exception("kv6 has too many blocks to fit into a pmf") def parseblk(s): return struct.unpack("<BBBBHBB",s) blkdata = [parseblk(infp.read(8)) for i in range(blklen)] xoffset = [struct.unpack("<I", infp.read(4))[0] for i in range(xsiz)] xyoffset = [struct.unpack("<H", infp.read(2))[0] for i in range(xsiz*ysiz)] assert blklen == sum(xoffset) assert blklen == sum(xyoffset) # Corollary: sum(xoffset) == sum(xyoffset) # Proof: Left as an exercise to the reader. magic_spal = infp.read(4) palette = None if magic_spal == b"": pass # no palette elif magic_spal == b"SPal": palette = [[_ord(v) for v in infp.read(3)] for i in range(256)] else: raise Exception("expected palette at end of file") infp.close() # # # # pretty simple really outfp = open(sys.argv[2], "wb") # start with the header of "PMF",0x1A,1,0,0,0 outfp.write(b"PMF\x1A\x01\x00\x00\x00") # then there's a uint32_t denoting how many body parts there are outfp.write(struct.pack("<I",1)) # then, for each body part, # there's a null-terminated 16-byte string (ma
x 15 chars) denoting the part outfp.write(bonename + b"\x00"
*(16-len(bonename))) # then there's a uint32_t denoting how many points there are in this body part outfp.write(struct.pack("<I",blklen)) # then there's a whole bunch of this: # uint16_t radius; # int16_t x,y,z; # uint8_t b,g,r,reserved; bi = 0 oi = 0 for cx in range(xsiz): for cy in range(ysiz): for i in range(xyoffset[oi]): b,g,r,l,ypos,vis,unk1 = blkdata[bi] outfp.write(struct.pack("<HhhhBBBB" ,ptsize ,cx*ptspacing-xpivot ,ypos*ptspacing-zpivot ,cy*ptspacing-ypivot ,b,g,r,0)) bi += 1 oi += 1 # rinse, lather, repeat outfp.close()
from mrjob.job import MRJob from mrjob.step import MRStep def get_id_from_line(line): if line.find('.","Message-ID: <') > 0: start = line.find("Message-ID")+13 i=0 for char in line[start:]: i=i+1 if (not
(char.isdigit() or (char == '.'))): stop = i+start-2 break return line[start:stop] class MRMultilineInput(MRJob): def st
eps(self): return [ MRStep(mapper_init=self.mapper_init_count, mapper=self.mapper_count), MRStep(mapper=self.mapper_child) # STEP 1 def mapper_init_count(self): self.message_id = '' self.in_body = False self.body = [] self.after_key = False self.beginning = False self.key = False def mapper_count(self, _, line): line = line.strip() if (line.find('.","Message-ID: <') > 0) and self.in_body and not self.beginning: yield self.message_id, self.body self.message_id = '' self.body = [] self.in_body = False self.after_key = False self.beginning = False self.key = False if self.in_body and not self.after_key: self.beginning = False self.body.append(line) if line.find('.","Message-ID: <') > 0 and not self.key: if not self.in_body: self.in_body = True self.beginning = True self.after_key = True self.key = True start = line.find("Message-ID")+13 i=0 for char in line[start:]: i=i+1 if (not (char.isdigit() or (char == '.'))): stop = i+start-2 break self.message_id = line[start:stop] self.after_key = False # STEP 2 def mapper_child(self, message_id, values): clean_body = '' clean_date = '' clean_from = '' clean_to = '' clean_values = [] start = 0 for idx, line in enumerate(values): if "Date:" in line: clean_date = line[5:].strip() if line.find("From:") == 0: clean_from = line[5:].strip() if line.find("To:") == 0: clean_to = line[3:].strip() if "X-FileName:" in line: start = idx+1 break for i in range(start,len(values)): if "-Original Message-" in values[i]: break clean_body=clean_body + values[i] + " " clean_values.append(clean_date) clean_values.append(clean_from) #clean_values.append(clean_to) #clean_values.append(clean_body.strip()) clean_values.append("TEST BODY") newval = values for element in values: if "subject:" in element.lower(): subject = element break if "re:" in subject.lower(): newval.append("child") elif "fw:" not in subject.lower(): newval.append("parent") for element in newval: if "Subject:" in element: subject = element break relation = values[-1] i = 0 colon = 0 if "<" not in subject: for char in subject: i=i+1 if char == ":": colon = i sub = subject[colon+1:].strip() sub_relation = [] sub_relation.append(sub) sub_relation.append(relation) yield sub_relation, (message_id,clean_values) if __name__ == '__main__': MRMultilineInput.run()
ze output line: {line}' ) name, version = parts url, sha256 = self._get_pypi_info(name, version) dep = HomebrewDependency( name=name, url=url, sha256=sha256, version=version ) yield dep def _remove_dbt_resource(self, lines: List[str]) -> Iterator[str]: # TODO: fork poet or extract the good bits to avoid this line_iter = iter(lines) # don't do a double-newline or "brew audit" gets mad for line in line_iter: # skip the contents of the "dbt" resource block. if line.strip() == 'resource "dbt" do': for skip in line_iter: if skip.strip() == 'end': # skip the newline after 'end' next(line_iter) break else: yield line def create_versioned_formula_file(self): formula_contents = self.get_formula_data(versioned=True) if self.versioned_formula_path.exists(): print('Homebrew formula path already exists, overwriting') self.versioned_formula_path.write_text(formula_contents) def commit_versioned_formula(self): # add a commit for the new formula run_command( ['git', 'add', self.versioned_formula_path], cwd=self.homebrew_path ) run_command( ['git', 'commit', '-m', f'add dbt@{self.version}'], cwd=self.homebrew_path ) def commit_default_formula(self): run_command( ['git', 'add', self.default_formula_path], cwd=self.homebrew_path ) run_command( ['git', 'commit', '-m', f'upgrade dbt to {self.version}'], cwd=self.homebrew_path ) @staticmethod def run_tests(formula_path: Path, audit: bool = True): path = os.path.normpath(formula_path) run_command(['brew', 'uninstall', '--force', path]) versions = [ l.strip() for l in collect_output(['brew', 'list']).split('\n') if l.strip().startswith('dbt@') or l.strip() == 'dbt' ] if versions: run_command(['brew', 'unlink'] + versions) run_command(['brew', 'install', path]) run_command(['brew', 'test', path]) if audit: run_command(['brew', 'audit', '--strict', path]) def create_default_package(self): os.remove(self.default_formula_path) formula_contents = self.get_formula_data(versioned=False) self.default_formula_path.write_text(formula_contents) def build(self): self.create_versioned_formula_file() # self.run_tests(formula_path=self.versioned_formula_path) self.commit_versione
d_formula() if self.set_default: self.create_default_package() # self.run_tests(formula_path=self.default_formula_path, audit=False) self.commit_default_formula() class WheelInfo: def __init__(self, p
ath): self.path = path @staticmethod def _extract_distinfo_path(wfile: zipfile.ZipFile) -> zipfile.Path: zpath = zipfile.Path(root=wfile) for path in zpath.iterdir(): if path.name.endswith('.dist-info'): return path raise ValueError('Wheel with no dist-info?') def get_metadata(self) -> Dict[str, str]: with zipfile.ZipFile(self.path) as wf: distinfo = self._extract_distinfo_path(wf) metadata = distinfo / 'METADATA' metadata_dict: Dict[str, str] = {} for line in metadata.read_text().split('\n'): parts = line.split(': ', 1) if len(parts) == 2: metadata_dict[parts[0]] = parts[1] return metadata_dict def package_name(self) -> str: metadata = self.get_metadata() if 'Name' not in metadata: raise ValueError('Wheel with no name?') return metadata['Name'] class DockerBuilder: """The docker builder requires the existence of a dbt package""" def __init__(self, dbt_path: Path, version: Version) -> None: self.dbt_path = dbt_path self.version = version @property def docker_path(self) -> Path: return self.dbt_path / 'docker' @property def dockerfile_name(self) -> str: return f'Dockerfile.{self.version}' @property def dockerfile_path(self) -> Path: return self.docker_path / self.dockerfile_name @property def requirements_path(self) -> Path: return self.docker_path / 'requirements' @property def requirements_file_name(self) -> str: return f'requirements.{self.version}.txt' @property def dockerfile_venv_path(self) -> Path: return self.dbt_path / 'build' / 'docker-venv' @property def requirements_txt_path(self) -> Path: return self.requirements_path / self.requirements_file_name def make_venv(self) -> DistFolderEnv: env = DistFolderEnv(self.dbt_path) env.create(self.dockerfile_venv_path) return env def get_frozen(self) -> str: env = self.make_venv() pip_path = self.dockerfile_venv_path / 'bin/pip' cmd = [pip_path, 'freeze'] wheel_names = { WheelInfo(wheel_path).package_name() for wheel_path in env.wheels } # remove the dependencies in dbt itself return '\n'.join([ dep for dep in collect_output(cmd).split('\n') if dep.split('==')[0] not in wheel_names ]) def write_lockfile(self): freeze = self.get_frozen() path = self.requirements_txt_path if path.exists(): raise ValueError(f'Found existing requirements file at {path}!') os.makedirs(path.parent, exist_ok=True) path.write_text(freeze) def get_dockerfile_contents(self): dist_path = (self.dbt_path / 'dist').relative_to(Path.cwd()) wheel_paths = ' '.join( os.path.join('.', 'dist', p.name) for p in _require_wheels(self.dbt_path) ) requirements_path = self.requirements_txt_path.relative_to(Path.cwd()) return textwrap.dedent( f'''\ FROM python:3.8.1-slim-buster RUN apt-get update && \ apt-get dist-upgrade -y && \ apt-get install -y --no-install-recommends \ git software-properties-common make build-essential \ ca-certificates libpq-dev && \ apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* COPY {requirements_path} ./{self.requirements_file_name} COPY {dist_path} ./dist RUN pip install --upgrade pip setuptools RUN pip install --requirement ./{self.requirements_file_name} RUN pip install {wheel_paths} RUN useradd -mU dbt_user ENV PYTHONIOENCODING=utf-8 ENV LANG C.UTF-8 WORKDIR /usr/app VOLUME /usr/app USER dbt_user ENTRYPOINT dbt ''' ) def write_dockerfile(self): dockerfile = self.get_dockerfile_contents() path = self.dockerfile_path if path.exists(): raise ValueError(f'Found existing docker file at {path}!') os.makedirs(path.parent, exist_ok=True) path.write_text(dockerfile) @property def image_tag(self): return f'dbt:{self.version}' @property def remote_tag(self): return f'fishtownanalytics/{self.image_tag}' def create_docker_image(self): run_command( [ 'docker', 'build', '-f', self.dockerfile_path, '--tag', self.image_tag, # '--no-cache', self.dbt_path, ], cwd=self.dbt_path ) def set_remote_tag(self): # tag it run_command( ['docker', 'tag', self.image_tag, self.remote_tag], cwd=self.
#!/usr/bin/python # Generate .js files defining Blockly core and language messages. # # Copyright 2013 Google Inc. # https://developers.google.com/blockly/ # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import argparse import codecs import os import re import sys from common import read_json_file _NEWLINE_PATTERN = re.compile('[\n\r]') def string_is_ascii(s): try: s.decode('ascii') return True except UnicodeEncodeError: return False def load_constants(filename): """Read in constants file, which must be output in every language.""" constant_defs = read_json_file(filename); constants_text = '\n' for key in constant_defs: value = constant_defs[key] value = value.replace('"', '\\"') constants_text += u'\nBlockly.Msg["{0}"] = \"{1}\";'.format( key, value) return constants_text def main(): """Generate .js files defining Blockly core and language messages.""" # Process command-line arguments. parser = argparse.ArgumentParser(description='Convert JSON files to JS.') parser.add_argument('--source_lang', default='en', help='ISO 639-1 source language code') parser.add_argument('--source_lang_file', default=os.path.join('json', 'en.json'), help='Path to .json file for source language') parser.add_argument('--source_synonym_file', default=os.path.join('json', 'synonyms.json'), help='Path to .json file with synonym definitions') parser.add_argument('--source_constants_file', default=os.path.join('json', 'constants.json'), help='Path to .json file with constant definitions') parser.add_argument('--output_dir', default='js/', help='relative directory for output files') parser.add_argument('--key_file', default='keys.json', help='relative path to input keys file') parser.add_argument('--quiet', action='store_true', default=False, help='do not write anything to standard output') parser.add_argument('files', nargs='+', help='input files') args = parser.parse_args() if not args.output_dir.endswith(os.path.sep): args.output_dir += os.path.sep # Read in source language .json file, which provides any values missing # in target languages' .json files. source_defs = read_json_file(os.path.join(os.curdir, args.source_lang_file)) # Make sure the source file doesn't contain a newline or carriage return. for key, value in source_defs.items(): if _NEWLINE_PATTERN.search(value): print('ERROR: definition of {0} in {1} contained a newline character.'. format(key, args.source_lang_file)) sys.exit(1) sorted_keys = source_defs.keys() sorted_keys.sort() # Read in synonyms file, which must be output in every language. synonym_defs = read_json_file(os.path.join( os.curdir, args.source_synonym_file)) synonym_text = '\n'.join([u'Blockly.Msg["{0}"] = Blockly.Msg["{1}"];' .format(key, synonym_defs[key]) for key in synonym_defs]) # Read in constants file, which must be output in every language. constants_text = load_constants(os.path.join(os.curdir, args.source_constants_file)) # Create each output file. for arg_file in args.files: (_, filename) = os.path.split(arg_file) target_lang = filename[:filename.index('.')] if target_lang not in ('qqq', 'keys', 'synonyms', 'constants'): target_defs = read_json_file(os.path.join(os.curdir, arg_file)) # Verify that keys are 'ascii' bad_keys = [key for key in target_defs if not string_is_ascii(key)] if bad_keys: print(u'These keys in {0} contain non ascii characters: {1}'.format( filename, ', '.join(bad_keys))) # If there's a '\n' or '\r', remove it and print a warning. for key, value in target_defs.items(): if _NEWLINE_PATTERN.search(value): print(u'WARNING: definition of {0} in {1} contained ' 'a newline character.'. format(key, arg_file)) target_defs[key] = _NEWLINE_PATTERN.sub(' ', value) # Output file. outname = os.path.join(os.curdir, args.output_dir, target_lang + '.js') with codecs.open(outname, 'w', 'utf-8') as outfile: outfile.write( """// This file was automatically generated. Do not modify. 'use strict'; goog.provide('Blockly.Msg.{0}'); goog.require('Blockly.Msg'); """.format(target_lang.replace('-', '.'))) # For each key in the source language file, output the target value # if present; otherwise, output the source language value with a # warning comment. for key in sorted_keys: if key in target_defs: value = target_defs[key] comment
= '' del target_defs[key] else: value = source_defs[key] comment = ' // untranslated' value = value.replace('"', '\\"') outfile.write(u'Blockly.Msg["{0}"] = "{1}";{2}\n' .format(key, value, comment)) # Announce any keys defined only for target language. if target_defs: extra_keys = [key for key in target_defs if key not in synonym_d
efs] synonym_keys = [key for key in target_defs if key in synonym_defs] if not args.quiet: if extra_keys: print(u'These extra keys appeared in {0}: {1}'.format( filename, ', '.join(extra_keys))) if synonym_keys: print(u'These synonym keys appeared in {0}: {1}'.format( filename, ', '.join(synonym_keys))) outfile.write(synonym_text) outfile.write(constants_text) if not args.quiet: print('Created {0}.'.format(outname)) if __name__ == '__main__': main()
# -*- coding: utf-8 -*- """ pythoner.net Copyright (C) 2013 PYTHONER.ORG This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for m
ore details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ from django.contrib import admin from models import * class ProfileAdmin(admin.ModelAdmin): list_display = ('screen_name','city','introduction') admin.site.register(UserProfile,ProfileAdmin)
# Copyright 2015 Netflix, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ .. module: security_monkey.watchers.keypair :platform: Unix .. version:: $$VERSION$$ .. moduleauthor:: Mike Grima <mgrima@netflix.com> """ import json from security_monkey.decorators import record_exception from security_monkey.decorators import iter_account_region from security_monkey.watcher import Watcher, ChangeItem from security_monkey.datastore import Account from security_monkey import app, ARN_PREFIX class ElasticSearchService(Watcher): index = 'elasticsearchservice' i_am_singular = 'ElasticSearch Service Access Policy' i_am_plural = 'ElasticSearch Service Access Policies' def __init__(self, accounts=None, debug=False): super(ElasticSearchService, self).__init__(accounts=accounts, debug=debug) def slurp(self): """ :returns: item_list - list of ElasticSearchService Items :return: exception_map - A dict where the keys are a tuple containing the location of the exception and the value is the actual exception """ self.prep_for_slurp() @iter_account_region(index=self.index, accounts=self.accounts, service_name='es') def slurp_items(**kwargs): item_list = [] exception_map = {} kwargs['exception_map'] = exception_map account_db = Account.query.filter(Account.name == kwargs['account_name']).first() account_num = account_db.identifier es_info = self.get_all_es_domains_in_region(**kwargs) if es_info is None: return item_list, exception_map (client, domains) = es_info app.logger.debug("Found {} {}".format(len(domains), ElasticSearchService.i_am_plural)) for domain in domains: if self.check_ignore_list(domain["DomainName"]): continue # Fetch the policy: item = self.build_item(domain["DomainName"], client, account_num, **kwargs) if item: item_list.append(item) return item_list, exception_map return slurp_items() @record_exception(source='{index}-watcher'.format(index=index), pop_exception_fields=False) def get_all_es_domains_in_region(self, **kwargs): from security_monkey.common.sts_connect import connect client = connect(kwargs['account_name'], "boto3.es.client", region=kwargs['region']) app.logger.debug("Checking {}/{}/{}".format(ElasticSearchService.index, kwargs['account_name'], kwargs['region'])) # No need to paginate according to: client.can_paginate("list_domain_names") domains = self.wrap_aws_rate_limited_call(client.list_domain_names)["DomainNames"] return client, domains @record_exception(source='{index}-watcher'.format(index=index), pop_exception_fields=False) def build_item(self, domain, client, account_num, **kwargs): arn = ARN_PREFIX + ':es:{region}:{account_number}:domain/{domain_name}'.format( region=kwargs['region'], account_number=account_num, domain_name=domain) config = { 'arn': arn } domain_config = self.wrap_aws_rate_limited_call(client.describe_elasticsearch_domain_config,
DomainName=domain) # Does the cluster have a policy? if domain_config["DomainConfig"]["AccessPolicies"]["Options"] == "": config['policy'] = {} else: config['policy'] = json.loads(domain_config["DomainConfig"]["AccessPolicies"]["Opt
ions"]) config['name'] = domain return ElasticSearchServiceItem(region=kwargs['region'], account=kwargs['account_name'], name=domain, arn=arn, config=config) class ElasticSearchServiceItem(ChangeItem): def __init__(self, region=None, account=None, name=None, arn=None, config={}): super(ElasticSearchServiceItem, self).__init__( index=ElasticSearchService.index, region=region, account=account, name=name, arn=arn, new_config=config)
outdata1 = divmod(20,8) # prefix an argument with a star when calling a function to unpack tuple t = (20,8) outdata2 = divmod(*t) import os # Note that filename = hh.grad _, filename = os.path.split('/nfs/j3/hh.grad') # Using * to grab excess items # Can be used in python3, but not in python2 # a, b, *rest = range(5) # a
, b, *rest = range(3) # a, b, *rest = range(2) # a, *body, c, d = range(5) # *head, b, c, d = range(5) # Nested tuple unpacking a = [('good', (334,213)), ('bad', (231,234))] for cond, (x, y) in a: print('x = {0}, y = {1}'.format(x, y)) # Namedtuple from collections import namedtuple place = namedtuple('place', 'condition coordinate') tokyo = place('good', (334,213)) print(tokyo) # _fields class attribute, _make(iterable) class method, _asdict() instance method print(place._fiel
ds) LatLong = namedtuple('LatLong', 'lat long') delhi_data = ('Delhi NCR', LatLong(28.61, 77.21)) delhi = place._make(delhi_data) for key, value in delhi._asdict().items(): print(key + ':', value)
import datetime import decimal from time import time from django.utils.hashcompat import md5_constructor from django.utils.log import getLogger logger = getLogger('django.db.backends') class CursorDebugWrapper(object): def __init__(self, cursor, db): self.cursor = cursor self.db = db # Instance of a BaseDatabaseWrapper subclass def execute(self, sql, params=()): start = time() try: return self.cursor.execute(sql, params) finally: stop = time() duration = stop - start sql = self.db.ops.last_executed_query(self.cursor, sql, params) self.db.queries.append({ 'sql': sql, 'time': "%.3f" % duration, }) logger.debug('(%.3f) %s; args=%s' % (duration, sql, params), extra={'duration':duration, 'sql':sql, 'params':params} ) def executemany(self, sql, param_list): start = time() try: return self.cursor.executemany(sql, param_list) finally: stop = time() duration = stop - start self.db.queries.append({ 'sql': '%s times: %s' % (len(param_list), sql), 'time': "%.3f" % duration, }) logger.debug('(%.3f) %s; args=%s' % (duration, sql, param_list), extra={'duration':duration, 'sql':sql, 'params':param_list} ) def __getattr__(self, attr): if attr in self.__dict__: return self.__dict__[attr] else: return getattr(self.cursor, attr) def __iter__(self): return iter(self.cursor) ############################################### # Converters from database (string) to Python # ############################################### def typecast_date(s): return s and datetime.date(*map(int, s.split('-'))) or None # returns None if s is null def typecast_time(s): # does NOT store time zone information if not s: return None hour, minutes, seconds = s.split(':') if '.' in seconds: # check whether seconds have a fractional part seconds, microseconds = seconds.split('.') else: microseconds = '0' return datetime.time(int(hour), int(minutes), int(seconds), int(float('.'+microseconds) * 1000000)) def typecast_timestamp(s): # does NOT store time zone information # "2005-07-29 15:48:00.590358-05" # "2005-07-29 09:56:00-05" if not s: return None if not ' ' in s: return typecast_date(s) d, t = s.split() # Extract timezone information, if it exists. Currently we just throw # it away, but in the future we may make use of it. if '-' in t: t, tz = t.split('-', 1) tz = '-' + tz elif '+' in t: t, tz = t.split('+', 1) tz = '+' + tz else: tz = '' dates = d.split('-') times = t.split(':') seconds = times[2] if '.' in seconds: # check whether seconds have a fractional part seconds, microseconds = seconds.split('.') else: microseconds = '0' return datetime.datetime(int(dates[0]), int(dates[1]), int(dates[2]), int(times[0]), int(times[1]), int(seconds), int((microseconds + '000000')[:6])) def typecast_boolean(s): if s is None: return None if not s: return False return str(s)[0].lower() == 't' def typecast_decimal(
s): if s is None or s == '': return None return decimal.Decimal(s) ############################################### # Converters from Python to database (string) # ############################################### def rev_typecast_boolean(obj, d): return obj and '1' or '0' def rev_typecast_decimal(d): if d is None: return None r
eturn str(d) def truncate_name(name, length=None, hash_len=4): """Shortens a string to a repeatable mangled version with the given length. """ if length is None or len(name) <= length: return name hash = md5_constructor(name).hexdigest()[:hash_len] return '%s%s' % (name[:length-hash_len], hash) def format_number(value, max_digits, decimal_places): """ Formats a number into a string with the requisite number of digits and decimal places. """ if isinstance(value, decimal.Decimal): context = decimal.getcontext().copy() context.prec = max_digits return u'%s' % str(value.quantize(decimal.Decimal(".1") ** decimal_places, context=context)) else: return u"%.*f" % (decimal_places, value)
import xml.etree.cElementTree as et from collections import Ordere
dDict from tabletopscanner.boardgamegeekapi.parsers import Deserializer class SearchParser(Deserializer): def deserialize(self,
xml): tree = et.fromstring(xml) return [SearchParser.__make_search_result(el) for el in tree.findall('item')] @staticmethod def __make_search_result(el): geekid = geekid = el.attrib['id'] name = el.find('name').attrib['value'] yearpublished = el.find('yearpublished').attrib['value'] return OrderedDict({ 'geekid': geekid, 'name': name, 'yearpublished': yearpublished })
assert_false # pylint: disable=E0611 from auth.authz import get_user_by_email, get_course_groupname_for_role from django.conf import settings from selenium.webdriver.common.keys import Keys import time import os from django.contrib.auth.models import Group from logging import getLogger logger = getLogger(__name__) from terrain.browser import reset_data TEST_ROOT = settings.COMMON_TEST_DATA_ROOT @step('I (?:visit|access|open) the Studio homepage$') def i_visit_the_studio_homepage(_step): # To make this go to port 8001, put # LETTUCE_SERVER_PORT = 8001 # in your settings.py file. world.visit('/') signin_css = 'a.action-signin' assert world.is_css_present(signin_css) @step('I am logged into Studio$') def i_am_logged_into_studio(_step): log_into_studio() @step('I confirm the alert$') def i_confirm_with_ok(_step): world.browser.get_alert().accept() @step(u'I press the "([^"]*)" delete icon$') def i_press_the_category_delete_icon(_step, category): if category == 'section': css = 'a.delete-button.delete-section-button span.delete-icon' elif category == 'subsection': css = 'a.delete-button.delete-subsection-button span.delete-icon' else: assert False, 'Invalid category: %s' % category world.css_click(css) @step('I have opened a new course in Studio$') def i_have_opened_a_new_course(_step): open_new_course() @step('(I select|s?he selects) the new course') def select_new_course(_step, whom): course_link_css = 'a.course-link' world.css_click(course_link_css) @step(u'I press the "([^"]*)" notification button$') def press_the_notification_button(_step, name): # Because the notification uses
a CSS transition, # Selenium will always report it as being visible. # This makes it very difficult to successfully click # the "Save" button at the UI level. # Instead, we use JavaScript to reliably click # the button. btn_css
= 'div#page-notification a.action-%s' % name.lower() world.trigger_event(btn_css, event='focus') world.browser.execute_script("$('{}').click()".format(btn_css)) world.wait_for_ajax_complete() @step('I change the "(.*)" field to "(.*)"$') def i_change_field_to_value(_step, field, value): field_css = '#%s' % '-'.join([s.lower() for s in field.split()]) ele = world.css_find(field_css).first ele.fill(value) ele._element.send_keys(Keys.ENTER) @step('I reset the database') def reset_the_db(_step): """ When running Lettuce tests using examples (i.e. "Confirmation is shown on save" in course-settings.feature), the normal hooks aren't called between examples. reset_data should run before each scenario to flush the test database. When this doesn't happen we get errors due to trying to insert a non-unique entry. So instead, we delete the database manually. This has the effect of removing any users and courses that have been created during the test run. """ reset_data(None) @step('I see a confirmation that my changes have been saved') def i_see_a_confirmation(step): confirmation_css = '#alert-confirmation' assert world.is_css_present(confirmation_css) def open_new_course(): world.clear_courses() create_studio_user() log_into_studio() create_a_course() def create_studio_user( uname='robot', email='robot+studio@edx.org', password='test', is_staff=False): studio_user = world.UserFactory( username=uname, email=email, password=password, is_staff=is_staff) registration = world.RegistrationFactory(user=studio_user) registration.register(studio_user) registration.activate() return studio_user def fill_in_course_info( name='Robot Super Course', org='MITx', num='101', run='2013_Spring'): world.css_fill('.new-course-name', name) world.css_fill('.new-course-org', org) world.css_fill('.new-course-number', num) world.css_fill('.new-course-run', run) def log_into_studio( uname='robot', email='robot+studio@edx.org', password='test', name='Robot Studio'): world.log_in(username=uname, password=password, email=email, name=name) # Navigate to the studio dashboard world.visit('/') assert_in(uname, world.css_text('h2.title', timeout=10)) def add_course_author(user, course): """ Add the user to the instructor group of the course so they will have the permissions to see it in studio """ for role in ("staff", "instructor"): groupname = get_course_groupname_for_role(course.location, role) group, __ = Group.objects.get_or_create(name=groupname) user.groups.add(group) user.save() def create_a_course(): course = world.CourseFactory.create(org='MITx', course='999', display_name='Robot Super Course') world.scenario_dict['COURSE'] = course user = world.scenario_dict.get("USER") if not user: user = get_user_by_email('robot+studio@edx.org') add_course_author(user, course) # Navigate to the studio dashboard world.visit('/') course_link_css = 'a.course-link' world.css_click(course_link_css) course_title_css = 'span.course-title' assert_true(world.is_css_present(course_title_css)) def add_section(name='My Section'): link_css = 'a.new-courseware-section-button' world.css_click(link_css) name_css = 'input.new-section-name' save_css = 'input.new-section-name-save' world.css_fill(name_css, name) world.css_click(save_css) span_css = 'span.section-name-span' assert_true(world.is_css_present(span_css)) def add_subsection(name='Subsection One'): css = 'a.new-subsection-item' world.css_click(css) name_css = 'input.new-subsection-name-input' save_css = 'input.new-subsection-name-save' world.css_fill(name_css, name) world.css_click(save_css) def set_date_and_time(date_css, desired_date, time_css, desired_time): world.css_fill(date_css, desired_date) # hit TAB to get to the time field e = world.css_find(date_css).first # pylint: disable=W0212 e._element.send_keys(Keys.TAB) world.css_fill(time_css, desired_time) e = world.css_find(time_css).first e._element.send_keys(Keys.TAB) time.sleep(float(1)) @step('I have enabled the (.*) advanced module$') def i_enabled_the_advanced_module(step, module): step.given('I have opened a new course section in Studio') world.css_click('.nav-course-settings') world.css_click('.nav-course-settings-advanced a') type_in_codemirror(0, '["%s"]' % module) press_the_notification_button(step, 'Save') @world.absorb def create_course_with_unit(): """ Prepare for tests by creating a course with a section, subsection, and unit. Performs the following: Clear out all courseware Create a course with a section, subsection, and unit Create a user and make that user a course author Log the user into studio Open the course from the dashboard Expand the section and click on the New Unit link The end result is the page where the user is editing the new unit """ world.clear_courses() course = world.CourseFactory.create() world.scenario_dict['COURSE'] = course section = world.ItemFactory.create(parent_location=course.location) world.ItemFactory.create( parent_location=section.location, category='sequential', display_name='Subsection One', ) user = create_studio_user(is_staff=False) add_course_author(user, course) log_into_studio() world.css_click('a.course-link') world.wait_for_js_to_load() css_selectors = [ 'div.section-item a.expand-collapse-icon', 'a.new-unit-item' ] for selector in css_selectors: world.css_click(selector) world.wait_for_mathjax() world.wait_for_xmodule() assert world.is_css_present('ul.new-component-type') @step('I have clicked the new unit button$') @step(u'I am in Studio editing a new unit$') def edit_new_unit(step): create_course_wi
import unittest import os import json import time from os import environ from ConfigParser import ConfigParser from pprint import pprint from biokbase.workspace.client import Workspace as workspaceService from MyContigFilter.MyContigFilterImpl import MyContigFilter class MyContigFilterTest(unittest.TestCase): @classmethod def setUpClass(cls): token = environ.get('KB_AUTH_TOKEN', None) cls.ctx = {'token': token, 'provenance': [{'service': 'MyContigFilter', 'method': 'please_never_use_it_in_production', 'method_params': []}], 'authenticated': 1} config_file = environ.get('KB_DEPLOYMENT_CONFIG', None) cls.cfg = {} config = ConfigParser() config.read(config_file) for nameval in config.items('MyContigFilter'): cls.cfg[nameval[0]] = nameval[1] cls.wsURL = cls.cfg['workspace-url'] cls.wsClient = workspaceService(cls.wsURL, token=token) cls.serviceImpl = MyContigFilter(cls.cfg) @classmethod def tearDownClass(cls): if hasattr(cls, 'wsName'): cls.wsClient.delete_workspace({'workspace': cls.wsName}) print('Test workspace was deleted') def getWsClient(self): return self.__class__.wsClient def getWsName(self): if hasattr(self.__class__, 'wsName'): return self.__class__.wsName suffix = int(time.time() * 1000) wsName = "test_MyContigFilter_" + str(suffix) ret = self.getWsClient().create_workspace({'workspace': wsName}) self.__class__.wsName = wsName return wsName def getImpl(self): return self.__class__.serviceImpl def getContext(self): return self.__class__.ctx def test_filter_contigs_ok(self): obj_name = "contigset.1" contig1 = {'id': '1', 'length': 10, 'md5': 'md5', 'sequence': 'agcttttcat'} contig2 = {'id': '2', 'length': 5, 'md5': 'md5', 'sequence': 'agctt'} contig3 = {'id': '3
', 'length': 12, 'md5': 'md5', 'sequence': 'agcttttcatgg'} obj1 = {'contigs': [contig1, contig2, contig3], 'id': 'id', 'md5': 'md5', 'name': 'name', 'source': 'source', 'source_id': 'source_id', 'type': 'type'} self.getWsClient().save_objects({'workspace': self.getWsName(), 'objects':
[{'type': 'KBaseGenomes.ContigSet', 'name': obj_name, 'data': obj1}]}) ret = self.getImpl().filter_contigs(self.getContext(), {'workspace': self.getWsName(), 'contigset_id': obj_name, 'min_length': '10', 'output_name': 'my_output'}) obj2 = self.getWsClient().get_objects([{'ref': self.getWsName()+'/'+'my_output'}])[0]['data'] self.assertEqual(len(obj2['contigs']), 2) self.assertTrue(len(obj2['contigs'][0]['sequence']) >= 10) self.assertTrue(len(obj2['contigs'][1]['sequence']) >= 10) self.assertEqual(ret[0]['n_initial_contigs'], 3) self.assertEqual(ret[0]['n_contigs_removed'], 1) self.assertEqual(ret[0]['n_contigs_remaining'], 2) def test_filter_contigs_err1(self): with self.assertRaises(ValueError) as context: self.getImpl().filter_contigs(self.getContext(), {'workspace': self.getWsName(), 'contigset_id': 'fake', 'min_length': 10, 'output_name': 'fake'}) self.assertTrue('Error loading original ContigSet object' in str(context.exception)) def test_filter_contigs_err2(self): with self.assertRaises(ValueError) as context: self.getImpl().filter_contigs(self.getContext(), {'workspace': self.getWsName(), 'contigset_id': 'fake', 'min_length': '-10', 'output_name': 'fake'}) self.assertTrue('min_length parameter shouldn\'t be negative' in str(context.exception)) def test_filter_contigs_err3(self): with self.assertRaises(ValueError) as context: self.getImpl().filter_contigs(self.getContext(), {'workspace': self.getWsName(), 'contigset_id': 'fake', 'min_length': 'ten', 'output_name': 'fake'}) self.assertTrue('Cannot parse integer from min_length parameter' in str(context.exception))
ere are all the test parameters and values for the each `~astropy.modeling.FittableModel` defined. There is a dictionary for 1D and a dictionary for 2D models. Explanation of keywords of the dictionaries: "parameters" : list or dict Model parameters, the model is tested with. Make sure you keep the right order. For polynomials you can also use a dict to specify the coefficients. See examples below. "x_values" : list x values where the model is evaluated. "y_values" : list Reference y values for the in x_values given positions. "z_values" : list Reference z values for the in x_values and y_values given positions. (2D model option) "x_lim" : list x test range for the model fitter. Depending on the model this can differ e.g. the PowerLaw model should be tested over a few magnitudes. "y_lim" : list y test range for the model fitter. Depending on the model this can differ e.g. the PowerLaw model should be tested over a few magnitudes. (2D model option) "log_fit" : bool PowerLaw models should be tested over a few magnitudes. So log_fit should be true. "requires_scipy" : bool If a model requires scipy (Bessel functions etc.) set this flag. "integral" : float Approximate value of the integral in the range x_lim (and y_lim). "deriv_parameters" : list If given the test of the derivative will use these parameters to create a model (optional) "deriv_initial" : list If given the test of the derivative will use these parameters as initial values for the fit (optional) """ from __future__ import (absolute_import, division, print_function, unicode_literals) from ..functional_models import ( Gaussian1D, Sine1D, Box1D,
Linear1D, Lorentz1D, MexicanHat1D, Trapezoid1D, Const1D, Moffat1D, Gaussian2D, Const2D, Box2D, MexicanHat2D, TrapezoidDisk2D, AiryDisk2D
, Moffat2D, Disk2D, Ring2D) from ..polynomial import Polynomial1D, Polynomial2D from ..powerlaws import ( PowerLaw1D, BrokenPowerLaw1D, ExponentialCutoffPowerLaw1D, LogParabola1D) import numpy as np #1D Models models_1D = { Gaussian1D: { 'parameters': [1, 0, 1], 'x_values': [0, np.sqrt(2), -np.sqrt(2)], 'y_values': [1.0, 0.367879, 0.367879], 'x_lim': [-10, 10], 'integral': np.sqrt(2 * np.pi) }, Sine1D: { 'parameters': [1, 0.1], 'x_values': [0, 2.5], 'y_values': [0, 1], 'x_lim': [-10, 10], 'integral': 0 }, Box1D: { 'parameters': [1, 0, 10], 'x_values': [-5, 5, 0, -10, 10], 'y_values': [1, 1, 1, 0, 0], 'x_lim': [-10, 10], 'integral': 10 }, Linear1D: { 'parameters': [1, 0], 'x_values': [0, np.pi, 42, -1], 'y_values': [0, np.pi, 42, -1], 'x_lim': [-10, 10], 'integral': 0 }, Lorentz1D: { 'parameters': [1, 0, 1], 'x_values': [0, -1, 1, 0.5, -0.5], 'y_values': [1., 0.2, 0.2, 0.5, 0.5], 'x_lim': [-10, 10], 'integral': 1 }, MexicanHat1D: { 'parameters': [1, 0, 1], 'x_values': [0, 1, -1, 3, -3], 'y_values': [1.0, 0.0, 0.0, -0.088872, -0.088872], 'x_lim': [-20, 20], 'integral': 0 }, Trapezoid1D: { 'parameters': [1, 0, 2, 1], 'x_values': [0, 1, -1, 1.5, -1.5, 2, 2], 'y_values': [1, 1, 1, 0.5, 0.5, 0, 0], 'x_lim': [-10, 10], 'integral': 3 }, Const1D: { 'parameters': [1], 'x_values': [-1, 1, np.pi, -42., 0], 'y_values': [1, 1, 1, 1, 1], 'x_lim': [-10, 10], 'integral': 20 }, Moffat1D: { 'parameters': [1, 0, 1, 2], 'x_values': [0, 1, -1, 3, -3], 'y_values': [1.0, 0.25, 0.25, 0.01, 0.01], 'x_lim': [-10, 10], 'integral': 1, 'deriv_parameters': [23.4, 1.2, 2.1, 2.3], 'deriv_initial': [10, 1, 1, 1] }, PowerLaw1D: { 'parameters': [1, 1, 2], 'constraints': {'fixed': {'x_0': True}}, 'x_values': [1, 10, 100], 'y_values': [1.0, 0.01, 0.0001], 'x_lim': [1, 10], 'log_fit': True, 'integral': 0.99 }, BrokenPowerLaw1D: { 'parameters': [1, 1, 2, 3], 'constraints': {'fixed': {'x_break': True}}, 'x_values': [0.1, 1, 10, 100], 'y_values': [1e2, 1.0, 1e-3, 1e-6], 'x_lim': [0.1, 100], 'log_fit': True }, ExponentialCutoffPowerLaw1D: { 'parameters': [1, 1, 2, 3], 'constraints': {'fixed': {'x_0': True}}, 'x_values': [0.1, 1, 10, 100], 'y_values': [9.67216100e+01, 7.16531311e-01, 3.56739933e-04, 3.33823780e-19], 'x_lim': [0.01, 100], 'log_fit': True }, LogParabola1D: { 'parameters': [1, 2, 3, 0.1], 'constraints': {'fixed': {'x_0': True}}, 'x_values': [0.1, 1, 10, 100], 'y_values': [3.26089063e+03, 7.62472488e+00, 6.17440488e-03, 1.73160572e-06], 'x_lim': [0.1, 100], 'log_fit': True }, Polynomial1D: { 'parameters': {'degree': 2, 'c0': 1., 'c1': 1., 'c2': 1.}, 'x_values': [1, 10, 100], 'y_values': [3, 111, 10101], 'x_lim': [-3, 3] } } #2D Models models_2D = { Gaussian2D: { 'parameters': [1, 0, 0, 1, 1], 'constraints': {'fixed': {'theta': True}}, 'x_values': [0, np.sqrt(2), -np.sqrt(2)], 'y_values': [0, np.sqrt(2), -np.sqrt(2)], 'z_values': [1, 1. / np.exp(1) ** 2, 1. / np.exp(1) ** 2], 'x_lim': [-10, 10], 'y_lim': [-10, 10], 'integral': 2 * np.pi, 'deriv_parameters': [137., 5.1, 5.4, 1.5, 2., np.pi/4], 'deriv_initial': [10, 5, 5, 4, 4, .5] }, Const2D: { 'parameters': [1], 'x_values': [-1, 1, np.pi, -42., 0], 'y_values': [0, 1, 42, np.pi, -1], 'z_values': [1, 1, 1, 1, 1], 'x_lim': [-10, 10], 'y_lim': [-10, 10], 'integral': 400 }, Box2D: { 'parameters': [1, 0, 0, 10, 10], 'x_values': [-5, 5, -5, 5, 0, -10, 10], 'y_values': [-5, 5, 0, 0, 0, -10, 10], 'z_values': [1, 1, 1, 1, 1, 0, 0], 'x_lim': [-10, 10], 'y_lim': [-10, 10], 'integral': 100 }, MexicanHat2D: { 'parameters': [1, 0, 0, 1], 'x_values': [0, 0, 0, 0, 0, 1, -1, 3, -3], 'y_values': [0, 1, -1, 3, -3, 0, 0, 0, 0], 'z_values': [1.0, 0.303265, 0.303265, -0.038881, -0.038881, 0.303265, 0.303265, -0.038881, -0.038881], 'x_lim': [-10, 11], 'y_lim': [-10, 11], 'integral': 0 }, TrapezoidDisk2D: { 'parameters': [1, 0, 0, 1, 1], 'x_values': [0, 0.5, 0, 1.5], 'y_values': [0, 0.5, 1.5, 0], 'z_values': [1, 1, 0.5, 0.5], 'x_lim': [-3, 3], 'y_lim': [-3, 3] }, AiryDisk2D: { 'parameters': [7, 0, 0, 10], 'x_values': [0, 1, -1, -0.5, -0.5], 'y_values': [0, -1, 0.5, 0.5, -0.5], 'z_values': [7., 6.50158267, 6.68490643, 6.87251093, 6.87251093], 'x_lim': [-10, 10], 'y_lim': [-10, 10], 'requires_scipy': True }, Moffat2D: { 'parameters': [1, 0, 0, 1, 2], 'x_values': [0, 1, -1, 3, -3], 'y_values': [0, -1, 3, 1, -3], 'z_values': [1.0, 0.111111, 0.008264, 0.008264, 0.00277], 'x_lim': [-3, 3], 'y_lim': [-3, 3] }, Polynomial2D: { 'parameters': {'degree': 1, 'c0_0': 1., 'c1_0': 1., 'c0_1': 1.}, 'x_values': [1, 2, 3], 'y_values': [1, 3, 2], 'z_values': [3, 6, 6], 'x_lim': [1, 100], 'y_lim': [1, 100] }, Disk2D: { 'parameters': [1, 0, 0, 5], 'x_values': [-5, 5, -5, 5, 0, -10, 10], 'y_values': [-5, 5, 0, 0, 0, -10, 10], 'z_values': [0, 0, 1, 1, 1, 0, 0], 'x_lim': [-10, 10], 'y_lim': [-10, 10], 'integral': np.pi * 5 ** 2 }, Ring2D: { 'parameters': [1, 0, 0, 5, 5], 'x_values': [-5, 5, -5, 5, 0, -
DATA_DIR = '/media/
d/ss
d2/dstl/'
t(':')[0] value = [] else: key = line.strip().split('- ')[1].split(': ')[0] value = line.split(key)[1][2:] if key in data: if hasattr(data[key],'__iter__'): value = data[key] + [value] else: value = [data[key],value] if value: data[key] = value return data class PickledObjectField(models.Field): """ Django snippet - http://www.djangosnippets.org/snippets/513/ """ __metaclass__ = models.SubfieldBase def to_python(self, value): try: return pickle.loads(str(value)) except: # If an error was raised, just return the plain value return value def get_db_prep_save(self, value): if value is not None: value = pickle.dumps(value) return str(value) def get_internal_type(self): return 'TextField' def get_db_prep_lookup(self, lookup_type, value): if lookup_type == 'exact': value = self.get_db_prep_save(value) return super(PickledObjectField, self).get_db_prep_lookup(lookup_type, value) elif lookup_type == 'in': value = [self.get_db_prep_save(v) for v in value] return super(PickledObjectField, self).get_db_prep_lookup(lookup_type, value) else: raise TypeError('Lookup type %s is not supported.' % lookup_type) class Media(models.Model): title = models.CharField(max_length=255) slug = AutoSlugField(max_length=50, overwrite=True, populate_from=("title",)) creation_date = models.DateTimeField(auto_now_add=True) author = models.ForeignKey(User, blank=True, null=True, limit_choices_to={'is_staff':True}) one_off_author = models.CharField('one-off author', max_length=100, blank=True) credit = models.CharField(max_length=150, blank=True) caption = models.TextField(blank=True) metadata = PickledObjectField(blank=True) sites = models.ManyToManyField(Site,related_name='%(class)s_sites') categories = models.ManyToManyField(CATEGORIES_MODULE, blank=True) reproduction_allowed = models.BooleanField("we have reproduction rights for this media", default=True) public = models.BooleanField(help_text="this media is publicly available", default=True) external_url = models.URLField(blank=True,null=True,help_text="If this URLField is set, the media will be pulled externally") mime_type = models.CharField(max_length=150,blank=True,null=True) width = models.IntegerField(blank=True, null=True) height = models.IntegerField(blank=True, null=True) widget_template = models.CharField(max_length=255,blank=True,null=True, help_text='The template name used to generate the widget (defaults to mime_type layout)') class Meta: ordering = ('-creation_date',) abstract = True unique_together = (('slug', 'creation_date'),) def __unicode__(self): return self.title def get_absolute_url(self): if self.external_url: return self.external_url if hasattr(self,'file') and getattr(self,'file',None): return self.absolute_url(( settings.MEDIA_URL, '/'.join([self.creation_date.strftime("%Y"), self.creation_date.strftime("%b").lower(), self.creation_date.strftime("%d")]), os.path.basename(self.file.path))) return '' def absolute_url(self, format): raise NotImplementedError def save(self, *args, **kwargs): if self.file and not self.mime_type: self.mime_type = mimetypes.guess_type(self.file.path)[0] if not(self.metadata) and self.file and extractMetadata: self.metadata = parse_metadata(self.file.path) or '' super(Media, self).save(*args, **kwargs) def get_mime_type(self): if self.mime_type: return self.mime_type if self.metadata and 'mime_type' in self.metadata: return self.metadata['mime_type'] return def get_template(self): mime_type = self.get_mime_type() if self.widget_template: if appsettings.TEMPLATE_MODE == appsettings.FILE_SYSTEM: return get_template(self.widget_template) else: return MediaTemplate.objects.get(name=self.widget_template).template() elif mime_type is None: if appsettings.TEMPLATE_MODE == appsettings.FILE_SYSTEM: if appsettings.USE_VOXANT and isinstance(self, VoxantVideo): return get_template('massmedia/voxant.html') else: return get_template('massmedia/generic.html') else: return MediaTemplate.objects.get(mimetype='').tempate() else: if appsettings.TEMPLATE_MODE == appsettings.FILE_SYSTEM: try: return get_template('massmedia/%s.html'%mime_type) except TemplateDoesNotExist: try: return get_template('massmedia/%s/generic.html'%mime_type.split('/')[0]) except TemplateDoesNotExist: return get_template('massmedia/generic.html') else: try: return MediaTemplate.objects.get(mimetype=mime_type) except MediaTemplate.DoesNotExist: try: return MediaTemplate.objects.get(mimetype=mime_type.split('/')[0]) except MediaTemplate.DoesNotExist: return MediaTemplate.objects.get(mimetype='').tempate() def render_template(self): return self.get_template().render(Context({ 'media':self, 'MEDIA_URL':settings.MEDIA_URL })) class Image(Media): file = models.ImageField(upload_to=upload_to('img/%Y/%b/%d'), blank=True, null=True) def save(self, *args, **kwargs): if iptc: try: data.update(IPTCInfo(path).__dict__['_data']) except: pass super(Image, self).save(*args, **kwargs) def thumb(self): if self.file: thumbnail = '%s.thumb%s'%os.path.splitext(self.file.path) thumburl = thumbnail[len(settings.MEDIA_ROOT)-1:] if not os.path.exists(thumbnail): im = PilImage.open(self.file) im.thumbnail(appsettings.THUMB_SIZE,PilImage.ANTIALIAS) try: im.save(thumbnail,im.format) except KeyError: pass return '<a href="%s"><img src="%s%s"/></a>'%\ (self.get_absolute_url(),settings.MEDIA_URL,thumburl) elif self.external_url: return '<a href="%s"><img src="%s"/></a>'%\ (self.get_absolute_url(),self.get_absolute_url()) thumb.allow_tags = True thumb.short_description = 'Thumbnail' def absolute_url(self, format): return "%simg/%s/%s" % format class Video(Media): file = models.FileField(upload_to=upload_to('video/%Y/%b/%d'), blank=True, null=True) thumbnail = models.ForeignKey(Image, null=True, blank=True) def thumb(self): return self.thumbnail.thumb() thumb.allow_tags = True thumb.short_description = 'Thumbnail' def absolute_url(self, format)
: return "%svideo/%s/%s" % format if appsettings.USE_VOXANT: class VoxantVideo(Video):
asset_id = models.CharField(max_length=255,help_text='Voxant video asset ID (the `a` parameter)') layout_id = models.CharField(max_length=255,help_text='Voxant video asset ID (the `m` parameter)') def absolute_url(self, format): return "%svoxantvideo/%s/%s" % format class Audio(Media): file = models.FileField(upload_to=upload_to('audio/%Y/%b/%d'), blank=True, null=True) class Meta: verbose_name_plural = 'audio' def absolute_url(self, format): return "%saudio/%s/%s" % format class Flash(Media): file = m
#!/usr/bin/env python ''' 2D Group Members: > Charlotte Phang > Lau Wenkie > Mok Jun Neng > Martin Tan > Dicson Candra ''' #Import relevant modules import RPi.GPIO as GPIO import os import glob import time from PIDsm import PID_ControllerSM ### PIN NUMBERS ### tempPin = 4 motorPin = 12 fanPin = 13 ### PARAMETERS ### pwmFreq = 100 #Code to read temperature from the ####################### sensor class tempSensor: #Location of file to read from for temperature: /sys/bus/w1/devices/28-000008ae29b8/w1_slave #to manually read, "cat /sys/bus/w1/devices/28-000008ae29b8/w1_slave" in terminal def __init__(self): os.system('modprobe w1-gpio') os.system('modprobe w1-therm') #define directory of the temperature data in the linux filesystem self.base_dir = '/sys/bus/w1/devices/' self.device_folder = glob.glob(self.base_dir + '28*')[0] self.device_file = self.device_folder + '/w1_slave' def read_temp_raw(self): #reading raw output of the 1 wire bus f = open(self.device_file, 'r') #open file defined in self.device_file lines = f.readlines() f.close() #close file to reset the file pointer return lines def __call__(self): #function to extract temperature data from the raw data in string lines = self.read_temp_raw() while lines[0].strip()[-3:] != 'YES': time.sleep(0.2) lines = self.read_temp_raw() equals_pos = lines[1].find('t=') if equals_pos != -1: temp_string = lines[1][equals_pos+2:] temp_c = float(temp_string) / 1000.0 return temp_c #Set up global variables GPIO.setmode(GPIO.BCM) #use BCM pin numbering system GPIO.setup(tempPin, GPIO.IN, GPIO.PUD_UP) #set up the 1 wire interface GPIO.setup(moto
rPin, GPIO.OUT) #setup the motor pin GPIO.setup(fanPin, GPIO.OUT) #setup the fan pin #define the fan and pump pins as PWM pins and initialise them at 0% PWM (off) pump = GPIO.PWM(motorPin, pwmFreq) pump.start(0.0) fan = GPIO.PWM(fanPin, pwmFreq) fan.start(0.0) #create controller object from MotorSM class targetTemperature = raw_input('Please key in your desired target temperature: ') motorController = PID_ControllerSM(
float(targetTemperature),30,0,10) motorController.start() fanController = PID_ControllerSM(float(targetTemperature),50,0,5) fanController.start() #create sensor object temp = tempSensor() def main(): #main code to loop indefinitely here #check current temperature currentTemp = temp() print 'Current temp: %.3f' %(currentTemp) #for monitoring in the terminal motorOutput = motorController.step(currentTemp) #get the amount of PWM to output to fan and pump from the state machine fanOutput = fanController.step(currentTemp) pump.ChangeDutyCycle(motorOutput) #output the pump PWM. ChangeDutyCycle takes a value from 0 to 100% fan.ChangeDutyCycle(fanOutput) #output the fan PWM ##################################################################################### ### Run the main code unless user terminates using Ctrl+C. ### ### Before exiting, code will reset and release GPIO control to deactivate motor. ### ##################################################################################### while True: try: main() #execute main() except KeyboardInterrupt: print 'Cleaning and Exiting...' GPIO.cleanup() #clean up the pins and exit the program print 'Done' exit()
from django.conf import settings as django_settings # noinspection PyPep8Naming class LazySettings: @property def REQUIRE_MAIN_NAME(self): return getattr(django_settings, 'REQUIRE_MAIN_NAME', 'main') @property def DEFAULT_PAGINATE_BY(self): return getattr(django_settings, 'DEFAULT_PAGINATE_BY', 30) @property def FILTER_SEARCH_INPUT_BY(self): return getattr(django_settings, 'FILTER_SEARCH_INPUT_BY
', 10) @property def AUTO_PAGE_SIZE(self): return getattr(django_settings, 'AUTO_PAGE_SIZE', True) @property def AUTO_FORM_HEADLINE(self): return getattr(django_settings, 'AUTO_FORM_HEADLINE', True) @property def CREATE_FORM_HEADLINE_PREFIX(self): return getattr(django_settings, 'CREATE_FORM_HEADLINE_PREFIX', 'Add') @property def UPDATE_FORM_HEA
DLINE_PREFIX(self): return getattr(django_settings, 'UPDATE_FORM_HEADLINE_PREFIX', 'Edit') @property def FORM_RELATED_OBJECT_IDS(self): return getattr(django_settings, 'FORM_RELATED_OBJECT_IDS', True) @property def GENERIC_FORM_BASE_TEMPLATE(self): return getattr(django_settings, 'GENERIC_FORM_BASE_TEMPLATE', 'ajaxviews/generic_form.html') @property def AUTO_DELETE_URL(self): return getattr(django_settings, 'AUTO_DELETE_URL', True) @property def FORM_DELETE_CONFIRMATION(self): return getattr(django_settings, 'FORM_DELETE_CONFIRMATION', True) @property def AUTO_SUCCESS_URL(self): return getattr(django_settings, 'AUTO_SUCCESS_URL', True) settings = LazySettings()
import re import time class BaseCounters: def __init__(self): self.keyre = re.compile('\A[\w.]+\Z') def ping(self, key): self.validate_key(key) self.do_ping(key, int(time.time())) def hit(self, key, n=1): self.validate_key(key) self.do_hit(key, n) def validate_key(self, key): if re.match(self.keyre, key): pass else: rai
se ValueError("Counters keys must only contain letters, numbers, the underscore (_) and fullstop (.), received \"
%s\"" % key)
# If the first sample didn't turn out large enough, keep trying to take samples; # this shouldn't happen often because we use a big multiplier for their initial size. # See: scala/spark/RDD.scala while len(samples) < num: # TODO: add log warning for when more than one iteration was run seed = rand.randint(0, sys.maxsize) samples = self.sample(withReplacement, fraction, seed).collect() rand.shuffle(samples) return samples[0:num] @staticmethod def _computeFractionForSampleSize( sampleSizeLowerBound: int, total: int, withReplacement: bool ) -> float: """ Returns a sampling rate that guarantees a sample of size >= sampleSizeLowerBound 99.99% of the time. How the sampling rate is determined: Let p = num / total, where num is the sample size and total is the total number of data points in the RDD. We're trying to compute q > p such that - when sampling with replacement, we're drawing each data point with prob_i ~ Pois(q), where we want to guarantee Pr[s < num] < 0.0001 for s = sum(prob_i for i from 0 to total), i.e. the failure rate of not having a sufficiently large sample < 0.0001. Setting q = p + 5 * sqrt(p/total) is sufficient to guarantee 0.9999 success rate for num > 12, but we need a slightly larger q (9 empirically determined). - when sampling without replacement, we're drawing each data point with prob_i ~ Binomial(total, fraction) and our choice of q guarantees 1-delta, or 0.9999 success rate, where success rate is defined the same as in sampling with replacement. """ fraction = float(sampleSizeLowerBound) / total if withReplacement: numStDev = 5 if sampleSizeLowerBound < 12: numStDev = 9 return fraction + numStDev * sqrt(fraction / total) else: delta = 0.00005 gamma = -log(delta) / total return min(1, fraction + gamma + sqrt(gamma * gamma + 2 * gamma * fraction)) def union(self: "RDD[T]", other: "RDD[U]") -> "RDD[Union[T, U]]": """ Return the union of this RDD and another one. Examples -------- >>> rdd = sc.parallelize([1, 1, 2, 3]) >>> rdd.union(rdd).collect() [1, 1, 2, 3, 1, 1, 2, 3] """ if self._jrdd_deserializer == other._jrdd_deserializer: rdd: "RDD[Union[T, U]]" = RDD( self._jrdd.union(other._jrdd), self.ctx, self._jrdd_deserializer ) else: # These RDDs contain data in different serialized formats, so we # must normalize them to the default serializer. self_copy = self._reserialize() other_copy = other._reserialize() rdd = RDD(self_copy._jrdd.union(other_copy._jrdd), self.ctx, self.ctx.serializer) if ( self.partitioner == other.partitioner and self.getNumPartitions() == rdd.getNumPartitions() ): rdd.partitioner = self.partitioner return rdd def intersection(self: "RDD[T]", other: "RDD[T]") -> "RDD[T]": """ Return the intersection of this RDD and another one. The output will not contain any duplicate elements, even if the input RDDs did. Notes ----- This method performs a shuffle internally. Examples -------- >>> rdd1 = sc.parallelize([1, 10, 2, 3, 4, 5]) >>> rdd2 = sc.parallelize([1, 6, 2, 3, 7, 8]) >>> rdd1.intersection(rdd2).collect() [1, 2, 3] """ return ( self.map(lambda v: (v, None)) .cogroup(other.map(lambda v: (v, None))) .filter(lambda k_vs: all(k_vs[1])) .keys() ) def _reserialize(self: "RDD[T]", serializer: Optional[Serializer] = None) -> "RDD[T]": serializer = serializer or self.ctx.serializer if self._jrdd_deserializer != serializer: self = self.map(lambda x: x, preservesPartitioning=True) self._jrdd_deserializer = serializer return self def __add__(self: "RDD[T]", other: "RDD[U]") -> "RDD[Union[T, U]]": """ Return the union of this RDD and another one. Examples -------- >>> rdd = sc.parallelize([1, 1, 2, 3]) >>> (rdd + rdd).collect() [1, 1, 2, 3, 1, 1, 2, 3] """ if not isinstance(other, RDD): raise TypeError return self.union(other) @overload def repartitionAndSortWithinPartitions( self: "RDD[Tuple[S, V]]", numPartitions: Optional[int] = ..., partitionFunc: Callable[["S"], int] = ..., ascending: bool = ..., ) -> "RDD[Tuple[S, V]]": ... @overload def repartitionAndSortWithinPartitions( self: "RDD[Tuple[K, V]]", numPartitions: Optional[int], partitionFunc: Callable[[K], int], ascending: bool, keyfunc: Callable[[K], "S"], ) -> "RDD[Tuple[K, V]]": ... @overload def repartitionAndSortWithinPartitions( self: "RDD[Tuple[K, V]]", numPartitions: Optional[int] = ..., partitionFunc: Callable[[K], int] = ..., ascending: bool = ..., *, keyfunc: Callable[[K], "S"], ) -> "RDD[Tuple[K, V]]": ... def repartitionAndSortWithinPartitions( self: "RDD[Tuple[Any, Any]]", numPartitions: Optional[int] = None, partitionFunc: Callable[[Any], int] = portable_hash, ascending: bool = True, keyfunc: Callable[[Any], Any] = lambda x: x, ) -> "RDD[Tuple[Any, Any]]": """ Repartition the RDD according to the given partitioner and, within each resulting partition, sort records by their keys. Examples -------- >>> rdd = sc.parallelize([(0, 5), (3, 8), (2, 6), (0, 8), (3, 8), (1, 3)]) >>> rdd2 = rdd.repartitionAndSortWithinPartitions(2, lambda x: x % 2, True) >>> rdd2.glom().collect() [[(0, 5), (0, 8), (2, 6)], [(1, 3), (3, 8), (3, 8)]] """ if numPartitions is None: numPartitions = self._defaultReducePartitions() memory = self._memory_limit() serializer = self._jrdd_deserializer def sortPartition(iterator: Iterable[Tuple[K, V]]) -> Iterable[Tuple[K, V]]: sort = ExternalSorter(memory * 0.9, serializer).sorted return iter(sort(iterator
, key=lambda k_v: keyfunc(k_v[0]), reverse=(not ascending))) return self.partitionBy(numPartitions, partitionFunc).mapPartitions(sortPartition, True) @overload def sortByKey( self: "RDD[Tuple[S, V]]"
, ascending: bool = ..., numPartitions: Optional[int] = ..., ) -> "RDD[Tuple[K, V]]": ... @overload def sortByKey( self: "RDD[Tuple[K, V]]", ascending: bool, numPartitions: int, keyfunc: Callable[[K], "S"], ) -> "RDD[Tuple[K, V]]": ... @overload def sortByKey( self: "RDD[Tuple[K, V]]", ascending: bool = ..., numPartitions: Optional[int] = ..., *, keyfunc: Callable[[K], "S"], ) -> "RDD[Tuple[K, V]]": ... def sortByKey( self: "RDD[Tuple[K, V]]", ascending: Optional[bool] = True, numPartitions: Optional[int] = None, keyfunc: Callable[[Any], Any] = lambda x: x, ) -> "RDD[Tuple[K, V]]": """ Sorts this RDD, which is assumed to consist of (key, value) pairs. Examples -------- >>> tmp = [('a', 1), ('b', 2), ('1', 3), ('d', 4), ('2', 5)] >>> sc.parallelize(tmp).sortByKey().first() ('1', 3) >>> sc.parallelize(tmp).sortByKey(True, 1).collect() [('1', 3), ('2', 5), ('a', 1), ('b', 2), ('d', 4)] >>> sc.parallelize(tmp).sortByKey(True, 2).collect() [('1', 3), ('2', 5), ('a', 1), ('b', 2), ('d', 4
from datetime import datetime class PanoplyException(Exception): def __init__(self, args=None, retryable=True): super(PanoplyException, self).__init__(args) self.retryable = retryable class IncorrectParamError(Exception): def __init__(self, msg: str = "Incorrect input parametr"): super().__init__(msg) class DataSourceException(Exception): def __init__(self, message, code, exception_cls, phase, source_type, source_id, database_id): super().__init__(message) self.message = message self.code = code self.phase = phas
e self.source_type = source_type self.source_id = source_id self.database_id = database
_id self.exception_cls = exception_cls self.created_at = datetime.utcnow() class TokenValidationException(PanoplyException): def __init__(self, original_error, args=None, retryable=True): super().__init__(args, retryable) self.original_error = original_error
""" =========================================== Robust linear model estimation using RANSAC =========================================== In this example we see how to robustly fit a linear model to faulty data using the RANSAC algorithm. """ import numpy as np from matplotlib import pyplot as plt from sklearn import linear_model, datasets n_samples = 1000 n_outliers = 50 X, y, coef = datasets.make_regression(n_samples=n_samples, n_features=1, n_informative=1
, noise=10, coef=True, random_state=0) # Add outlier d
ata np.random.seed(0) X[:n_outliers] = 3 + 0.5 * np.random.normal(size=(n_outliers, 1)) y[:n_outliers] = -3 + 10 * np.random.normal(size=n_outliers) # Fit line using all data model = linear_model.LinearRegression() model.fit(X, y) # Robustly fit linear model with RANSAC algorithm model_ransac = linear_model.RANSACRegressor(linear_model.LinearRegression()) model_ransac.fit(X, y) inlier_mask = model_ransac.inlier_mask_ outlier_mask = np.logical_not(inlier_mask) # Predict data of estimated models line_X = np.arange(-5, 5) line_y = model.predict(line_X[:, np.newaxis]) line_y_ransac = model_ransac.predict(line_X[:, np.newaxis]) # Compare estimated coefficients print "Estimated coefficients (true, normal, RANSAC):" print coef, model.coef_, model_ransac.estimator_.coef_ plt.plot(X[inlier_mask], y[inlier_mask], '.g', label='Inliers') plt.plot(X[outlier_mask], y[outlier_mask], '.r', label='Outliers') plt.plot(line_X, line_y, '-k', label='Linear regressor') plt.plot(line_X, line_y_ransac, '-b', label='RANSAC regressor') plt.legend(loc='lower right') plt.show()
from Channel import Channel import telepot class AmbrosioBot(telepot.Bot): """AmbrosioBot is my telgram bot""" def __init__(self, token): super(AmbrosioBot, self).__init__(token) self.clist = None self.chat_id = None def set_list(self,clist): self.clist = clist def on_chat_message(self, msg): content_type, chat_type, chat_id, = telepot.glance(msg) if content_type == 'text': command =
msg['text'] if self.clist is not None: self.clist.append(command) self.chat_id = chat_id def respond(self, response): if self.chat_id is not None: self.sendMessage(self.chat_id, response) class TelegramChannel(Channel): """channel class received commands from telegram""" def __init__(self, name="TelegramChannel"): super(Telegram
Channel, self).__init__(name) self.bot = AmbrosioBot("189884221:AAHls9d0EkCDfU0wgQ-acs5Z39aibA7BZmc") self.messages = [] self.bot.set_list(self.messages) self.bot.notifyOnMessage() def get_msg(self): if self.msg_avail(): return self.messages.pop(0) def msg_avail(self): return len(self.messages) > 0 def respond(self, response): if response is None: response = "Command not understand" self.bot.respond(response)
limit_bids=0, limit_asks=0): """ Send a request to get the public order book, return the response. Arguments: symbol -- currency symbol (default 'btcusd') limit_bids -- limit the number of bids returned (default 0) limit_asks -- limit the number of asks returned (default 0) """ url = self.base_url + '/v1/book/' + symbol params = { 'limit_bids': limit_bids, 'limit_asks': limit_asks } return requests.get(url, params) def trades(self, symbol='btcusd', since=0, limit_trades=50, include_breaks=0): """ Send a request to get all public trades, return the response. Arguments: symbol -- currency symbol (default 'btcusd') since -- only return trades after this unix timestamp (default 0) limit_trades -- maximum number of trades to return (default 50). include_breaks -- whether to display broken trades (default False) """ url = self.base_url + '/v1/trades/' + symbol params = { 'since': since, 'limit_trades': limit_trades, 'include_breaks': include_breaks } return requests.get(url, params) def auction(self, symbol='btcusd'): """Send a request for latest auction info, return the response.""" url = self.base_url + '/v1/auction/' + symbol return requests.get(url) def auction_history(self, symbol='btcusd', since=0, limit_auction_results=50, include_indicative=1): """ Send a request for auction history info, return the response. Arguments: symbol -- currency symbol (default 'btcusd') since -- only return auction events after this timestamp (default 0) limit_auction_results -- maximum number of auction events to return (default 50). include_indicative -- whether to include publication of indicative prices and quantities. (default True) """ url = self.base_url + '/v1/auction/' + symbol + '/history' params = { 'since': since,
'limit_auction_results': limit_auction_results, 'include_indicative': include_indicative } return requests.get(url, params) # authenticated requests def new_order(self, amount, price, side, client_order_id=None, symbol='btcusd', type='exchange limit', options=None):
""" Send a request to place an order, return the response. Arguments: amount -- quoted decimal amount of BTC to purchase price -- quoted decimal amount of USD to spend per BTC side -- 'buy' or 'sell' client_order_id -- an optional client-specified order id (default None) symbol -- currency symbol (default 'btcusd') type -- the order type (default 'exchange limit') """ request = '/v1/order/new' url = self.base_url + request params = { 'request': request, 'nonce': self.get_nonce(), 'symbol': symbol, 'amount': amount, 'price': price, 'side': side, 'type': type } if client_order_id is not None: params['client_order_id'] = client_order_id if options is not None: params['options'] = options return requests.post(url, headers=self.prepare(params)) def cancel_order(self, order_id): """ Send a request to cancel an order, return the response. Arguments: order_id - the order id to cancel """ request = '/v1/order/cancel' url = self.base_url + request params = { 'request': request, 'nonce': self.get_nonce(), 'order_id': order_id } return requests.post(url, headers=self.prepare(params)) def cancel_session(self): """Send a request to cancel all session orders, return the response.""" request = '/v1/order/cancel/session' url = self.base_url + request params = { 'request': request, 'nonce': self.get_nonce() } return requests.post(url, headers=self.prepare(params)) def cancel_all(self): """Send a request to cancel all orders, return the response.""" request = '/v1/order/cancel/all' url = self.base_url + request params = { 'request': request, 'nonce': self.get_nonce() } return requests.post(url, headers=self.prepare(params)) def order_status(self, order_id): """ Send a request to get an order status, return the response. Arguments: order_id -- the order id to get information on """ request = '/v1/order/status' url = self.base_url + request params = { 'request': request, 'nonce': self.get_nonce(), 'order_id': order_id } return requests.post(url, headers=self.prepare(params)) def active_orders(self): """Send a request to get active orders, return the response.""" request = '/v1/orders' url = self.base_url + request params = { 'request': request, 'nonce': self.get_nonce() } return requests.post(url, headers=self.prepare(params)) def past_trades(self, symbol='btcusd', limit_trades=50, timestamp=0): """ Send a trade history request, return the response. Arguements: symbol -- currency symbol (default 'btcusd') limit_trades -- maximum number of trades to return (default 50) timestamp -- only return trades after this unix timestamp (default 0) """ request = '/v1/mytrades' url = self.base_url + request params = { 'request': request, 'nonce': self.get_nonce(), 'symbol': symbol, 'limit_trades': limit_trades, 'timestamp': timestamp } return requests.post(url, headers=self.prepare(params)) def tradevolume(self): """Send a request to get your trade volume, return the response.""" request = '/v1/tradevolume' url = self.base_url + request params = { 'request': request, 'nonce': self.get_nonce() } return requests.post(url, headers=self.prepare(params)) def balances(self): """Send an account balance request, return the response.""" request = '/v1/balances' url = self.base_url + request params = { 'request': request, 'nonce': self.get_nonce() } return requests.post(url, headers=self.prepare(params)) def newAddress(self, currency='btc', label=''): """ Send a request for a new cryptocurrency deposit address with an optional label. Return the response. Arguements: currency -- a Gemini supported cryptocurrency (btc, eth) label -- optional label for the deposit address """ request = '/v1/deposit/' + currency + '/newAddress' url = self.base_url + request params = { 'request': request, 'nonce': self.get_nonce() } if label != '': params['label'] = label return requests.post(url, headers=self.prepare(params)) def fees(self): """Send a request to get fee and notional volume, return the response.""" request = '/v1/notionalvolume' url = self.base_url + request params = { 'request': request, 'nonce': self.get_nonce() } return requests.post(url, headers=self.prepare(params)) def heartbeat(self): """Send a heartbeat message, return the response.""" request = '/v1/heartbeat' url = self.base_url + request params = { 'request': request, 'nonce': self.get_nonce()
imp
ort click from complex.cli import pass_context @click.command('status', short_help='Shows file changes.') @pass_context def cli(ctx): """Shows file changes in the current working directory.""" ctx.log('Cha
nged files: none') ctx.vlog('bla bla bla, debug info')
from __future__ import abs
olute_import import six import logging from .. import py3_errmsg logger = logging.getLogger(__name__) try: import enaml except ImportError: if six.PY3: logger.exception(py3_errmsg) else: raise else: from .model import (GetLastModel, DisplayHeaderModel, WatchForHeadersModel, ScanIDSearchModel) with enaml.imports(): from .view import (GetLastV
iew, GetLastWindow, WatchForHeadersView, ScanIDSearchView)
import typer from controller import log from controller.app import Application from controller.deploy.docker import Docker @Application.app.command(help="Provide instructions to join new nodes") def join( manager: bool = typer.Option( False, "--manager", show_default=False, help="join new node with manager role" ) ) -> None: Application.print_command( Application.serialize_parameter("--manager", manager, IF=manager), ) Application.get_controller().controller_init() docker = Docker() manager_address = "N/A" # Search for the manager address for node in docker.client.node.list(): role = node.spec.role state = node.status.state availability = node.spec.availability if ( role == "manager" and state == "ready" and availability == "active"
and node.manager_status ): manager_address = node.manager_status.addr if manager: log.info("To add a manager to this swarm, run the following command:") token = docker.swarm.get_token("manager") else: log.info("To add a worker to this swarm, run the following command:") token = docker.swarm.get_token("worker") print("") print(f"docker swarm join --token {token} {manager_a
ddress}") print("")
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('viewer', '0006_meter_on_auditlist'), ] operations = [ migrations.CreateModel( name='Group', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True,
primary_key=True)), ('name', models.CharField(max_length=64)), ], options={ }, bases=(models.Model,), ), migrations.RenameField( model_name='profiledatapoint', old_name='kwh', new_name='kw', ), migrations.AddField( model_name
='meter', name='groups', field=models.ManyToManyField(to='viewer.Group'), preserve_default=True, ), ]
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Converting code to AST. Adapted from Tangent. """ from __future__ impo
rt absolute_import from __future__ import division from __future__ import print_func
tion import textwrap import gast from tensorflow.python.util import tf_inspect def parse_object(obj): """Return the AST of given object.""" return parse_str(tf_inspect.getsource(obj)) def parse_str(src): """Return the AST of given piece of code.""" return gast.parse(textwrap.dedent(src))
# -*- coding: utf-8 -*- import os import ConfigParser import smtplib from email.MIMEMultipart import MIMEMultipart from email.MIMEText import MIMEText from email.MIMEBase import MIMEBase from email import encoders from global_functions import app_dir class Mailer(): """ Instance to manage the mailing. """ def __init__(sel
f): """ Setup all needed info. """ # Gets all the connection info from the .ini file self.Config = ConfigParser.ConfigParse
r() self.Config.read(os.path.join(app_dir, "institution.ini")) self.server = unicode(self.Config.get("Mail", "server")) self.port = int(self.Config.get("Mail", "port")) self.email = unicode(self.Config.get("Mail", "email")) self.password = unicode(self.Config.get("Mail", "password")) def connect(self): """ Connects to the mail server using the .ini info. """ self.smtp_server = smtplib.SMTP(self.server, self.port) self.smtp_server.ehlo() self.smtp_server.starttls() try: self.smtp_server.login(self.email, self.password) return 1 except: return 0 def send_certificate(self, path, send_to): """ Send each certificate from the configured email. """ # Email info msg = MIMEMultipart() msg["From"] = self.email msg["To"] = send_to msg["Subject"] = u"Certificado" body = u"""Em anexo a este e-mail encontra-se o seu certificado de participação de um de nossos eventos. Qualquer problema, entre em contato respondendo a este e-mail ou procure-nos em: {address} Fone: {phone} """.format( address=unicode(self.Config.get("Contact", "address")), phone=unicode(self.Config.get("Contact", "phone")) ) msg.attach(MIMEText(unicode(body), 'plain', 'utf-8')) # Add the certificate file attachment = open(unicode(path), "rb") filename = os.path.basename(unicode(path)) part = MIMEBase('application', 'octet-stream') part.set_payload(attachment.read()) encoders.encode_base64(part) part.add_header(u'Content-Disposition', "attachment; filename= %s" % filename) msg.attach(part) text = msg.as_string() # Send the email self.smtp_server.sendmail(self.email, send_to, text) def quit(self): # Quits the connection self.smtp_server.quit()
') self.thetaIterationValue = self.getWidget('spinBox_thetaIterationValue') self.phiIterationValue = self.getWidget('spinBox_phiIterationValue') self.medialMesh = self.getWidget('checkBox_medialMesh') # Advanced Post Processed Segmentation self.CollapsibleButton_AdvancedPostProcessedSegmentation = self.getWidget('CollapsibleButton_AdvancedPostProcessedSegmentation') self.GaussianFiltering = self.getWidget('checkBox_GaussianFiltering') self.label_VarianceX = self.getWidget('label_VarianceX') self.VarianceX = self.getWidget('SliderWidget_VarianceX') self.label_VarianceY = self.getWidget('label_VarianceY') self.VarianceY = self.getWidget('SliderWidget_VarianceY') self.label_VarianceZ = self.getWidget('label_VarianceZ') self.VarianceZ = self.getWidget('SliderWidget_VarianceZ') # Advanced Parameters to SPHARM Mesh self.CollapsibleButton_AdvancedParametersToSPHARMMesh = self.getWidget('CollapsibleButton_AdvancedParametersToSPHARMMesh') self.useRegTemplate = self.getWidget('checkBox_useRegTemplate') self.label_regTemplate = self.getWidget('label_regTemplate') self.regTemplate = self.getWidget('PathLineEdit_regTemplate') self.useFlipTemplate = self.getWidget('checkBox_useFlipTemplate') self.label_flipTemplate = self.getWidget('label_flipTemplate') self.flipTemplate = self.getWidget('PathLineEdit_flipTemplate') self.choiceOfFlip = self.getWidget('comboBox_choiceOfFlip') self.sameFlipForAll = self.getWidget('checkBox_sameFlipForAll') self.tableWidget_ChoiceOfFlip = self.getWidget('tableWidget_ChoiceOfFlip') # Visualization self.CollapsibleButton_Visualization = self.getWidget('CollapsibleButton_Visualization') self.visualizationInSPV = self.getWidget('pushButton_visualizationInSPV') self.CheckableComboBox_visualization = self.getWidget('CheckableComboBox_visualization') self.tableWidget_visualization = self.getWidget('tableWidget_visualization') # Apply CLIs self.ApplyButton = self.getWidget('applyButton') self.progress_layout = self.getWidget('progress_layout') # Connections # Group Project IO self.CollapsibleButton_GroupProjectIO.connect('clicked()', lambda: self.onSelectedCollapsibleButtonOpen( self.CollapsibleButton_GroupProjectIO)) self.GroupProjectInputDirectory.connect('directoryChanged(const QString &)', self.onInputDirectoryChanged) self.GroupProjectOutputDirectory.connect('directoryChanged(const QString &)', self.onOutputDirectoryChanged) self.Debug.connect('clicked(bool)', self.onDebug) # Post Processed Segmentation self.CollapsibleButton_SegPostProcess.connect('clicked()', lambda: self.onSelectedCollapsibleButtonOpen( self.CollapsibleButton_SegPostProcess)) self.OverwriteSegPostProcess.connect('clicked(bool)', self.onOverwriteFilesSegPostProcess) self.RescaleSegPostProcess.connect('stateChanged(int)', self.onSelectSpacing) self.sx.connect('valueChanged(double)', self.onSxValueChanged) self.sy.connect('valueChanged(double)', self.onSyValueChanged) self.sz.connect('valueChanged(double)', self.onSzValueChanged) self.LabelState.connect('clicked(bool)', self.onSelectValueLabelNumber) self.ValueLabelNumber.connect('valueChanged(double)', self.onLabelNumberValueChanged) # Generate Mesh Parameters self.CollapsibleButton_GenParaMesh.connect('clicked()', lambda: self.onSelectedCollapsibleButtonOpen( self.CollapsibleButton_GenParaMesh)) self.OverwriteGenParaMesh.connect('clicked(bool)', self.onOverwriteFilesGenParaMesh) self.NumberofIterations.connect('valueChanged(double)', self.onNumberofIterationsValueChanged) # Parameters to SPHARM Mesh self.CollapsibleButton_ParaToSPHARMMesh.connect('clicked()', lambda: self.onSelectedCollapsibleButtonOpen( self.CollapsibleButton_ParaToSPHARMMesh)) self.OverwriteParaToSPHARMMesh.connect('clicked(bool)', self.onOverwriteFilesParaToSPHARMMesh) self.SubdivLevelValue.connect('valueChanged(double)', self.onSubdivLevelValueChanged) self.SPHARMD
egreeValue.connect('valueChanged(double)', self.onSPHARMDegreeValueChanged) self.thetaIterationValue.connect('valueChanged(int)', self.onThetaIterationValueChanged) self.phiIterationValue.connect('valueChanged(int)', self.onPhiIterationValueChanged) self.medialMesh.connect('clicked(bool)', self.onMedialMeshValueChanged) # Advanced Post Processed Segmentation self.CollapsibleButton_AdvancedPostProcessedSegmentation.connect('clicked()',
lambda: self.onSelectedCollapsibleButtonOpen( self.CollapsibleButton_AdvancedPostProcessedSegmentation)) self.GaussianFiltering.connect('clicked(bool)', self.onSelectGaussianVariance) self.VarianceX.connect('valueChanged(double)', self.onVarianceXValueChanged) self.VarianceY.connect('valueChanged(double)', self.onVarianceYValueChanged) self.VarianceZ.connect('valueChanged(double)', self.onVarianceZValueChanged) # Advanced Parameters to SPHARM Mesh self.CollapsibleButton_AdvancedParametersToSPHARMMesh.connect('clicked()', lambda: self.onSelectedCollapsibleButtonOpen( self.CollapsibleButton_AdvancedParametersToSPHARMMesh)) self.useRegTemplate.connect('clicked(bool)', self.onEnableRegTemplate) self.regTemplate.connect('currentPathChanged(const QString)', self.onRegTemplateValueChanged) self.useFlipTemplate.connect('clicked(bool)', self.onEnableFlipTemplate) self.flipTemplate.connect('currentPathChanged(const QString)', self.onFlipTemplateValueChanged) self.choiceOfFlip.connect('currentIndexChanged(int)', self.onChoiceOfFlipValueChanged) self.sameFlipForAll.connect('clicked(bool)', self.onEnableFlipChoices) # Visualization self.CollapsibleButton_Visualization.connect('clicked()', lambda: self.onSelectedCollapsibleButtonOpen( self.CollapsibleButton_Visualization)) self.CheckableComboBox_visualization.connect('checkedIndexesChanged()', self.onCheckableComboBoxValueChanged) self.visualizationInSPV.connect('clicked(bool)', self.onSPHARMMeshesVisualizationInSPV) # Apply CLIs self.ApplyButton.connect('clicked(bool)', self.onApplyButton) slicer.mrmlScene.AddObserver(slicer.mrmlScene.EndCloseEvent, self.onCloseScene) # Widget Configuration # Table for the Flip Options self.tableWidget_ChoiceOfFlip.setColumnCount(2) self.tableWidget_ChoiceOfFlip.setHorizontalHeaderLabels([' Input Files ', ' Choice of Flip ']) self.tableWidget_ChoiceOfFlip.setColumnWidth(0, 400) horizontalHeader = self.tableWidget_ChoiceOfFlip.horizontalHeader() horizontalHeader.setStretchLastSection(False) _setSectionResizeMode(horizontalHeader, 0, qt.QHeaderView.Stretch) _setSectionResizeMode(horizontalHeader, 1, qt.QHeaderView.ResizeToContents) self.tableWidget_ChoiceOfFlip.verticalHeader().setVisible(False) # Progress Bar self.progress_layout.addWidget(self.Logic.ProgressBar) # Table for the visualization in SPV self.tableWidget_visualization.setColumnCount(2) self.tableWidget_visualization.setHorizontalHeaderLabels([' VTK Files ', ' Visualization ']) self.tableWidget_visualization.setColumnWidth(0, 400) horizontalHeader = self.tableWidget_visualization.horizontalHeader() horizontalHeader.setStretchLastSection(False) _setSectionResizeMode(horizontalHeader, 0, qt.QHeaderView.Stretch) _setSectionResizeMode(horizontalHeader, 1, qt.QHeaderVi
r seconds', 'minutesRequired': 'You must enter minutes (after a :)', 'badNumber': 'The %(part)s value you gave is not a number: %(number)r', 'badHour': 'You must enter an hour in the range %(range)s', 'badMinute': 'You must enter a minute in the range 0-59', 'badSecond': 'You must enter a second in the range 0-59', } def _to_python(self, value, state): time = value.strip() explicit_ampm = False if self.use_ampm: last_two = time[-2:].lower() if last_two not in ('am', 'pm'): if self.use_ampm != 'optional': raise Invalid( self.message('noAMPM', state), value, state) else: offset = 0 else: explicit_ampm = True if last_two == 'pm': offset = 12 else: offset = 0 time = time[:-2] else: offset = 0 parts = time.split(':') if len(parts) > 3: raise Invalid( self.message('tooManyColon', state), value, state) if len(parts) == 3 and not self.use_seconds: raise Invalid( self.message('noSeconds', state), value, state) if (len(parts) == 2 and self.use_seconds and self.use_seconds != 'optional'): raise Invalid( self.message('secondsRequired', state), value, state) if len(parts) == 1: raise Invalid( self.message('minutesRequired', state), value, state) try: hour = int(parts[0]) except ValueError: raise Invalid( self.message('badNumber', state, number=parts[0], part='hour'), value, state) if explicit_ampm: if hour > 12 or hour < 1: raise Invalid( self.message('badHour', state, number=hour, range='1-12'), value, state) if hour == 12 and offset == 12: # 12pm == 12 pass elif hour == 12 and offset == 0: # 12am == 0 hour = 0 else: hour += offset else: if hour > 23 or hour < 0: raise Invalid( self.message('badHour', state, number=hour, range='0-23'), value, state) try: minute = int(parts[1]) except ValueError: raise Invalid( self.message('badNumber', state, number=parts[1], part='minute'), value, state) if minute > 59 or minute < 0: raise Invalid( self.message('badMinute', state, number=minute), value, state) if len(parts) == 3: try: second = int(parts[2]) except ValueError: raise Invalid( self.message('badNumber', state, number=parts[2], part='second')) if second > 59 or second < 0: raise Invalid( self.message('badSecond', state, number=second), value, state) else: second = None if second is None: return (hour, minute) else: return (hour, minute, second) def _from_python(self, value, state): if isinstance(value, (str, unicode)): return value if hasattr(value, 'hour'): hour, minute = value.hour, value.minute elif len(value) == 3: hour, minute, second = value elif len(value) == 2: hour, minute = value second = 0 ampm = '' if ((self.use_ampm == 'optional' and self.prefer_ampm) or (self.use_ampm and self.use_ampm != 'optional')): ampm = 'am' if hour > 12: hour -= 12 ampm = 'pm' elif hour == 12: ampm = 'pm' elif hour == 0: hour = 12 if self.use_seconds: return '%i:%02i:%02i%s' % (hour, minute, second, ampm) else: return '%i:%02i%s' % (hour, minute, ampm) class PostalCode(Regex): """ US Postal codes (aka Zip Codes). :: >>> PostalCode.to_python('55555') '55555' >>> PostalCode.to_python('55555-5555') '55555-5555' >>> PostalCode.to_python('5555') Traceback (most recent call last): ... Invalid: Please enter a zip code (5 digits) """ regex = r'^\d\d\d\d\d(?:-\d\d\d\d)?$' strip = True messages = { 'invalid': 'Please enter a zip code (5 digits)', } class StripField(FancyValidator): """ Take a field from a dictionary, removing the key from the dictionary. ``name`` is the key. The field value and a new copy of the dictionary with that field removed are returned. >>> StripField('test').to_python({'a': 1, 'test': 2}) (2, {'a': 1}) >>> StripField('test').to_python({}) Traceback (most recent call last): ... Invalid: The name 'test' is missing """ __unpackargs__ = ('name',) messages = { 'missing': 'The name %(name)s is missing', } def _to_python(self, valueDict, state): v = valueDict.copy() try: field = v[self.name] del v[self.name] except KeyError: raise Invalid(self.message('missing', state, name=repr(self.name)), valueDict, state) return field, v class StringBool(FancyValidator): # Originally from TurboGears """ Converts a string to a boolean. Values like 'true' and 'false' are considered True and False, respectively; anything in ``true_values`` is true, anything in ``false_values`` is false, case-insensitive). The first item of those lists is considered the preferred form. :: >>> s = StringBoolean() >>> s.to_python('yes'), s.to_python('no') (True, False) >>> s.to_python(1), s.to_python('N') (True, False) >>> s.to_python('ye') Traceback (most recent call last): ... Invalid: Value should be 'true' or 'false' """ true_values = ['true', 't', 'yes', 'y', 'on', '1'] false_values = ['false', 'f', 'no', 'n', 'off', '0'] messages = { "string" : "Value should be %(true)r or %(false)r" } def _to_python(self, value, state): if isinstance(value, (str, unicode)): value = value.strip().lower() if value in self.true_values: return True if not value or value in self.false_values: return False raise Invalid(self.message("string", state, true=self.true_values[0], false=self.false_values[0]), value, state) return bool(value) def _from_python(self, value
, state): if value: return self.true_values[0] else: return self.false_values[0] # Should deprecate: StringBoolean = StringBool class SignedString(FancyValidator): """ Encodes a string into a signed string, and base64 encodes both the
signature string and a random nonce. It is up to you to provide a secret, and to keep the secret handy and consistent. """ messages = { 'malformed': 'Value does not contain a signature', 'badsig': 'Signature is not correct', } secret = None nonce_length = 4 def _to_python(self, value, state): global sha if not sha: import sha assert self.secret is not N