prefix
stringlengths
0
918k
middle
stringlengths
0
812k
suffix
stringlengths
0
962k
#!/usr/bin/python #coding=utf-
8 ''' @author: sheng @contact: sinotradition@gmail.com @copyright: License according to the project license. ''' NAME='guimao33' SPELL='guǐmǎo' CN='癸卯' SEQ='40' if
__name__=='__main__': pass
from .Commerce import Comm
erce from .Transaction im
port Transaction
import wpilib import math class SharpIR2Y0A02: ''' Sharp IR sensor GP2Y0A02YK0F Long distance sensor: 20cm to 150cm Output is in centimeters Distance can be calculated using 62.28*x ^ -1.092 ''' def __init__(self,num): self.distance = wpilib.AnalogInput(num) def getDistance(self): '''Returns distance in centimeters''' # Don't allow zero/negative values v = max(self.distance.getVoltage(), 0.00001) d = 62.28*math.pow(v, -1.092) # Constrain output return max(min(d, 145.0), 22.5) def getVoltage(self): return self.distance.getVoltage() class SharpIRGP2Y0A41SK0F: ''' Sharp IR sensor GP2Y0A41SK0F Short distance sensor: 4cm to 40cm Output is in centimeters= ''' #short Distance
def __init__(self,num): self.distance = wpilib.AnalogInput(num) def getDistance(self): '''Returns distance in centimeters''' # Don't allow zero/negative values v = max(self.distance.getVoltage(), 0.00001) d = 12.84*math.pow(v, -0.9824) # Constrain output return max(min(d, 25), 4.0) def getVoltage(self): return self.distance.getVoltage() class CombinedSensor: def __ini
t__(self, longDist, longOff, shortDist, shortOff): self.longDistance = longDist self.shortDistance = shortDist self.longOff = longOff self.shortOff = shortOff def getDistance(self): long = self.longDistance.getDistance() short = self.shortDistance.getDistance() #if short < 25: # return short - self.shortOff #else: return max(long - self.longOff, 0)
"""Detect zmq version""" # # Copyright (C) PyZMQ Developers # # This file is part of pyzmq, copied and adapted from h5py. # h5py source used under the New BSD license # # h5py: <http://code.google.com/p/h5py/> # # Distributed under the terms of the New BSD License. The full license is in # the file COPYING.BSD, distributed as part of this software. # # # Adapted for use in pycapnp from pyzmq. See https://github.com/zeromq/pyzmq # for original project. import shutil import sys import os import logging import platform from distutils import ccompiler from distutils.ccompiler import get_default_compiler import tempfile from .misc import get_compiler, get_output_error from .patch import patch_lib_paths pjoin = os.path.join # # Utility functions (adapted from h5py: http://h5py.googlecode.com) # def test_compilation(cfile, compiler=None, **compiler_attrs): """Test simple compilation with given settings""" cc = get_compiler(compiler, **compiler_attrs) efile, _ = os.path.splitext(cfile) cpreargs = lpreargs = [] if sys.platform == 'darwin': # use appropriate arch for compiler if platform.architecture()[0] == '32bit': if platform.processor() == 'powerpc': cpu = 'ppc' else: cpu = 'i386' cpreargs = ['-arch', cpu] lpreargs = ['-arch', cpu, '-undefined', 'dynamic_lookup'] else: # allow for missing UB arch, since it will still work: lpreargs = ['-undefined', 'dynamic_lookup'] if sys.platform == 'sunos5': if platform.architecture()[0] == '32bit': lpreargs = ['-m32'] else: lpreargs = ['-m64'] extra_compile_args = compiler_attrs.get('extra_compile_args', []) if os.name != 'nt': extra_compile_args += ['--std=c++14'] extra_link_args = compiler_attrs.get('extra_link_args', []) if cc.compiler_type == 'msvc': extra_link_args += ['/MANIFEST'] objs = cc.compile([cfile], extra_preargs=cpreargs, extra_postargs=extra_compile_args) cc.link_executable(objs, efile, extra_preargs=lpreargs, extra_postargs=extra_link_args) return efile def detect_version(basedir, compiler=None, **compiler_attrs): """Compile, link & execute a test program, in empty directory `basedir`. The C compiler will be updated with any keywords given via setattr. Parameters ---------- basedir : path The location where the test program will be compiled and run compiler : str The distutils compiler key (e.g. 'unix', 'msvc', or 'mingw32') **compiler_attrs : dict Any extra compiler attributes, which will be set via ``setattr(cc)``. Returns ------- A dict of properties for zmq compilation, with the following two keys: vers : tuple The ZMQ version as a tuple of ints, e.g. (2,2,0) settings : dict The compiler options used to compile the test function, e.g. `include_dirs`, `library_dirs`, `libs`, etc. """ if compiler is None: compiler = get_default_compiler() cfile = pjoin(basedir, 'vers.cpp') shutil.copy(pjoin(os.path.dirname(__file__), 'vers.cpp'), cfile) # check if we need to link against Realtime Extensions library if sys.platform.startswith('linux'): cc = ccompiler.new_compiler(compiler=compiler) cc.output_dir = basedir if not cc.has_function('timer_create'): if 'libraries' not in compiler_attrs: compiler_attrs['libraries'] = [] compiler_attrs['libraries'].append('rt') cc = get_compiler(compiler=compiler, **compiler_attrs) efile = test_compilation(cfile, compiler=cc) patch_lib_paths(efile, cc.library_dirs) rc, so, se = get_output_error([efile]) if rc: msg = "Error running version detection script:\n%s\n%s" % (so, se) logging.error(msg) raise IOError(msg) handlers = {'vers': lambda val: tuple(int(v) for v in val.split('.'))} props = {} for line in (x for x in so.split('\n') if x): key, val = line.split(':') props[key] = handlers[key](val) return props def test_build(**compiler_attrs): "
""do a test build of libcapnp""" tmp_dir = tempfile.mkdtemp() # line() # info("Configu
re: Autodetecting Cap'n Proto settings...") # info(" Custom Cap'n Proto dir: %s" % prefix) try: detected = detect_version(tmp_dir, None, **compiler_attrs) finally: erase_dir(tmp_dir) # info(" Cap'n Proto version detected: %s" % v_str(detected['vers'])) return detected def erase_dir(path): """Erase directory""" try: shutil.rmtree(path) except Exception: pass
import torch import torchvision.transforms as transforms import torch.utils.data as data import os import json import pickle import argparse from PIL import Image import numpy as np from utils import Vocabulary class CocoDataset(data.Dataset): def __init__(self, root, anns, vocab, mode='train',transform=None): self.root = root self.anns = json.load(open(anns)) self.vocab = pickle.load(open(vocab, 'rb')) self.transform = transform self.data = [ann for ann in self.anns if ann['split'] == mode] def __getitem__(self, index): data = self.data vocab = self.vocab # load image path = os.path.join(self.root, data[index]['file_path']) img = Image.open(path).convert('RGB') if self.transform is not None: img = self.transform(img) # load caption cap = data[index]['final_caption'] caption = [] caption.append(vocab('<start>')) caption.extend([vocab(word) for word in cap]) caption.append(vocab('<end>')) target = torch.IntTensor(caption) return img, target, data[index]['imgid'] def __len__(self): return len(self.data) def collate_fn(data): # sort the data in descending order data.sort(key=lambda x: len(x[1]), reverse=True) images, captions, imgids = zip(*data) # merge images (from tuple of 3D tensor to 4D tensor). images = torch.stack(images, 0) # merge captions (from tuple of 1D tensor to 2D tensor). lengths = [len(cap) for cap in captions] targets = torch.zeros(len(captions), max(lengths)).long() for i, cap in enumerate(captions): end = lengths[i] targets[i, :end] = cap[:end] return images, targets, lengths, imgids def get_loader(opt, mode='train', shuffle=True, num_workers=1, transform=None): coco = CocoDataset(root=opt.root_dir, anns=opt.data_json, vocab=opt.vocab_path, mode=mode, transform=transform) data_loader = torch.utils.data.DataLoader(dataset=coco, batch_size=opt.batch_size,
shuffle=shuffle, num_workers=num_workers, collate_fn=collate_fn) return data_loader if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('--root_dir', type=str, default='/home/myunggi/Research/show-and-tell', help="root directory of the project") parser.add_argument('--data_json', type=str, default='data/data.json', help='input data list which includes captions and image information') parser.add_argument('--vocab_path', type=str, default='data/vocab.pkl', help='vocabulary wrapper') parser.add_argument('--crop_size', type=int, default=224, help='image crop size') parser.add_argument('--batch_size', type=int, default=128, help='batch size') args = parser.parse_args() transform = transforms.Compose([ transforms.RandomCrop(args.crop_size), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)) ]) data_loader = get_loader(args, transform=transform) total_iter = len(data_loader) for i, (img, target, length) in enumerate(data_loader): print('done')
o.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'auth.userlanguage': { 'Meta': {'unique_together': "(['user', 'language'],)", 'object_name': 'UserLanguage'}, 'follow_requests': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'language': ('django.db.models.fields.CharField', [], {'max_length': '16'}), 'proficiency': ('django.db.models.fields.IntegerField', [], {'default': '1'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']"}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'teams.application': { 'Meta': {'unique_together': "(('team', 'user', 'status'),)", 'object_name': 'Application'}, 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'history': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'note': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), 'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'applications'", 'to': "orm['teams.Team']"}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'team_applications'", 'to': "orm['auth.CustomUser']"}) }, 'teams.partner': { 'Meta': {'object_name': 'Partner'}, 'admins': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'managed_partners'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['auth.CustomUser']"}), 'can_request_paid_captions': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '250'}), 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}) }, 'teams.project': { 'Meta': {'unique_together': "(('team', 'name'), ('team', 'slug'))", 'object_name': 'Project'}, 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}), 'guidelines': ('django.db.models.fields.TextField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'p
rimary_key': 'True'}), 'modified': ('django.db.models.fields.DateTimeField', [], {'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'order': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'blank': 'True'}), 'team': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Team']"}), 'workflow_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}) }, 'teams.team': { 'Meta': {'ordering': "['name']", 'object_name': 'Team'}, 'applicants': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'applicated_teams'", 'symmetrical': 'False', 'through': "orm['teams.Application']", 'to': "orm['auth.CustomUser']"}), 'application_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'auth_provider_code': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '24', 'blank': 'True'}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'header_html_text': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}), 'highlight': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_moderated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'last_notification_time': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'logo': ('utils.amazon.fields.S3EnabledImageField', [], {'max_length': '100', 'blank': 'True'}), 'max_tasks_per_member': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'membership_policy': ('django.db.models.fields.IntegerField', [], {'default': '4'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '250'}), 'notify_interval': ('django.db.models.fields.CharField', [], {'default': "'D'", 'max_length': '1'}), 'page_content': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'partner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'teams'", 'null': 'True', 'to': "orm['teams.Partner']"}), 'points': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'projects_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}), 'subtitle_policy': ('django.db.models.fields.IntegerField', [], {'default': '10'}), 'task_assign_policy': ('django.db.models.fields.IntegerField', [], {'default': '10'}), 'task_expiration': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}), 'third_party_accounts': ('django.db.models.fields.related.ManyToMany
# -*- encoding: utf-8 -*- import mock import os from shutil import rmtree from tempfile import mkdtemp from django.test import TestCase from django.conf import settings from django.core.management import call_command from django.core.management.base import CommandError from django.test.utils import override_settings from django.template.base import TemplateDoesNotExist from paperclip.models import Attachment from geotrek.common.models import Organism, FileType from geotrek.common.parsers import ExcelParser, AttachmentParserMixin class OrganismParser(ExcelParser): model = Organism fields = {'organism': 'nOm'} class OrganismEidParser(ExcelParser): model = Organism fields = {'organism': 'nOm'} eid = 'organism' class AttachmentParser(AttachmentParserMixin, OrganismEidParser): non_fields = {'attachments': 'photo'} class ParserTests(TestCase): def test_bad_parser_class(self): with self.assertRaises(CommandError) as cm: call_command('import', 'geotrek.common.DoesNotExist', '', verbosity=0) self.assertEqual(unicode(cm.exception), u"Failed to import parser class 'geotrek.common.DoesNotExist'") def test_bad_filename(self): with self.assertRaises(CommandError) as cm: call_command('import', 'geotrek.common.tests.test_parsers.OrganismParser', 'find_me/I_am_not_there.shp', verbosity=0) self.assertEqual(unicode(cm.exception), u"File does not exists at: find_me/I_am_not_there.shp") def test_create(self): filename = os.path.join(os.path.dirname(__file__), 'data', 'organism.xls') call_command('import', 'geotrek.common.tests.test_parsers.OrganismParser', filename, verbosity=0) self.assertEqual(Organism.objects.count(), 1) organism = Organism.objects.get() self.assertEqual(organism.organism, u"Comité Théodule") def test_duplicate_without_eid(self): filename = os.path.join(os.path.dirname(__file__), 'data', 'organism.xls') call_command('import', 'geotrek.common.tests.test_parsers.OrganismParser', filename, verbosity=0) call_command('import', 'geotrek.common.tests.test_parsers.OrganismParser', filename, verbosity=0) self.assertEqual(Organism.objects.count(), 2) def test_unmodified_with_eid(self): filename = os.path.join(os.path.dirname(__file__), 'data', 'organism.xls') call_command('import', 'geotrek.common.tests.test_parsers.OrganismEidParser', filename, verbosity=0) call_command('import', 'geotrek.common.tests.test_parsers.OrganismEidParser', filename, verbosity=0) self.assertEqual(Organism.objects.count(), 1) def test_updated_with_eid(self): filename = os.path.join(os.path.dirname(__file__), 'data', 'organism.xls') filename2 = os.path.join(os.path.dirname(__file__), 'data', 'organism2.xls') call_command('import', 'geotrek.common.tests.test_parsers.OrganismEidParser', filename, verbosity=0) call_command('import', 'geotrek.common.tests.test_parsers.OrganismEidParser', filename2, verbosity=0) self.assertEqual(Organism.objects.count(), 2) organisms = Organism.objects.order_by('pk') self.assertEqual(organisms[0].organism, u"Comité Théodule") self.assertEqual(organisms[1].organism, u"Comité Hippolyte") def test_report_format_text(self): parser = OrganismParser() self.assertRegexpMatches(parser.report(), '0/0 lines imported.') self.assertNotRegexpMatches(parser.report(), '<div id=\"collapse-\$celery_id\" class=\"collapse\">') def test_report_format_html(self): parser = OrganismParser() self.assertRegexpMatches(parser.report(output_format='html'), '<div id=\"collapse-\$celery_id\" class=\"collapse\">') def test_report_format_bad(self): parser = OrganismParser() with self.assertRaises(TemplateDoesNotExist): parser.report(output_format='toto') @override_settings(MEDIA_ROOT=mkdtemp('geotrek_test')) class Attachme
ntParserTests(TestCase): def setUp(self):
self.filetype = FileType.objects.create(type=u"Photographie") def tearDown(self): rmtree(settings.MEDIA_ROOT) @mock.patch('requests.get') def test_attachment(self, mocked): mocked.return_value.status_code = 200 mocked.return_value.content = '' filename = os.path.join(os.path.dirname(__file__), 'data', 'organism.xls') call_command('import', 'geotrek.common.tests.test_parsers.AttachmentParser', filename, verbosity=0) organism = Organism.objects.get() attachment = Attachment.objects.get() self.assertEqual(attachment.content_object, organism) self.assertEqual(attachment.attachment_file.name, 'paperclip/common_organism/{pk}/titi.png'.format(pk=organism.pk)) self.assertEqual(attachment.filetype, self.filetype) @mock.patch('requests.get') def test_attachment_not_updated(self, mocked): mocked.return_value.status_code = 200 mocked.return_value.content = '' filename = os.path.join(os.path.dirname(__file__), 'data', 'organism.xls') call_command('import', 'geotrek.common.tests.test_parsers.AttachmentParser', filename, verbosity=0) call_command('import', 'geotrek.common.tests.test_parsers.AttachmentParser', filename, verbosity=0) self.assertEqual(mocked.call_count, 1) self.assertEqual(Attachment.objects.count(), 1)
serial.Serial = lambda *arg: Dummy() global Driver Driver = lambda *arg: Dummy() global Sup800fTelemetry Sup800fTelemetry = lambda *arg: Dummy() global switch_to_nmea_mode switch_to_nmea_mode = lambda *arg: Dummy() # Ignore messages drop = lambda message: None drop2 = lambda: consume_messages(config.COMMAND_FORWARDED_EXCHANGE, drop) thread = threading.Thread(target=drop2) thread.name = config.COMMAND_FORWARDED_EXCHANGE thread.start() try: from control.button import Button except SystemError: print('Disabling button because not running on Raspberry Pi') override_imports_for_non_rpi() THREADS = [] POPEN = None DRIVER = None EMIT_INITIALIZED = False class CherryPyServer(threading.Thread): """Runs the various web apps in a thread.""" def __init__(self, port, address, telemetry, waypoint_generator): super(CherryPyServer, self).__init__() self.name = self.__class__.__name__ # Web monitor config = MonitorApp.get_config(os.path.abspath(os.getcwd())) status_app = cherrypy.tree.mount( MonitorApp(telemetry, waypoint_generator, port), '/', config ) cherrypy.config.update({ 'server.socket_host': address, 'server.socket_port': port, 'server.ssl_module': 'builtin', 'server.ssl_certificate': 'control/web_telemetry/cert.pem', 'server.ssl_private_key': 'control/web_telemetry/key.pem', 'engine.autoreload.on': False, }) WebSocketPlugin(cherrypy.engine).subscribe() cherrypy.tools.websocket = WebSocketTool() # Web telemetry config = WebTelemetryStatusApp.get_config(os.path.abspath(os.getcwd())) web_telemetry_app = cherrypy.tree.mount( WebTelemetryStatusApp(telemetry, port), '/telemetry', config ) # OMG, shut up CherryPy, nobody cares about your problems for app in (status_app, web_telemetry_app, cherrypy): app.log.access_log.setLevel(logging.ERROR) app.log.error_log.setLevel(logging.ERROR) def run(self): """Runs the thread and server in a thread.""" cherrypy.engine.start() @staticmethod def kill(): """Stops the thread and server.""" cherrypy.engine.exit() def terminate(signal_number, stack_frame): # pylint: disable=unused-argument """Terminates the program. Used when a signal is received.""" print( 'Received signal {signal_number}, quitting'.format( signal_number=signal_number ) ) if POPEN is not None and POPEN.poll() is None: print('Killing image capture') try: POPEN.kill() except OSError: pass DRIVER.drive(0.0, 0.0) time.sleep(0.2) try: with open('/dev/pi-blaster', 'w') as blaster: time.sleep(0.1) blaster.write( '{pin}={throttle}\n'.format( pin=THROTTLE_GPIO_PIN, throttle=THROTTLE_NEUTRAL_US ) ) time.sleep(0.1) blaster.write( '{pin}={steering}\n'.format( pin=STEERING_GPIO_PIN, steering=STEERING_NEUTRAL_US ) ) time.sleep(0.1) except IOError: pass for socket in os.listdir(os.sep.join(('.', 'messaging', 'sockets'))): MessageProducer(socket).kill() time.sleep(0.1) for thread in THREADS: thread.kill() thread.join() # Some threads should still be active expected = set(('MainThread', '_TimeoutMonitor')) actives = set((thread.name for thread in threading.enumerate())) if not (actives <= expected): print('Trying to exit while {} threads are still active!'.format( threading.active_count() )) for thread in threading.enumerate(): print(thread.name) sys.exit(0) def get_configuration(value, default): """Returns a system configuration value.""" if value in os.environ: return os.environ[value] return default def start_threads( waypoint_generator, logger, web_socket_handler, max_throttle, kml_file_name, ): """Runs everything.""" logger.info('Creating Telemetry') telemetry = Telemetry(kml_file_name) telemetry_dumper = TelemetryDumper( telemetry, waypoint_generator, web_socket_handler ) logger.info('Done creating Telemetry') global DRIVER DRIVER = Driver(telemetry) DRIVER.set_max_throttle(max_throttle) logger.info('Setting SUP800F to NMEA mode') serial_ = serial.Serial('/dev/ttyAMA0', 115200) serial_.setTimeout(1.0) for _ in range(10): serial_.readline() try: switch_to_nmea_mode(serial_) except: # pylint: disable=W0702 logger.error('Unable to set mode') for _ in range(10): serial_.readline() logger.info('Done') # The following objects must be created in order, because of message # exchange dependencies: # sup800f_telemetry: reads from command forwarded # command: reads from command, writes to command forwarded # button: writes to command # cherry_py_server: writes to command # TODO(2016-08-21) Have somethin
g better than sleeps to work around race # conditions logger.info('Creating threads') sup800f_telemetry = Sup800fTelemetry(serial_) time.sleep(0.5) command = Command(telemetry, DRIVER, waypoint_ge
nerator) time.sleep(0.5) button = Button() port = int(get_configuration('PORT', 8080)) address = get_configuration('ADDRESS', '0.0.0.0') cherry_py_server = CherryPyServer( port, address, telemetry, waypoint_generator ) time.sleep(0.5) global THREADS THREADS += ( button, cherry_py_server, command, sup800f_telemetry, telemetry_dumper, ) for thread in THREADS: thread.start() logger.info('Started all threads') # Use a fake timeout so that the main thread can still receive signals sup800f_telemetry.join(100000000000) # Once we get here, sup800f_telemetry has died and there's no point in # continuing because we're not receiving telemetry messages any more, so # stop the command module command.stop() command.join(100000000000) cherry_py_server.kill() cherry_py_server.join(100000000000) button.kill() button.join(100000000000) def make_parser(): """Builds and returns an argument parser.""" parser = argparse.ArgumentParser( description='Command and control software for the Sparkfun AVC.' ) now = datetime.datetime.now() parser.add_argument( '-l', '--log', dest='log', help='The file to log to.', default=( '/data/sparkfun-{date}.log'.format( date=datetime.datetime.strftime( now, '%Y-%m-%d_%H-%M-%S' ) ) ), type=str ) parser.add_argument( '--video', dest='video', help='The video file name.', default=( '/data/video-{date}.h264'.format( date=datetime.datetime.strftime( now, '%Y-%m-%d_%H-%M-%S' ) ) ), type=str ) parser.add_argument( '-v', '--verbose', dest='verbose', help='Increase output.', action='store_true' ) parser.add_argument( '-k', '--kml', dest='kml_file', help='The KML file from which to load waypoints.', default=None, type=str, ) parser.add_argument( '--max-throttle', dest='max_throttle', help='The max throttle to drive at.', default=1.0, type=float, ) parser.add_argument( '--chase', dest='chase',
"Add a movie to Plex." import sys from pathlib import Path from argparse import ArgumentParser from tkinter import filedialog, messagebox, simpledialog, Tk, Frame, Label from tkinter.ttk import Combobox, Button class FeaturettePicker: FEATURETTES = { "Behind the Scenes": "behindthescenes", "Deleted Scenes": "deleted", "Featurette": "featurette", "Interview": "interview", "Scene": "scene", "Short": "short", "Trailer": "trailer", "Other": "other", } def __init__(self, parent, file): self.file = Path(file) self.label = Label(parent, text=self.file.name, justify='left') # can also position using "grid" instead of "pack", but no both self.label.pack(fill="x", padx=5, pady=5) self.parent = parent self.parent.bind("<Return>", self.ok) self.parent.bind("<Escape>", self.cancel) self.box = Frame(parent) self.ok_button = Button(self.box, text="Add", command=self.ok, default='active') self.ok_button.pack(padx=5, pady=5, side='right') self.cancel_button = Button(self.box, text="Cancel", command=self.cancel) self.cancel_button.pack(padx=5, pady=5, side='right') self.combo = Combobox(parent, values=[*self.FEATURETTES]) self.combo.pack(fill="x", padx=5, pady=5) self.box.pack() self.result = None def run(self): self.parent.mainloop() return self.result def cancel(self): try: self.parent.withdraw() finally: self.parent.quit() def ok(self): self.result = self.combo.get() self.cancel() def get_parser(): parser = ArgumentParser() parser.add_argument('infile') return parser def main(infile): try: tkr = Tk() tkr.withdraw() src = Path(infile) root = Path(src.anchor) outname = simpledialog\
.askstring("Movie Name", f"Selected: {src}.\n\nName of movie (and year in parens):", initialvalue=src.name, parent=tkr) or sys.exit() dst = root/'media-library'/'movies'/outname ext = simpledialog\ .askstring("Extension", "File extension (If you want to specify a variant, do so h
ere by prepending " "it to the extension, e.g. ` - [OldVersion].mp4`):", initialvalue=src.suffix, parent=tkr) out = dst/(outname+ext) files = [] while messagebox.askyesno("Special Features", "Add more files as special features?"): for f in filedialog.askopenfilenames(parent=tkr, initialdir="."): fsrc = Path(f) ftype = FeaturettePicker(Tk(), fsrc).run() if ftype is None: return name = simpledialog.askstring("Featurette Name", "Featurette Name:", initialvalue=fsrc.name[:-len(fsrc.suffix)], parent=tkr) if name is None: return fext = simpledialog.askstring("Extension", f"File extension for {fsrc.name}:", initialvalue=fsrc.suffix, parent=tkr) fdst = dst/f"{name}-{FeaturettePicker.FEATURETTES[ftype]}{fext}" files.append((fsrc, fdst)) msg = ("\n\nFeatures:\n\n"+"\n".join(f"{s} -> {d.name}" for (s, d) in files)+"\n\n") if files else "" if not messagebox.askokcancel("Proceed?", f"Ready to link {src.name} -> {out}. {msg}Proceed?", parent=tkr): return dst.mkdir(exist_ok=True) for s, d in [(src, out)] + files: if d.exists(): overwrite = messagebox.askyesnocancel("Overwrite?", f"{d} exists. Overwrite?") if overwrite is None: return if overwrite: d.unlink() else: continue s.link_to(d) messagebox.showinfo("Success", "Success.") except Exception as e: messagebox.showerror("Fatal Error", f"Fatal error: {e}") raise if __name__ == "__main__": main(get_parser().parse_args().infile)
#!/usr/bin/env python3 import rainbow import hashlib import string import time import random """SHA-256 hash function Precondition: Input plaintext as string Postcondition: Returns hash as string """ def sha256(plaintext): return hashlib.sha256(bytes(plaintext, 'utf-8')).hexdigest() """Returns a reduction function which generates an n-digit lowercase password from a hash """ def reduce_lower(n): """Reduction function Precondition: hash is H(previousPlaintext) Postcondition: returns randomly distributed n-digit lowercase plaintext password """ d
ef result(hash, col): plaintextKey = (int(hash[:9], 16) ^ col) % (26 ** n) plaintext = "" for _ in range(n): plaintext += string.ascii_lowercase[plaintextKey % 26] plaintextKey //= 26 return plaintext return result """Returns a function which generates a random n-di
git lowercase password """ def gen_lower(n): def result(): password = "" for _ in range(n): password += random.choice(string.ascii_lowercase) return password return result """Precondition: Input a function which generates a random password, or input no arguments to generate a random password Postcondition: Cracks H(password) and prints elapsed time """ def test(table, hash_function, gen_password_function, password=""): if password == "": password = gen_password_function() print("Cracking password: {0}\nH(password): {1}".format(password, hash_function(password))) cracked = table.crack(hash_function(password)) if cracked: print("Success! Password: {0}".format(cracked)) return True else: print("Unsuccessful :(") return False # Tests random passwords multiple times and prints success rate and average crack time. def bulk_test(table, hash_function, gen_password_function, numTests): start = time.time() numSuccess = 0 for i in range(numTests): print("\nTest {0} of {1}".format(i + 1, numTests)) numSuccess += test(table, hash_function, gen_password_function) print("""\n{0} out of {1} random hashes were successful!\n Average time per hash (including failures): {2} secs.""" \ .format(numSuccess, numTests, (time.time() - start) / numTests)) table = rainbow.RainbowTable(sha256, reduce_lower(4), gen_lower(4))
#!/usr/bin/env python3 # NOTE: this example requires PyAudio because it uses the Microphone class import speech_recognition as sr # this is called from the background thread def callback(recognizer, audio): # received audio data, now we'll recognize it using Google Speech Recognition try: # for testing purposes, we're just using the default API key # to use another API key, use `r.recognize_google(audio, key="GOOGLE_SPEECH_RECOGNITION_API_KEY")` # instead of `r.recognize_google(audio)` # print("Google Speech Recognition thinks you said " + recognizer.recognize_google(audio)) r.recognize_google(audio, key="") except sr.UnknownValueError: print("Google Speech Recognition could not understand audio") except sr.RequestError as e: print("Could not request results from Google Speech Recognition service; {0}".format(e)) r = sr.Recognizer() m = sr.Microphone() with m as source: r.adjust_for_ambient_noise(source) # we only need to calibrate once, before we start listening # start listening in the background (note that we don't have to do this inside a `with` statement) stop_listening = r.listen_in_background(m, callback) # `stop_listening` is now a function that, when called, stops background listening # do some other computation for 5 seconds, then stop listening and keep doing other computations import time for _ in range(50): time.sleep(0.1) # we're still listening even though the main thread is doing other things stop_listening() # calling this
function requests that
the background listener stop listening while True: time.sleep(0.1)
WHERE id = ?", (tenant_uuid, ) ) result = cur.fetchone() except sqlite3.Error as e: logging.error("Error %s when querying from tenants table for tenant_id %s", e, tenant_uuid) return str(e), None, None if result: tenant_name = result[0] logging.debug("Found tenant_uuid %s, tenant_name %s", tenant_uuid, tenant_name) return None, tenant_uuid, tenant_name def get_privileges(tenant_uuid, datastore): """ Return privileges for given (tenant_uuid, datastore) pair by querying the auth DB. """ _auth_mgr = get_auth_mgr() privileges = [] logging.debug("get_privileges tenant_uuid=%s datastore=%s", tenant_uuid, datastore) try: cur = _auth_mgr.conn.execute( "SELECT * FROM privileges WHERE tenant_id = ? and datastore = ?", (tenant_uuid, datastore) ) privileges = cur.fetchone() except sqlite3.Error as e: logging.error("Error %s when querying privileges table for tenant_id %s and datastore %s", e, tenant_uuid, datastore) return str(e), None return None, privileges def has_privilege(privileges, type): """ Check the privileges has the specific type of privilege set. """ if not privileges: return False logging.debug("%s=%d", type, privileges[type]) return privileges[type] def get_vol_size(opts): """ get volume size. """ if not opts or not opts.has_key(SIZE): logging.warning("Volume size not specified") return kv.DEFAULT_DISK_SIZE return opts[SIZE].upper() def check_max_volume_size(opts, privileges): """ Check whether the size of the volume to be created exceeds the max volume size specified in the privileges. """ if privileges: vol_size_in_MB = convert.convert_to_MB(get_vol_size(opts)) max_vol_size_in_MB = privileges[auth_data_const.COL_MAX_VOLUME_SIZE] logging.debug("vol_size_in_MB=%d max_vol_size_in_MB=%d", vol_size_in_MB, max_vol_size_in_MB) # if max_vol_size_in_MB which read from DB is 0, which means # no max_vol_size limit, function should return True if max_vol_size_in_MB == 0: return True return vol_size_in_MB <= max_vol_size_in_MB else: # no privileges return True def get_total_storage_used(tenant_uuid, datastore): """ Return total storage used by (tenant_uuid, datastore) by querying auth DB. """ _auth_mgr = get_auth_mgr() total_storage_used = 0 try: cur = _auth_mgr.conn.execute( "SELECT SUM(volume_size) FROM volumes WHERE tenant_id = ? and datastore = ?", (tenant_uuid, datastore) ) except sqlite3.Error as e: logging.error("Error %s when querying storage table for tenant_id %s and datastore %s", e, tenant_uuid, datastore) return str(e), total_storage_used result = cur.fetchone() if result: if result[0]: total_storage_used = result[0] logging.debug("total storage used for (tenant %s datastore %s) is %s MB", tenant_uuid, datastore, total_storage_used) return None, total_storage_used def check_usage_quota(opts, tenant_uuid, datastore, privileges): """ Check if the volume can be created without violating the quota. """ if privileges: vol_size_in_MB = convert.convert_to_MB(get_vol_size(opts)) error_info, total_storage_used = get_total_storage_used(tenant_uuid, datastore) if error_info: # cannot get the total_storage_used, to be safe, return False return False usage_quota = privileges[auth_data_const.COL_USAGE_QUOTA] # if usage_quota which read from DB is 0, which means # no usage_quota, function should return True if usage_quota == 0: return True return vol_size_in_MB + total_storage_used <= usage_quota else: # no privileges return True def check_privileges_for_command(cmd, opts, tenant_uuid, datastore, privileges): """ Check whether the (tenant_uuid, datastore) has the privileges to run the given command. """ result = None cmd_need_mount_privilege = [CMD_ATTACH, CMD_DETACH] if cmd in cmd_need_mount_privilege: if not has_privilege(privileges, auth_data_const.COL_MOUNT_VOLUME): result = "No mount privilege" if cmd == CMD_CREATE: if not has_privilege(privileges, auth_data_const.COL_CREATE_VOLUME): result = "No create privilege" if not check_max_volume_size(opts, privileges): result = "volume size exceeds the max volume size limit" if not check_usage_quota(opts, tenant_uuid, datastore, privileges): result = "The total volume size exceeds the usage quota" if cmd == CMD_REMOVE: if not has_privilege(privileges, auth_data_const.COL_DELETE_VOLUME): result = "No delete privilege" return result def tables_exist(): """ Check tables needed for authorization exist or not. """ _auth_mgr = get_auth_mgr() try: cur = _auth_mgr.conn.execute("SELECT name FROM sqlite_master WHERE type = 'table' and name = 'tenants';") result = cur.fetchall() except sqlite3.Error as e: logging.error("Error %s when checking whether table tenants exists or not", e) return str(e), False if not result: error_info = "table tenants does not exist" logging.error(error_info) return error_info, False try: cur = _auth_mgr.conn.execute("SELECT name FROM sqlite_master WHERE type = 'table' and name = 'vms';") result = cur.fetchall() except sqlite3.Error as e: logging.error("Error %s when checking whether table vms exists or not", e) return str(e), False if not result: error_info = "table vms does not exist" logging.error(error_info) return error_info, False try: cur = _auth_mgr.conn.execute("SELECT name FROM sqlite_master WHERE type = 'table' and name = 'privileges';")
result = cur.fetchall() except sqlite3.Error as e: logging.error("Error %s when checking whether table privileges exists or not", e) return str(e), False if not result: error_info = "table privileges does not exist" logging.error(error_info) return error_info, False try: cur = _auth_mgr.conn.execute("SELECT name FROM sqlite_master WHERE type = 'table' and name = 'volumes';") result = cur.fetc
hall() except sqlite3.Error as e: logging.error("Error %s when checking whether table volumes exists or not", e) return str(e), False if not result: error_info = "table volumes does not exist" logging.error(error_info) return error_info, False return None, True def authorize(vm_uuid, datastore, cmd, opts): """ Check whether the command can be run on this VM. Return value: result, tenant_uuid, tenant_name - result: return None if the command can be run on this VM, otherwise, return corresponding error message - tenant_uuid: If the VM belongs to a tenant, return tenant_uuid, otherwise, return None - tenant_name: If the VM belongs to a tenant, return tenant_name, otherwise, return None """ logging.debug("Authorize: vm_uuid=%s", vm_uuid) logging.debug("Authorize: datastore=%s", datastore) logging.debug("Authorize: cmd=%s", cmd) logging.debug("Authorize: opt=%s", opts) try: get_auth_mgr() except auth_data.DbConnectionError as e: error_info = "Failed to connect auth DB({0})".format(e) return error_info, None, None # If table "tenants", "vms", "privileges" or "volumes" does not exist # don't need auth check if not tables_exist(): logging.error("Required tables in auth db do not exist") error_info = "Require
import unittest from fractions import Fraction as F from abcesac.music import * class KeyTestCase(unittest.TestCase): def test_get_notes(self): got = Key('C').get_notes() want = ['C','D','E','F','G','A','B'] self.assertEquals(got, want) got = Key('D').get_notes() want = ['D','E','F#','G','A','B','C#'] self.assertEquals(got, want) got = Key('E').get_notes() want = ['E','F#','G#','A','B','C#','D#'] self.assertEquals(got, want) got = Key('Eb').get_notes() want = ['Eb','F','G','Ab','Bb','C','D'] self.assertEquals(got, want) def test_interval(self): got = Key('C').interval('C', 5) self.assertEquals(got, 'A') got = Key('C').interval('B', 5) self.assertEquals(got, 'G') got = Key('C').interval('B', 6) self.assertEquals(got, 'A') got = Key('G#').interval('B', 6) self.assertEquals(got, 'A#') def test_length_value(self): got = length_value(F(1,8)) self.assertEquals(got, (F(1,8),0)) got = length_value(F(2,8)) self.assertEquals(got, (F(1,4),0)) got = length_value(F(3,8)) self.assertEquals(got, (F(1,4),1)) got = length_value(F(4,8)) self.assertEquals(got, (F(1,2),0)) got = length_value(F(5,8)) self.assertEquals(got, (F(1,2),1)) got = length_value(F(6,8)) self.assertEquals(got, (F(1,2),1)) got = length_value(F(7,8)) self.assertEquals(got, (F(1,2),2)) got = length_value(F(8,8)) self.assertEquals(got, (F(1,1),0)) got = length_value(F(9,8)) self.assertEquals(got, (F(1,1),1)) got = length_value(F(10,8)) self.assertEquals(got, (F(1,1),1)) got = length_value(F(16,8)) self.assertEquals(got, (F(2,1),0)) def test_tuplets(self): tuplet = Tuplet(3, 2) tuplet.add_note(Note(name='C', length=F(1,8))) tuplet.add_note(Note(name='C', length=F(1,8))) tuplet.add_note(Note(name='C', length=F(1,8))) self.assertEquals(tuplet.length, F(2,8)) tuplet = Tuplet(3, 2) tuplet.add_note(Note(name='C', length=F(1,8))) tuplet.add_note(Note(name='C', length=F(1,8))) tuplet.add_note(Note(name='C', length=F(1,16))) tuplet.add_note(Note(name='C', length=F(1,16))) self.assertEquals(tuplet.length, F(2,8)) tuplet = Tuplet(5, 3) tuplet.add_note(Note(name='C', length=F(1,8))) tuplet.add_note(Note(name='C', length=F(1,8))) tuplet.add_note(Note(name='C', length=F(1,8))) tuplet.add_note(Note(name='C', length=F(1,8))) tuplet.add_note(Note(name='C', length=F(1,16))) tuplet.add_note(Note(name='C', length=F(1,16))) self.assertEquals(tuplet.length, F(3,8)) tuplet = Tuplet(7, 3) tuplet.add_note(Note(name='C', length=F(1,16))) tuplet.add_note(Note(name='C', length=F(1,16))) tuplet.add_note(Note(name='C', length=F(1,16))) tuplet.add_note(Note(name='C', length=F(1,16))) tuplet.add_note(Note(name='C', length=F(1,16))) tuplet.add_note(Note(name='C', length=F(1,16))) tuplet.add_note(Note(name='C', length=F(1,16))) self.assertEquals(tuplet.length, F(3,16)) def test_modes(self): got = Key('C').mode_scale('major') want = ['C', 'D', 'E', 'F', 'G', 'A', 'B'] self.assertEquals(got, want) got = Key('C').mode_scale('ionian') want = ['C', 'D', 'E', 'F', 'G', 'A', 'B'] self.assertEquals
(got, want) got = Key('C').mode_scale('minor') want = ['C', 'D', 'Eb', 'F', 'G', 'Ab', 'Bb'] self.assertEquals(got, want) got = Key('E').mode_scale('minor') want = [
'E', 'F#', 'G', 'A', 'B', 'C', 'D'] self.assertEquals(got, want) got = Key('C').mode_scale('dorian') want = ['C', 'D', 'Eb', 'F', 'G', 'A', 'Bb'] self.assertEquals(got, want) got = Key('C').mode_scale('phrygian') want = ['C', 'Db', 'Eb', 'F', 'G', 'Ab', 'Bb'] self.assertEquals(got, want) got = Key('C').mode_scale('lydian') want = ['C', 'D', 'E', 'F#', 'G', 'A', 'B'] self.assertEquals(got, want) got = Key('C').mode_scale('mixolydian') want = ['C', 'D', 'E', 'F', 'G', 'A', 'Bb'] self.assertEquals(got, want) got = Key('C').mode_scale('aeolian') want = ['C', 'D', 'Eb', 'F', 'G', 'Ab', 'Bb'] self.assertEquals(got, want) got = Key('C').mode_scale('locrian') want = ['C', 'Db', 'Eb', 'F', 'Gb', 'Ab', 'Bb'] self.assertEquals(got, want) got = Key('D').mode_scale('major') want = ['D', 'E', 'F#', 'G', 'A', 'B', 'C#'] self.assertEquals(got, want) if __name__ == '__main__': unittest.main()
"Legend creation helper function.""" proxies = [] descriptions = [] for label, color in items: if label == 'column-index': continue if name == 'Data Type': line = mpl.sns.mpl.lines.Line2D([], [], linestyle='none', color=color, marker='o') else: line = mpl.sns.mpl.lines.Line2D([], [], linestyle='-', color=color) proxies.append(line) descriptions.append(label) lgnd = ax.legend(proxies, descriptions, title=name, loc=loc, frameon=True) lgnd_frame = lgnd.get_frame() lgnd_frame.set_facecolor('white') lgnd_frame.set_edgecolor('black') return lgnd, ax info = self.info() info = info[info['type'] != '-'] info['size'] *= 13000/info['size'].max() info['size'] += 2000 node_size_dict = info['size'].to_dict() # Can pull all nodes from keys node_class_name_dict = info['type'].to_dict() node_type_dict = {} # Values are tuple of "underlying" type and color node_conn_dict = {} # Values are tuple of connection type and color items = self._data().items() for k0, v0 in items: n0 = k0[1:] if k0.startswith('_') else k0 node_type_dict[n0] = get_node_type_color(v0) for k1, v1 in items: if v0 is v1: continue n1 = k1[1:] if k1.startswith('_') else k1 for name in v0.index.names: # Check the index of data object 0 against the index if name is None: # and columns of data object 1 continue if name in v1.index.names: contyp = 'index-index' node_conn_dict[(n0, n1)] = (contyp, conn[contyp]) node_conn_dict[(n1, n0)] = (contyp, conn[contyp]) for col in v1.columns: # Catches index "atom", column "atom1"; does not catch atom10 if name == col or (name == col[:-1] and col[-1].isdigit()): contyp = 'index-column' node_conn_dict[(n0, n1)] = (contyp, conn[contyp]) node_conn_dict[(n1, n0)] = ('column-index', conn[contyp]) g = nx.Graph() g.add_nodes_from(node_size_dict.keys()) g.add_edges_from(node_conn_dict.keys()) node_sizes = [node_size_dict[node] for node in g.nodes()] node_labels = {node: ' {}\n({})'.format(node, node_class_name_dict[node]) for node in g.nodes()} node_colors = [node_type_dict[node][1] for node in g.nodes()] edge_colors = [node_conn_dict[edge][1] for edge in g.edges()] # Build the figure and legends if fig: fig, ax = plt.subplots(1, figsize=figsize) ax.axis('off') pos = nx.spring_layout(g) nx.draw_networkx_nodes(g, pos=pos, ax=ax, alpha=0.7, node_size=node_sizes, node_color=node_colors) nx.draw_networkx_labels(g, pos=pos, labels=node_labels, font_size=17, font_weight='bold', ax=ax) nx.draw_networkx_edges(g, pos=pos, edge_color=edge_colors, width=2, ax=ax) l1, ax = legend(set(node_conn_dict.values()), 'Connection', (1, 0), ax) _, ax = legend(set(node_type_dict.values()), 'Data Type', (1, 0.3), ax) fig.gca().add_artist(l1) g.edge_types = {node: value[0] for node, value in node_conn_dict.items()} # Attached connection information to network graph return g def save(self, path=None, complevel=1, complib='zlib'): """ Save the container as an HDF5 archive. Args: path (str): Path where to save the container """ if path is None: path = self.hexuid + '.hdf5' elif os.path.isdir(path): path += os.sep + self.hexuid + '.hdf5' elif not (path.endswith('.hdf5') or path.endswith('.hdf')): raise ValueError('File path must have a ".hdf5" or ".hdf" extension.') with pd.HDFStore(path, 'w', complevel=complevel, complib=complib) as store: store['kwargs'] = pd.Series() store.get_storer('kwargs').attrs.metadata = self._rel() fc = 0 # Field counter (see special handling of fields below) for name, data in self._data().items(): if hasattr(data, '_revert_categories'): data._revert_categories()
name = name[1:] if name.startswith('_') else name if isinstance(data, Field): # Fields are handled separately fname = 'FIELD{}_'.format(fc) + name + '/' store[fname + 'data'] = pd.DataFrame(data) for i, fi
eld in enumerate(data.field_values): ffname = fname + 'values' + str(i) if isinstance(field, pd.Series): store[ffname] = pd.Series(field) else: store[ffname] = pd.DataFrame(field) fc += 1 elif isinstance(data, Series): s = pd.Series(data) if isinstance(data.dtype, pd.types.dtypes.CategoricalDtype): s = s.astype('O') store[name] = s elif isinstance(data, DataFrame): store[name] = pd.DataFrame(data) elif isinstance(data, SparseSeries): s = pd.SparseSeries(data) if isinstance(data.dtype, pd.types.dtypes.CategoricalDtype): s = s.astype('O') store[name] = s elif isinstance(data, SparseDataFrame): store[name] = pd.SparseDataFrame(data) else: if hasattr(data, 'dtype') and isinstance(data.dtype, pd.types.dtypes.CategoricalDtype): data = data.astype('O') else: for col in data: if isinstance(data[col].dtype, pd.types.dtypes.CategoricalDtype): data[col] = data[col].astype('O') store[name] = data if hasattr(data, '_set_categories'): data._set_categories() def to_hdf(self, *args, **kwargs): """Alias of :func:`~exa.core.container.Container`.""" self.save(*args, **kwargs) @classmethod def load(cls, pkid_or_path=None): """ Load a container object from a persistent location or file path. Args: pkid_or_path: Integer pkid corresponding to the container table or file path Returns: container: The saved container object """ path = pkid_or_path if isinstance(path, (int, np.int32, np.int64)): raise NotImplementedError('Lookup via CMS not implemented.') elif not os.path.isfile(path): raise FileNotFoundError('File {} not found.'.format(path)) kwargs = {} fields = defaultdict(dict) with pd.HDFStore(path) as store: for key in store.keys(): if 'kwargs' in key: kwargs.update(store.get_storer(key).attrs.metadata) elif "FIELD" in key: name, dname = "_".join(key.split("_")[1:]).split("/") dname = dname.replace('values', '') fields[name][dname] = store[key] else: name = str(key[1:]) kwargs[name] = store[key] for name, field_data in fields.items(): fps = field_data.pop('data') kwargs[name] = Field(fps, field_values=[field_data[str(arr)] for arr in sorted(map(int, field_data.keys()))])
ROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = await self._delete_initial( resource_group_name=resource_group_name, service_endpoint_policy_name=service_endpoint_policy_name, service_endpoint_policy_definition_name=service_endpoint_policy_definition_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceEndpointPolicyName': self._serialize.url("service_endpoint_policy_name", service_endpoint_policy_name, 'str'), 'serviceEndpointPolicyDefinitionName': self._serialize.url("service_endpoint_policy_definition_name", service_endpoint_policy_definition_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = AsyncNoPolling() else: polling_method = polling if cont_token: return AsyncLROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/serviceEndpointPolicies/{serviceEndpointPolicyName}/serviceEndpointPolicyDefinitions/{serviceEndpointPolicyDefinitionName}'} # type: ignore async def get( self, resource_group_name: str, service_endpoint_policy_name: str, service_endpoint_policy_definition_name: str, **kwargs: Any ) -> "_models.ServiceEndpointPolicyDefinition": """Get the specified service endpoint policy definitions from service endpoint policy. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param service_endpoint_policy_name: The name of the service endpoint policy name. :type service_endpoint_policy_name: str :param service_endpoint_policy_definition_name: The name of the service endpoint policy definition name. :type service_endpoint_policy_definition_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: ServiceEndpointPolicyDefinition, or the result of cls(response) :rtype: ~azure.mgmt.network.v2019_07_01.models.ServiceEndpointPolicyDefinition :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.ServiceEndpointPolicyDefinition"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError }
error_map.update(kwargs.pop('error_map', {})) api_version = "2019-07-01" accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_
name", resource_group_name, 'str'), 'serviceEndpointPolicyName': self._serialize.url("service_endpoint_policy_name", service_endpoint_policy_name, 'str'), 'serviceEndpointPolicyDefinitionName': self._serialize.url("service_endpoint_policy_definition_name", service_endpoint_policy_definition_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('ServiceEndpointPolicyDefinition', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/serviceEndpointPolicies/{serviceEndpointPolicyName}/serviceEndpointPolicyDefinitions/{serviceEndpointPolicyDefinitionName}'} # type: ignore async def _create_or_update_initial( self, resource_group_name: str, service_endpoint_policy_name: str, service_endpoint_policy_definition_name: str, service_endpoint_policy_definitions: "_models.ServiceEndpointPolicyDefinition", **kwargs: Any ) -> "_models.ServiceEndpointPolicyDefinition": cls = kwargs.pop('cls', None) # type: ClsType["_models.ServiceEndpointPolicyDefinition"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2019-07-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self._create_or_update_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'serviceEndpointPolicyName': self._serialize.url("service_endpoint_policy_name", service_endpoint_policy_name, 'str'), 'serviceEndpointPolicyDefinitionName': self._serialize.url("service_endpoint_policy_definition_name", service_endpoint_policy_definition_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(service_endpoint_policy_definitions, 'ServiceEndpointPolicyDefinition') body_content_kwargs['content'] = body_content
from contextlib import ContextDecorator from unittest import mock import httpx import pytest from util.working_directory import working_directory from .http import pull_http class MockedHttpxStreamResponse(ContextDecorator): """ VCR does not like recording HTTPX stream requests so mock it. """ def __init__(self, method, url, **kwargs): self.response = httpx.get(url) def __getattr__(self, attr): return getattr(self.response, attr) def __enter__(self, *args, **kwargs): return self def __exit__(self, *args, **kwargs): return self @pytest.mark.vcr @mock.patch("httpx.stream", MockedHttpxStreamRe
sponse) def test_extension_from_mimetype(tempdir): with working_directory(tempdir.pa
th): files = pull_http({"url": "https://httpbin.org/get"}) assert files["get.json"]["mimetype"] == "application/json" files = pull_http({"url": "https://httpbin.org/image/png"}, path="image") assert files["image.png"]["mimetype"] == "image/png" files = pull_http({"url": "https://httpbin.org/html"}, path="content") assert files["content.html"]["mimetype"] == "text/html" files = pull_http({"url": "https://httpbin.org/html"}, path="foo.bar") assert files["foo.bar"]["mimetype"] is None # For some reason the status code does not work with VCR record def test_status_codes(tempdir): with pytest.raises(RuntimeError) as excinfo: pull_http({"url": "https://httpbin.org/status/404"}) assert "Error when fetching https://httpbin.org/status/404: 404" in str( excinfo.value )
#!/usr/bin/env python3 # https://docs.python.org/3/library/modulefinder.html from modulefinder import ModuleFinder finder = Mo
duleFinder() finder.run_script('graph1.py') print('Loaded modules:') for name, mod in finder.modules.items(): print('%s: '
% name, end='') print(','.join(list(mod.globalnames.keys())[:3])) print('-'*50) print('Modules not imported:') print('\n'.join(finder.badmodules.keys()))
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (c) 2013 Camptocamp SA (http://www.camptocamp.com) # @author Nicolas Bessi # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import orm from openerp.tools import DEFAULT_SERVER_DATE_FORMAT import time class AccountStatementFromInvoiceLines(orm.TransientModel): _inherit = "account.statement.from.invoice.lines" def populate_statement(self, cr, uid, ids, context=None): """Taken from account voucher as no hook is available. No function no refactoring, just trimming the part that generates voucher""" if context is None: context = {} statement_id = context.get('statement_id', False) if not statement_id: return {'type': 'ir.actions.act_window_close'} data = self.read(cr, uid, ids, context=context)[0] line_ids = data['line_ids'] if not line_ids: return {'type': 'ir.actions.act_window_close'} line_obj = self.pool['account.move.line'] statement_obj = self.pool['account.bank.statement'] statement_line_obj = self.pool['account.bank.statement.line'] currency_obj = self.pool['res.currency'] line_date = time.strftime(DEFAULT_SERVER_DATE_FORMAT) statement = statement_obj.browse( cr, uid, statement_id, context=context) # for each selected move lines for line in line_obj.browse(cr, uid, line_ids, context=context): ctx = context.copy() # take the date for computation of currency => use payment date ctx['date'] = line_date amount = 0.0 if line.debit > 0: amount = line.debit elif line.credit > 0: amount = -line.credit if line.amount_currency: amount = currency_obj.compute( cr, uid, line.currency_id.id, statement.currency.id, line.amount_currency, context=ctx) elif (line.invoice and line.invoice.currency_id.id != statement.currency.id): amount = currency_obj.compute( cr, uid, line.invoice.currency_id.id, statement.currency.id, amount, context=ctx) context.update({'move_line_ids': [line.id], 'invoice_id': line.invoice.id}) s_type = 'general' if line.journal_id.type in ('sale', 'sale_refund'): s_type = 'customer' elif line.journal_id.type in ('purchase', 'purhcase_refund'): s_type = 'supplier' vals = self._prepare_statement_line_vals( cr, uid, line, s_type, statement_id, amount, context=context) statement_line_obj.create(cr, uid, vals, context=context) return {'type': 'ir.actions.act_window_close'} def _prepare_statement_line_vals(self, cr, uid, move_line, s_type, statement_id, amount, context=None): return {'name': move_line.name or '?', 'amount': amount, 'type': s_type, 'partner_id': move_line.partner_id.id, 'account_id': move_line.account_id.id, 'statement_id': statement_id, 'ref': move_line.ref, 'voucher_id': False, 'date': time.strftime('%Y-%m-%d'), } class AccountPaymentPopulateStatement(orm.TransientModel): _inherit = "account.payment.populate.statement" def populate_statement(self, cr, uid, ids, context=None): """Taken from payment addon as no hook is vailable. No function no refactoring, just trimming the part that generates voucher""" line_obj = self.pool['payment.line'] statement_obj = self.pool['account.bank.statement'] statement_line_obj = self.pool['account.bank.statement.line'] currency_obj = self.pool['res.currency'] if context is None: context = {} data = self.read(cr, uid, ids, [], context=context)[0] line_ids = data['lines'] if not line_ids: return {'type': 'ir.actions.act_window_close'} statement = statement_obj.browse( cr, uid, context['active_id'], context=context) for line in line_obj.browse(cr, uid, line_ids, context=context): ctx = context.copy() # Last value_date earlier,but this field exists
no more now ctx['date'] = line.ml_maturity_date amount = currency_obj.compute( cr, uid, lin
e.currency.id, statement.currency.id, line.amount_currency, context=ctx) if not line.move_line_id.id: continue context.update({'move_line_ids': [line.move_line_id.id]}) vals = self._prepare_statement_line_vals( cr, uid, line, -amount, statement, context=context) st_line_id = statement_line_obj.create(cr, uid, vals, context=context) line_obj.write( cr, uid, [line.id], {'bank_statement_line_id': st_line_id}) return {'type': 'ir.actions.act_window_close'} def _prepare_statement_line_vals(self, cr, uid, payment_line, amount, statement, context=None): return { 'name': payment_line.order_id.reference or '?', 'amount': amount, 'type': 'supplier', 'partner_id': payment_line.partner_id.id, 'account_id': payment_line.move_line_id.account_id.id, 'statement_id': statement.id, 'ref': payment_line.communication, 'date': (payment_line.date or payment_line.ml_maturity_date or statement.date) }
from pygame.sprite import DirtySprite from pygame import draw class BaseWidget(DirtySprite): """clase base para todos los widgets""" focusable = True # si no es focusable, no se le llaman focusin y focusout # (por ejemplo, un contenedor, una etiqueta de texto) hasFocus = False # indica si el widget está en foco o no. enabled = True # un widget con enabled==False no recibe ningun evento nombre = '' # identifica al widget en el renderer hasMouseOver = False # indica si el widget tuvo el mouse encima o no, por el onMouseOut opciones = None # las opciones con las que se inicializo setFocus_onIn = False # if True: Renderer.setFocus se dispara onMouseIn también. KeyCombination = '' layer = 0 rect = None x, y = 0, 0 def __init__(self, parent=None, **opciones): if parent is not None: self.parent = parent self.layer = self.parent.layer + 1 self.opciones = opciones super().__init__() def on_focus_in(self): self.hasFocus = True def on_focus_out(self):
self.hasFocus = False def on_mouse_down(self, mousedata): pass def on_mouse_up(self, mousedata): pass
def on_mouse_over(self): pass def on_mouse_in(self): self.hasMouseOver = True def on_mouse_out(self): self.hasMouseOver = False def on_key_down(self, keydata): pass def on_key_up(self, keydata): pass def on_destruction(self): # esta funcion se llama cuando el widget es quitado del renderer. pass @staticmethod def _biselar(imagen, color_luz, color_sombra): w, h = imagen.get_size() draw.line(imagen, color_sombra, (0, h - 2), (w - 1, h - 2), 2) draw.line(imagen, color_sombra, (w - 2, h - 2), (w - 2, 0), 2) draw.lines(imagen, color_luz, 0, [(w - 2, 0), (0, 0), (0, h - 4)], 2) return imagen def reubicar_en_ventana(self, dx=0, dy=0): self.rect.move_ip(dx, dy) self.x += dx self.y += dy self.dirty = 1 def __repr__(self): return self.nombre def is_visible(self): return self._visible
from dj
ango.conf import settings from django.http import HttpResponse def index(request): return HttpResponse("Hello")
# -*- coding: utf-8 -*- # This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Commerce Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. import decimal import os import time import pytest from django.core.urlresolvers import reverse from shuup.testing.browser_utils import ( click_element, wait_until_appeared, wait_until_condition ) from shuup.testing.factories import ( create_order_with_product, get_default_product, get_default_shop, get_default_supplier ) from shuup.testing.utils import initialize_admin_browser_test from shuup.utils.i18n import format_money pytestmark = pytest.mark.skipif(os.environ.get("SHUUP_BROWSER_TESTS", "0") != "1", reason="No browser tests run.") @pytest.mark.browser @pytest.mark.djangodb def test_refunds(browser, admin_user, live_server, settings): order = create_order_with_product( get_default_product(), get_default_supplier(), 10, decimal.Decimal("10"), n_lines=10, shop=get_default_shop()) order2 = create_order_with_product( get_default_product(), get_default_supplier(), 10, decimal.Decimal("10"), n_lines=10, shop=get_default_shop()) order2.create_payment(order2.taxful_total_price) initialize_admin_browser_test(browser, live_server, settings) _test_toolbar_visibility(browser, live_server, order) _test_create_full_refund(browser, live_server, order) _test_refund_view(browser, live_server, order2) def _check_create_refund_link(browser, order, present): url = reverse("shuup_admin:order.create-refund", kwargs={"pk": order.pk}) wait_until_condition(browser, lambda x: (len(x.find_by_css("a[href='%s']" % url)) > 0) == present) def _test_toolbar_visibility(browser, live_server, order): url = reverse("shuup_admin:order.detail", kwargs={"pk": order.pk}) browser.visit("%s%s" % (live_server, url)) wait_until_appeared(browser, "#order_details") _check_create_refund_link(browser, order, False) order.create_payment(order.taxful_total_price) browser.reload() wait_until_appeared(browser, "#order_details") _check_create_refund_link(browser, order, True) def _test_create_full_refund(browser, live_server, order): url = reverse("shuup_admin:order.create-refund", kwargs={"pk": order.pk}) browser.visit("%s%s" % (live_server, url)) wait_until_condition(browser, lambda x: x.is_text_present("Refunded: %s" % format_money(order.shop.create_price("0.00")))) wait_until_condition(browser, lambda x: x.is_text_present("Remaining: %s" % for
mat_money(order.taxful_total_price))) url = reverse("shuup_admin:order.create-full-refund", kwargs={"pk
": order.pk}) click_element(browser, "a[href='%s']" % url) wait_until_condition(browser, lambda x: x.is_text_present("Refund Amount: %s" % format_money(order.taxful_total_price))) click_element(browser, "#create-full-refund") wait_until_appeared(browser, "#order_details") _check_create_refund_link(browser, order, False) order.refresh_from_db() assert not order.taxful_total_price assert order.is_paid() assert order.is_fully_shipped() def _test_refund_view(browser, live_server, order): url = reverse("shuup_admin:order.create-refund", kwargs={"pk": order.pk}) browser.visit("%s%s" % (live_server, url)) wait_until_condition(browser, lambda x: x.is_text_present("Refunded: %s" % format_money(order.shop.create_price("0.00")))) assert len(browser.find_by_css("#id_form-0-line_number option")) == 12 # blank + arbitrary amount + num lines click_element(browser, "#select2-id_form-0-line_number-container") wait_until_appeared(browser, "input.select2-search__field") browser.execute_script('$($(".select2-results__option")[1]).trigger({type: "mouseup"})') # select arbitrary amount wait_until_condition(browser, lambda x: len(x.find_by_css("#id_form-0-text"))) wait_until_condition(browser, lambda x: len(x.find_by_css("#id_form-0-amount"))) browser.find_by_css("#id_form-0-text").first.value = "test" browser.find_by_css("#id_form-0-amount").first.value = "900" click_element(browser, "#add-refund") click_element(browser, "#select2-id_form-1-line_number-container") wait_until_appeared(browser, "input.select2-search__field") browser.execute_script('$($(".select2-results__option")[2]).trigger({type: "mouseup"})') # select first line browser.find_by_css("#id_form-1-amount").first.value == "100" browser.find_by_css("#id_form-1-quantity").first.value == "10" click_element(browser, "button[form='create_refund']") _check_create_refund_link(browser, order, True) # can still refund quantity order.refresh_from_db() assert not order.taxful_total_price assert order.is_paid() assert not order.is_fully_shipped()
"""Source code used for the talk: http://www.slideshare.net/MarcGarcia11/cart-not-only-classification-and-regression-trees """ # data import pandas as pd data = {'age': [38, 49, 27, 19, 54, 29, 19, 42, 34, 64, 19, 62, 27, 77, 55, 41, 56, 32, 59, 35], 'distance': [6169.98, 7598.87, 3276.07, 1570.43, 951.76, 139.97, 4476.89, 8958.77, 1336.44, 6138.85, 2298.68, 1167.92, 676.30, 736.85, 1326.52, 712.13, 3083.07, 1382.64, 2267.55, 2844.18], 'attended': [False, False, False, True, True, True, False, True, True, True, False, True, True, True, False, True, True, True, True, False]} df = pd.DataFrame(data) # base_plot from bokeh.plotting import figure, show def base_plot(df): p = figure(title='Event attendance', plot_width=900, plot_height=400) p.xaxis.axis_label = 'Distance' p.yaxis.axis_label = 'Age' p.circle(df[df.attended]['distance'], df[df.attended]['age'], color='red', legend='Attended', fill_alpha=0.2, size=10) p.circle(df[~df.attended]['distance'], df[~df.attended]['age'], color='blue', legend="Didn't attend", fill_alpha=0.2, size=10) return p _ = show(base_plot()) # tree_to_nodes from collections import namedtuple from itertools import starmap def tree_to_nodes(dtree): nodes = starmap(namedtuple('Node', 'feature,threshold,left,right'), zip(map(lambda x: {0: 'age', 1: 'distance'}.get(x), dtree.tree_.feature), dtree.tree_.threshold, dtree.tree_.children_left, dtree.tree_.children_right)) return list(nodes) # cart_plot from collections import namedtuple, deque from functools import partial class NodeRanges(namedtuple('NodeRanges', 'node,max_x,min_x,max_y,min_y')): pass def cart_plot(df, dtree, nodes, limit=None): nodes = tree_to_nodes(dtree) plot = base_plot() add_line = partial(plot.line, line_color='black', line_width=2) stack = deque() stack.append(NodeRanges(node=nodes[0], max_x=df['distance'].max(), min_x=df['distance'].min(), max_y=df['age'].max(), min_y=df['age'].min())) count = 1 while len(stack): node, max_x, min_x, max_y, min_y = stack.pop() feature, threshold, left, right = node if feature == 'distance': add_line(x=[threshold, threshold], y=[min_y, max_y]) elif feature == 'age': add_line(x=[min_x, max_x], y=[threshold, threshold]) else: continue stack.append(NodeRanges(node=nodes[left], max_x=threshold if feature == 'distance' else max_x, min_x=min_x, max_y=threshold if feature == 'age' else max_y, min_y=min_y)) stack.append(NodeRanges(node=nodes[right], max_x=max_x, min_x=threshold if feature == 'distance' else min_x, max_y=max_y, min_y=threshold if feature == 'age' else min_y)) if limit is not None and count >= limit: break else: count += 1 show(plot) # decision_tree_model def decision_tree_model(age, distance): if distance >= 2283.11: if age >= 40.00: if distance >= 6868.86: if distance >= 8278.82: return True else: return False else: return True else: return False else: if age >= 54.50: if age >= 57.00: return True else: return False else: return True # entropy import math def entropy(a, b): total = a + b prob_a = a / total prob_b = b / total return - prob_a * math.log(prob_a, 2) - prob_b * math.log(prob_b, 2) # get_best_split def get_best_split(x, y): best_split = None best_entropy = 1. for feature in x.columns.values: column = x[feature] for value in column.iterrows(): a = y[column < value] == class_a_value b = y[column < value] == class_b_value left_weight = (a + b) / len(y.index) left_entropy = entropy(a, b) a = y[column >= value] == class_a_value b = y[column >= value] == class_b_value right_weight = (a + b) / len(y.index) right_entropy = entropy(a, b) split_entropy = left_weight * le
ft_entropy + right_weight * right_entropy if split_entropy < best_entropy: best_split = (feature, value) best_entropy = split_entropy return best_split # train_decision_tree def train_decision_tree(x, y): feature, value = get_best_split(x, y) x_left, y_left = x[x[feature]
< value], y[x[feature] < value] if len(y_left.unique()) > 1: left_node = train_decision_tree(x_left, y_left) else: left_node = None x_right, y_right = x[x[feature] >= value], y[x[feature] >= value] if len(y_right.unique()) > 1: right_node = train_decision_tree(x_right, y_right) else: right_node = None return (feature, value, left_node, right_node)
# Copyright 1999-2013 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 from __future__ import unicode_literals import formatter import io import sys import time import portage from portage import os from portage import _encodings from portage import _unicode_encode from portage.output import xtermTitle from _emerge.getloadavg import getloadavg if sys.hexversion >= 0x3000000: basestring = str class JobStatusDisplay(object): _bound_properties = ("curval", "failed", "running") # Don't update the display unless at least this much # time has passed, in units of seconds. _min_display_latency = 2 _default_term_codes = { 'cr' : '\r', 'el' : '\x1b[K', 'nel' : '\n', } _termcap_name_map = { 'carriage_return' : 'cr', 'clr_eol' : 'el', 'newline' : 'nel', } def __init__(self, qui
et=False, xterm_titles=True): object.__setattr__(self, "quiet", quiet) object.
__setattr__(self, "xterm_titles", xterm_titles) object.__setattr__(self, "maxval", 0) object.__setattr__(self, "merges", 0) object.__setattr__(self, "_changed", False) object.__setattr__(self, "_displayed", False) object.__setattr__(self, "_last_display_time", 0) self.reset() isatty = os.environ.get('TERM') != 'dumb' and \ hasattr(self.out, 'isatty') and \ self.out.isatty() object.__setattr__(self, "_isatty", isatty) if not isatty or not self._init_term(): term_codes = {} for k, capname in self._termcap_name_map.items(): term_codes[k] = self._default_term_codes[capname] object.__setattr__(self, "_term_codes", term_codes) encoding = sys.getdefaultencoding() for k, v in self._term_codes.items(): if not isinstance(v, basestring): self._term_codes[k] = v.decode(encoding, 'replace') if self._isatty: width = portage.output.get_term_size()[1] else: width = 80 self._set_width(width) def _set_width(self, width): if width == getattr(self, 'width', None): return if width <= 0 or width > 80: width = 80 object.__setattr__(self, "width", width) object.__setattr__(self, "_jobs_column_width", width - 32) @property def out(self): """Use a lazy reference to sys.stdout, in case the API consumer has temporarily overridden stdout.""" return sys.stdout def _write(self, s): # avoid potential UnicodeEncodeError s = _unicode_encode(s, encoding=_encodings['stdio'], errors='backslashreplace') out = self.out if sys.hexversion >= 0x3000000: out = out.buffer out.write(s) out.flush() def _init_term(self): """ Initialize term control codes. @rtype: bool @return: True if term codes were successfully initialized, False otherwise. """ term_type = os.environ.get("TERM", "").strip() if not term_type: return False tigetstr = None try: import curses try: curses.setupterm(term_type, self.out.fileno()) tigetstr = curses.tigetstr except curses.error: pass except ImportError: pass if tigetstr is None: return False term_codes = {} for k, capname in self._termcap_name_map.items(): # Use _native_string for PyPy compat (bug #470258). code = tigetstr(portage._native_string(capname)) if code is None: code = self._default_term_codes[capname] term_codes[k] = code object.__setattr__(self, "_term_codes", term_codes) return True def _format_msg(self, msg): return ">>> %s" % msg def _erase(self): self._write( self._term_codes['carriage_return'] + \ self._term_codes['clr_eol']) self._displayed = False def _display(self, line): self._write(line) self._displayed = True def _update(self, msg): if not self._isatty: self._write(self._format_msg(msg) + self._term_codes['newline']) self._displayed = True return if self._displayed: self._erase() self._display(self._format_msg(msg)) def displayMessage(self, msg): was_displayed = self._displayed if self._isatty and self._displayed: self._erase() self._write(self._format_msg(msg) + self._term_codes['newline']) self._displayed = False if was_displayed: self._changed = True self.display() def reset(self): self.maxval = 0 self.merges = 0 for name in self._bound_properties: object.__setattr__(self, name, 0) if self._displayed: self._write(self._term_codes['newline']) self._displayed = False def __setattr__(self, name, value): old_value = getattr(self, name) if value == old_value: return object.__setattr__(self, name, value) if name in self._bound_properties: self._property_change(name, old_value, value) def _property_change(self, name, old_value, new_value): self._changed = True self.display() def _load_avg_str(self): try: avg = getloadavg() except OSError: return 'unknown' max_avg = max(avg) if max_avg < 10: digits = 2 elif max_avg < 100: digits = 1 else: digits = 0 return ", ".join(("%%.%df" % digits ) % x for x in avg) def display(self): """ Display status on stdout, but only if something has changed since the last call. This always returns True, for continuous scheduling via timeout_add. """ if self.quiet: return True current_time = time.time() time_delta = current_time - self._last_display_time if self._displayed and \ not self._changed: if not self._isatty: return True if time_delta < self._min_display_latency: return True self._last_display_time = current_time self._changed = False self._display_status() return True def _display_status(self): # Don't use len(self._completed_tasks) here since that also # can include uninstall tasks. curval_str = "%s" % (self.curval,) maxval_str = "%s" % (self.maxval,) running_str = "%s" % (self.running,) failed_str = "%s" % (self.failed,) load_avg_str = self._load_avg_str() color_output = io.StringIO() plain_output = io.StringIO() style_file = portage.output.ConsoleStyleFile(color_output) style_file.write_listener = plain_output style_writer = portage.output.StyleWriter(file=style_file, maxcol=9999) style_writer.style_listener = style_file.new_styles f = formatter.AbstractFormatter(style_writer) number_style = "INFORM" f.add_literal_data("Jobs: ") f.push_style(number_style) f.add_literal_data(curval_str) f.pop_style() f.add_literal_data(" of ") f.push_style(number_style) f.add_literal_data(maxval_str) f.pop_style() f.add_literal_data(" complete") if self.running: f.add_literal_data(", ") f.push_style(number_style) f.add_literal_data(running_str) f.pop_style() f.add_literal_data(" running") if self.failed: f.add_literal_data(", ") f.push_style(number_style) f.add_literal_data(failed_str) f.pop_style() f.add_literal_data(" failed") padding = self._jobs_column_width - len(plain_output.getvalue()) if padding > 0: f.add_literal_data(padding * " ") f.add_literal_data("Load avg: ") f.add_literal_data(load_avg_str) # Truncate to fit width, to avoid making the terminal scroll if the # line overflows (happens when the load average is large). plain_output = plain_output.getvalue() if self._isatty and len(plain_output) > self.width: # Use plain_output here since it's easier to truncate # properly than the color output which contains console # color codes. self._update(plain_output[:self.width]) else: self._update(color_output.getvalue()) if self.xterm_titles: # If the HOSTNAME variable is exported, include it # in the xterm title, just like emergelog() does. # See bug #390699. title_str = " ".join(plain_output.split()) hostname = os.environ.get("HOSTNAME") if hostname is not None: title_str = "%s: %s" % (hostname, title_str) xtermTitle(title_str)
#!/usr/bin/env python3 from mutagen.mp3 import MP3 import sys if len(sys.argv) < 2: print('error: didn\'t pass enough arguments') print('usage: ./bitrate.py
<file name>') print('usage: find the bitrate of an mp3 file') exit(1) f = MP3(sys.argv[1]) print('bitrate: %s' % (f.
info.bitrate / 1000))
# -*- coding: utf-8 -*- from __future__ i
mport unicode_literals from django.db import models, migrations class Migration(migrations.M
igration): dependencies = [ ] operations = [ migrations.CreateModel( name='Item', fields=[ ('id', models.AutoField(serialize=False, primary_key=True, auto_created=True, verbose_name='ID')), ], options={ }, bases=(models.Model,), ), ]
it_matrices([(0,8),(8,8),(8,0),(0,0)]) # set up accelometer accel.init(1) # notify of progress print("P60") sys.stdout.flush() # set up buttons A = 4 B = 17 UP = 25 DOWN = 24 LEFT = 23 RIGHT = 18 START = 27 SELECT = 22 # accelometer threshold THRESHOLD = 3 class State(object): PLAYING, IDLE, SCORE, EXIT = range(4) # starting variables state = State.IDLE field = None title = led_matrix.LEDText("ASPIRIN - Press A to use accelometer or B to use buttons") # notify of progress print("P90") sys.stdout.flush() class Direction(object): LEFT, RIGHT, UP, DOWN = range(4) class Apple(object): def __init__(self, position): self.position = position def draw(self): led_matrix.point(*self.position) class Striker(object): def __init__(self, start_pos, direction): self.position = start_pos # starting position of the striker self.direction = direction def draw(self): led_matrix.point(*self.position, color=3) def move(self): # check if the striker hit the wall and needs to bounce back if self.direction == Direction.LEFT and self.position[0] == 0: self.direction = Direction.RIGHT elif self.direction == Direction.RIGHT and self.position[0] == led_matrix.width()-1: self.direction = Direction.LEFT elif self.direction == Direction.DOWN and self.position[1] == 0: self.direction = Direction.UP elif self.direction == Direction.UP and self.position[1] == led_matrix.height()-1: self.direction = Direction.DOWN if self.direction == Direction.LEFT: self.position = (self.position[0]-1, self.position[1]) elif self.direction == Direction.RIGHT: self.position = (self.position[0]+1, self.position[1]) elif self.direction == Direction.DOWN: self.position = (self.position[0], self.position[1]-1) elif self.direction == Direction.UP: self.position = (self.position[0], self.position[1]+1) class Player(object): def __init__(self, position=None, accel=False): # set position to be center of screen if position is not given if position is None: self.position = (int(led_matrix.width()/2), int(led_matrix.height()/2)) else: self.position = position self.accel = accel # True if controls are the accelometer, False if controls are buttons def draw(self): led_matrix.point(*self.position, color=8) def move(self, direction): if direction == Direction.UP: if self.position[1] < led_matrix.height()-1: self.position = (self.position[0], self.position[1]+1) elif direction == Direction.DOWN: if self.position[1] > 0: self.position = (self.position[0], self.position[1]-1) elif direction == Direction.LEFT: if self.position[0] > 0: self.position = (self.position[0]-1, self.position[1]) elif direction == Direction.RIGHT: if self.position[0] < led_matrix.width()-1: self.position = (self.position[0]+1, self.position[1]) else: raise ValueError("Invalid direction given.") class Field(object): def __init__(self, player): self.player = player empty_strikers = set() # initialize empty strikers for x_pos in range(led_matrix.width()): empty_strikers.add(Striker((x_pos, 0), Direction.UP)) for y_pos in range(led_matrix.height()): empty_strikers.add(Striker((0, y_pos), Direction.RIGHT)) self.empty_strikers = empty_strikers # strikers not used yet self.strikers = set() # active strikers self.apple = None def draw(self): self.player.draw() self.apple.draw() # strikers = self.horizontal_strikers.union(self.vertical_strikers) for striker in self.strikers: striker.draw() def player_collided_with_apple(self): return self.player.position == self.apple.position def player_collided_with_striker(self): # strikers = self.horizontal_strikers.union(self.vertical_strikers) for striker in self.strikers: if self.player.position == striker.position: return True return False def new_apple(self): # set up list of x and y choices x_pos = list(range(led_matrix.width())) y_pos = list(range(led_matrix.height())) # remove the position that player is currently in del x_pos[self.player.position[0]] del y_pos[self.player.position[1]] self.apple = Apple((random.choice(x_pos), random.choice(y_pos))) def add_striker(self): if len(self.empty_strikers) == 0: return False # no more strikers to make, you win!! new_striker = random.choice(list(self.empty_strikers)) self.strikers.add(new_striker) self.empty_strikers.remove(new_striker) return True # set up buttons GPIO.setmode(GPIO.BC
M) def button_handler(channel): global state global field if channel in [START, SELECT]: state = State.EXIT elif state in [State.IDLE,
State.SCORE] and channel in [A, B]: # Reset field and player to start a new game player = Player(accel=(channel == A)) field = None field = Field(player) field.new_apple() # add the first apple state = State.PLAYING # elif state == State.PLAYING and (not field.player.accel) and channel in [UP, DOWN, LEFT, RIGHT]: # if channel == UP: # field.player.move(Direction.UP) # elif channel == DOWN: # field.player.move(Direction.DOWN) # elif channel == LEFT: # field.player.move(Direction.LEFT) # elif channel == RIGHT: # field.player.move(Direction.RIGHT) for button in [UP, DOWN, LEFT, RIGHT, START, A, B, SELECT]: GPIO.setup(button, GPIO.IN, pull_up_down=GPIO.PUD_UP) GPIO.add_event_detect(button, GPIO.FALLING, callback=button_handler, bouncetime=100) # notify of progress print("P100") sys.stdout.flush() # notify menu we are ready for the led matrix print("READY") sys.stdout.flush() # FSM ======= while True: if state == State.PLAYING: led_matrix.erase() # move player with accelometer, otherwise poll the buttons if field.player.accel: angles = accel.angles() # "Simple" lowpass filter for velocity data x = angles[0] y = angles[1] # alpha = 0.2 # velocity = 0.0 # x_diff = velocity*alpha + (angles[0]*2*8/90)*(1 - alpha) # y_diff = velocity*alpha + (angles[1]*2*8/90)*(1 - alpha) if x > THRESHOLD: field.player.move(Direction.RIGHT) elif x < -THRESHOLD: field.player.move(Direction.LEFT) if y > THRESHOLD: field.player.move(Direction.DOWN) elif y < -THRESHOLD: field.player.move(Direction.UP) else: if GPIO.input(UP) == 0: field.player.move(Direction.UP) if GPIO.input(DOWN) == 0: field.player.move(Direction.DOWN) if GPIO.input(LEFT) == 0: field.player.move(Direction.LEFT) if GPIO.input(RIGHT) == 0: field.player.move(Direction.RIGHT) # move the strikers for striker in field.strikers: striker.move() # draw all the objects on the field field.draw() led_matrix.show() # check for collisions if field.player_collided_with_striker(): state = State.SCORE elif field.player_collided_with_apple(): field.new_apple() ret = field.add_striker() if ret == False:
import sqlite3 as sql from flask.json import jsonify from flask import current_app def total_entries(): with sql.connect("names.db") as con: cur = con.cursor() entries = cur.execute("SELECT count(*) FROM names").fetchone() con.commit() return '{}\n'.format('{}\n'.format(entries)[1:-3]) def select_entries_by_name(name): with sql.connect("names.db") as con: cur = con.cursor() query = cur.execute("SELECT id, year, gender, count FROM names WHERE name = '{}';".format(name)) con.commit() cached = current_app.cache.get('a_key') if cached: return cached #"The value is cached: {}\n".format(cached) result = [dict({'id': row[0], 'year': row[1], 'gender': row[2], 'count': row[3]}) for row in query.fetchall()] current_app.cache.set('a_key', result, timeout=180) return result #return jsonify({'Entries for %s' % name: entries}) def insert_name(name,year,gender,count): with sql.connect("names.db") as con: cur = con.cursor() try: cur.execute("INSERT INTO names (name,year,gender,count) VALUES ('{}',{},'{}',{})".format(
name,year,gender,count) ) con.commit() new_id = cur.lastrowid return str(new_id) except Exception as e: print e return 'The baby is already present in the DataBase.' def fir
st_and_last(name): with sql.connect("names.db") as con: cur = con.cursor() last = cur.execute("select MAX(year) from names where name='{}';".format(name) ).fetchone() first = cur.execute("select MIN(year) from names where name='{}';".format(name) ).fetchone() con.commit() return 'Last year is: %s \nFirst year is: %s' % ( \ '{}'.format('{}\n'.format(last)[1:-3]), \ '{}'.format('{}\n'.format(first)[1:-3]))
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BAS
IS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """ DAG designed to test a PythonOperator that calls a functool.partial """ import functools import logging from datetime import datetime from airflow.models
import DAG from airflow.operators.python import PythonOperator DEFAULT_DATE = datetime(2016, 1, 1) default_args = dict(start_date=DEFAULT_DATE, owner='airflow') class CallableClass: """ Class that is callable. """ def __call__(self): """A __call__ method """ def a_function(_, __): """A function with two args """ partial_function = functools.partial(a_function, arg_x=1) class_instance = CallableClass() logging.info('class_instance type: %s', type(class_instance)) dag = DAG(dag_id='test_task_view_type_check', default_args=default_args) dag_task1 = PythonOperator( task_id='test_dagrun_functool_partial', dag=dag, python_callable=partial_function, ) dag_task2 = PythonOperator( task_id='test_dagrun_instance', dag=dag, python_callable=class_instance, )
readout_classifier class single_shot_readout: """ Single shot readout class Args: adc (Instrument): a device that measures a complex vector for each readout trigger (an ADC) prepare_seqs (list of pulses.sequence): a dict of sequences of control pulses. The keys are use for state identification. ro_seq (pulses.sequence): a sequence of control pulses that is used to generate the reaout pulse of the DAC. pulse_generator (pulses.pulse_generator): pulse generator used to concatenate and set waveform sequences on the DAC. ro_delay_seq (pulses.sequence): Sequence used to align the DAC and ADC (readout delay compensation) adc_measurement_name (str): name of measurement on ADC """ def __init__(self, adc, prepare_seqs, ro_seq, pulse_generator, ro_delay_seq = None, _readout_classifier = None, adc_measurement_name='Voltage'): self.adc = adc self.ro_seq = ro_seq self.prepare_seqs = prepare_seqs self.ro_delay_seq = ro_delay_seq self.pulse_generator = pulse_generator self.repeat_samples = 2 self.save_last_samples = False self.train_test_split = 0.8 self.measurement_name = '' # self.dump_measured_samples = False self.measure_avg_samples = True #self.measure_cov_samples = False self.measure_hists = True self.measure_feature_w_threshold = True #self.measure_features = True #self.cutoff_start = 0 if not _readout_classifier: self.readout_classifier = readout_classifier.linear_classifier() else: self.readout_classifier = _readout_classifier self.adc_measurement_name = adc_measurement_name self.filter_binary = {'get_points':lambda: (self.adc.get_points()[adc_measurement_name][0],), 'get_dtype': lambda: int, 'get_opts': lambda: {}, 'filter': self.filter_binary_func} # def measure_delay(self, ro_channel): # import matplotlib.pyplot as plt # from scipy.signal import resample # self.pulse_generator.set_seq(self.ro_delay_seq) # first_nonzero = int(np.nonzero(np.abs(self.pulse_generator.channels[ro_channel].get_waveform()))[0][0]/self.pulse_generator.channels[ro_channel].get_clock()*self.adc.get_clock()) # ro_dac_waveform = self.pulse_generator.channels[ro_channel].awg_I.get_waveform(channel=self.pulse_generator.channels[ro_channel].awg_ch_I)+\ # 1j*self.pulse_generator.channels[ro_channel].awg_Q.get_waveform(channel=self.pulse_generator.channels[ro_channel].awg_ch_Q) # ro_dac_waveform = resample(ro_dac_waveform, num=int(len(ro_dac_waveform)/self.pulse_generator.channels[ro_channel].get_clock()*self.adc.get_clock())) # ro_adc_waveform = np.mean(self.adc.measure()['Voltage'], axis=0) # ro_dac_waveform = ro_dac_waveform - np.mean(ro_dac_waveform) # ro_adc_waveform = ro_adc_waveform - np.mean(ro_adc_waveform) # xc = np.abs(np.correlate(ro_dac_waveform, ro_adc_waveform, 'same')) # xc_max = np.argmax(xc) # delay = int((xc_max - first_nonzero)/2) # #plt.figure('delay') # #plt.plot(ro_dac_waveform[first_nonzero:]) # #plt.plot(ro_adc_waveform[delay:]) # #plt.plot(ro_adc_waveform) # #print ('Measured delay is {} samples'.format(delay), first_nonzero, xc_max) # return delay def calibrate(self): X = [] y = [] for class_id, prepare_seq in enumerate(self.prepare_seqs): for i in range(self.repeat_samples): # pulse sequence to prepare state self.pulse_generator.set_seq(prepare_seq+self.ro_seq) measurement = self.adc.measure() if type(self.adc_measurement_name) is list: raise ValueError('Multiqubit readout not implemented') #need multiqubit readdout implementation else: X.append(measurement[self.adc_measurement_name]) y.extend([class_id]*len(self.adc.get_points()[self.adc_measurement_name][0][1])) X = np.reshape(X, (-1, len(self.adc.get_points()[self.adc_measurement_name][-1][1]))) # last dimension is the feature dimension y = np.asarray(y) # if self.dump_measured_samples or self.save_last_samples: # self.calib_X = X#np.reshape(X, (len(self.prepare_seqs), -1, len(self.adc.get_points()[self.adc_measurement_name][-1][1]))) # self.calib_y = y scores = readout_classifier.evaluate_classifier(self.readout_classifier, X, y) self.readout_classifier.fit(X, y) self.scores = scores self.confusion_matrix = readout_classifier.confusion_matrix(y, self.readout_classifier.predict(X)) def get_opts(self): opts = {} scores = {score_name:{'log':False} for score_name in readout_classifier.readout_classifier_scores} opts.update(scores) if self.measure_avg_samples: avg_samples = {'avg_sample'+str(_class):{'log':False} for _class in self.readout_classifier.class_list} #features = {'feature'+str(_class):{'log':False} for _class in self.readout_classifier.class_list} opts.update(avg_samples) #meas.update(features) if self.measure_hists: #hists = {'hists':{'log':Fas}} opts['hists'] = {'log':False} opts['proba_points'] = {'log':False} if self.measure_feature_w_threshold: opts['feature'] = {'log':False} opts['threshold'] = {'log':False} return opts def measure(self): self.calibrate() meas = {} # if self.dump_measured_samples: # self.dump_samples(name=self.measurement_name) meas.update(self.scores) if self.measure_avg_samples: avg_samples = {'avg_sample'+str(_class):self.readout_classifier.class_averages[_class] for _class in self.readout_classifier.class_list} #features = {'feature'+str(_class):self.readout_classifier.class_features[_class] for _class in self.readout_classifier.class_list} meas.update(avg_samples) #meas.update(features) if self.measure_hists: meas['hists'] = self.readout_classifier.hists meas['proba_points'] = self.readout_classifier.proba_points if self.measure_feature_w_threshold: meas['feature'] = self.readout_classifier.feature meas['threshold'] = self.readout_classifier.threshold return meas def get_points(self): points = {} scores = {score_name:[] for score_name in readout_classifier.readout_classifier_scores} points.update(scores) if self.measure_avg_samples: avg_samples = {'avg_sample'+str(_class):[('Time',np.arange(self.adc.get_nop())/self.adc.get_clock(), 's')] for _class in self.readout_classifier.class_list} #features = {'feature'+str(_class):[('Time',np.arange(self.adc.get_nop())/self.adc.get_clock(), 's')] for _class in self.readout_classifier.class_list} points.update(avg_samples) #points.update(features) if self.measure_hists: points['hists'] = [('class', self.readout_classifier.class_list, ''), ('bin', np.arange(self.readout_classifier.nbins), '')] points['proba_points'] = [('bin', np.arange(self.readout_classifier.nbins), '')] if self.measure_feature_w_threshold: points['feature'] = [('Time',np.arange(self.adc.get_nop())/self.adc.get_clock(), 's')] points['threshold'] = [] return points def get_dtype(self): dtypes = {} scores = {score_name:float for score_name in readout_classifier.readout_classifier_scores} dtypes.update(scores) if self.measure_avg_samples: avg_samples = {'avg_sample'+str(_class):self.adc.get_dtype()
[self.adc_measurement_name] for _class in self.readout_classifier.class_list} features = {'feature'+str(_class):self.adc.get_dtype()[self.adc_measurement_name] for _class in self.readout_classifier.class_list} dtypes.update(avg_samples) dtypes.update(features) if self.measure_hists:
dtypes['hists'] = float dtypes['proba_points'] = float if self.measure_feature_w_threshold: dtypes['feature'] = np.complex dtypes['threshold'] = float return dtypes # def dump_samples(self, name): # from .save_pkl import save_pkl # header = {'type':'Readout classification X', 'name':name} # measurement = {'Readout classification X':(['Sample ID', 'time'], # [np.arange(self.calib_X.shape[0]), np.arange(self.calib_X.shape[1])/self.adc.get_clock()], # self.calib_X), # 'Readout classification y':(['Sample ID'], # [np.arange(self.calib_X.shape[0])], # self.calib_y)} # save_pkl(header, measurement, plot=False) def filter_binary_func(self, x): return self.readout_classifier.predict(x[self.adc_measurement_name])
tce_sort_4kg_4321.c') os.system('ar -r '+lib_name+' tce_sort_4kg_4321.o') for transpose_order in transpose_list: dummy = 0 A = transpose_order[0] B = transpose_order[1] C = transpose_order[2] D = transpose_order[3] driver_name = 'transpose_'+A+B+C+D print driver_name source_name = driver_name+'_driver.F' lst_name = driver_name+'_driver.lst' source_file = open(source_name,'w') source_file.write(' PROGRAM ARRAYTEST\n') source_file.write('#include "mpif.h"\n') source_file.write(' REAL*8 before('+ranks[0]+','+ranks[0]+','+ranks[0]+','+ranks[0]+')\n') source_file.write(' REAL*8 after_jeff('+sizechar+')\n') source_file.write(' REAL*8 after_hirata('+sizechar+')\n') source_file.write(' REAL*8 after_glass('+sizechar+')\n') source_file.write(' REAL*8 factor\n') source_file.write(' REAL*8 Tstart,Tfinish,Thirata,Tglass,Tjeff\n') source_file.write(' REAL*8 Tspeedup,Tbest\n') source_file.write(' INTEGER*4 i,j,k,l\n') source_file.write(' INTEGER*4 aSize(4)\n') source_file.write(' INTEGER*4 perm(4)\n') source_file.write(' INTEGER*4 fastest(4)\n') source_file.write(' INTEGER ierror\n') source_file.write(' LOGICAL glass_correct\n') source_file.write(' EXTERNAL glass_correct\n') source_file.write(' call mpi_init(ierror)\n') source_file.write(' aSize(1) = '+ranks[0]+'\n') source_file.write(' aSize(2) = '+ranks[1]+'\n') source_file.write(' aSize(3) = '+ranks[2]+'\n') source_file.write(' aSize(4) = '+ranks[3]+'\n') source_file.write(' perm(1) = '+A+'\n') source_file.write(' perm(2) = '+B+'\n') source_file.write(' perm(3) = '+C+'\n') source_file.write(' perm(4) = '+D+'\n') source_file.write(' DO 70 i = 1, '+ranks[0]+'\n') source_file.write(' DO 60 j = 1, '+ranks[1]+'\n') source_file.write(' DO 50 k = 1, '+ranks[2]+'\n') source_file.write(' DO 40 l = 1, '+ranks[3]+'\n') source_file.write(' before(i,j,k,l) = l + k*10 + j*100 + i*1000\n') source_file.write('40 CONTINUE\n') source_file.write('50 CONTINUE\n') source_file.write('60 CONTINUE\n') source_file.write('70 CONTINUE\n') source_file.write(' factor = 1.0\n') source_file.write(' Tbest=999999.0\n') source_file.write(' Tstart=0.0\n') source_file.write(' Tfinish=0.0\n') source_file.write(' CALL CPU_TIME(Tstart)\n') source_file.write(' DO 30 i = 1, '+count+'\n') source_file.write(' CALL tce_sort_4(before, after_hirata,\n') source_file.write(' & aSize(1), aSize(2), aSize(3), aSize(4),\n') source_file.write(' & perm(1), perm(2), perm(3), perm(4), factor)\n') source_file.write('30 CONTINUE\n') source_file.write(' CALL CPU_TIME(Tfinish)\n') source_file.write(' Thirata=(Tfinish-Tstart)\n') source_file.write(' Tstart=0.0\n') source_file.write(' Tfinish=0.0\n') source_file.write(' Tstart=rtc()\n') source_file.write(' IF( ((perm(1).eq.4).and.(perm(2).eq.3)).and.\n') source_file.write(' & ((perm(3).eq.2).and.(perm(4).eq.1)) ) THEN\n') source_file.write(' CALL CPU_TIME(Tstart)\n') source_file.write(' DO 31 i = 1, '+count+'\n') source_file.write(' CALL tce_sort_4kg_4321_(before, after_glass,\n') source_file.write(' & aSize(1), aSize(2), aSize(3), aSize(4),\n') source_file.write(' & factor)\n') source_file.write('31 CONTINUE\n') source_file.write(' CALL CPU_TIME(Tfinish)\n') source_file.write(' ELSEIF(glass_correct(perm(1), perm(2), perm(3), perm(4))) THEN\n') source_file.write(' CALL CPU_TIME(Tstart)\n') source_file.write(' DO 32 i = 1, '+count+'\n') source_file.write(' CALL tce_sort_4kg_(before, after_glass,\n') source_file.write(' & aSize(1), aSize(2), aSize(3), aSize(4),\n') source_file.write(' & perm(1), perm(2), perm(3), perm(4), factor)\n') source_file.write('32 CONTINUE\n') source_file.write(' CALL CPU_TIME(Tfinish)\n') source_file.write(' ENDIF\n') #source_file.write(' Tfinish=rtc()\n') source_file.write(' Tglass=(Tfinish-Tstart)\n') source_file.write(' IF(glass_correct(perm(1), perm(2), perm(3), perm(4))) THEN\n') #source_file.write(' PRINT*," i after_glass(i)\n') #source_file.write(' & after_hirata(i)"\n') source_file.write(' DO 33 i = 1, '+sizechar+'\n') source_file.write(' IF (after_glass(i).ne.after_hirata(i)) THEN\n') source_file.write(' PRINT*,"glass error ",i,after_glass(i),after_hirata(i)\n') source_file.write(' ENDIF\n') source_file.write('33 CONTINUE\n') source_file.write(' ENDIF\n') source_file.write(' write(6,*) "TESTING TRANPOSE TYPE '+A+B+C+D+'"\n') source_file.write(' write(6,*) "==================="\n') source_file.write(' write(6,*) "The compilation flags were:"\n') for option in range(0,len(fortran_opt_flags.split())): source_file.write(' write(6,*) "'+fortran_opt_flags.split()[option]+'"\n') source_file.write(' write(6,*) "==================="\n') source_file.write(' write(6,*) "Hirata Reference = ",Thirata,"seconds"\n') source_file.write(' IF(glass_correct(perm(1), perm(2), perm(3), perm(4))) THEN\n') source_file.write(' write(6,*) "KGlass Reference = ",Tglass,"seconds"\n') source_file.write(' ENDIF\n') source_file.write(' write(6,1001) "Algorithm","Jeff","Speedup","Best","Best Speedup"\n') for loop_order in loop_list: dummy = dummy+1 a = loop_order[0] b = loop_order[1] c = loop_order[2] d = loop_order[3] subroutine_name = 'trans_'+A+B+C+D+'_loop_'+a+b+c+d+'_'
source_file.write(' Tstart=0.0\n') source_file.write(' Tfinish=0.0\n') source_file.write('
CALL CPU_TIME(Tstart)\n') source_file.write(' DO '+str(100+dummy)+' i = 1, '+count+'\n') source_file.write(' CALL '+subroutine_name+'(before, after_jeff,\n') source_file.write(' & aSize(1), aSize(2), aSize(3), aSize(4),\n') source_file.write(' & factor)\n') source_file.write(str(100+dummy)+' CONTINUE\n') source_file.write(' CALL CPU_TIME(Tfinish)\n') source_file.write(' Tjeff=(Tfinish-Tstart)\n') source_file.write(' Tspeedup=Thirata/Tjeff\n') source_file.write(' Tbest=min(Tjeff,Tbest)\n') source_file.write(' if(Tjeff.eq.Tbest) then \n') source_file.write(' fastest(1)='+a+'\n') source_file.write(' fastest(2)='+b+'\n') source_file.write(' fastest(3)='+c+'\n') source_file.write(' fastest(4)='+d+'\n') source_file.write(' endif\n') # source_file.write(' goto 911\n') ######################## if 0 < dummy < 10: nice_dummy=' '+str(dummy) if 9 < dummy < 100: nice_dummy=' '+str(dummy) if 99 < dummy < 999: nice_dummy=''+str(dummy) #source_file.write(' PRINT*,"Loop '+a+b+c+d+' ",Tjeff,Tspeedup\n') source_file.write(' write(6,1100) "'+nice_dummy+' Loop '+a+b+c+d+' ",\n') source_file.write(' & Tjeff,Tspeedup,Tbest,Thirata/Tbest\n') #source_file.write(' DO '+str(500+dummy)+' i = 1, '+sizechar+'\n') #source_file.write(' IF (after_
import unittest from models import heliosat import numpy as np from netcdf import netcdf as nc from datetime import datetime import os import glob class TestPerformance(unittest.TestCase): def setUp(self): # os.system('rm -rf static.nc temporal_cache products') os.system('rm -rf temporal_cache products/estimated') os.system('rm -rf temporal_cache') os.system('cp -rf data_argentina mock_data') self.files = glob.glob('mock_data/goes13.*.BAND_01.nc'
) def tearDown(self): os.system('rm -rf mock_data') def test_main(self): begin = datetime.now() heliosat.workwith('mock_data/goes13.2015.*.BAND_01.nc', 32) end = datetime.now() elapsed = (end - begin).total_seconds() first, last = min(self.files), max(self.files) t
o_dt = heliosat.to_datetime processed = (to_dt(last) - to_dt(first)).total_seconds() processed_days = processed / 3600. / 24 scale_shapes = (2245. / 86) * (3515. / 180) * (30. / processed_days) estimated = elapsed * scale_shapes / 3600. print "Scaling total time to %.2f hours." % estimated print "Efficiency achieved: %.2f%%" % (3.5 / estimated * 100.) if __name__ == '__main__': unittest.run()
import datetime from typing import Optional, TypedDict from backend.commo
n.sitevars.sitevar import Sitevar class WebConfig(TypedDict): travis_job: str tbaClient_endpoints_sha: str cu
rrent_commit: str deploy_time: str endpoints_sha: str commit_time: str class AndroidConfig(TypedDict): min_app_version: int latest_app_version: int class IOSConfig(TypedDict): min_app_version: int latest_app_version: int class ContentType(TypedDict): current_season: int max_season: int web: Optional[WebConfig] android: Optional[AndroidConfig] ios: Optional[IOSConfig] class ApiStatus(Sitevar[ContentType]): @staticmethod def key() -> str: return "apistatus" @staticmethod def description() -> str: return "For setting max year, min app versions, etc." @staticmethod def default_value() -> ContentType: current_year = datetime.datetime.now().year return ContentType( current_season=current_year, max_season=current_year, web=None, android=None, ios=None, ) @classmethod def status(cls) -> ContentType: return cls.get()
from .max import max from pyramda.priva
te.asserts import assert_equal
def max_test(): assert_equal(max([1, 3, 4, 2]), 4)
""" Script for selecting a good number of basis functions. Too many or too few basis functions will introduce numerical error. True solution must be known. Run the program several times, varying the value of the -N option. There may be a way to improve on this brute force method. """ # To allow __main__ in subdirectory import sys sys.path.append(sys.path[0] + '/..') import argparse import numpy as np import ps.ps import io_util import problems import problems.boundary import copy from multiprocessing import Pool parser = argparse.ArgumentParser() io_util.add_arguments(parser, ('problem', 'N')) args = parser.parse_args() problem = problems.problem_dict[args.problem]() boundary = problems.boundary.OuterSine(prob
lem.R) problem.boundary = boundary # Options to pass to the solver options = { 'problem': problem, 'N': args.N, 'scheme_order': 4, } meta_options = {
'procedure_name': 'optimize_basis', } io_util.print_options(options, meta_options) def my_print(t): print('n_circle={} n_radius={} error={}'.format(*t)) def worker(t): options['n_circle'] = t[0] options['n_radius'] = t[1] my_solver = ps.ps.PizzaSolver(options) result = my_solver.run() t = (t[0], t[1], result.error) my_print(t) return t all_options = [] # Tweak the following ranges as needed for n_circle in range(30, 100, 5): for n_radius in range(17, n_circle, 4): all_options.append((n_circle, n_radius)) with Pool(4) as p: results = p.map(worker, all_options) min_error = float('inf') for t in results: if t[2] < min_error: min_error = t[2] min_t = t print() my_print(min_t)
from .model import SV
C
pe).get('ping_rta_warning'): rules[device_type+"_rta"].update({"Severity2":["warning",{'name': device_type+"_rta_warning", 'operator': 'greater_than', 'value': float(ping_rule_dict.get(device_type).get('ping_rta_warning')) or ''}]}) rules[device_type+"_rta"].update({"Severity3":["up",{'name': device_type+"_rta_up", 'operator': 'less_than', 'value': float(ping_rule_dict.get(device_type).get('ping_rta_warning'))},'AND',{'name': device_type+"_rta_up", 'operator': 'greater_than', 'value': 0 }]}) #TODO: This is a seperate module should be oneto prevent re-looping ovver rules for rule in rules: if rule in set(service_name_with_operator_in): #service_name = "_".join(rule.split("_")[0:4]) service_rules = rules.get(rule) for i in range(1,len(service_rules)+1): severities = service_rules.get("Severity%s"%i) for x in range(1,len(severities),2): if severities[x].get("name") in operator_name_with_operator_in.keys(): severities[x]["operator"] = operator_name_with_operator_in.get(severities[x].get("name")) return rules def process_kpi_rules(all_services_dict): #TODO Update this code for both ul_issue and other KPIS kpi_rule_dict = {} formula_mapper = eval(Variable.get('kpi_rule_function_mapper')) for service in formula_mapper: is_Function = False if "round" in formula_mapper.get(service) else True kpi_rule_dict[service] = { "name":service, "isFunction":is_Function, "formula":formula_mapper.get(service), "isarray":
[False,False], "service":service, "arraylocations":0 } print kpi_rule_dict return kpi_rule_dict def generate_service_rules(): service_threshold_query = Variable.get('q_get_thresholds') #creating Severity Rules data = execute_query(service_threshol
d_query) rules_dict = createDict(data) Variable.set("rules",str(rules_dict)) #can only be done if generate_service_rules is completed and there is a rule Variable in Airflow Variables def generate_kpi_rules(): service_rules = eval(Variable.get('rules')) processed_kpi_rules = process_kpi_rules(service_rules) #Variable.set("kpi_rules",str(processed_kpi_rules)) def generate_kpi_prev_states(): ul_tech = eval(Variable.get('ul_issue_kpi_technologies')) old_pl_data = redis_hook_2.get("all_devices_state") all_device_type_age_dict = {} for techs_bs in ul_tech: redis_hook_2.set("kpi_ul_prev_state_%s"%(ul_tech.get(techs_bs)),old_pl_data) redis_hook_2.set("kpi_ul_prev_state_%s"%(techs_bs),old_pl_data) def generate_backhaul_inventory_for_util(): backhaul_inventory_data_query=""" select device_device.ip_address, device_device.device_name, device_devicetype.name, device_device.mac_address, device_devicetype.agent_tag, site_instance_siteinstance.name, device_device.device_alias, device_devicetechnology.name as techno_name, group_concat(service_servicedatasource.name separator '$$') as port_name, group_concat(inventory_basestation.bh_port_name separator '$$') as port_alias, group_concat(inventory_basestation.bh_capacity separator '$$') as port_wise_capacity from device_device inner join (device_devicetechnology, device_devicetype, machine_machine, site_instance_siteinstance) on ( device_devicetype.id = device_device.device_type and device_devicetechnology.id = device_device.device_technology and machine_machine.id = device_device.machine_id and site_instance_siteinstance.id = device_device.site_instance_id ) inner join (inventory_backhaul) on (device_device.id = inventory_backhaul.bh_configured_on_id OR device_device.id = inventory_backhaul.aggregator_id OR device_device.id = inventory_backhaul.pop_id OR device_device.id = inventory_backhaul.bh_switch_id OR device_device.id = inventory_backhaul.pe_ip_id) left join (inventory_basestation) on (inventory_backhaul.id = inventory_basestation.backhaul_id) left join (service_servicedatasource) on (inventory_basestation.bh_port_name = service_servicedatasource.alias) where device_device.is_deleted=0 and device_device.host_state <> 'Disable' and device_devicetype.name in ('Cisco','Juniper','RiCi', 'PINE','Huawei','PE') group by device_device.ip_address; """ backhaul_data = execute_query(backhaul_inventory_data_query) bh_cap_mappng = {} for device in backhaul_data: dev_name = device.get('device_name') bh_cap_mappng[device.get('device_name')] = { 'port_name' : device.get('port_name').split("$$") if device.get('port_name') else None, 'port_wise_capacity': device.get('port_wise_capacity').split("$$") if device.get('port_wise_capacity') else None, 'ip_address':device.get('ip_address'), 'port_alias':device.get('port_alias').split("$$") if device.get('port_alias') else None, 'capacity': {} } if device.get('port_name') and device.get('port_wise_capacity'): for index,port in enumerate(bh_cap_mappng.get(device.get('device_name')).get('port_name')): #print index,bh_cap_mappng.get(device.get('device_name')).get('port_wise_capacity'),device.get('port_name') try: port_capacity = bh_cap_mappng.get(device.get('device_name')).get('port_wise_capacity')[index] except IndexError: try: port_capacity = bh_cap_mappng.get(device.get('device_name')).get('port_wise_capacity')[index-1] except Exception: port_capacity = bh_cap_mappng.get(device.get('device_name')).get('port_wise_capacity')[0] except Exception: port_capacity = bh_cap_mappng.get(device.get('device_name')).get('port_wise_capacity')[0] bh_cap_mappng.get(device.get('device_name')).get('capacity').update({port:port_capacity}) for key_dic in ['port_alias']: if bh_cap_mappng.get(dev_name).get(key_dic) and len(bh_cap_mappng.get(dev_name).get(key_dic)) > 1: new_ports = [] for index_m,port_v in enumerate(set(bh_cap_mappng.get(dev_name).get(key_dic))): if ',' in port_v: def_ports = port_v.split(',') new_ports.extend(def_ports) for port in def_ports: try: bh_cap_mappng.get(device.get('device_name')).get('capacity').update({port:bh_cap_mappng.get(device.get('device_name')).get('port_wise_capacity')[index_m]}) except Exception: bh_cap_mappng.get(device.get('device_name')).get('capacity').update({port:bh_cap_mappng.get(device.get('device_name')).get('port_wise_capacity')[index_m]}) else: new_ports.append(port_v) try: bh_cap_mappng.get(device.get('device_name')).get('capacity').update({port_v:bh_cap_mappng.get(device.get('device_name')).get('port_wise_capacity')[index_m]}) except Exception: bh_cap_mappng.get(device.get('device_name')).get('capacity').update({port_v:bh_cap_mappng.get(device.get('device_name')).get('port_wise_capacity')[index_m-1]}) bh_cap_mappng.get(dev_name)[key_dic] = new_ports print "Setting redis Key backhaul_capacities with backhaul capacities " #for dev in bh_cap_mappng: bh_cap_mappng.get('11389').get('capacity').update({'GigabitEthernet0_0_24':'44'}) print bh_cap_mappng.get('11389').get('capacity') redis_hook_2.set("backhaul_capacities",str(bh_cap_mappng)) print "Successfully Created Key: backhaul_capacities in Redis. " def generate_basestation_inventory_for_util(): basestation_inventory_data_query=""" select DISTINCT(device_device.ip_address), device_device.device_name, device_devicetype.name, device_device.mac_address, device_device.ip_address, device_devicetype.agent_tag, inventory_sector.name, site_instance_siteinstance.name, device_device.device_alias, device_devicetechnology.name as techno_name, inventory_circuit.qos_bandwidth as QoS_BW from device_device inner join (device_devicetechnology, device_devicemodel, device_devicetype, machine_machine, site_instance_siteinstance, inventory_sector) on ( device_devicetype.id = device_device.device_type and device_devicetechnology.id = device_device.device_technology and device_devicemodel.id = device_device.device_model and machine_machine.id = device_device.machine_id and site_instance_siteinstance.id = device_device.site_instance_id and inventory_sector.sector_configured_on_id = device_device.id ) left join (inventory_circuit)
#!/Users/abhisheksamdaria/GitHub/pstHealth/venv/bin/python2.7 from __future__ import print_function import base64 import os import sys if __name__ == "__main__": # create font data chunk for
embedding font = "Tests/images/courB08" print(" f._load_pilfont_data(") print(" # %s" % os.path.basename(font)) print(" BytesIO(base64.decodestring(b'''") base64.encode(open(font + ".pil",
"rb"), sys.stdout) print("''')), Image.open(BytesIO(base64.decodestring(b'''") base64.encode(open(font + ".pbm", "rb"), sys.stdout) print("'''))))") # End of file
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'Event.photo' db.add_column('events_event', 'photo', self.gf('django.db.models.fields.files.ImageField')(default='', max_length=200, blank=True), keep_default=False) def backwards(self, orm): # Deleting field 'Event.photo' db.delete_column('events_event', 'photo') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'events.day': { 'Meta': {'object_name': 'Day'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}) }, 'events.event': { 'Meta': {'object_name': 'Event'}, 'collaborators': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'collaboration_events'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['participant.Participant']"}), 'days': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['events.Day']", 'symmetrical': 'False'}), 'description': ('django.db.models.fields.TextField', [], {}), 'end_time': ('django.db.models.fields.TimeField', [], {}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'location': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['maps.Location']", 'null': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}), 'participant': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'own_events'", 'to': "orm['participant.Participant']"}), 'photo': ('django.db.models.fields.files.ImageField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}), 'start_time': ('django.db.models.fields.TimeField', [], {}) }, 'maps.location': { 'Meta': {'unique_together': "(('user', 'name'),)", 'object_name': 'Location'}, 'area': ('django.contrib.gis.db.models.fields.PolygonField', [], {'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'marker': ('django.contrib.gis.db.models.fields.PointField'
, [], {}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '150
'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'location_set'", 'to': "orm['auth.User']"}) }, 'participant.category': { 'Meta': {'object_name': 'Category'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '20'}) }, 'participant.participant': { 'Meta': {'object_name': 'Participant'}, 'approved_on': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['participant.Category']", 'symmetrical': 'False'}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'logo': ('django.db.models.fields.files.ImageField', [], {'max_length': '200'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'photo': ('django.db.models.fields.files.ImageField', [], {'max_length': '200', 'blank': 'True'}), 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'}), 'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'}), 'website': ('django.db.models.fields.CharField', [], {'max_length': '2000'}) } } complete_apps = ['events']
import settings import mysql.connector from domain.domain import Article from domain.domain import Project from domain.domain import User from domain.domain import Tag import service.database as db # 文章管理 class ArticleService: # 查询最近发表的文章 def query_most_published_article(self): conn = db.get_connection() sql = "".join(["select a.id as id,a.author_id as author_id,", "u.name as author_name,a.title as title,a.content as content,a.create_time as create_time,", "a.publish_time as publish_time,a.last_update_time as last_update_time", " from article as a left join user as u on a.author_id=u.id", " order by a.publish_time desc limit 0,%(page_size)s"]) cursor = conn.cursor() cursor.execute(sql, {"page_size": settings.app_settings["page_size"]}) articles = None for (id, author_id, author_name, title, content, create_time, publish_time, last_update_time) in cursor: if (not articles): articles = [] article = Article() articles.append(article) article.id = id if (author_id): u = User() article.author = u u.id = author_id u.name = author_name article.title = title article.content = content article.create_time = create_time article.publish_time = publish_time article.last_update_time = last_update_time cursor.close() conn.close() return articles # 根据标签查询文章列表 def query_article_by_tag(self, tag_id): if (not tag_id): return None _tag_id = None try: _tag_id = int(tag_id) except ValueError: return None sql = "".join(["select a.id as id,a.author_id as author_id,u.name as author_name", ",a.title as title,a.create_time as create_time,a.publish_time as publish_time", ",a.last_update_time as last_update_time", " from article as a left join user as u on a.author_id=u.id", " where a.publish_time is not null and a.id in (select article_id from article_tag where tag_id=%(tag_id)s)"]) conn = db.get_connection() cursor = conn.cursor() cursor.execute(sql, {"tag_id": _tag_id}) articles = None for (id, author_id, author_name, title, create_time, publish_time, last_update_time) in cursor: if (not articles): articles = [] a = Article() articles.append(a) a.id = id a.title = title a.create_time = create_time a.publish_time = publish_time a.last_update_time = last_update_time if (author_id): u = User() a.author = u u.id = author_id u.name = author_name cursor.close() conn.close() return articles # 根据文章 ID 查询文章 def find(self, article_id): conn = db.get_connection() sql = "".join(["select a.id as id,a.author_id as author_id,", "u.name as author_name,a.title as title,a.content as content,a.create_time as create_time,", "a.publish_time as publish_time,a.last_update_time as last_update_time", " from article as a left join user as u on a.a
uthor_id=u.id", " where a.id=%(article_id)s"]) cursor = conn.cursor() cursor.execute(sql, {"article_id": article_id}) article = None for (id, author_id, author_na
me, title, content, create_time, publish_time, last_update_time) in cursor: if (not article): article = Article() article.id = id article.title = title article.content = content article.create_time = create_time article.publish_time = publish_time article.last_update_time = last_update_time if (author_id): u = User() article.author = u u.id = author_id u.name = author_name cursor.close() conn.close() return article # 添加新文章 def add(self, article): # implement return 1 # 标签管理 class TagService: def list_all(self): conn = db.get_connection() if (not conn): return None sql = "".join(["select t.id as id, t.name as name, t.author_id as author_id, u.name as author_name", ",t.create_time as create_time,t.last_update_time as last_update_time", " from tag as t left join user as u on t.author_id=u.id order by t.create_time desc"]) cursor = conn.cursor() cursor.execute(sql) tags = None for (id, name, author_id, author_name, create_time, last_update_time) in cursor: if (not tags): tags = [] t = Tag() tags.append(t) t.id = id t.name = name t.create_time = create_time t.last_update_time = last_update_time if (author_id): u = User() t.author = u u.id = author_id u.name = author_name cursor.close() conn.close() return tags article_service = ArticleService() tag_service = TagService()
.post(URL_CMS_PLUGIN_ADD, plugin_data) plugin_id = int(response.content) self.assertEquals(response.status_code, 200) self.assertEquals(int(response.content), CMSPlugin.objects.all()[0].pk) # there should be only 1 plugin self.assertEquals(CMSPlugin.objects.all().count(), 1) self.assertEquals(CMSPlugin.objects.filter(placeholder__page__publisher_is_draft=True).count(), 1) # publish page response = self.client.post(URL_CMS_PAGE + "%d/change-status/" % page.pk, {1: 1}) self.assertEqual(response.status_code, 200) self.assertEquals(Page.objects.count(), 2) # there should now be two plugins - 1 draft, 1 public self.assertEquals(CMSPlugin.objects.all().count(), 2) # delete the plugin plugin_data = { 'plugin_id': plugin_id } remove_url = URL_CMS_PLUGIN_REMOVE response = self.client.post(remove_url, plugin_data) self.assertEquals(response.status_code, 200) # there should be no plugins self.assertEquals(CMSPlugin.objects.all().count(), 1) self.assertEquals(CMSPlugin.objects.filter(placeholder__page__publisher_is_draft=False).count(), 1) def test_remove_plugin_not_associated_to_page(self): """ Test case for PlaceholderField """ page_data = self.get_new_page_data() response = self.client.post(URL_CMS_PAGE_ADD, page_data) page = Page.objects.all()[0]
# add a plugin plugin_data = { 'plugin_type': "TextPlugin", 'language': settings.LANGUAGES[0][0], 'placeholder': page.placeholders.get(slot="body").pk, } response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data) self.assertEquals(response.status_code, 200) self.assertEquals(int(response.content), CMSPlugin.objects.all()[0].pk) # there should be only 1 plugin
self.assertEquals(CMSPlugin.objects.all().count(), 1) ph = Placeholder(slot="subplugin") ph.save() plugin_data = { 'plugin_type': "TextPlugin", 'language': settings.LANGUAGES[0][0], 'placeholder': ph.pk, 'parent': int(response.content) } response = self.client.post(URL_CMS_PLUGIN_ADD, plugin_data) # no longer allowed for security reasons self.assertEqual(response.status_code, 404) def test_register_plugin_twice_should_raise(self): number_of_plugins_before = len(plugin_pool.get_all_plugins()) # The first time we register the plugin is should work plugin_pool.register_plugin(DumbFixturePlugin) # Let's add it a second time. We should catch and exception raised = False try: plugin_pool.register_plugin(DumbFixturePlugin) except PluginAlreadyRegistered: raised = True self.assertTrue(raised) # Let's also unregister the plugin now, and assert it's not in the # pool anymore plugin_pool.unregister_plugin(DumbFixturePlugin) # Let's make sure we have the same number of plugins as before: number_of_plugins_after = len(plugin_pool.get_all_plugins()) self.assertEqual(number_of_plugins_before, number_of_plugins_after) def test_unregister_non_existing_plugin_should_raise(self): number_of_plugins_before = len(plugin_pool.get_all_plugins()) raised = False try: # There should not be such a plugin registered if the others tests # don't leak plugins plugin_pool.unregister_plugin(DumbFixturePlugin) except PluginNotRegistered: raised = True self.assertTrue(raised) # Let's count, to make sure we didn't remove a plugin accidentally. number_of_plugins_after = len(plugin_pool.get_all_plugins()) self.assertEqual(number_of_plugins_before, number_of_plugins_after) def test_inheritplugin_media(self): """ Test case for InheritPagePlaceholder """ inheritfrompage = create_page('page to inherit from', 'nav_playground.html', 'en') body = inheritfrompage.placeholders.get(slot="body") plugin = TwitterRecentEntries( plugin_type='TwitterRecentEntriesPlugin', placeholder=body, position=1, language=settings.LANGUAGE_CODE, twitter_user='djangocms', ) plugin.insert_at(None, position='last-child', save=True) inheritfrompage.publish() page = create_page('inherit from page', 'nav_playground.html', 'en', published=True) inherited_body = page.placeholders.get(slot="body") inherit_plugin = InheritPagePlaceholder( plugin_type='InheritPagePlaceholderPlugin', placeholder=inherited_body, position=1, language=settings.LANGUAGE_CODE, from_page=inheritfrompage, from_language=settings.LANGUAGE_CODE) inherit_plugin.insert_at(None, position='last-child', save=True) page.publish() self.client.logout() response = self.client.get(page.get_absolute_url()) self.assertTrue('%scms/js/libs/jquery.tweet.js' % settings.STATIC_URL in response.content, response.content) def test_inherit_plugin_with_empty_plugin(self): inheritfrompage = create_page('page to inherit from', 'nav_playground.html', 'en', published=True) body = inheritfrompage.placeholders.get(slot="body") empty_plugin = CMSPlugin( plugin_type='TextPlugin', # create an empty plugin placeholder=body, position=1, language='en', ) empty_plugin.insert_at(None, position='last-child', save=True) other_page = create_page('other page', 'nav_playground.html', 'en', published=True) inherited_body = other_page.placeholders.get(slot="body") inherit_plugin = InheritPagePlaceholder( plugin_type='InheritPagePlaceholderPlugin', placeholder=inherited_body, position=1, language='en', from_page=inheritfrompage, from_language='en' ) inherit_plugin.insert_at(None, position='last-child', save=True) add_plugin(inherited_body, "TextPlugin", "en", body="foobar") # this should not fail, even if there in an empty plugin rendered = inherited_body.render(context=self.get_context(other_page.get_absolute_url()), width=200) self.assertIn("foobar", rendered) def test_render_textplugin(self): # Setup page = create_page("render test", "nav_playground.html", "en") ph = page.placeholders.get(slot="body") text_plugin = add_plugin(ph, "TextPlugin", "en", body="Hello World") link_plugins = [] for i in range(0, 10): link_plugins.append(add_plugin(ph, "LinkPlugin", "en", target=text_plugin, name="A Link %d" % i, url="http://django-cms.org")) text_plugin.text.body += '<img src="/static/cms/images/plugins/link.png" alt="Link - %s" id="plugin_obj_%d" title="Link - %s" />' % ( link_plugins[-1].name, link_plugins[-1].pk, link_plugins[-1].name, ) text_plugin.save() txt = text_plugin.text ph = Placeholder.objects.get(pk=ph.pk) with self.assertNumQueries(2): # 1 query for the CMSPlugin objects, # 1 query for each type of child object (1 in this case, all are Link plugins) txt.body = plugin_tags_to_admin_html( '\n'.join(["{{ plugin_object %d }}" % l.cmsplugin_ptr_id for l in link_plugins])) txt.sav
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from oslo_serialization import jsonutils from nova import objects from nova.objects import base as obj_base from nova.scheduler.filters import numa_topology_filter from nova import test from nova.tests.unit import fake_instance from nova.tests.unit.scheduler import fakes from nova.virt import hardware class TestNUMATopologyFilter(test.NoDBTestCase): def setUp(self): super(TestNUMATopologyFilter, self).setUp() self.filt_cls = numa_topology_filter.NUMATopologyFilter() def test_numa_topology_filter_pass(self): instance_topology = objects.InstanceNUMATopology( cells=[objects.InstanceNUMACell(id=0, cpuset=set([1]), memory=512), objects.InstanceNUMACell(id=1, cpuset=set([3]), memory=512) ]) instance = fake_instance.fake_instance_obj(mock.sentinel.ctx) instance.numa_topology = instance_topology filter_properties = { 'request_spec': { 'instance_properties': jsonutils.to_primitive( obj_base.obj_to_primitive(instance))}} host = fakes.FakeHostState('host1', 'node1', {'numa_topology': fakes.NUMA_TOPOLOGY, 'pci_stats': None}) self.assertTrue(self.filt_cls.host_passes(host, filter_properties)) def test_numa_topology_filter_numa_instance_no_numa_host_fail(self): instance_topology = objects.InstanceNUMATopology( cells=[objects.InstanceNUMACell(id=0, cpuset=set([1]), memory=512), objects.InstanceNUMACell(id=1, cpuset=set([3]), memory=512) ]) instance = fake_instance.fake_instance_obj(mock.sentinel.ctx) instance.numa_topology = instance_topology filter_properties = { 'request_spec': { 'instance_properties': jsonutils.to_primitive( obj_base.obj_to_primitive(instance))}} host = fakes.FakeHostState('host1', 'node1', {'pci_stats': None}) self.assertFalse(self.filt_cls.host_passes(host, filter_properties)) def test_numa_topology_filter_numa_host_no_numa_instance_pass(self): instance = fake_instance.fake_instance_obj(mock.sentinel.ctx) instance.numa_topology = None filter_properties = { 'request_spec': { 'instance_properties': jsonutils.to_primitive( obj_base.obj_to_primitive(instance))}} host = fakes.FakeHostState('host1', 'node1', {'numa_topology': fakes.NUMA_TOPOLOGY}) self.assertTrue(self.filt_cls.host_passes(host, filter_properties)) def test_numa_topology_filter_fail_fit(self): instance_topology = objects.InstanceNUMATopology( cells=[objects.InstanceNUMACell(id=0, cpuset=set([1]), memory=512), objects.InstanceNUMACell(id=1, cpuset=set([2]), memory=512), objects.InstanceNUMACell(id=2, cpuset=set([3]), memory=512) ]) instance = fake_instance.fake_instance_obj(mock.sentinel.ctx) instance.numa_topology = instance_topology filter_properties = { 'request_spec': { 'instance_properties': jsonutils.to_primitive( obj_base.obj_to_primitive(instance))}} host = fakes.FakeHostState('host1', 'node1', {'numa_topology': fakes.NUMA_TOPOLOGY, 'pci_stats': None}) self.assertFalse(self.filt_cls.host_passes(host, filter_properties)) def test_numa_topology_filter_fail_memory(self): self.flags(ram_allocation_ratio=1) instance_topology = objects.InstanceNUMATopology( cells=[objects.InstanceNUMACell(id=0, cpuset=set([1]), memory=1024), objects.InstanceNUMACell(id=1, cpuset=set([3]), memory=512) ]) instance = fake_instance.fake_instance_obj(mock.sentinel.ctx) instance.numa_topology = instance_topology filter_properties = { 'request_spec': { 'instance_properties': jsonutils.to_primitive( obj_base.obj_to_primitive(instance))}} host = fakes.FakeHostState('host1', 'node1', {'numa_topology': fakes.NUMA_TOPOLOGY, 'pci_stats': None}) self.assertFalse(self.filt_cls.host_passes(host, filter_properti
es)) def test_numa_topology_filter_fail_cpu(self): self.flags(cpu_allocation_ratio=1) instance_topology = objects.InstanceNUMATopology( cells=[objects.InstanceNUMACell(id=0, cpuset=set([1]), memory=512),
objects.InstanceNUMACell(id=1, cpuset=set([3, 4, 5]), memory=512)]) instance = fake_instance.fake_instance_obj(mock.sentinel.ctx) instance.numa_topology = instance_topology filter_properties = { 'request_spec': { 'instance_properties': jsonutils.to_primitive( obj_base.obj_to_primitive(instance))}} host = fakes.FakeHostState('host1', 'node1', {'numa_topology': fakes.NUMA_TOPOLOGY, 'pci_stats': None}) self.assertFalse(self.filt_cls.host_passes(host, filter_properties)) def test_numa_topology_filter_pass_set_limit(self): self.flags(cpu_allocation_ratio=21) self.flags(ram_allocation_ratio=1.3) instance_topology = objects.InstanceNUMATopology( cells=[objects.InstanceNUMACell(id=0, cpuset=set([1]), memory=512), objects.InstanceNUMACell(id=1, cpuset=set([3]), memory=512) ]) instance = fake_instance.fake_instance_obj(mock.sentinel.ctx) instance.numa_topology = instance_topology filter_properties = { 'request_spec': { 'instance_properties': jsonutils.to_primitive( obj_base.obj_to_primitive(instance))}} host = fakes.FakeHostState('host1', 'node1', {'numa_topology': fakes.NUMA_TOPOLOGY, 'pci_stats': None}) self.assertTrue(self.filt_cls.host_passes(host, filter_properties)) limits_topology = hardware.VirtNUMALimitTopology.from_json( host.limits['numa_topology']) self.assertEqual(limits_topology.cells[0].cpu_limit, 42) self.assertEqual(limits_topology.cells[1].cpu_limit, 42) self.assertEqual(limits_topology.cells[0].memory_limit, 665) self.assertEqual(limits_topology.cells[1].memory_limit, 665)
#!/usr/bin/env python from os.path import dirname, join import plyer try: from setuptools import setup except ImportError: from distutils.core import setup curdir = dirname(__file__) packages = [ 'plyer', 'plyer.platforms', 'plyer.platforms.linux', 'plyer.platforms.android', 'plyer.platforms.win', 'plyer.platforms.win.libs', 'plyer.platforms.ios', 'plyer.platforms.macosx', ] setup( name='plyer', version=plyer.__version__, description='Platform-independant wrapper for platform-dependant APIs', long_description=open(join(curdir, 'README.rst')).read(), author='Kivy team', author_email=
'mat@kivy.org', url='https://plyer.readthedocs.org/en/latest/', packages=packages, package_data={'': ['LICENSE', 'README.rst']}, package_dir={'plyer': 'plyer'}, include_package_data=True, license=open(join(curdir, 'LICENSE')).read(),
zip_safe=False, classifiers=( 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Natural Language :: English', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', ), )
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # # Description: File system resilience testing application # Author: Hubert Kario <hubert@kario.pl> # # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # # Copyright (c) 2015 Hubert Kario. All rights reserved. # # This copyrighted material is made available to anyone wishing # to use, modify, copy, or redistribute it subject to the terms # and conditions of the GNU General Public License version 2. # # This program is distributed in the hope that it will be # useful, but WITHOUT ANY WARRANTY; without even the implied # warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR # PURPOSE. See the GNU General Public License for more details. # # You should have received a copy of the GNU General Public # License along with this program; if not, write to the Free # Software Foundation, Inc., 51 Franklin Street, Fifth Floor, # Boston, MA 02110-1301, USA. # # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ """Handling of NBD requests.""" import struct from .constants import Magic, RequestType from ..compat import compat_str class Error(Exception): """Exception describing what went wrong.""" def __repr__(self): """Format exception.""" return "request.{0}".format(super(Error, self).__repr__()) class NBDRequest(object): """Representation of single NBD protocol request.""" def __init__(self, req_type, handle, data_from, data_length, data=None): """Make a NBD protocol request object.""" self.req_type = req_type self.handle = handle self.data_from = data_from self.data_length = data_length self.data = data def __eq__(self, other): """Check if the other object is equal to this object.""" return (isinstance(other, self.__class__) and self.__dict__ == other.__dict__) def __ne__(self, other): """Check if the other object is different from this object.""" return not self.__eq__(other) def recvexactly(sock, size, flags=0): """recv exactly size bytes from socket.""" buff = bytearray(size) view = memoryview(buff) pos = 0 while pos < size: read = sock.recv_into(view[pos:], size - pos, flags) if not read: raise Error("Incomplete read, expected {0}, read {1}" .format(size, size)) pos += read return buff class NBDRequestSocket(object): """Handle requests on NBD socket.""" request_fmt = ">IIQQI" request_length = struct.calcsize(request_fmt) def __init__(self, sock): """Initialize the socket wrapper.""" self.sock = sock def recv(self): """Receive a single request from socket and return it.""" data = recvexactly(self.sock, self.request_length) assert len(data) == self.request_length data = compat_str(data) result_tuple = struct.
unpack(self.reques
t_fmt, data) magic, req_type, handle, data_from, data_length = result_tuple if magic != Magic.NBD_REQUEST_MAGIC: raise Error("Request magic invalid: {0}".format(magic)) if req_type != RequestType.NBD_CMD_WRITE: return NBDRequest(req_type, handle, data_from, data_length) payload = recvexactly(self.sock, data_length) return NBDRequest(req_type, handle, data_from, data_length, payload) def send(self, request): """Send a single request through socket.""" data = struct.pack(self.request_fmt, Magic.NBD_REQUEST_MAGIC, request.req_type, request.handle, request.data_from, request.data_length) if request.req_type == RequestType.NBD_CMD_WRITE: data = data + request.data self.sock.sendall(data)
ites must match the ID patterns" bad_changes.append(BadChange(status, path, content=msg)) continue # At levels above the suites, can only add directories if len(names) < self.LEN_ID: if status[0] != self.ST_ADDED: msg = ( "At levels above the suites, " "can only add directories" ) bad_changes.append(BadChange(status, path, content=msg)) continue # Cannot have a file at the branch level if len(names) == self.LEN_ID + 1 and tail is None: msg = "Cannot have a file at the branch level" bad_changes.append(BadChange(status, path, content=msg)) continue # New suite should have an info file if len(names) == self.LEN_ID and status == self.ST_ADDED: if (self.ST_ADDED, path + "trunk/") not in changes: bad_changes.append( BadChange(status, path, BadChange.NO_TRUNK) ) continue path_trunk_info_file = path + self.TRUNK_INFO_FILE if (self.ST_ADDED, path_trunk_info_file) not in changes and ( self.ST_UPDATED, path_trunk_info_file, ) not in changes: bad_changes.append( BadChange(status, path, BadChange.NO_INFO) ) continue sid = "".join(names[0 : self.LEN_ID]) branch = names[self.LEN_ID] if len(names) > self.LEN_ID else None path_head = "/".join(sid) + "/" path_tail = path[len(path_head) :] is_meta_suite = sid == "ROSIE" if status != self.ST_DELETED: # Check info file if sid not in txn_info_map: try: txn_info_map[sid] = self._load_info( repos, sid, branch=branch, transaction=txn ) err = None except ConfigSyntaxError as exc: err = InfoFileError(InfoFileError.VALUE, exc) except RosePopenError as exc: err = InfoFileError(InfoFileError.NO_INFO, exc.stderr) if err: bad_changes.append(err) txn_info_map[sid] = err continue # Suite must have an owner txn_owner, txn_access_list = self._get_access_info( txn_info_map[sid] ) if not txn_owner: bad_changes.append( InfoFileError(InfoFileError.NO_OWNER) ) continue # No need to check other non-trunk changes if branch and branch != "trunk": continue # For meta suite, make sure keys in keys file can be parsed if is_meta_suite and path_tail == self.TRUNK_KNOWN_KEYS_FILE: out = self._svnlook("cat", "-t", txn, repos, path) try: shlex.split(out) except ValueError: bad_changes.append( BadChange(status, path, BadChange.VALUE) ) continue # User IDs of owner and access list must be real if ( status != self.ST_DELETED and path_tail == self.TRUNK_INFO_FILE and not isinstance(txn_info_map[sid], InfoFileError) ): txn_owner, txn_access_lis
t = self._get_access_info( txn_info_map[sid] ) if self._verify_users( status, path, txn_owner, txn_access_list, bad_changes ): conti
nue reports = DefaultValidators().validate( txn_info_map[sid], load_meta_config( txn_info_map[sid], config_type=metomi.rose.INFO_CONFIG_NAME, ), ) if reports: reports_str = get_reports_as_text({None: reports}, path) bad_changes.append( BadChange(status, path, BadChange.VALUE, reports_str) ) continue # Can only remove trunk information file with suite if status == self.ST_DELETED and path_tail == self.TRUNK_INFO_FILE: if (self.ST_DELETED, path_head) not in changes: bad_changes.append( BadChange(status, path, BadChange.NO_INFO) ) continue # Can only remove trunk with suite # (Don't allow replacing trunk with a copy from elsewhere, either) if status == self.ST_DELETED and path_tail == "trunk/": if (self.ST_DELETED, path_head) not in changes: bad_changes.append( BadChange(status, path, BadChange.NO_TRUNK) ) continue # New suite trunk: ignore the rest if (self.ST_ADDED, path_head + "trunk/") in changes: continue # See whether author has permission to make changes if author is None: author = self._svnlook("author", "-t", txn, repos).strip() if super_users is None: super_users = [] for s_key in ["rosa-svn", "rosa-svn-pre-commit"]: value = conf.get_value([s_key, "super-users"]) if value is not None: super_users = shlex.split(value) break if sid not in rev_info_map: rev_info_map[sid] = self._load_info(repos, sid, branch=branch) owner, access_list = self._get_access_info(rev_info_map[sid]) admin_users = super_users + [owner] # Only admin users can remove the suite if author not in admin_users and not path_tail: msg = "Only the suite owner can remove the suite" bad_changes.append(BadChange(status, path, content=msg)) continue # Admin users and those in access list can modify everything in # trunk apart from specific entries in the trunk info file if "*" in access_list or author in admin_users + access_list: if path_tail != self.TRUNK_INFO_FILE: continue else: msg = "User not in access list" bad_changes.append(BadChange(status, path, content=msg)) continue # Only the admin users can change owner and access list if owner == txn_owner and access_list == txn_access_list: continue if author not in admin_users: if owner != txn_owner: bad_changes.append( BadChange( status, path, BadChange.PERM, "owner=" + txn_owner ) ) else: # access list bad_change = BadChange( status, path, BadChange.PERM, "access-list=" + " ".join(txn_access_list), ) bad_changes.append(bad_change) continue if bad_changes: raise BadChanges(bad_changes) __call__ = run def main(): """Implement "rosa svn-pre-commit".""" add_meta_paths() opt_parser = RoseOptionParser() opts, args = opt_parser.parse_args() repos, txn = args report = Reporter(opts.verbosity - opt
# encoding: utf-8 # Copyright 2012 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed
under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from .Rackspace
import Rackspace as delegate_class
# From: https://gist.github.com/nathan-hoad/8966377 import os import asyncio import sys from asyncio.streams import StreamWriter, FlowControlMixin reader, writer = None, None @asyncio.coroutine def stdio(loop=None): if loop is None: loop = asyncio.get_event_loop() reader = asyncio.StreamReader() reader_protocol = asyncio.StreamReaderProtocol(reader) writer_transport, writer_protocol = yield from loop.connect_write_pipe(FlowControlMixin, os.fdopen(0, 'wb')) writer = StreamWriter(writer_transport, writer_protocol, None, loop) yield from loop.connect_read_pipe(lambda: reader_protocol, sys.stdin) return reader, writer @asyncio.coroutine def async_input(message): if isinstance(message, str): message = message.encode('utf8') global reader, writer if (reader, writer) == (None, None): reader, writer = yield from stdio() writer.write(message) yield from writer.drain(
) line = yield from
reader.readline() return line.decode('utf8').replace('\r', '').replace('\n', '')
# -*- coding: utf-8 -*- ######################################################################### # # Copyright (C) 2016 OSGeo # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ######################################################################### from django import forms from django.core.validators import validate_email, ValidationError from slugify import slugify from django.utils.translation import ugettext as _ from modeltranslation.forms import TranslationModelForm from django.contrib.auth import get_user_model from geonode.groups.models import GroupProfile class GroupForm(TranslationModelForm): slug = forms.SlugField( max_length=20, help_text=_("a short version of the name consisting only of letters, numbers, underscores and hyphens."), widget=forms.HiddenInput, required=False) def clean_slug(self): if GroupProfile.objects.filter( slug__iexact=self.cleaned_data["slug"]).count() > 0: raise forms.ValidationError( _("A group already exists with that slug.")) return self.cleaned_data["slug"].lower() def clean_title(self): if GroupProfile.objects.filter( title__iexact=self.cleaned_data["title"]).count() > 0: raise forms.ValidationError( _("A group already exists with that name.")) return self.cleaned_data["title"] def clean(self): cleaned_data = self.cleaned_data name = cleaned_data.get("title") slug = slugify(name) cleaned_data["slug"] = slug return cleaned_data class Meta: model = GroupProfile exclude = ['group'] class GroupUpdateForm(forms.ModelForm): def clean_name(self): if GroupProfile.objects.filter(
name__iexact=self.cleaned_data["title"]).count() > 0: if self.cleaned_data["title"] == self.instance.name: pass # same instance else: raise forms.ValidationError( _("A group already exists with that nam
e.")) return self.cleaned_data["title"] class Meta: model = GroupProfile exclude = ['group'] class GroupMemberForm(forms.Form): role = forms.ChoiceField(choices=[ ("member", "Member"), ("manager", "Manager"), ]) user_identifiers = forms.CharField( widget=forms.TextInput( attrs={ 'class': 'user-select'})) def clean_user_identifiers(self): value = self.cleaned_data["user_identifiers"] new_members, errors = [], [] for ui in value.split(","): ui = ui.strip() try: validate_email(ui) try: new_members.append(get_user_model().objects.get(email=ui)) except get_user_model().DoesNotExist: new_members.append(ui) except ValidationError: try: new_members.append( get_user_model().objects.get( username=ui)) except get_user_model().DoesNotExist: errors.append(ui) if errors: message = ( "The following are not valid email addresses or " "usernames: %s; not added to the group" % ", ".join(errors)) raise forms.ValidationError(message) return new_members class GroupInviteForm(forms.Form): invite_role = forms.ChoiceField(label="Role", choices=[ ("member", "Member"), ("manager", "Manager"), ]) invite_user_identifiers = forms.CharField( label="E-mail addresses list", widget=forms.Textarea) def clean_user_identifiers(self): value = self.cleaned_data["invite_user_identifiers"] invitees, errors = [], [] for ui in value.split(","): ui = ui.strip() try: validate_email(ui) try: invitees.append(get_user_model().objects.get(email=ui)) except get_user_model().DoesNotExist: invitees.append(ui) except ValidationError: try: invitees.append(get_user_model().objects.get(username=ui)) except get_user_model().DoesNotExist: errors.append(ui) if errors: message = ( "The following are not valid email addresses or " "usernames: %s; no invitations sent" % ", ".join(errors)) raise forms.ValidationError(message) return invitees
t_address, server): self.user = None self.host = client_address # Client's hostname / ip. self.realname = None # Client's real name self.nick = None # Client's currently registered nickname self.send_queue = [] # Messages to send to client (strings) self.channels = {} # Channels the client is in _py2_compat.socketserver.BaseRequestHandler.__init__(self, request, client_address, server) def handle(self): log.info('Client connected: %s', self.client_ident()) self.buffer = buffer.LineBuffer() try: while True: self._handle_one() except self.Disconnect: self.request.close() def _handle_one(self): """ Handle one read/write cycle. """ ready_to_read, ready_to_write, in_error = select.select( [self.request], [self.request], [self.request], 0.1) if in_error: raise self.Disconnect() # Write any commands to the client while self.send_queue and ready_to_write: msg = self.send_queue.pop(0) self._send(msg) # See if the client has any commands for us. if ready_to_read: self._handle_incoming() def _handle_incoming(self): try: data = self.request.recv(1024) except Exception: raise self.Disconnect() if not data: raise self.Disconnect() self.buffer.feed(data) for line in self.buffer: self._handle_line(line) def _handle_line(self, line): try: log.debug('from %s: %s' % (self.client_ident(), line)) command, sep, params = line.partition(' ') handler = getattr(self, 'handle_%s' % command.lower(), None) if not handler: log.info('No handl
er for command: %s. ' 'Full line: %s' % (command, line)) raise IRCError.from_name('unknowncommand', '%s
:Unknown command' % command) response = handler(params) except AttributeError as e: log.error(_py2_compat.str(e)) raise except IRCError as e: response = ':%s %s %s' % (self.server.servername, e.code, e.value) log.error(response) except Exception as e: response = ':%s ERROR %r' % (self.server.servername, e) log.error(response) raise if response: self._send(response) def _send(self, msg): log.debug('to %s: %s', self.client_ident(), msg) self.request.send(msg + '\r\n') def handle_nick(self, params): """ Handle the initial setting of the user's nickname and nick changes. """ nick = params # Valid nickname? if re.search('[^a-zA-Z0-9\-\[\]\'`^{}_]', nick): raise IRCError.from_name('erroneusnickname', ':%s' % nick) if self.server.clients.get(nick, None) == self: # Already registered to user return if nick in self.server.clients: # Someone else is using the nick raise IRCError.from_name('nicknameinuse', 'NICK :%s' % (nick)) if not self.nick: # New connection and nick is available; register and send welcome # and MOTD. self.nick = nick self.server.clients[nick] = self response = ':%s %s %s :%s' % (self.server.servername, events.codes['welcome'], self.nick, SRV_WELCOME) self.send_queue.append(response) response = ':%s 376 %s :End of MOTD command.' % ( self.server.servername, self.nick) self.send_queue.append(response) return # Nick is available. Change the nick. message = ':%s NICK :%s' % (self.client_ident(), nick) self.server.clients.pop(self.nick) self.nick = nick self.server.clients[self.nick] = self # Send a notification of the nick change to all the clients in the # channels the client is in. for channel in self.channels.values(): self._send_to_others(message, channel) # Send a notification of the nick change to the client itself return message def handle_user(self, params): """ Handle the USER command which identifies the user to the server. """ params = params.split(' ', 3) if len(params) != 4: raise IRCError.from_name('needmoreparams', 'USER :Not enough parameters') user, mode, unused, realname = params self.user = user self.mode = mode self.realname = realname return '' def handle_ping(self, params): """ Handle client PING requests to keep the connection alive. """ response = ':%s PONG :%s' % (self.server.servername, self.server.servername) return response def handle_join(self, params): """ Handle the JOINing of a user to a channel. Valid channel names start with a # and consist of a-z, A-Z, 0-9 and/or '_'. """ channel_names = params.split(' ', 1)[0] # Ignore keys for channel_name in channel_names.split(','): r_channel_name = channel_name.strip() # Valid channel name? if not re.match('^#([a-zA-Z0-9_])+$', r_channel_name): raise IRCError.from_name('nosuchchannel', '%s :No such channel' % r_channel_name) # Add user to the channel (create new channel if not exists) channel = self.server.channels.setdefault(r_channel_name, IRCChannel(r_channel_name)) channel.clients.add(self) # Add channel to user's channel list self.channels[channel.name] = channel # Send the topic response_join = ':%s TOPIC %s :%s' % (channel.topic_by, channel.name, channel.topic) self.send_queue.append(response_join) # Send join message to everybody in the channel, including yourself and # send user list of the channel back to the user. response_join = ':%s JOIN :%s' % (self.client_ident(), r_channel_name) for client in channel.clients: client.send_queue.append(response_join) nicks = [client.nick for client in channel.clients] response_userlist = ':%s 353 %s = %s :%s' % (self.server.servername, self.nick, channel.name, ' '.join(nicks)) self.send_queue.append(response_userlist) response = ':%s 366 %s %s :End of /NAMES list' % (self.server.servername, self.nick, channel.name) self.send_queue.append(response) def handle_privmsg(self, params): """ Handle sending a private message to a user or channel. """ target, sep, msg = params.partition(' ') if not msg: raise IRCError.from_name('needmoreparams', 'PRIVMSG :Not enough parameters') message = ':%s PRIVMSG %s %s' % (self.client_ident(), target, msg) if target.startswith('#') or target.startswith('$'): # Message to channel. Check if the channel exists. channel = self.server.channels.get(target) if not channel: raise IRCError.from_name('nosuchnick', 'PRIVMSG :%s' % target) if not channel.name in self.channels: # The user isn't in the channel. raise IRCError.from_name('cannotsendtochan', '%s :Cannot send to channel' % channel.name) self._send_to_others(message, channel) else: # Message to user client = self.server.clients.get(target, None) if not client: raise IRCError.from_name('nosuchnick', 'PRIVMSG :%s' % target) client.send_queue.append(message) def _send_to_others(self, message, channel): """ Send the message to all clients i
import random import time import datetime from consts import * __all__ = ['gen_valid_id', 'gen_list_page', 'log'] def gen_valid_id(collection): def gen_id(): _id = '' for i in range(4): _id += random.choice('0123456789') return _id id =
gen_id() while collection.find_one({'id': id}): id = gen_id() return id def gen_list_page(collection, status, page=1): page = int(page) left = (page - 1) * 15 right = left + 15 all = collection.find({'status': status}).sort([('id', 1)]) max_page = int((all.count()-1) / 15) + 1 if all.count() else 0 if page > max_page: return PAGE_NOT_EXIST elif page < 1: return ARGS_INCORRECT header = '===== {0}/{1} =====\n'.format(page,
max_page) selected = all[left:right] return header + '\n'.join([ '{id} {title} ({comment})'.format(**i) for i in selected]) def log(m): with open('log', 'a') as f: if m.type == 'text': exp=m.content elif m.type == 'image': exp=m.img elif m.type == 'link': exp=';'.join([m.title, m.description, m.url]) else: exp=str(dict(m)) f.write(LOG.format(datetime.datetime.fromtimestamp( time.time()).strftime('%Y-%m-%d %H:%M:%S'), m.source, m.type, exp)) def add_key(key, value): from pymongo import MongoClient collection = MongoClient()['SongsDistributor']['collection'] for i in ('checked', 'pending'): collection.update_many({'status': i}, {'$set': {key: value}}) print('ok')
# --------------------
----------------------
----------------------------------- # # This file is the copyrighted property of Tableau Software and is protected # by registered patents and other applicable U.S. and international laws and # regulations. # # Unlicensed use of the contents of this file is prohibited. Please refer to # the NOTICES.txt file for further details. # # ----------------------------------------------------------------------------- from ctypes import * from . import Libs class TableauException(Exception): def __init__(self, errorCode, message): Exception.__init__(self, message) self.errorCode = errorCode self.message = message def __str__(self): return 'TableauException ({0}): {1}'.format(self.errorCode, self.message) def GetLastErrorMessage(): common_lib = Libs.LoadLibs().load_lib('Common') common_lib.TabGetLastErrorMessage.argtypes = [] common_lib.TabGetLastErrorMessage.restype = c_wchar_p return wstring_at(common_lib.TabGetLastErrorMessage())
# Copyright (C) 2011 One Laptop Per Child # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA import os import logging from gi.repository import GConf from gi.repository import Gst from gi.repository import Gtk from gi.repository import Gdk from gi.repository import GObject DEFAULT_PITCH = 0 DEFAULT_RATE = 0 _speech_manager = None class SpeechManager(GObject.GObject): __gtype_name__ = 'SpeechManager' __gsignals__ = { 'play': (GObject.SignalFlags.RUN_FIRST, None, []), 'pause': (GObject.SignalFlags.RUN_FIRST, None, []), 'stop': (GObject.SignalFlags.RUN_FIRST, None, []) } MIN_PITCH = -100 MAX_PITCH = 100 MIN_RATE = -100 MAX_RATE = 100 def __init__(self, **kwargs): GObject.GObject.__init__(self, **kwargs) self._player = _GstSpeechPlayer() self._player.connect('play', self._update_state, 'play') self._player.connect('stop', self._update_state, 'stop') self._player.connect('pause', self._update_state, 'pause') self._voice_name = self._player.get_default_voice() self._pitch = DEFAULT_PITCH self._rate = DEFAULT_RATE self._is_playing = False self._is_paused = False self.restore() def _update_state(self, player, signal): self._is_playing = (signal == 'play') self._is_paused = (signal == 'pause') self.emit(signal) def get_is_playing(self): return self._is_playing is_playing = GObject.property(type=bool, getter=get_is_playing, setter=None, default=False) def get_is_paused(self): return self._is_paused is_paused = GObject.property(type=bool, getter=get_is_paused, setter=None, default=False) def get_pitch(self): return self._pitch def get_rate(self): return self._rate def set_pitch(self, pitch): self._pitch = pitch self.save() def set_rate(self, rate): self._rate = rate self.save() def say_text(self, text): if text: self._player.speak(self._pitch, self._rate, self._voice_name, text) def say_selected_text(self): clipboard = Gtk.Clipboard.get(Gdk.SELECTION_PRIMARY) clipboard.request_text(self.__primary_selection_cb, None) def pause(self): self._player.pause_sound_device() def restart(self): self._player.restart_sound_device() def stop(self): self._player.stop_sound_device() def __primary_selection_cb(self, clipboard, text, user_data): self.say_text(text) def save(self): client = GConf.Client.get_default() client.set_int('/desktop/sugar/speech/pitch', self._pitch) client.set_int('/desktop/sugar/speech/rate', self._rate) logging.debug('saving speech configuration pitch %s rate %s', self._pitch, self._rate) def restore(self): client = GConf.Client.get_default() self._pitch = client.get_int('/desktop/sugar/speech/pitch') self._rate = client.get_int('/desktop/sugar/speech/rate') logging.debug('loading speech configuration pitch %s rate %s', self._pitch, self._rate) class _GstSpeechPlayer(GObject.GObject): __gsignals__ = { 'play': (GObject.SignalFlags.RUN_FIRST, None, []), 'pause': (GObject.SignalFlags.RUN_FIRST, None, []), 'stop': (GObject.SignalFlags.RUN_FIRST, None, []) } def __init__(self): GObject.GObject.__init__(self) self._pipeline = None def restart_sound_device(self): if self._pipeline is None: logging.debug('Trying to restart not initialized sound device') return self._pipeline.set_state(Gst.State.PLAYING) self.emit('play') def pause_sound_device(self): if self._pipeline is None: return self._pipeline.set_state(Gst.State.PAUSED) self.emit('pause') def stop_sound_device(self): if self._pipeline is None: return self._pipeline.set_state(Gst.State.NULL) self.emit('stop') def make_pipeline(self, command): if self._pipeline is not None: self.stop_sound_device() del self._pipeline self._pipeline = Gst.parse_launch(command) bus = self._pipeline.get_bus() bus.add_signal_watch() bus.connect('message', self.__pipe_message_cb) def __pipe_message_cb(self, bus, message): if message.type == Gst.MessageType.EOS: self._pipeline.set_state(Gst.State.NULL) self.emit('stop') elif message.type == Gst.MessageType.ERROR: self._pipeline.set_state(Gst.State.NULL) self.emit('stop') def speak(self, pitch, rate, voice_name, text): # TODO workaround for http://bugs.sugarlabs.or
g/ticket/1801 if not [i for i in text if i.isalnum()]: return self.make_pipeline('espeak name=espeak ! autoaudiosink') src = self._pipeline.get_by_na
me('espeak') src.props.text = text src.props.pitch = pitch src.props.rate = rate src.props.voice = voice_name src.props.track = 2 # track for marks self.restart_sound_device() def get_all_voices(self): all_voices = {} for voice in Gst.ElementFactory.make('espeak', None).props.voices: name, language, dialect = voice if dialect != 'none': all_voices[language + '_' + dialect] = name else: all_voices[language] = name return all_voices def get_default_voice(self): """Try to figure out the default voice, from the current locale ($LANG) Fall back to espeak's voice called Default.""" voices = self.get_all_voices() locale = os.environ.get('LANG', '') language_location = locale.split('.', 1)[0].lower() language = language_location.split('_')[0] # if the language is es but not es_es default to es_la (latin voice) if language == 'es' and language_location != 'es_es': language_location = 'es_la' best = voices.get(language_location) or voices.get(language) \ or 'default' logging.debug('Best voice for LANG %s seems to be %s', locale, best) return best def get_speech_manager(): global _speech_manager if _speech_manager is None: _speech_manager = SpeechManager() return _speech_manager
""" PyLEMS API module
. :author: Gautham Ganapathy :organization: LEMS (https://github.com/organizations/LEMS) """ from lems.model.fundamental import * from lems.model.structure import * from lems.model.dynamics import * from lems.model.simulation import * from lems.model.component import * from lems.model.model im
port Model
nts except (ConnectionError, HTTPError) as err : sleep_time = 2**(attempt - 1) verbose("Connection attempt " + str(attempt) + " failed. " "Sleeping for " + str(sleep_time) + " second(s).") time.sleep(sleep_time) attempt = attempt + 1 except (AttributeError, TypeError) : return None return None # Add a submission to the DB def addSubmission(conn, job_id, submission) : cursor = conn.cursor() query = "REPLACE INTO s
ubmission (job_id, submission_id, subreddit_id, " \ "subreddit, title, author, url, permalink, thumbnail, name, selftext, " \ "over_18, is_self, created_utc, num_comments, ups, downs, score) VALUES " \ "(%s, %s, %s, %s, %s, %s,
%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) " values = [ job_id, submission.id, submission.subreddit_id, submission.subreddit.display_name, submission.title, submission.author.name, submission.url, submission.permalink, submission.thumbnail, submission.name, submission.selftext, submission.over_18, submission.is_self, datetime.fromtimestamp(submission.created_utc).strftime('%Y-%m-%d %H:%M:%S'), submission.num_comments, submission.ups, submission.downs, submission.score ] try : cursor.execute(query, values) conn.commit() return True except sql.Error as err : verbose("") verbose(">>>> Warning: Could not add Submission: " + str(err)) verbose(" Query: " + cursor.statement) return False finally : cursor.close() # Add an entry to the submission score history def addSubmissionScoreHistory(conn, job_id, submission) : cursor = conn.cursor() query = "INSERT INTO submission_score_history (job_id, submission_id, timestamp, ups, " \ "downs, score) VALUES (%s, %s, %s, %s, %s, %s) ON DUPLICATE KEY UPDATE job_id=job_id" values = [ job_id, submission.id, datetime.now().strftime('%Y-%m-%d %H:%M:%S'), submission.ups, submission.downs, submission.score ] try : cursor.execute(query, values) conn.commit() except sql.Error as err : verbose("") verbose(">>>> Warning: Could not add Submission score history: " + str(err)) verbose(" Query: " + cursor.statement) finally : cursor.close() # Get the submission's last run time def getSubmissionRunTime(conn, job_id, submission_id) : cursor = conn.cursor() query = "SELECT last_run FROM submission WHERE job_id=%s AND submission_id=%s LIMIT 1" values = [ job_id, submission_id ] try : cursor.execute(query, values) for(last_run) in cursor : if (last_run[0] is not None) : return last_run[0] return -1 except sql.Error as err : verbose(">>>> Warning: Could not get the submission last run time: " + str(err)) verbose(" Query: " + cursor.statement) finally: cursor.close() # Update the submission's last run time def updateSubmissionRunTime(conn, job_id, submission_id) : cursor = conn.cursor() query = "UPDATE submission SET last_run=%s WHERE job_id=%s AND submission_id=%s" values = [ datetime.now().strftime('%Y-%m-%d %H:%M:%S'), job_id, submission_id ] try : cursor.execute(query, values) conn.commit() except sql.Error as err : verbose(">>>> Warning: Could not update submission run time: " + str(err)) verbose(" Query: " + cursor.statement) finally: cursor.close() # Add a comment to the DB def addComment(conn, job_id, submission_id, comment) : cursor = conn.cursor() query = "REPLACE INTO comment (job_id, submission_id, comment_id, " \ "parent_id, author, body, created_utc, ups, downs) VALUES " \ "(%s, %s, %s, %s, %s, %s, %s, %s, %s) " values = [ job_id, submission_id, comment.id, comment.parent_id, None if comment.author is None else comment.author.name, comment.body, datetime.fromtimestamp(comment.created_utc).strftime('%Y-%m-%d %H:%M:%S'), comment.ups, comment.downs ] try : cursor.execute(query, values) conn.commit() return True except sql.Error as err : verbose("") verbose(">>>> Warning: Could not add Comment: " + str(err)) verbose(" Query: " + cursor.statement) return False finally : cursor.close() # Add an entry to the comment score history def addCommentScoreHistory(conn, job_id, comment) : cursor = conn.cursor() query = "INSERT INTO comment_score_history (job_id, comment_id, timestamp, ups, " \ "downs) VALUES (%s, %s, %s, %s, %s) ON DUPLICATE KEY UPDATE job_id=job_id" values = [ job_id, comment.id, datetime.now().strftime('%Y-%m-%d %H:%M:%S'), comment.ups, comment.downs ] try : cursor.execute(query, values) conn.commit() except sql.Error as err : verbose("") verbose(">>>> Warning: Could not add Submission score history: " + str(err)) verbose(" Query: " + cursor.statement) finally : cursor.close() # Add an entry into the job history table def addJobHistory(conn, job_id, success, total_results = 0) : return cursor = conn.cursor() query = "INSERT INTO job_history (job_id, timestamp, status, total_results) " \ "VALUES(%s, %s, %s, %s, %s)" values = [ job_id, datetime.now().strftime('%Y-%m-%d %H:%M:%S'), "success" if success else "failure", total_results ] try : cursor.execute(query, values) conn.commit() except sql.Error as err : verbose(">>>> Warning: Could not add job_history entry: " + str(err)) verbose(" Query: " + cursor.statement) finally: cursor.close() # Update the stored job's last run time and total results def updateJobStats(conn, job_id, total_results) : cursor = conn.cursor() query = "UPDATE job SET last_count=%s, last_run=%s WHERE job_id=%s" values = [ total_results, datetime.now().strftime('%Y-%m-%d %H:%M:%S'), job_id ] try : cursor.execute(query, values) conn.commit() except sql.Error as err : verbose(">>>> Warning: Could not update job: " + str(err)) verbose(" Query: " + cursor.statement) finally: cursor.close() # Recursively parse all of the comments def parseCommentTree(conn, job_id, submission_id, comment) : global submission_count, submission_total, comment_count, comment_total queue = collections.deque() queue.append(comment) while len(queue) > 0: next = queue.popleft(); if isinstance(next, praw.objects.MoreComments) : more_comments = getComments(next) if more_comments is not None : queue.extendleft(more_comments) else : success = addComment(conn, job_id, submission_id, next) if success : comment_count = comment_count + 1 # Show status logging if args.verbose : sys.stdout.write("\rProgress: Submission: {}/{}, Comment: {}/{}".format(submission_count, submission_total, comment_count, comment_total)) addCommentScoreHistory(conn, job_id, next) queue.extend(next.replies) # Main function if __name__ == '__main__' : # Handle command line arguments parser = argparse.ArgumentParser(description="A Reddit variation of TwitterGoggles") parser.add_argument('head', type=int, help="Specify the head #") parser.add_argument('-v','--verbose', default=True, action="store_true", help="Show additional logs") parser.add_argument('-d','--delay', type=int, default=0, help="Delay execution by DELAY seconds") args = parser.parse_args() # Handle config settings config = configparser.ConfigParser() script_dir = os.path.dirname(__file__) config_file = os.path.join(script_dir, 'config/settings.cfg') config.read(config_file) # Handle file locking lock = open(config["Misc"]["lockfile"], 'a') try : flock(lock, LOCK_EX | LOCK_NB) except IOError : print("Unable to lock file", config["Misc"]["lockfile"] + ".","Terminating.") sys.exit(1) # Display startup info print("vvvvv Start:", datetime.now().strftime('%Y-%m-%d %H:%M:%S')) verbose("Verbose Mode: Enabled") print("Head:", args.head) print("Delay:", args.delay) if (args.delay > 0) : time.sleep(args.delay) print("Connecting to database...") try : run_total_count = 0 conn = connect() print("Connected") # Get all of the jobs for this head jobs = getJobs(conn) if not jobs.rowcount : print("\nUnable to find any jobs to run. Please make sure there are entries in the 'job'" + " table, that their 'zombie_head' value matches {}, and the 'state' value is greater" + " th
from django.conf.urls import patterns, include, url from django.conf import settings from django.conf.urls.static import static from django.core.urlresolvers import reverse_lazy from django.views.generic import RedirectView from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', url(r'', include('social.apps.django_app.urls', namespace='social')), # API url(r'^usernames/(?P<username>\w+)/$','api.views.username_availability',name='username_availability'), url(r'^users/social-data/(?P<backend>\w+)/$','api.views.fetch_social_data',name='fetch_social_data'), url(r'^users/refresh-social-data/(?P<backend>\w+)/$','api.views.refresh_social_data',name='refresh_social_data'), url(r'^users/save-data/(?P<resumeId>\d+)/$','api.views.save_data'), url(r'^users/get-data/(?P<resumeId>\d+)/$','api.views.get_resume_data'), url(r'^user/get-all-cv/$','dashboard.views.get_all_resumes'), url(r'^user/dashboard/$','dashboard.views.show_dashboard'), url(r'^user/create-new-cv/$','dashboard.views.create_new_resume'), # test api url(r'^fb-graph-test/$','api.views.fb_graph_test'), url(r'^github-api-test/$','api.views.github_api_test'), url(r'^linkedin-api-test/$','api.views.linkedin_api_test'), # forget password implementation url(r'^forgot-password/$','api.views.password_reset_middleware', name='forgot_password'), url(r'^users/password/reset/$', 'django.contrib.auth.views.password_reset', {'post_reset_redirect' : '/users/password/reset/done/'}), url(r'^users/password/reset/done/$', 'django.contrib.auth.views.password_reset_done'), url(r'^users/password/reset/(?P<uidb64>[0-9A-Za-z]+)-(?P<token>.+)/$', 'django.contrib.auth.views.password_reset_confirm', {'post_reset_redirect' : '/users/password/done/'}), url(r'^users
/password/done/$', 'django.contrib.auth.views.password_reset_complete'), url(r'^$', 'api.views.home',name='home'), url(r'^signup/(?P<backend>[^/]+)/$', 'api.views.signup
', name='signup'), url(r'^signup/$' , RedirectView.as_view(url='/signup/username/')), url(r'^email-sent/', 'api.views.validation_sent'), url(r'^resumizr-login/(?P<backend>[^/]+)/$', 'api.views.username_login', name='username_login'), url(r'^login/$','api.views.login', name='login'), url(r'^logout/$','api.views.logout', name='logout'), url(r'^app/$','api.views.app',name='app'), url(r'^admin/', include(admin.site.urls)), url(r'^generate/cvform/(?P<resumeNum>\d+)/$','api.views.generateForm', name='generateform'), url(r'^write/cv_to_pdf/$','pdfconvertor.views.writepdf', name='writepdf'), url(r'^preview/cv/$','api.views.previewCv', name='preview'), url(r'^landing_page/','api.views.landing_page', name='landing_page'), ) #development media server if settings.DEBUG: urlpatterns += patterns( 'django.views.static', (r'media/(?P<path>.*)', 'serve', {'document_root': settings.MEDIA_ROOT}), )
), scoped = False ) self.dragEndSignal().connect( Gaffer.WeakMethod( self.__dragEnd ), scoped = False ) self.keyPressSignal().connect( Gaffer.WeakMethod( self.__keyPress ), scoped = False ) self.__values = [] if isinstance( values, ( six.integer_types, float ) ) : self.__setValuesInternal( [ values ], self.ValueChangedReason.SetValues ) else : self.__setValuesInternal( values, self.ValueChangedReason.SetValues ) ## Convenience function to call setValues( [ value ] ) def setValue( self, value ) : self.setValues( [ value ] ) ## Convenience function returning getValues()[0] if there # is only one value, and raising ValueError if not. def getValue( self ) : if len( self.__values ) != 1 : raise ValueError return self.__values[0] def setValues( self, values ) : self.__setValuesInternal( values, self.ValueChangedReason.SetValues ) def getValues( self ) : return self.__values ## A signal emitted whenever a value has been changed. Slots should # have the signature slot( Slider, ValueChangedReason ). def valueChangedSignal( self ) : try : return self.__valueChangedSignal except : self.__valueChangedSignal = Gaffer.Signals.Signal2() return self.__valueChangedSignal ## Returns True if a user would expect the specified sequence # of changes to be merged into one undoable event. @classmethod def changesShouldBeMerged( cls, firstReason, secondReason ) : if type( firstReason ) != type( secondReason ) : return False return ( firstReason, secondReason ) in ( # click and drag ( cls.ValueChangedReason.Click, cls.ValueChangedReason.DragBegin ), ( cls.ValueChangedReason.DragBegin, cls.ValueChangedReason.DragMove ), ( cls.ValueChangedReason.DragMove, cls.ValueChangedReason.DragMove ), ( cls.ValueChangedReason.DragMove, cls.ValueChangedReason.DragEnd ), # increment ( cls.ValueChangedReason.Increment, cls.ValueChangedReason.Increment ), ) def setRange( self, min, max, hardMin=None, hardMax=None ) : if hardMin is None : hardMin = min if hardMax is None : hardMax = max if min==self.__min and max==self.__max and hardMin==self.__hardMin and hardMax==self.__hardMax : return self.__min = min self.__max = max self.__hardMin = hardMin self.__hardMax = hardMax self.__setValuesInte
rnal( self.__values, self.ValueChangedReason.Invalid ) # reclamps the values to the range if necessary self._
qtWidget().update() def getRange( self ) : return self.__min, self.__max, self.__hardMin, self.__hardMax def indexRemovedSignal( self ) : signal = getattr( self, "_indexRemovedSignal", None ) if signal is None : signal = GafferUI.WidgetEventSignal() self._indexRemovedSignal = signal return signal def setSelectedIndex( self, index ) : if self.__selectedIndex == index : return if index is not None : if not len( self.__values ) or index < 0 or index >= len( self.__values ) : raise IndexError self.__selectedIndex = index self._qtWidget().update() signal = getattr( self, "_selectedIndexChangedSignal", None ) if signal is not None : signal( self ) ## May return None to indicate that no index is selected. def getSelectedIndex( self ) : return self.__selectedIndex def selectedIndexChangedSignal( self ) : signal = getattr( self, "_selectedIndexChangedSignal", None ) if signal is None : signal = GafferUI.WidgetSignal() self._selectedIndexChangedSignal = signal return signal ## Determines whether or not values may be added/removed def setSizeEditable( self, editable ) : self.__sizeEditable = editable def getSizeEditable( self ) : return self.__sizeEditable ## Sets a size after which no more values can # be removed. def setMinimumSize( self, minimumSize ) : self.__minimumSize = minimumSize def getMinimumSize( self ) : return self.__minimumSize ## Sets the value increment added/subtracted # when using the cursor keys. The default value of None # uses an increment equivalent to the size of one pixel at # the current slider size. An increment of 0 can be specified # to disable the behaviour entirely. def setIncrement( self, increment ) : self.__increment = increment def getIncrement( self ) : return self.__increment ## Sets the increment used for snapping values generated # by interactions such as drags and button presses. Snapping # can be ignored by by holding the `Ctrl` modifier. def setSnapIncrement( self, increment ) : self.__snapIncrement = increment def getSnapIncrement( self ) : return self.__snapIncrement def setHoverPositionVisible( self, visible ) : self.__hoverPositionVisible = visible def getHoverPositionVisible( self ) : return self.__hoverPositionVisible ## May be overridden by derived classes to customise # the drawing of the background. def _drawBackground( self, painter ) : size = self.size() valueRange = self.__max - self.__min if valueRange == 0 : return idealSpacing = 10 idealNumTicks = float( size.x ) / idealSpacing tickStep = valueRange / idealNumTicks logTickStep = math.log10( tickStep ) flooredLogTickStep = math.floor( logTickStep ) tickStep = math.pow( 10, flooredLogTickStep ) blend = (logTickStep - flooredLogTickStep) tickValue = math.floor( self.__min / tickStep ) * tickStep i = 0 while tickValue <= self.__max : x = size.x * ( tickValue - self.__min ) / valueRange if i % 100 == 0 : height0 = height1 = 0.75 alpha0 = alpha1 = 1 elif i % 50 == 0 : height0 = 0.75 height1 = 0.5 alpha0 = alpha1 = 1 elif i % 10 == 0 : height0 = 0.75 height1 = 0.25 alpha0 = alpha1 = 1 elif i % 5 == 0 : height0 = 0.5 height1 = 0 alpha0 = 1 alpha1 = 0 else : height0 = 0.25 height1 = 0 alpha0 = 1 alpha1 = 0 alpha = alpha0 + (alpha1 - alpha0) * blend height = height0 + (height1 - height0) * blend pen = QtGui.QPen() pen.setWidth( 0 ) pen.setColor( QtGui.QColor( 0, 0, 0, alpha * 255 ) ) painter.setPen( pen ) painter.drawLine( x, size.y, x, size.y * ( 1 - height ) ) tickValue += tickStep i += 1 ## May be overridden by derived classes to customise the # drawing of the value indicator. # # `value` : The value itself. # `position` : The widget-relative position where the # indicator should be drawn. # `state` : A GafferUI.Style.State. DisabledState is used # to draw hover indicators, since there is # currently no dedicated state for this purpose. def _drawValue( self, painter, value, position, state ) : size = self.size() pen = QtGui.QPen( QtGui.QColor( 0, 0, 0, 255 ) ) pen.setWidth( 1 ) painter.setPen( pen ) if state == state.NormalState : color = QtGui.QColor( 128, 128, 128, 255 ) else : color = QtGui.QColor( 119, 156, 255, 255 ) painter.setBrush( QtGui.QBrush( color ) ) if state == state.DisabledState : painter.setOpacity( 0.5 ) if position < 0 : painter.drawPolygon( QtGui.QPolygonF( [ QtCore.QPointF( 8, 4 ), QtCore.QPointF( 8, size.y - 4 ), QtCore.QPointF( 2, size.y / 2 ), ] ) ) elif position > size.x : painter.drawPolygon( QtGui.QPolygonF( [ QtCore.QPointF( size.x - 8, 4 ), QtCore.QPointF( size.x - 8, size.y - 4 ), QtCore.QPointF( size.x - 2, size.y / 2 ), ] ) ) else : painter.drawEllipse( QtCore.QPoint( position, size.y / 2 ), size.y / 4, size.y / 4 ) def __indexUnderMouse( self ) : size = self.size() mousePosition = GafferUI.Widget.mousePosition( relativeTo = self ).x result = None for i, v in enumerate( self.__values ) : # clamp value inside range so we can select # handles representing points outside the widget. v = max( min( v, self.__max ), self.__min ) dist = math.fabs( mousePosition - self.__valueToPosition( v ) ) if result is None or dist < minDist : result = i minDist = dist if not self.getSizeEditable() : # when the size isn't editable, we consider the closest # position to be under the mouse, this makes it easy # to just click anywhere to move the c
from unittest import TestCase from paramiko import SSHException from pyinfra.api import Config, State from pyinfra.api.connect import connect_all from pyinfra.api.exceptions import NoGroupError, NoHostError, PyinfraError from ..paramiko_util import PatchSSHTestCase from ..util import make_inventory class TestInventoryApi(TestCase): def test_inventory_creation(self): inventory = make_inventory() # Check length assert len(inventory.hosts) == 2 # Get a host host = inventory.get_host('somehost') assert host.data.ssh_user == 'vagrant' # Check our group data assert inventory.get_group_data('test_group') == { 'group_data': 'hello world', } def test_tuple_host_group_inventory_creation(self): inventory = make_inventory( hosts=[ ('somehost', {'some_data': 'hello'}), ], tuple_group=([ ('somehost', {'another_data': 'world'}), ], { 'tuple_group_data': 'word', }), ) # Check host data host = inventory.get_host('somehost') assert host.d
ata.some_data == 'hello' assert host.data.another_data == 'world' # Che
ck group data assert host.data.tuple_group_data == 'word' def test_host_and_group_errors(self): inventory = make_inventory() with self.assertRaises(NoHostError): inventory.get_host('i-dont-exist') with self.assertRaises(NoGroupError): inventory.get_group('i-dont-exist') class TestStateApi(PatchSSHTestCase): def test_fail_percent(self): inventory = make_inventory(( 'somehost', ('thinghost', {'ssh_hostname': SSHException}), 'anotherhost', )) state = State(inventory, Config(FAIL_PERCENT=1)) # Ensure we would fail at this point with self.assertRaises(PyinfraError) as context: connect_all(state) assert context.exception.args[0] == 'Over 1% of hosts failed (33%)' # Ensure the other two did connect assert len(state.active_hosts) == 2
""" Copyright 2016 Rasmus Larsen This software may be modified and distributed under the terms of the MIT license. See the LICENSE.txt file for details. """ import sys import time from sacred import Experiment from core.ALEEmulator import ALEEmulator from dqn.Agent import Agent from dqn.DoubleDQN import DoubleDQN ex = Experiment('double-dqn') @ex.config def net_config(): conv_layers = 3
conv_units = [32, 64, 64] filter_sizes = [8, 4, 3] strides = [4, 2, 1] state_frames = 4 fc_layers = 1 fc_units = [512] in_width = 84 in_height = 84 discount = 0.99 device = '/gpu:0' lr = 0.00025 opt_decay = 0.95 momentum = 0.0 opt_eps = 0.01 target_sync = 1e4 clip_delta = 1.0 tensorboard = False tensorboard
_freq = 50 @ex.config def emu_config(): rom_path = '../ale-git/roms/' rom_name = 'breakout' display_screen = True frame_skip = 4 repeat_prob = 0.0 color_avg = True random_seed = 42 random_start = 30 @ex.config def agent_config(): hist_size = 1e5 eps = 1.0 eps_min = 0.1 eps_decay = (eps - eps_min) / 1e6 batch_size = 32 train_start = 5e3 train_frames = 5e6 test_freq = 5e4 test_frames = 5e3 update_freq = 4 @ex.command def test(_config): emu = ALEEmulator(_config) _config['num_actions'] = emu.num_actions net = DoubleDQN(_config) net.load(_config['rom_name']) agent = Agent(emu, net, _config) agent.next(0) # put a frame into the replay memory, TODO: should not be necessary agent.test() @ex.automain def main(_config, _log): sys.stdout = open('log_' + _config['rom_name'] + time.strftime('%H%M%d%m', time.gmtime()), 'w', buffering=True) print "#{}".format(_config) emu = ALEEmulator(_config) _config['num_actions'] = emu.num_actions net = DoubleDQN(_config) agent = Agent(emu, net, _config) agent.train()
import gtk from plugin_base.find_extension import FindExtension class SizeFindFiles(FindExtension): """Size extension for find files tool""" def __init__(self, parent): FindExtension.__init__(self, parent) # create container table = gtk.Table(2, 4, False) table.set_border_width(5) table.set_col_spacings(5) # create interface self._adjustment_max = gtk.Adjustment(value=50.0, lower=0.0, upper=100000.0, step_incr=0.1, page_incr=10.0) self._adjustment_min = gtk.Adjustment(value=0.0, lower=0.0, upper=10.0, step_incr=0.1, page_incr=10.0) label = gtk.Label('<b>{0}</b>'.format(_('Match file size'))) label.set_alignment(0.0, 0.5) label.set_use_markup(True) label_min = gtk.Label(_('Minimum:')) label_min.set_alignment(0, 0.5) label_min_unit = gtk.Label(_('MB')) label_max = gtk.Label(_('Maximum:')) label_max.set_alignment(0, 0.5) label_max_unit = gtk.Label(_('MB')) self._entry_max = gtk.SpinButton(adjustment=self._adjustment_max, digits=2) self._entry_min = gtk.SpinButton(adjustment=self._adjustment_min, digits=2) self._entry_max
.connect('value-changed', self._max_value_changed) self._entry_min.connect('value-changed', self._min_value_changed) self._entry_max.connect('activate', self._parent.find_files) self._entry
_min.connect('activate', lambda entry: self._entry_max.grab_focus()) # pack interface table.attach(label, 0, 3, 0, 1, xoptions=gtk.FILL) table.attach(label_min, 0, 1, 1, 2, xoptions=gtk.FILL) table.attach(self._entry_min, 1, 2, 1, 2, xoptions=gtk.FILL) table.attach(label_min_unit, 2, 3, 1, 2, xoptions=gtk.FILL) table.attach(label_max, 0, 1, 2, 3, xoptions=gtk.FILL) table.attach(self._entry_max, 1, 2, 2, 3, xoptions=gtk.FILL) table.attach(label_max_unit, 2, 3, 2, 3, xoptions=gtk.FILL) self.vbox.pack_start(table, False, False, 0) def _max_value_changed(self, entry): """Assign value to adjustment handler""" self._adjustment_min.set_upper(entry.get_value()) def _min_value_changed(self, entry): """Assign value to adjustment handler""" self._adjustment_max.set_lower(entry.get_value()) def get_title(self): """Return i18n title for extension""" return _('Size') def is_path_ok(self, path): """Check is specified path fits the cirteria""" size = self._parent._provider.get_stat(path).size size_max = self._entry_max.get_value() * 1048576 size_min = self._entry_min.get_value() * 1048576 return size_min < size < size_max
import yaml from os import makedirs from os.path import join,dirname,realpath,isdir script_dir = dirname(realpath(__file__)) default_yml_filepath = join(script_dir,'defa
ults.yml') defaults = { "output_dir": 'output', "header_img_dir": 'imgs/headers/', "scaled_img_dir": 'imgs/scaled/', "original_img_dir": 'imgs/original/', "header_img_url": 'imgs/headers/', "scaled_img_url": 'imgs/scaled/', "original_img_url": 'imgs/original/', "template_dir": join(script_dir,'templates'), "max_article_img_width": 710, "max_avatar_width": 710, "database_file": "databa
se.yml", "static_dir": join(script_dir,'static'), "copyright_msg": None, "extra_links": [], "import_to_discourse": False, "strapline": None, } config = dict() def getConfig(): if not config: raise RuntimeError('config not loaded yet') return config def loadConfig(yml_filepath): config.update(defaults) with open(yml_filepath) as f: patch = yaml.load(f.read()) config.update(patch) # make paths absolute config['header_img_dir'] = join(config['output_dir'],config['header_img_dir']) config['scaled_img_dir'] = join(config['output_dir'],config['scaled_img_dir']) config['original_img_dir'] = join(config['output_dir'],config['original_img_dir']) config['database_file'] = join(config['output_dir'],config['database_file']) def makeDirs(): if not config: raise RuntimeError('config not loaded yet') for key in ['header_img_dir','scaled_img_dir','original_img_dir']: path = config[key] if not isdir(path): makedirs(path)
# -*- encoding: utf-8 -*- ################################################################################ # # # Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol # # # # This program is free software: you can redistribute it and/or modify # # it under the terms of the GNU Affero General Public License as published by # # the Free Software Foundation, either version 3 of the License, or # # (at your option) any later version. # # # # This program is distributed in the hope that it will be useful, # # but WITHOUT ANY WARRANTY; without even the implied warranty of # # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # # GNU Affero General Public License for more details. # # # # You should have received a copy of the GNU Affero General Public License # # along with this program. If not, see <http://www.gnu.org/licenses/>. # ################################################################################ from openerp import models, fields, api from datetime import * class clv_medicament_template_history(models.Model): _name = 'clv_medicament.template.history' medicament_template_id = fields.Many2one('clv_medicament.template', 'Medicament Template', required=True) user_id = fields.Many2one ('res.users', 'User', required=True) date = fields.Datetime("Date", required=True) state = fields.Selection([('draft','Draft'), ('revised','Revised'), ('waiting','Waiting'), ('done','Done'), ('canceled','Canceled'), ], string='Status', default='draft', readonly=True, required=True, help="") notes = fields.Text(string='Notes') _order = "date desc" _defaults = { 'user_id': lambda obj,cr,uid,context: uid, 'date': lambda *a: datetime.now().strftime('%Y-%m-%d %H:%M:%S'), } class clv_medicament_template(models.Model): _inherit = 'clv_medicament.template' history_ids = fields.One2many('clv_medicament.template.history', 'medicament_template_id', 'Medicament Template History', readonly=True) active_history = fields.Boolean('Active History', help="If unchecked, it will allow you to disable the history without removing it.", default=True) @api.one def insert_clv_me
dicament_template_history(self, medicament_template_id, state, notes): if self.active_history: values = { 'medicam
ent_template_id': medicament_template_id, 'state': state, 'notes': notes, } self.pool.get('clv_medicament.template.history').create(self._cr, self._uid, values) @api.multi def write(self, values): if (not 'state' in values) and (not 'date' in values): notes = values.keys() self.insert_clv_medicament_template_history(self.id, self.state, notes) return super(clv_medicament_template, self).write(values) @api.one def button_draft(self): self.date = datetime.now().strftime('%Y-%m-%d %H:%M:%S') self.state = 'draft' self.insert_clv_medicament_template_history(self.id, 'draft', '') @api.one def button_revised(self): self.date = datetime.now().strftime('%Y-%m-%d %H:%M:%S') self.state = 'revised' self.insert_clv_medicament_template_history(self.id, 'revised', '') @api.one def button_waiting(self): self.date = datetime.now().strftime('%Y-%m-%d %H:%M:%S') self.state = 'waiting' self.insert_clv_medicament_template_history(self.id, 'waiting', '') @api.one def button_done(self): self.date = datetime.now().strftime('%Y-%m-%d %H:%M:%S') self.state = 'done' self.insert_clv_medicament_template_history(self.id, 'done', '') @api.one def button_cancel(self): self.date = datetime.now().strftime('%Y-%m-%d %H:%M:%S') self.state = 'canceled' self.insert_clv_medicament_template_history(self.id, 'canceled', '') @api.one def set_to_draft(self, *args): self.state = 'draft' self.create_workflow() return True
import my
test print '----This is func1----' mytest.world.func1() print '----This is func2----' mytest.simple.func2() print '----This is f
unc3----' mytest.whatever.func3() print '----This is myobj using MyClass----' myobj = mytest.MyClass('nick', 'florida') myobj.hello()
# Copyright 2014 Cirruspath, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Author: James Horey # Email: jhorey@cirruspath.com # from flask import Flask, request, redirect, url_for
import json import os from pom.triggers.poster import Poster from pom.triggers.github import GitHub from pom.triggers.salesforce import Salesforce from pom.clients.oauth2 import OAuth2 import requests from requests.exceptions import ConnectionError import sys from tornado.wsgi import WSGIContainer from tornado.httpserver import HTTPServer from tornado.ioloop i
mport IOLoop import urllib from uuid import uuid4 import yaml app = Flask(__name__) # # Read in all the known oauth providers. # CONFIG_DIR = os.path.dirname(os.path.dirname(__file__)) + "/config" providers = {} for f in os.listdir(CONFIG_DIR + "/providers"): n, e = os.path.splitext(f) print "Source: " + n providers[n] = OAuth2(n, CONFIG_DIR + "/providers/" + f, os.environ['POM_APPS']) # # Instantiate all the triggers. # triggers = [] yaml_file = open(CONFIG_DIR + "/pom.yaml", 'r') config = yaml.load(yaml_file) redirect_uri = config["callback"] if 'triggers' in config: trigger_list = config["triggers"].split(",") for t in trigger_list: if t == "github": triggers.append(GitHub()) elif t == "salesforce": triggers.append(Salesforce()) elif t == "poster": triggers.append(Poster()) else: trigger_list = [] # # Responses store the oauth state machine. # responses = {} # # The default page redirects the user to the source OAuth page. # @app.route('/', methods=['GET']) def authorize(): state = str(uuid4()) if 'session' in request.args: session = request.args['session'] else: session = state if 'source' in request.args: source = providers[request.args['source']] print "Using the %s OAuth server" % request.args['source'] else: print "Using the Salesforce OAuth server" source = providers["salesforce"] payload = { 'scope' : source.scopes, 'state' : state, 'redirect_uri' : redirect_uri + '/' + source.name, 'response_type' : 'code', 'client_id' : source.consumer_key, 'access_type' : 'offline'} url = source.authorize_url + "?" + urllib.urlencode(payload) responses[state] = { 'stage' : 'authorize', 'session' : session } if 'redirect' in request.args: responses[state]['redirect'] = request.args['redirect'] print "Using the %s user redirect" % responses[state]['redirect'] return redirect(url) # # Fetch a new access token using a refresh token. # @app.route('/refresh', methods=['DELETE']) def revoke_access_token(): refresh_token = request.args['refresh'] source = providers[request.args['source']] payload = { 'token' : refresh_token } resp = requests.post(source.revoke_url, params = payload) return resp.text # # Fetch a new access token using a refresh token. # @app.route('/refresh', methods=['GET']) def refresh_access_token(): refresh_token = request.args['refresh'] source = providers[request.args['source']] print "refreshing with " + refresh_token payload = { 'client_id' : source.consumer_key, 'client_secret' : source.consumer_secret, 'grant_type' : 'refresh_token', 'refresh_token' : refresh_token } resp = requests.post(source.token_url, params = payload) return resp.text def _get_access_token(source, auth_code, state, session, redirect=None): try: payload = { 'client_id' : source.consumer_key, 'client_secret' : source.consumer_secret, 'grant_type' : 'authorization_code', 'code' : auth_code, 'redirect_uri' : redirect_uri + '/' + source.name} headers = {'Accept' : 'application/json'} # headers = {'content-type': 'application/x-www-form-urlencoded', # 'content-length' : 256} res = requests.post(source.access_token_url, data = payload, headers = headers) if res.status_code == requests.codes.ok: resp_json = res.json() print "JSON response: " + str(resp_json) if 'access_token' in resp_json: resp = None resp_json['source'] = source.name if redirect: resp_json['_user_redirect'] = redirect for t in triggers: resp = t.consume_access_key(resp_json) responses[state] = { 'stage' : 'authorized', 'resp' : resp_json } if resp: return resp.text else: return json.dumps( {'status' : 'authorized', 'session' : session } ) else: error_msg = "unauthorized" else: error_msg = "unreachable" return json.dumps( {"status" : "failed", "error" : error_msg, "session" : session } ) except ConnectionError as e: print str(e) # # The generic callback method. Should be supplemented with the provider source # name so that we know what to do. # @app.route('/callback/<source_name>', methods=['GET']) def callback(source_name): source = providers[source_name] if 'code' in request.args: auth_code = request.args["code"] state = request.args["state"] session = responses[state]['session'] if 'redirect' in responses[state]: redirect = responses[state]['redirect'] else: redirect = None responses[state]['stage'] = 'callback' return _get_access_token(source, auth_code, state, session, redirect) else: return json.dumps( {'status' : 'failed', 'error' : 'authentication' } ) # # Retrieve the access & refresh keys. # @app.route('/key', methods=['GET']) def key(): if 'session' in request.args and request.args['session'] in responses: resp = responses[request.args['session']] if resp['stage'] == 'authorized': return resp['resp']['access_key'] return json.dumps( {'status' : 'failed', 'error' : 'could not find access key' } ) def main(): if 'POM_SSL' in os.environ: key_dir = os.environ['POM_SSL'] else: key_dir = os.path.dirname(os.path.dirname(__file__)) + "/keys" if not 'POM_APPS' in os.environ: print "POM_APPS should be set to a directory with our application OAuth credentials" exit(1) print "Using SSL certificate in " + key_dir try: http_server = HTTPServer(WSGIContainer(app), ssl_options={ "certfile": key_dir + "/server.crt", "keyfile": key_dir + "/server.key", }) http_server.listen(port=int(sys.argv[2]), address=sys.argv[1]) IOLoop.instance().start() except: pass
import numpy as np import pandas as pd from plotnine import (ggplot, aes, geom_area, geom_ribbon, facet_wrap, scale_x_continuous, theme) n = 4 # No. of ribbions in a vertical stack m = 100 # Points width = 2*np.pi # width of each ribbon x = np.linspace(0, width, m) df = pd.DataFrame({ 'x': np.tile(x, n), 'ymin': np.hstack([np.sin(x)+2*i for i in range(n)]), 'ymax': np.hstack([np.sin(x)+2*i+1 for i in range(n)]), 'z': np.repeat(range(n), m) }) _theme = theme(subplots_adjust={'right': 0.85}) def test_ribbon_aesthetics(): p = (ggplot(df, aes('x', ymin='ymin', ymax='ymax', group='factor(z)')) + geom_ribbon() + geom_ribbon(aes('x+width', alpha='z')) + geom_ribbon(aes('x+2*width', linetype='factor(z)'), color='black', fill=None, size=2) + geom_ribbon(aes('x+3*width', color='z'), fill=None, size=2) + geom_ribbon(aes('x+4*width', fill='factor(z)')) + geom_ribbon(aes('x+5*width', size='z'), color='black', fill=None) + scale_x_continuous( breaks=[i*2*np.pi for i in range(7)], labels=['0'] + [r'${}\pi$'.format(2*i) for i in range(1, 7)]) ) assert p + _theme == 'ribbon_aesthetics' def test_area_aesthetics(): p = (ggplot(df, aes('x', 'ymax+2', group='fac
tor(z)')) + geom_area() +
geom_area(aes('x+width', alpha='z')) + geom_area(aes('x+2*width', linetype='factor(z)'), color='black', fill=None, size=2) + geom_area(aes('x+3*width', color='z'), fill=None, size=2) + geom_area(aes('x+4*width', fill='factor(z)')) + geom_area(aes('x+5*width', size='z'), color='black', fill=None) + scale_x_continuous( breaks=[i*2*np.pi for i in range(7)], labels=['0'] + [r'${}\pi$'.format(2*i) for i in range(1, 7)]) ) assert p + _theme == 'area_aesthetics' def test_ribbon_facetting(): p = (ggplot(df, aes('x', ymin='ymin', ymax='ymax', fill='factor(z)')) + geom_ribbon() + facet_wrap('~ z') ) assert p + _theme == 'ribbon_facetting'
import os import amo.search from .models import Reindexing from django.core.management.base import CommandError # shortcut functions is_reindexing_amo = Reindexing.objects.is_reindexing_amo flag_reindexing_amo = Reindexing.objects.flag_reindexing_amo unflag_reindexing_amo = Reindexing.objects.unflag_reindexing_amo get_indices = Reindexing.objects.get_indices def index_obje
cts(ids, model, search, index=None, transforms=None): if index is None: index = model._get_index() indices = Reindexing.objects.get_indices(index) if transform
s is None: transforms = [] qs = model.objects.no_cache().filter(id__in=ids) for t in transforms: qs = qs.transform(t) for ob in qs: data = search.extract(ob) for index in indices: model.index(data, bulk=True, id=ob.id, index=index) amo.search.get_es().flush_bulk(forced=True) def raise_if_reindex_in_progress(site): """Checks if the database indexation flag is on for the given site. If it's on, and if no "FORCE_INDEXING" variable is present in the env, raises a CommandError. """ already_reindexing = Reindexing.objects._is_reindexing(site) if already_reindexing and 'FORCE_INDEXING' not in os.environ: raise CommandError("Indexation already occuring. Add a FORCE_INDEXING " "variable in the environ to force it")
# -*- coding: utf-8 -*- from
__future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('shows', '0001_initial'),
] operations = [ migrations.AlterModelOptions( name='show', options={'verbose_name_plural': 'shows', 'ordering': ['datetime', 'cinema', 'id'], 'verbose_name': 'show'}, ), ]
# This file is part of James CI. # # James CI is free software: you can redistribute it and/or modify it under the # terms of the GNU General Public License as published by the Free Software # Foundation, either version 3 of the License, or (at your option) any later # version. # # James CI is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABI
LITY or FITNESS FOR # A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more # details. # # You should have received a copy of the GNU Lesser General Public License along # with James CI. If not, see <http://www.gnu.org/licenses/>. # # # Copyright (C) # 2017 Alexander Haase <ahaase@alexhaase.de> # import enum @enum.unique class Status(enum.IntEnum): """ This enum class will be used for defining the status of a
:py:class:`~.Pipeline` or :py:class:`~.Job`. Multiple statuses may be compared by their value. .. note:: The minimum of a list of statuses will be the *worst* status of the list. However, if the list has a status of :py:attr:`created`, :py:attr:`pending` or :py:attr:`running`, these will have priority, indicating not all jobs have finished yet. """ created = enum.auto() pending = enum.auto() running = enum.auto() canceled = enum.auto() errored = enum.auto() failed = enum.auto() success = enum.auto() def __str__(self): """ Return the status name as string. This function is required to remove the enum's class name prefix when string representation is required. """ return self.name def final(self): """ :return: If the status is a final state or not. :rtype: bool """ return (self in [self.canceled, self.errored, self.failed, self.success])
key], as_iterable=as_iterable) if as_iterable: return _as_iterable(preds, output=key) return preds[key] def get_variable_names(self): return self._estimator.get_variable_names() def get_variable_value(self, name): return self._estimator.get_variable_value(name) def export(self, export_dir, input_fn=None, input_feature_key=None, use_deprecated_input_fn=True, signature_fn=None, default_batch_size=1, exports_to_keep=None): """See BaseEstimator.export.""" def default_input_fn(unused_estimator, examples): return layers.parse_feature_columns_from_examples( examples, self._feature_columns) return self._estimator.export( export_dir=export_dir, input_fn=input_fn or default_input_fn, input_feature_key=input_feature_key, use_deprecated_input_fn=use_deprecated_input_fn, signature_fn=(signature_fn or export.classification_signature_fn_with_prob), prediction_key=prediction_key.PredictionKey.PROBABILITIES, default_batch_size=default_batch_size, exports_to_keep=exports_to_keep) @property @deprecated("2016-10-30", "This method will be removed after the deprecation date. " "To inspect variables, use get_variable_names() and " "get_variable_value().") def weights_(self): values = {} optimizer_regex = r".*/"+self._optimizer.get_name() + r"(_\d)?$" for name in self.get_variable_names(): if (name.startswith("linear/") and name != "linear/bias_weight" and not re.match(optimizer_regex, name)): values[name] = self.get_variable_value(name) if len(values) == 1: return values[list(values.keys())[0]] return values @property @deprecated("2016-10-30", "This method will be removed after the deprecation date. " "To inspect variables, use get_variable_names() and " "get_variable_value().") def bias_(self): return self.get_variable_value("linear/bias_weight") @property def config(self): return self._estimator.config @property def model_dir(self): return self._estimator.model_dir class LinearRegressor(evaluable.Evaluable, trainable.Trainable): """Linear regressor model. Train a linear regression model to predict label value given observation of feature values. Example: ```python sparse_column_a = sparse_column_with_hash_bucket(...) sparse_column_b = sparse_column_with_hash_bucket(...) sparse_feature_a_x_sparse_feature_b = crossed_column(...) estimator = LinearRegressor( feature_columns=[sparse_column_a, sparse_feature_a_x_sparse_feature_b]) # Input builders def input_fn_train: # returns x, y ... def input_fn_eval: # returns x, y ... estimator.fit(input_fn=input_fn_train) estimator.evaluate(input_fn=input_fn_eval) estimator.predict(x=x) ``` Input of `fit` and `evaluate` should have following features, otherwise there will be a KeyError: * if `weight_column_name` is not `None`: key=weight_column_name, value=a `Tensor` * for column in `feature_columns`: - if isinstance(column, `SparseColumn`): key=column.name, value=a `SparseTensor` - if isinstance(column, `WeightedSparseColumn`): {key=id column name, value=a `SparseTensor`, key=weight column name, value=a `SparseTensor`} - if isinstance(column, `RealValuedColumn`): key=column.name, value=a `Tensor` """ def __init__(self, # _joint_weights: pylint: disable=invalid-name feature_columns, model_dir=None, weight_column_name=None, optimizer=None, gradient_clip_norm=None, enable_centered_bias=False, label_dimension=1, _joint_weights=False, config=None, feature_engineering_fn=None): """Construct a `LinearRegressor` estimator object. Args: feature_columns: An iterable containing all the feature columns used by the model. All items in the set should be instances of classes derived from `FeatureColumn`. model_dir: Directory to save model parameters, graph, etc. This can also be used to load checkpoints from the directory into a estimator to continue training a previously saved model. weight_column_name: A string defining feature column name representing weights. It is used to down weight or boost examples during training. It will be multiplied by the loss of the example. optimizer: An instance of `tf.Optimizer` used to train the model. If `None`, will use an Ftrl optimizer. gradient_clip_norm: A `float` > 0. If provided, gradients are clipped to their global norm with this clipping ratio. See `tf.clip_by_global_norm` for more details. enable_centered_bias: A bool. If True, estimator will learn a centered bias variable for each class. Rest of the model structure learns the residual after centered bias. label_dimension: dimension of the label for multilabels. _joint_weights: If True use a single (possibly partitioned) variable to store the weights. It's faster, but requires all feature columns are sparse and have the 'sum' combiner. Incompatible with SDCAOptimizer. config: `RunConfig` object to configure the runtime settings. feature_engineering_fn: Feature engineering function. Takes features and labels which are the output of `input_fn` and returns features and labels which will be fed into the model. Returns: A `LinearRegressor` estimator. """ self._feature_columns = feature_columns assert self._feature_columns self._optimizer = _get_default_optimizer(feature_columns) if optimizer: self._optimizer = _get_optimizer(optimizer) chief_hook = None if (isinstance(optimizer, sdca_optimizer.SDCAOptimizer) and enable_centered_bias): enable_centered_bias = False logging.warning("centered_bias is not supported with SDCA, " "please disable it explicitly.") head = head_lib._regression_head( # pylint: disable=protected-access weight_column_name=weight_column_name, label_dimension=label_dimension, enable_centered_bias=enable_centered_bias) params = { "head": head, "feature_columns": feature_columns, "optimizer": self._optimizer, } if isinstance(optimizer, sdca_optimizer.SDCAOptimizer): assert label_dimension == 1, "SDCA only applies for label_dimension=1." assert not _joint_weights, ("_joint_weights is incompatible with" " SDCAOptimizer.") model_fn = sdca_model_fn # The model_fn passes the model parameters to the chief_hook. We then use # the hook to update weights and shrink step only on the chief. chief_hook = _SdcaUpdateWeightsHook() params.update({ "weight_column_
name": weight_column_name, "update_weights_hoo
k": chief_hook, }) else: model_fn = _linear_model_fn params.update({ "gradient_clip_norm": gradient_clip_norm, "num_ps_replicas": config.num_ps_replicas if config else 0, "joint_weights": _joint_weights, }) self._estimator = estimator.Estimator( model_fn=model_fn, model_dir=model_dir, config=config, params=params, feature_engineering_fn=feature_engineering_fn) self._additional_run_hook = (chief_hook if self._estimator.config.is_chief else None) def fit(self, x=None, y=None, input_fn=None, steps=None, batch_size=None, monitors=None, max_steps=None): """See trainable.Trainable.""" # TODO(roumposg): Remove when deprecated monitors are removed. if monitors is None: monitors = [] deprecated_mo
################################################################################ # # Copyright 2015-2020 Félix Brezo and Yaiza Rubio # # This program is part of OSRFramework. You can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ################################################################################ __author__ = "Felix Brezo, Yaiza Rubio <contacto@i3visio.com>" __version__ = "2.0" from osrframework.utils.platforms import Platform class Papaly(Platform): """A <Platform> object for Papaly.""" def __init__(self): self.platformName = "Papaly" self.tags = ["social"] ######################## # Defining valid modes # ######################## self.isValidMode = {} self.isValidMode["phonefy"] = False self.isValidMode["usufy"] = True self.isValidMode["searchfy"] = False ###################################### # Search URL for the different modes # ###################################### # Strings with the URL for each and every mode self.url = {} #self.url["phonefy"] = "http://anyurl.com//phone/" + "<phonefy>" self.url["usufy"] = "https://papaly.com/<usufy>" #self.url["searchfy"] = "http://anyurl.com/search/" + "<searchfy>" ###################################### # Whether the user needs credentials # ###################################### self.needsCredentials = {} #self.needsCredentials["phonefy"] = False self.needsCredentials["usufy"] = False #self.needsCredentials["searchfy"] = False ################# # Valid queries # ################# # Strings that will imply that the query number is not appearing self.validQuery = {} # The regular expression '.+' will match any query #self.validQuery["phonefy"] = ".*" self.validQuery["usufy"] = ".+" #self.validQuery["searchfy"] = ".*" ################### # Not_found clues # ################### # Strings that will imply that the query number is not appearing self.notFoundText = {} #self.notFoundText["phonefy"] = [] self.notFoundText["usufy"] = ["<title>Page not found</title>"] #self.notFoundText["searchfy"] = [] ######################### # Fields to be searched # ######################### self.fieldsRegExp = {} # Definition of regular expressions to be searched in phonefy mode #self.fieldsRegExp["phonefy"] = {}
# Example of fields: #self.fieldsRegExp["phonefy"]["i3visio.location"] = "" # Definition of regular expressions to be searched in usufy mode self.fieldsRegExp["usufy"] = {} # Example of fields:
#self.fieldsRegExp["usufy"]["i3visio.location"] = "" # Definition of regular expressions to be searched in searchfy mode #self.fieldsRegExp["searchfy"] = {} # Example of fields: #self.fieldsRegExp["searchfy"]["i3visio.location"] = "" ################ # Fields found # ################ # This attribute will be feeded when running the program. self.foundFields = {}
/docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataModel.html#DataModelDataTypes """ def __init__(self, type_as_dict): self.type = list(type_as_dict.keys())[0] self.value = list(type_as_dict.values())[0] def __hash__(self): return hash((self.type, self.value)) def __eq__(self, other): return ( self.type == other.type and self.value == other.value ) def __repr__(self): return "DynamoType: {0}".format(self.to_json()) def to_json(self): return {self.type: self.value} def compare(self, range_comparison, range_objs): """ Compares this type against comparison filters """ range_values = [obj.value for obj in range_objs] comparison_func = get_comparison_func(range_comparison) return comparison_func(self.value, *range_values) class Item(object): def __init__(self, hash_key, hash_key_type, range_key, range_key_type, attrs): self.hash_key = hash_key self.hash_key_type = hash_key_type self.range_key = range_key self.range_key_type = range_key_type self.attrs = {} for key, value in attrs.items(): self.attrs[key] = DynamoType(value) def __repr__(self): return "Item: {0}".format(self.to_json()) def to_json(self): attributes = {} for attribute_key, attribute in self.attrs.items(): attributes[attribute_key] = attribute.value return { "Attributes": attributes } def describe_attrs(self, attributes): if attributes: included = {} for key, value in self.attrs.items(): if key in attributes: included[key] = value else: included = self.attrs return { "Item": included } class Table(object): def __init__(self, name, hash_key_attr, hash_key_type, range_key_attr=None, range_key_type=None, read_capacity=None, write_capacity=None): self.name = name self.hash_key_attr = hash_key_attr self.hash_key_type = hash_key_type self.range_key_attr = range_key_attr self.range_key_type = range_key_type self.read_capacity = read_capacity self.write_capacity = write_capacity self.created_at = datetime.datetime.utcnow() self.items = defaultdict(dict) @property def has_range_key(self): return self.range_key_attr is not None @property def describe(self): results = { "Table": { "CreationDateTime": unix_time(self.created_at), "KeySchema": { "HashKeyElement": { "AttributeName": self.hash_key_attr, "AttributeType": self.hash_key_type }, }, "ProvisionedThroughput": { "ReadCapacityUnits": sel
f.read_capacity, "WriteCapacityUnits": self.write_capacity }, "TableName": self.name, "TableStatus": "ACTIVE", "ItemCount": len(self), "TableSizeBytes": 0, } } if self.has_range_ke
y: results["Table"]["KeySchema"]["RangeKeyElement"] = { "AttributeName": self.range_key_attr, "AttributeType": self.range_key_type } return results def __len__(self): count = 0 for key, value in self.items.items(): if self.has_range_key: count += len(value) else: count += 1 return count def __nonzero__(self): return True def __bool__(self): return self.__nonzero__() def put_item(self, item_attrs): hash_value = DynamoType(item_attrs.get(self.hash_key_attr)) if self.has_range_key: range_value = DynamoType(item_attrs.get(self.range_key_attr)) else: range_value = None item = Item(hash_value, self.hash_key_type, range_value, self.range_key_type, item_attrs) if range_value: self.items[hash_value][range_value] = item else: self.items[hash_value] = item return item def get_item(self, hash_key, range_key): if self.has_range_key and not range_key: raise ValueError("Table has a range key, but no range key was passed into get_item") try: if range_key: return self.items[hash_key][range_key] else: return self.items[hash_key] except KeyError: return None def query(self, hash_key, range_comparison, range_objs): results = [] last_page = True # Once pagination is implemented, change this if self.range_key_attr: possible_results = self.items[hash_key].values() else: possible_results = list(self.all_items()) if range_comparison: for result in possible_results: if result.range_key.compare(range_comparison, range_objs): results.append(result) else: # If we're not filtering on range key, return all values results = possible_results return results, last_page def all_items(self): for hash_set in self.items.values(): if self.range_key_attr: for item in hash_set.values(): yield item else: yield hash_set def scan(self, filters): results = [] scanned_count = 0 last_page = True # Once pagination is implemented, change this for result in self.all_items(): scanned_count += 1 passes_all_conditions = True for attribute_name, (comparison_operator, comparison_objs) in filters.items(): attribute = result.attrs.get(attribute_name) if attribute: # Attribute found if not attribute.compare(comparison_operator, comparison_objs): passes_all_conditions = False break elif comparison_operator == 'NULL': # Comparison is NULL and we don't have the attribute continue else: # No attribute found and comparison is no NULL. This item fails passes_all_conditions = False break if passes_all_conditions: results.append(result) return results, scanned_count, last_page def delete_item(self, hash_key, range_key): try: if range_key: return self.items[hash_key].pop(range_key) else: return self.items.pop(hash_key) except KeyError: return None class DynamoDBBackend(BaseBackend): def __init__(self): self.tables = OrderedDict() def create_table(self, name, **params): table = Table(name, **params) self.tables[name] = table return table def delete_table(self, name): return self.tables.pop(name, None) def update_table_throughput(self, name, new_read_units, new_write_units): table = self.tables[name] table.read_capacity = new_read_units table.write_capacity = new_write_units return table def put_item(self, table_name, item_attrs): table = self.tables.get(table_name) if not table: return None return table.put_item(item_attrs) def get_item(self, table_name, hash_key_dict, range_key_dict): table = self.tables.get(table_name) if not table: return None hash_key = DynamoType(hash_key_dict) range_key = DynamoType(range_key_dict) if range_key_dict else None return table.get_item(hash_key, range_key) def query(self, table_name, hash_key_dict
# -*- coding: utf-8 -*- ############################################################################### # # GetTariff # Returns an individual Tariff object with a given id. # # Python versions 2.6, 2.7, 3.x # # Copyright 2014, Temboo Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, # either express or implied. See the License for the specific # language governing permissions and limitations under the License. # # ############################################################################### from temboo.core.choreography import Choreography from temboo.core.choreography import InputSet from temboo.core.choreography import ResultSet from temboo.core.choreography import ChoreographyExecution import json class GetTariff(Choreography): def __init__(self, temboo_session): """ Create a new instance of the GetTariff Choreo. A TembooSession object, containing a valid set of Temboo credentials, must be supplied. """ super(GetTariff, self).__init__(temboo_session, '/Library/Genability/TariffData/GetTariff') def new_input_set(self): return GetTariffInputSet() def _make_result_set(self, result, path): return GetTariffResultSet(result, path) def _make_execution(self, session, exec_id, path): return GetTariffChoreographyExecution(session, exec
_id, path) class GetTariffInputSet(InputSet): """ An InputSet with methods appropriate for specifying the inputs to the GetTariff Choreo. The InputSet object is used to specify input parameters when exec
uting this Choreo. """ def set_AppID(self, value): """ Set the value of the AppID input for this Choreo. ((conditional, string) The App ID provided by Genability.) """ super(GetTariffInputSet, self)._set_input('AppID', value) def set_AppKey(self, value): """ Set the value of the AppKey input for this Choreo. ((required, string) The App Key provided by Genability.) """ super(GetTariffInputSet, self)._set_input('AppKey', value) def set_MasterTariffID(self, value): """ Set the value of the MasterTariffID input for this Choreo. ((required, integer) The master tariff id. This can be retrieved in the output of the GetTariffs Choreo.) """ super(GetTariffInputSet, self)._set_input('MasterTariffID', value) def set_PopulateProperties(self, value): """ Set the value of the PopulateProperties input for this Choreo. ((optional, boolean) Set to "true" to populate the properties for the returned Tariffs.) """ super(GetTariffInputSet, self)._set_input('PopulateProperties', value) def set_PopulateRates(self, value): """ Set the value of the PopulateRates input for this Choreo. ((optional, boolean) Set to "true" to populate the rate details for the returned Tariffs.) """ super(GetTariffInputSet, self)._set_input('PopulateRates', value) class GetTariffResultSet(ResultSet): """ A ResultSet with methods tailored to the values returned by the GetTariff Choreo. The ResultSet object is used to retrieve the results of a Choreo execution. """ def getJSONFromString(self, str): return json.loads(str) def get_Response(self): """ Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from Genability.) """ return self._output.get('Response', None) class GetTariffChoreographyExecution(ChoreographyExecution): def _make_result_set(self, response, path): return GetTariffResultSet(response, path)
# -*- coding: utf-8 -*- # (c) 2016 Alfredo de la Fuente - AvanzOSC # License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html from openerp import models, fields, api class SaleOrder(models.Model): _inherit = 'sale.order' @api.multi def action_button_confirm(self): procurement_obj = self.env['procurement.order'] procurement_group_obj = self.env['procurement.group'] res = super(SaleOrder, self).action_button_confirm() for line in self.order_line: valid = self._validate_service_project_for_procurement( line.product_id) if valid: if not self.procurement_group_i
d: vals = self._prepare_procurement_group(self) group = procurement_group_obj.create(vals) self.write({'procurement_group_id': group.id}) vals = self
._prepare_order_line_procurement( self, line, group_id=self.procurement_group_id.id) vals['name'] = self.name + ' - ' + line.product_id.name procurement = procurement_obj.create(vals) procurement.run() return res def _validate_service_project_for_procurement(self, product): routes = product.route_ids.filtered(lambda r: r.id in [ self.env.ref('procurement_service_project.route_serv_project').id]) return product.type == 'service' and routes class SaleOrderLine(models.Model): _inherit = 'sale.order.line' service_project_task = fields.Many2one( comodel_name='project.task', string='Generated task from procurement', copy=False)
je(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-success'><span class='fa fa-check fa-fw'></span>Finaliza la recuperacion de unidades academicas</p>",10) cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,True,"",80) cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-success'><span class='fa fa-check fa-fw'></span>Se ha creado el proyecto</p>",90) cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-success'><span class='fa fa-check fa-fw'></span>Su navegador se reiniciara</p>",97) cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"EOF",100) except: cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"<p class='text-danger'><span class='fa fa-times fa-fw'></span><b>PROBLEMA:</b> la recuperacion de unidades academicas no se puede completar: {}</p>".format(traceback.format_exc()),80) cambia_mensaje(request.POST.get('csrfmiddlewaretoken'),request.POST.get('fraseB'),request.user.username,False,"EOF",100) # print traceback.format_exc() # messages.success(request, "Se ha creado exitosamente el proyecto") #articulos = funciones.buscadorSimple(fraseB) #ac = AdministradorConsultas() #ac.descargar_papers(fraseB) #lista_scopus = ac.titulos_descargas #if fraseA != "" or autor != "" or words != "": # articulos = funciones.buscadorAvanzado(fraseA, words, autor, after, before) #print articulos #funciones.moveFiles(modelo_proyecto.id_proyecto, request.user, articulos, lista_scopus) #funciones.escribir_archivo_documentos(modelo_pr
oyecto.id_proyecto, request.user, articulos, lista_scopus) # messages.success(request, "Se ha creado exitosamente el proyecto") #~ return redirect('crear_proyecto') else: messages.error(request, "Imposible crear el proyecto") else: form = FormularioCrearProyecto() return render(request, 'GestionProyecto/Nuevo
Proyecto.html', {'form': form, 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}, context_instance=RequestContext(request)) #Visualización de proyectos propios de un usuario. @login_required def ver_mis_proyectos(request): global model_proyecto global proyectos_list try: proyectos_list = get_list_or_404(proyecto, idUsuario=request.user) except: proyectos_list =None messages.success(request, "Usted no tiene proyectos") return render(request, 'GestionProyecto/verMisProyectos.html', {'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}, context_instance=RequestContext(request)) #Visualización de proyectos con disponibilidad pública que no pertenecen al usuario actual. @login_required def ver_otros_proyectos(request): global model_proyecto global proyecto_list if (model_proyecto != None and model_proyecto.idUsuario != request.user): model_proyecto = None try: proyectos_list_all = get_list_or_404(proyecto) idUser = request.user otros_proyectos = [] for project in proyectos_list_all: if project.idUsuario != idUser: otros_proyectos.append(project) except: proyectos_list_all =None otros_proyectos = None return render(request, 'GestionProyecto/OtrosProyectos.html', { 'proyectos': otros_proyectos, 'proyectos_user':proyectos_list, 'mproyecto': model_proyecto}, context_instance=RequestContext(request)) @login_required def busqueda_navegacion(request): global proyectos_list global model_proyecto return render(request, 'GestionBusqueda/Busqueda_Navegacion.html', {'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) @login_required def editar_proyecto(request, id_proyecto): global proyectos_list global model_proyecto model_proyecto = get_object_or_404(proyecto, id_proyecto=id_proyecto) request.session['proyecto']= str(model_proyecto.id_proyecto) request.proyecto = model_proyecto # print "This is my project:",request.session['proyecto'] lista = funciones.crearListaDocumentos(id_proyecto, request.user) if request.method == 'POST': proyecto_form = FormularioCrearProyecto(request.POST, instance=model_proyecto) #proyecto_form.fields['disponibilidad'].widget.attrs['disabled']=True if proyecto_form.is_valid: #print proyecto_form.cleaned_data #nuevoNombre=proyecto_form.cleaned_data['nombre'] model_project = proyecto_form.save() # funciones.cambiarNombreDirectorio(nombreDirectorioAnterior,nuevoNombre,request.user) messages.success(request, "Se ha modificado exitosamente el proyecto") else: messages.error(request, "Imposible editar el proyecto") else: proyecto_form = FormularioCrearProyecto(instance=model_proyecto) return render(request, 'GestionProyecto/editar_proyecto.html', {'form': proyecto_form, 'lista': lista, 'user': request.user, 'mproyecto':model_proyecto, 'proyectos_user': proyectos_list, 'proyecto': id_proyecto, 'lista_permisos': permisos}, context_instance=RequestContext(request)) @login_required def ver_proyecto(request, id_proyecto): global model_proyecto global proyectos_list proyecto_actual = None proyecto_actual = get_object_or_404(proyecto, id_proyecto=id_proyecto) proyecto_form = FormularioCrearProyecto(instance=proyecto_actual) if (model_proyecto != None and model_proyecto.idUsuario != request.user): model_proyecto = None #model_proyecto = get_object_or_404(proyecto, id_proyecto=id_proyecto) #proyecto_form = FormularioCrearProyecto(instance=model_proyecto) #proyecto_form.fields['disponibilidad'].widget.attrs['disabled']=True #proyecto_form.fields['nombre'].label="Titulo del proyecto" proyecto_form.fields['nombre'].widget.attrs['disabled'] = True proyecto_form.fields['resumen'].widget.attrs['disabled'] = True return render(request, 'GestionProyecto/ver_proyecto.html', {'form': proyecto_form, 'mproyecto':model_proyecto, 'proyectos_user':proyectos_list, 'lista_permisos': permisos}, context_instance=RequestContext(request)) @login_required def buscador(request): global proyectos_list global model_proyecto if request.method == 'GET': ir = ConsumirServicios.IR() fraseBusqueda = request.GET.get("busquedaIR") data = ir.consultar(fraseBusqueda,str(request.user.username), str(model_proyecto.id_proyecto)) # print model_proyecto # IR.consultar(fraseBusqueda,"","") # data = ir.consultar(fraseBusqueda,str(request.user.username),request.session['proyecto']) #data = funciones.busqueda(fraseBusqueda) #for d in data: # d['path'] = d['path'].replace("/home/vigtech/shared/repository/", "/media/").encode("utf8") # print data # print fraseBusqueda return render(request, "GestionBusqueda/Busqueda_Navegacion.html", {'resultados': data, 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto, 'lista_permisos': permisos}) else: return render(request, "GestionBusqueda/Busqueda_Navegacion.html", {'resultados': data, 'proyectos_user': proyectos_list, 'mproyecto': model_proyecto,'lista_permisos': permisos}) @login_required def analisisView(request): global proyectos_list global model_proyecto #data = ConsumirServicios.consumir_red(request.user.username, request.session['pr
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and li
mitations # under the License. import mock from keystone import catalog from keystone.common import manager from keystone.tests import unit class TestCreateLegacyDriver(unit.BaseTestCase): @mock.patch('oslo_log.versionutils.report_deprecated_feature') def test_class_is_properly_deprecated(self, mock_reporter): Driver = manager.create_legacy_dri
ver(catalog.CatalogDriverV8) # NOTE(dstanek): I want to subvert the requirement for this # class to implement all of the abstract methods. Driver.__abstractmethods__ = set() impl = Driver() details = { 'as_of': 'Liberty', 'what': 'keystone.catalog.core.Driver', 'in_favor_of': 'keystone.catalog.core.CatalogDriverV8', 'remove_in': mock.ANY, } mock_reporter.assert_called_with(mock.ANY, mock.ANY, details) self.assertEqual('N', mock_reporter.call_args[0][2]['remove_in'][0]) self.assertIsInstance(impl, catalog.CatalogDriverV8)
me': 'foo', 'email': self.user.email}, follow=True ) assert response.status_code == 200 assert self.user.reload().username == 'foo' alog = ActivityLog.objects.latest('pk') assert alog.action == amo.LOG.ADMIN_USER_EDITED.id assert alog.arguments == [self.user] assert alog.details == {'username': [old_username, 'foo']} @mock.patch.object(UserProfile, '_delete_related_content') def test_can_not_delete_with_users_edit_permission( self, _delete_related_content_mock ): user = user_factory(email='someone@mozilla.com') self.grant_permission(user, 'Users:Edit') assert not user.deleted self.client.login(email=user.email) response = self.client.get(self.delete_url, follow=True) assert response.status_code == 403 response = self.client.post(self.delete_url, {'post': 'yes'}, follow=True) assert response.status_code == 403 user.reload() assert not user.deleted assert user.email assert _delete_related_content_mock.call_count == 0 @mock.patch.object(UserProfile, '_delete_related_content') def test_can_delete_with_admin_advanced_permission( self, _delete_related_content_mock ): user = user_factory(email='someone@mozilla.com') self.grant_permission(user, 'Admin:Advanced') assert not self.user.deleted self.client.login(email=user.email) core.set_user(user) response = self.client.get(self.delete_url, follow=True) assert response.status_code == 200 assert b'Cannot delete user' not in response.content response = self.client.post(self.delete_url, {'post': 'yes'}, follow=True) assert response.status_code == 200 self.user.reload() assert self.user.deleted assert self.user.email assert _delete_related_content_mock.call_count == 1 alog = ActivityLog.objects.latest('pk') assert alog.action == amo.LOG.ADMIN_USER_ANONYMIZED.id assert alog.arguments == [self.user] def test_can_delete_with_related_objects_with_admin_advanced_permission(self): # Add related instances... addon = addon_factory() addon_with_other_authors = addon_factory() AddonUser.objects.create(addon=addon_with_other_authors, user=user_factory()) relations_that_should_be_deleted = [ AddonUser.objects.create(addon=addon_with_other_authors, user=self.user), Rating.objects.create(addon=addon_factory(), rating=5, user=self.user), addon, # Has no other author, should be deleted. collection_factory(author=self.user), ] relations_that_should_survive = [ AbuseReport.objects.create(reporter=self.user), AbuseReport.objects.create(user=self.user), ActivityLog.create(user=self.user, action=amo.LOG.USER_EDITED), ReviewerScore.objects.create(user=self.user, score=42), addon_with_other_authors, # Has other authors, should be kept. # Bit of a weird case, but because the user was the only author of # this add-on, the addonuser relation is kept, and both the add-on # and the user are soft-deleted. This is in contrast with the case # where the user is *not* the only author, in which case the # addonuser relation is deleted, but the add-on is left intact. AddonUser.objects.create(addon=addon, user=self.user), ] # Now test as normal. user = user_factory(email='someone@mozilla.com') self.grant_permission(user, 'Admin:Advanced') assert not self.user.deleted self.client.login(email=user.email)
core.set_user(user) response = self.client.get(self.delete_url, follow=True) assert response.status_code == 200
assert b'Cannot delete user' not in response.content response = self.client.post(self.delete_url, {'post': 'yes'}, follow=True) assert response.status_code == 200 self.user.reload() assert self.user.deleted assert self.user.email alog = ActivityLog.objects.filter(action=amo.LOG.ADMIN_USER_ANONYMIZED.id).get() assert alog.arguments == [self.user] # Test the related instances we created earlier. for obj in relations_that_should_be_deleted: assert not obj.__class__.objects.filter(pk=obj.pk).exists() for obj in relations_that_should_survive: assert obj.__class__.objects.filter(pk=obj.pk).exists() def test_get_actions(self): user_admin = UserAdmin(UserProfile, admin.site) request = RequestFactory().get('/') request.user = AnonymousUser() assert list(user_admin.get_actions(request).keys()) == [] request.user = user_factory() self.grant_permission(request.user, 'Users:Edit') assert list(user_admin.get_actions(request).keys()) == [ 'ban_action', 'reset_api_key_action', 'reset_session_action', ] def test_ban_action(self): another_user = user_factory() a_third_user = user_factory() users = UserProfile.objects.filter(pk__in=(another_user.pk, self.user.pk)) user_admin = UserAdmin(UserProfile, admin.site) request = RequestFactory().get('/') request.user = user_factory() core.set_user(request.user) request._messages = default_messages_storage(request) user_admin.ban_action(request, users) # Both users should be banned. another_user.reload() self.user.reload() assert another_user.deleted assert another_user.email assert self.user.deleted assert self.user.email # The 3rd user should be unaffected. assert not a_third_user.reload().deleted # We should see 2 activity logs for banning. assert ( ActivityLog.objects.filter(action=amo.LOG.ADMIN_USER_BANNED.id).count() == 2 ) def test_ban_button_in_change_view(self): ban_url = reverse('admin:users_userprofile_ban', args=(self.user.pk,)) user = user_factory(email='someone@mozilla.com') self.grant_permission(user, 'Users:Edit') self.client.login(email=user.email) response = self.client.get(self.detail_url, follow=True) assert response.status_code == 200 assert ban_url in response.content.decode('utf-8') def test_reset_api_key_action(self): another_user = user_factory() a_third_user = user_factory() APIKey.objects.create(user=self.user, is_active=True, key='foo') APIKeyConfirmation.objects.create(user=self.user) APIKeyConfirmation.objects.create(user=another_user) APIKey.objects.create(user=a_third_user, is_active=True, key='bar') APIKeyConfirmation.objects.create(user=a_third_user) users = UserProfile.objects.filter(pk__in=(another_user.pk, self.user.pk)) user_admin = UserAdmin(UserProfile, admin.site) request = RequestFactory().get('/') request.user = user_factory() core.set_user(request.user) request._messages = default_messages_storage(request) user_admin.reset_api_key_action(request, users) # APIKeys should have been deactivated, APIKeyConfirmation deleted. assert self.user.api_keys.exists() assert not self.user.api_keys.filter(is_active=True).exists() assert not APIKeyConfirmation.objects.filter(user=self.user).exists() # This user didn't have api keys before, it shouldn't matter. assert not another_user.api_keys.exists() assert not another_user.api_keys.filter(is_active=True).exists() assert not APIKeyConfirmation.objects.filter(user=another_user).exists() # The 3rd user should be unaffected. assert a_third_user.api_keys.exists() assert a_third_user.api_keys.filter(is_active=True).exists() assert APIKeyConfirmation.objects.filter(user=a_third_user).exists() # We should s
from intprim.ba
yesian_interaction_primitives import * import intprim.basis import intprim.constants impor
t intprim.examples import intprim.filter import intprim.filter.align import intprim.filter.spatiotemporal import intprim.util
# coding: utf-8 import random from PIL import Image
from PIL import ImageDraw from PIL import ImageFont import sys import os # how many pictures to generate num = 10 if len(sys.argv) > 1: num = int(sys.argv[1])
def genline(text, font, filename): ''' generate one line ''' w, h = font.getsize(text) image = Image.new('RGB', (w + 15, h + 15), 'white') brush = ImageDraw.Draw(image) brush.text((8, 5), text, font=font, fill=(0, 0, 0)) image.save(filename + '.jpg') with open(filename + '.txt', 'w') as f: f.write(text) f.close() if __name__ == '__main__': if not os.path.isdir('./lines/'): os.mkdir('./lines/') for i in range(num): fontname = './fonts/simkai.ttf' fontsize = 24 font = ImageFont.truetype(fontname, fontsize) text = str(random.randint(1000000000, 9999999999)) text = text + str(random.randint(1000000000, 9999999999)) #text = str(random.randint(1000, 9999)) filename = './lines/' + str(i + 1) genline(text, font, filename) pass
gs.DEFINE_float("validation_set_percentage",0.1, "the percentage of training examples that will be used for validation set") tf.flags.DEFINE_string("data_postive_path","./data/rt-polaritydata/rt-polarity.pos", "file path for postive data") tf.flags.DEFINE_string("data_negative_path","./data/rt-polaritydata/rt-polarity.neg", "file path for negative data") #============================================================================== # model hyperparameters #============================================================================== tf.flags.DEFINE_float("learning_rate",0.001,"learning rate(default 0.001)") tf.flags.DEFINE_integer("embedding_size",128,"the size of word embeeding (default 128)") tf.flags.DEFINE_integer("num_filters",128,"the number of filters for each filter size(default 128)") tf.flags.DEFINE_string("filter_sizes","3,4,5","comma-separated filter sizes(default 3,4,5)") tf.flags.DEFINE_float("keep_prob",0.5,"the probability used for dropout(default 0.5)") tf.flags.DEFINE_float("l2_reg_lambda",0.0,"the l2 regularization lambda(default 0)") #============================================================================== # train parameters #============================================================================== tf.flags.DEFINE_integer("batch_size",64,"Batch size (default size 64)") tf.flags.DEFINE_integer("num_epochs",200,"Epoch sizes(default size 200)") tf.flags.DEFINE_integer("evaluate_interval",100,"Evaluate model interval(default 100)") tf.flags.DEFINE_integer("checkpoint_interval",100,"Save Checkpoint Interval(default 100)") tf.flags.DEFINE_integer("num_checkpoints",5,"number of checkpoints to save(default 5)") #============================================================================== # misc parameters #============================================================================== tf.flags.DEFINE_bool("allow_soft_parameters",True,"allow soft device placement(default true)") tf.flags.DEFINE_bool("log_device_placement",False,"Log placement of ops on devices(default false)") FLAGS = tf.flags.FLAGS FLAGS._parse_flags() print("\nParameters:") for attr,value in sorted(FLAGS.__flags.items()): print("%s:%s" % (attr.upper(),value)) print("\n") #============================================================================== # Data Preparation #============================================================================== # Load data print("Loading Data...\n") x_data,y = data_util.load_data_and_labels(FLAGS.data_postive_path,FLAGS.data_negative_path) # construct vocabulary max_sentence_length = max([len(sent.split(" ")) for sent in x_data]) vocab_processor = learn.preprocessing.VocabularyProcessor(max_sentence_length) x = np.array(list(vocab_processor.fit_transform(x_data))) print(max_sentence_length) # shuffle data np.random.seed(10) shuffled_indices = np.random.permutation(np.arange(len(y))) x_shuffled = x[shuffled_indices] y_shuffled = y[shuffled_indices] # split train-test set # have a try with k-fold cross-validation later. validation_set_index = -1 * int(FLAGS.validation_set_percentage * float(len(y))) x_train,x_val = x_shuffled[:validation_set_index],x_shuffled[validation_set_index:] y_train,y_val = y_shuffled[:validation_set_index],y_shuffled[validation_set_index:] print(
"Vocabulary Size: %s" % len(vocab_processor.vocabulary_._mapping)) print("Length of train/validation set: %d , %d ." % (len(y_train),len(y_val))) #=================================================
============================= # Training #============================================================================== with tf.Graph().as_default(): session_config = tf.ConfigProto(allow_soft_placement=FLAGS.allow_soft_parameters, log_device_placement=FLAGS.log_device_placement) sess = tf.Session(config=session_config) with sess.as_default(): cnn = CNN4Text(sequence_length=x_train.shape[1], num_classes=y_train.shape[1], vocab_size=len(vocab_processor.vocabulary_), embedding_size=FLAGS.embedding_size, filter_sizes=list(map(int,FLAGS.filter_sizes.split(","))), num_filters=FLAGS.num_filters, l2_reg_lambda=FLAGS.l2_reg_lambda) # the detail of train procedure global_step = tf.Variable(0,name="global_step",trainable=False) optimizer = tf.train.AdamOptimizer(learning_rate=FLAGS.learning_rate) grads_and_vars = optimizer.compute_gradients(cnn._loss) train_op = optimizer.apply_gradients(grads_and_vars,global_step=global_step) grad_summaries = [] for g,v in grads_and_vars: if g is not None: grad_hist_summary = tf.summary.histogram("%s/grad/hist" % v.name,g) sparsity_summary = tf.summary.scalar("%s/grad/hist" % v.name,tf.nn.zero_fraction(g)) grad_summaries.append(grad_hist_summary) grad_summaries.append(sparsity_summary) grad_summayies_merged = tf.summary.merge(grad_summaries) # output path of summary timestamp = str(int(time.time())) output_path = os.path.abspath(os.path.join(os.path.curdir,"runs",timestamp)) print("Writing into Output Path: %s ..." % output_path) # summary for loss and accuracy loss_summary = tf.summary.scalar("loss",cnn._loss) acc_summary = tf.summary.scalar("accuracy",cnn._accuracy) # train summaries train_summary_op = tf.summary.merge([loss_summary,acc_summary,grad_summayies_merged]) train_summary_path = os.path.join(output_path,"summary","train") train_summary_writer = tf.summary.FileWriter(train_summary_path,sess.graph) #validation summaries validation_summary_op = tf.summary.merge([loss_summary,acc_summary]) validation_summary_path = os.path.join(output_path,"summary","validation") validation_summary_writer = tf.summary.FileWriter(validation_summary_path,sess.graph) checkpoint_path = os.path.abspath(os.path.join(output_path,"checkpoints")) checkpoint_prefix = os.path.join(checkpoint_path,"model") if not os.path.exists(checkpoint_path): os.makedirs(checkpoint_path) saver = tf.train.Saver(tf.global_variables(),max_to_keep=FLAGS.num_checkpoints) # save vocabulary vocab_processor.save(os.path.join(output_path,"vocab")) sess.run(tf.global_variables_initializer()) # a single training step def train_step(x_batch,y_batch,writer=None): ''' a single training step ''' feed_dict = {cnn._input_x:x_batch, cnn._input_y:y_batch, cnn._keep_prob:FLAGS.keep_prob} _,step,summaries,loss,accuracy = sess.run([train_op,global_step,train_summary_op ,cnn._loss,cnn._accuracy],feed_dict) time_str = datetime.datetime.now().isoformat() print("%s: Step: %d,Loss: %.4f,Accuracy: %.4f" % (time_str,step,loss,accuracy)) if writer: writer.add_summary(summaries,step) # a single validation step def validation_step(x_batch,y_batch,writer=None): ''' a single training step ''' feed_dict = {cnn._input_x:x_batch, cnn._input_y:y_batch, cnn._keep_prob:1.0} # for evaluation step,summaries,loss,accuracy = sess.run([global_step,validation_summary_op ,cnn._loss,cnn._accuracy],feed_dict) time_str = datetime.datetime.now().isoformat() print("%s: Step: %d,Loss: %.4f,Accuracy: %.4f" % (time_str,step,loss,accuracy)) if writer: write
""" CMSIS-DAP Interface Firmware Copyright (c) 2009-2013 ARM Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under t
he License. Extract and patch the interface without bootloader """ from options import get_options from paths import get_interface_path, TMP_DIR f
rom utils import gen_binary, is_lpc, split_path from os.path import join if __name__ == '__main__': options = get_options() in_path = get_interface_path(options.interface, options.target, bootloader=False) _, name, _ = split_path(in_path) out_path = join(TMP_DIR, name + '.bin') print '\nELF: %s' % in_path gen_binary(in_path, out_path, is_lpc(options.interface)) print "\nBINARY: %s" % out_path
#Ensure there is an exceptional edge from the following case def f2(): b, d = Base, Derived try: class MyNewClass(b, d): pass except: e2 def f3(): sequence_of_four = a_global try: a, b, c = sequence_of_four except: e3 #Always treat locals as non-raising to keep DB size down. def f4(): if cond: local = 1 try: local except: e4 def f5(): try: a_global except: e5 def f6(): local = a_global try: local() except: e6 #Literals can't raise def f7(): try: 4 except: e7 def f8(): try: a + b except: e8 #OK assignments def f9(): try: a, b = 1, 2 except: e9 def fa()
: seq = a_global try: a = seq except: ea def fb(): a, b, c = a_global try: seq = a, b, c except: eb #Ensure that a.b and c[d] can raise def fc(): a, b = a_global try: return a[b] except:
ec def fd(): a = a_global try: return a.b except: ed def fe(): try: call() except: ee else: ef
# # Copyright (c) 2008-2015 Citrix Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License") # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # di
stributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # class nspbr6_args : """ Provides additional arguments required for fetching the nspbr6 resource. """ def __init__(self) : self._detail = False @property def detail(self) : """To get a detailed view. """ try : return self._detail except Exception as e: raise e @detail.setter def detail(self, detail) : """To get a detailed view. """ try : self._detail = detail except Exception as e: raise e
import pytest from plumbum.colorlib.styles import ANSIStyle, Color, AttributeNotFound, ColorNotFound from plumbum.colorlib.names import color_html, FindNearest class TestNearestColor: def test_exact(self): assert FindNearest(0,0,0).all_fast() == 0 for n,color in enumerate(color_html): # Ignoring duplicates if n not in (16, 21, 46, 51, 196, 201, 226, 231, 244): rgb = (int(color[1:3],16), int(color[3:5],16), int(color[5:7],16)) assert FindNearest(*rgb).all_fast() == n def test_nearby(self): assert FindNearest(1,2,2).all_fast() == 0 assert FindNearest(7,7,9).all_fast() == 232 def test_simplecolor(self): assert FindNearest(1,2,4).only_basic() == 0 assert FindNearest(0,255,0).only_basic() == 2 assert FindNearest(100,100,0).only_basic() == 3 assert FindNearest(140,140,140).only_basic() == 7 class TestColorLoad: def test_rgb(self): blue = Color(0,0,255) # Red, Green, Blue assert blue.rgb == (0,0,255) def test_simple_name(self): green = Color.from_simple('green') assert green.number == 2 def test_different_names(self): assert Color('Dark Blue') == Color('Dark_Blue') assert Color('Dark_blue') == Color('Dark_Blue') assert Color('DARKBLUE') == Color('Dark_Blue') assert Color('DarkBlue') == Color('Dark_Blue') assert Color('Dark Green') == Color('Dark_Green') def test_loading_methods(self): assert Color("Yellow") == Color.from_full("Yellow") assert (Color.from_full("yellow").representation !=
Color.from_simple("yellow").representation) class TestANSIColor: @classmethod def setup_class(cls): ANSIStyle.use_color = True def test_ansi(self): assert str(ANSIStyle(fgcolor=Color('reset'))) == '\033[39m' assert str(ANSIStyle(fgcolor=Color.from_full('green'))) == '\033[38;5;2m' a
ssert str(ANSIStyle(fgcolor=Color.from_simple('red'))) == '\033[31m' class TestNearestColor: def test_allcolors(self): myrange = (0,1,2,5,17,39,48,73,82,140,193,210,240,244,250,254,255) for r in myrange: for g in myrange: for b in myrange: near = FindNearest(r,g,b) assert near.all_slow() == near.all_fast(), 'Tested: {0}, {1}, {2}'.format(r,g,b)
#!/usr/bin/env python # vim: tw=80 ts=4 sw=4 noet from os.path import join, basename, dirname, abspath import _import from wwwclient import browse, scrape HTML = scrape.HTML s = browse.Session("http://www.google.com") f = s.form().fill(q="python web scraping") s.submit(f, action="btnG", method="GET") tree = scrape.HTML.tree(s.page()) nodes = tree.cut(below=3) nodes = nodes.filter(acc
ept=lambda n:n.name.lower() in ("table","p")) for node in nodes.children: print HTML.text(node) if node.name == "p": link = node.find(withName="a")[0] print "-->", link.attribute("href") print HTML.links(link) else: print "---------" # Google results are not properly closed, so we had to identify patterns where # there were a closing tag should be inse
rted # close_on = ("td", "a", "img", "br", "a") # scrape.do(scrape.HTML.iterate, session.last().data(), closeOn=close_on, write=sys.stdout) # EOF
#! Tests out the CG solver with CPHF Polarizabilities import time import numpy as np import psi4 psi4.set_output_file("output.dat") # Benzene mol = psi4.geometry(""" 0 1 O 0.000000000000 0.000000000000 -0.075791843589 H 0.000000000000 -0.866811828967 0.601435779270 H 0.000000000000 0.866811828967 0.601435779270 symmetry c1 """) psi4.set_options({"basis": "aug-cc-pVDZ", "scf_type": "df", "e_convergence": 1e-8, "save_jk": True, }) scf_e, scf_wfn = psi4.energy("SCF", return_wfn=True) # Orbitals Co = scf_wfn.Ca_subset("AO", "OCC") Cv = scf_wfn.Ca_subset("AO", "VIR") # Mints object mints = psi4.core.MintsHelper(scf_wfn.basisset()) # RHS Dipoles dipoles_xyz = [] for dip in mints.ao_dipole(): Fia = psi4.core.triplet(Co, dip, Cv, True, False, False) Fia.scale(-2.0) dipoles_xyz.append(Fia) # Build up the preconditioner precon = psi4.core.Matrix(Co.shape[1], Cv.shape[1]) occ = np.array(scf_wfn.epsilon_a_subset("AO", "OCC")) vir = np.array(scf_wfn.epsilon_a_subset("AO", "VIR")) precon.np[:] = (-occ.reshape(-1, 1) + vir) # Build a preconditioner function def precon_func(matrices, active_mask): ret = [] for act, mat in zip(active_mask, matrices): if act: p = mat.clone() p.apply_denominator(precon) ret.append(p) else: ret.append(False) return ret def wrap_Hx(matrices, active_mask): x_vec = [mat for act, mat in zip(active_mask, matrices) if act] Hx_vec = scf_wfn.cphf_Hx(x_vec) ret = [] cnt = 0 for act, mat in zip(active_mask, matrices): if act: ret.append(Hx_vec[cnt]) cnt += 1 else: ret.append(False) return ret # Solve ret, resid = psi4.p4util.solvers.cg_solver(dipoles_xyz, wrap_Hx, precon_func, rcond=1.e-6) polar = np.empty((3, 3)) for numx in range(3): for numf in range(3): polar[numx, numf] = -1 * ret[numx].vector_dot(dipoles_xyz[numf]) psi4.core.print_out("\n " + "CPHF Dipole Polarizability:".center(44) + "\n") tops = ("X", "Y", "Z") psi4.core.print_out(" %12s %12s %12s\n" % tops) for n, p in enumerate(tops): psi4.core.print_out(" %3
s %12.4f %12.4f %12.4f\n" % (p, polar[n][0], polar[n][1], polar[n][2])) psi4.core.print_out("\n") psi4.compare_values(8.01554, polar[0][0], 3, 'Dipole XX Polarizability') # TEST psi4.compare_values(12.50363, polar[1][1], 3, 'Dipole YY Polarizability') # TEST psi4.compare_values(10.04161, polar[2][2], 3, 'D
ipole ZZ Polarizability') # TEST
from bs4 import BeautifulSoup class RunParameter_xml: ''' A class for reading runparameters xml file from Illumina sequencing runs :param xml_file: A runparameters xml file ''' def __init__(self, xml_file):
self.xml_file = xml_file self._read_xml() def _read_xml(self): ''' Internal function for reading the xml file using BS4 ''' try: xml_file = self.xml_file with open(xml_file, 'r') as fp: soup = BeautifulSoup(fp, "html5lib") self._soup = soup except Exception as e: raise ValueError( 'Failed
to parse xml file {0}, error {1}'.\ format(self.xml_file, e)) def get_nova_workflow_type(self): try: soup = self._soup workflowtype = None if soup.workflowtype: workflowtype = \ soup.workflowtype.contents[0] return workflowtype except Exception as e: raise ValueError('Failed to get NovaSeq workflow type') def get_novaseq_flowcell(self): try: soup = self._soup flowcell_id = None workflowtype = self.get_nova_workflow_type() if workflowtype is None or \ workflowtype != 'NovaSeqXp': raise ValueError( 'Missing NovaSeq workflow type: {0}'.\ format(workflowtype)) if soup.rfidsinfo and \ soup.rfidsinfo.flowcellserialbarcode: flowcell_id = \ soup.rfidsinfo.flowcellmode.contents[0] if flowcell_id is None: raise ValueError( 'Missing NovaSeq flowcell id, file: {0}'.\ format(self.xml_file)) except Exception as e: raise ValueError( 'Failed to get NovaSeq flowcell id, error: {0}'.format(e)) def get_novaseq_flowcell_mode(self): try: soup = self._soup flowcell_mode = None workflowtype = self.get_nova_workflow_type() if workflowtype is None or \ workflowtype != 'NovaSeqXp': raise ValueError( 'Missing NovaSeq workflow type: {0}'.\ format(workflowtype)) if soup.rfidsinfo and \ soup.rfidsinfo.flowcellmode: flowcell_mode = \ soup.rfidsinfo.flowcellmode.contents[0] if flowcell_mode is None: raise ValueError( 'Missing NovaSeq flowcell mode, file: {0}'.\ format(self.xml_file)) except Exception as e: raise ValueError( 'Failed to get NovaSeq flowcell mode, error: {0}'.format(e)) def get_hiseq_flowcell(self): ''' A method for fetching flowcell details for hiseq run :returns: Flowcell info or None (for MiSeq, NextSeq or NovaSeq runs) ''' try: soup = self._soup if soup.flowcell: flowcell = soup.flowcell.contents[0] else: flowcell = None return flowcell except Exception as e: raise ValueError( 'Failed to get flowcell for hiseq, error: {0}'.\ format(e))
# -*- encoding: utf-8 -*- ############################################################################## # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp import models, api from openerp.exceptions import ValidationError class AccountOwnCheck(models.Model): _inherit = 'account.own.check' @api.multi def cancel_check(self): """ Lo que deberia pasar con el cheque cuando se cancela """ if any
(check.state != 'draft' for check in self): raise ValidationError("Solo se puede cancelar un cheque en estado borrador") self.next_state('draft_canceled') @api.multi def revert_canceled_check(self): """ Lo que deberia pasar con el cheque cuando se revierte una cancelacion """ if any(ch
eck.state != 'canceled' for check in self): raise ValidationError("Funcionalidad unica para cheques cancelados") self.cancel_state('canceled') @api.multi def reject_check(self): """ Lo que deberia pasar con el cheque cuando se rechaza """ if any(check.state != 'handed' for check in self): raise ValidationError("No se puede rechazar un cheque que no esta entregado") self.next_state('handed') @api.multi def revert_reject(self): """ Lo que deberia pasar con el cheque cuando se revierte un rechazo """ if any(check.state != 'rejected' for check in self): raise ValidationError("Funcionalidad unica para cheques rechazados") self.cancel_state('rejected') # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
import datetime import tweepy from geopy.geocoders import Nominatim import json from secret import * import boto3 import re import preprocessor as p import time p.set_options(p.OPT.URL, p.OPT.EMOJI) # Get the service resource. dynamodb = boto3.resource('dynamodb', region_name='us-west-2') table = dynamodb.Table('fuck') geolocator = Nominatim() epoch = datetime.datetime.utcfromtimestamp(0) with open('zip2fips.json') as data_file: zip2fips = json.load(data_file) def get_fips(coords): location = geolocator.reverse('{:f}, {:f}'.format(coords[0], coords[1])) zipcode = None fips = None if 'address' in location.raw: if 'country_code' in location.raw['address']: if location.raw['address']['country_code'] == 'us': if 'postcode' in location.raw['address']: zipcode = location.raw['address']['postcode'] else: print("postcode not in location address") try: fips = zip2fips[location.raw['address']['postcode']] except IndexError: print("FIPS could not be found") return fips, zipcode else: print("Not in the US") else: p
rint("No Country code is in the address") else: print("No address") class TwitterStreamListener(tweepy.StreamListener): def on_status(self, status): try: if status.geo != None: fips, zipcode = get_fips(status.geo['coordinates']) if fips is None: print("Fips is None")
raise Exception if zipcode is None: print("Zipcode is None") raise Exception txt = re.sub('[!@#$]', '', status.text) txt = p.clean(txt) try: table.update_item( Key={ 'fips': int(fips) }, UpdateExpression='ADD tweet :val1', ExpressionAttributeValues={ ':val1': set([txt]) } ) except: print("it crashed") print("FIPS:" + fips) print("TXT:" + txt) except Exception as e: print(e) def on_error(self, status): print(status) if __name__ == "__main__": auth = tweepy.OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_token_secret) # for i in range(4): # t = threading.Thread(target=worker) # t.daemon = True # thread dies when main thread (only non-daemon thread) exits. # t.start() stream = tweepy.Stream(auth, TwitterStreamListener()) while True: try: stream.filter(locations=[-125.0011, 24.9493, -66.9326, 49.5904]) except: continue time.wait(10) #stream.sample(1)
self._index = 0 if collection is not None: self.__class__._coll = collection self._coll = collection elif self._coll is None: if self.__class__._coll is None: raise NoCollectionError('Must have a collection in MongoDB!') else: self._coll = self.__class__._coll if season is not None: self.get_by_season(season) def get_by_season(self, season): season_data = seasons.find_one({'season': season}) self._start_date = season_data['start'] self._end_date = season_data['end'] self._asg = season_data['allStarGame'] data = self._coll.find({'game_date': {'$gte': self._start_date, '$lte': self._end_date}}) self._data = data self.set_data(self._data) def set_data(self, data): self._games = [] self._games = sorted([Game(event_id=game['id']) for game in data]) # for game_json in data: # event_id = game_json['id'] # game = Game(event_id=event_id) # self._games.append(game) #self._games = sorted(self._games) #self._start_date = self._games[0].date #self._end_date = self._games[-1].date def __str__(self): return '{}-{} NBA Season'.format(self.season, self.season + 1) def __iter__(self): self._index = 0 return self def next(self): try: game = self.games[self._index] except IndexError: raise StopIteration self._index += 1 return game @property def games(self): return self._games @property def start_date(self): return self._start_date @property def end_date(self): return self._end_date @property def season(self): return self._season def __len__(self): return len(self.season) @property def regular_season_games(self): return [game for game in self._games if game.game_type == 'Regular Season'] @property def postseason_games(self): return [game for game in self._games if game.game_type == 'Postseason'] def get_all_games_in_range(self, start_date=None, end_date=None): if start_date is not None and end_date is None: games = [game for game in self.games if start_date <= game.date] elif start_date is None and end_date is not None: games = [game for game in self.games if game.date <= end_date] elif start_date is not None and end_date is not None: games = [game for game in self.games if start_date <= game.date <= end_date] else: games = self.games return games def get_team_games_in_range(self, team, start_date=None, end_date=None): games = [game for game in self.get_all_games_in_range(start_date, end_date) if game.is_away(team) or game.is_home(team)] return games def get_player_games_in_range(self, player, start_date=None, end_date=None): games = [game for game in self.get_all_games_in_range(start_date, end_date) if game.player_in_game(player)] return games def drtg(self, team, start_date=None, end_date=None): games = self.get_team_games_in_range(team, start_date, end_date) pts_against = 0 possessions = 0 for game in games: opponent = game.opponent(team) pts_against += game.score(opponent) possessions += game.possessions(opponent) drtg = 100 * pts_against / possessions return drtg def ortg(self, team, start_date=None, end_date=None): games = self.get_team_games_in_range(team, start_date, end_date) pts_scored = 0 possessions = 0 for game in games: pts_scored += game.score(team) possessions += game.possessions(team) ortg = 100 * pts_scored / possessions return ortg def player_ortg(self, player, start_date=None, end_date=None): games_played = self.get_player_games_in_range(player, start_date, end_date) ast = 0 fgm = 0 fga = 0 fta = 0 ftm = 0 tov = 0 threes = 0 orb = 0 pts = 0 mp = 0 team_fgm = 0 team_fga = 0 team_ast = 0 team_mp = 0
team
_ftm = 0 team_fta = 0 team_orb = 0 team_pts = 0 team_3pm = 0 team_tov = 0 opp_dreb = 0 for game in games_played: player_data = game.player_boxscore(player) team = game.player_team(player) opponent = game.opponent(team) team_data = game.team_boxscore(team)['teamStats'] opponent_data = game.team_boxscore(opponent)['teamStats'] ast += player_data['assists'] fgm += player_data['fieldGoals']['made'] fga += player_data['fieldGoals']['attempted'] fta += player_data['freeThrows']['attempted'] ftm += player_data['freeThrows']['made'] tov += player_data['turnovers'] threes += player_data['threePointFieldGoals']['made'] orb += player_data['rebounds']['offensive'] pts += player_data['points'] mp += player_data['totalSecondsPlayed'] / 60.0 team_fgm += team_data['fieldGoals']['made'] team_fga += team_data['fieldGoals']['attempted'] team_ast += team_data['assists'] team_mp += team_data['minutes'] team_ftm += team_data['freeThrows']['made'] team_fta += team_data['freeThrows']['attempted'] team_orb += team_data['rebounds']['offensive'] team_pts += team_data['points'] team_3pm += team_data['threePointFieldGoals']['made'] team_tov += team_data['turnovers']['total'] opp_dreb += opponent_data['rebounds']['defensive'] team_orb_pct = team_orb / (opp_dreb + team_orb) ft_part = (1 - (1 - (ftm / fta))**2) * 0.4 * fta ast_part = 0.5 * (((team_pts - team_ftm) - (pts - ftm)) / (2 * (team_fga - fga))) * ast q_ast = ((mp / (team_mp / 5)) * (1.14 * ((team_ast - ast) / team_fgm))) + ((((team_ast / team_mp) * mp * 5 - ast) / ((team_fgm / team_mp) * mp * 5 - fgm)) * (1 - (mp / (team_mp / 5)))) fg_part = fgm * (1 - 0.5 * ((pts - ftm) / (2 * fga)) * q_ast) team_scoring_poss = team_fgm + (1 - (1 - (team_ftm / team_fta))**2) * team_fta * 0.4 team_play_pct = team_scoring_poss / (team_fga + team_fta * 0.4 + team_tov) team_orb_weight = ((1 - team_orb_pct) * team_play_pct) / ((1 - team_orb_pct) * team_play_pct + team_orb_pct * (1 - team_play_pct)) orb_part = orb * team_orb_weight * team_play_pct scr_poss = (fg_part + ast_part + ft_part) * (1 - (team_orb / team_scoring_poss) * team_orb_weight * team_play_pct) + orb_part fg_x_poss = (fga - fgm) * (1 - 1.07 * team_orb_pct) ft_x_poss = ((1 - (ftm / fta))**2) * 0.4 * fta tot_poss = scr_poss + fg_x_poss + ft_x_poss + tov pprod_fg_part = 2 * (fgm + 0.5 * threes) * (1 - 0.5 * ((pts - ftm) / (2 * fga)) * q_ast) pprod_ast_part = 2 * ((team_fgm - fgm + 0.5 * (team_3pm - threes)) / (team_fgm - fgm)) * 0.5 * (((team_pts - team_ftm) - (pts - ftm)) / (2 * (team_fga - fga))) * ast pprod_orb_part = orb * team_orb_weight * team_play_pct * (team_pts / (team_fgm + (1 - (1 - (team_ftm / team_fta))**2) * 0.4 * team_fta)) pprod = (pprod_fg_part + pprod_ast_part + ftm) * (1 - (team_orb / team_scoring_poss) * team_orb_weight * team_play_pct) + pprod_orb_part ortg = 100 * pprod / tot_poss return ortg def player_drtg(self, player, start_date=None, end_date=None): games_played = self.get_player_games_in_range(player, start_date, end_date) drb = 0 pf = 0 mp = 0 stl = 0 blk = 0 team_mp = 0 team_blk = 0 team_stl = 0 team_drb = 0 team_pf = 0 team_pos = 0 opp_fta
#!/usr/bin/e
nv p
ython3 print('hello hello hello')
#def binh_phuong() try:
a=int(raw_input("Nhap so n>=0 \n")) while a<=0: a=int(raw_input("Nhap lai so n>=0\n ")) print "%d" %(a) b=pow(a,2) c=int(raw_input("Doan so binh
phuong cua ban\n")) while c!=b: if c<b: print"chua dung, cao len 1 chut\n" c=input() else: print"qua rui giam xuong ti\n" c=input() print "Chinh xac ket qua la %d" %(c) except: print "Ban nhap khong dung kieu Integer"
import unittest import logging from domaincrawl.link_aggregator import LinkAggregator from domaincrawl.link_filters import DomainFilter, is_acceptable_url_scheme from domaincrawl.site_graph import SiteGraph from domaincrawl.util import URLNormalizer, extract_domain_port class LinkAggregatorTest(unittest.TestCase): logging.basicConfig(format='%(asctime)s %(levelname)s:%(message)s', level=logging.DEBUG, datefmt='%m/%d/%Y %I:%M:%S %p') def test_link_dedup(self): base_url = "acme.com:8999" base_domain, port = extract_domain_port(base_url) logger = logging.getLogger() url_norm = URLNormalizer(base_domain, port) normalized_url = url_norm.normalize_with_domain(base_url) logger.debug("Constructed normalized base url : %s"%normalized_url) domain_filter = DomainFilter(base_domain, logger) site_graph = SiteGraph(logger) link_aggregator = LinkAggregator(logger, site_graph, link_mappers=[url_norm.normalize_with_domain], link_filters=[domain_filter.passes, is_acceptable_url_scheme]) valid_links = ["/a/b","/a/b/./","http://acme.com:8002/a","https://acme.com:8002/b?q=asd#frag"] expected_links = ["http://acme.com:8999/a/b","http://acme.com:8002/a","https://acme.com:8002/b"] # This time
, we also specify a referrer page filtered_links = link_aggregator.filter_update_links(valid_links, normalized_url) self.assertListEqual(expected_links,filtered_links) self.assertSetEqual(set(expected_links),link_aggregator._links) # Second invocation should result in deduplication filtered_links = link_aggregator.filter_update_links(valid_links, None) self.assertTrue(len(filtered_links) == 0) self.assertSetEqual(se
t(expected_links),link_aggregator._links) # None of the invalid links should pass invalid_links = ["mailto://user@mail.com","code.acme.com","code.acme.com/b","https://127.122.9.1"] filtered_links = link_aggregator.filter_update_links(invalid_links, None) self.assertTrue(len(filtered_links) == 0) self.assertSetEqual(set(expected_links),link_aggregator._links) # A new valid link should pass new_valid_links = ["http://acme.com:8999/"] filtered_links = link_aggregator.filter_update_links(new_valid_links, None) expected_result = ["http://acme.com:8999"] self.assertListEqual(expected_result,filtered_links) expected_result_set = set(expected_links) expected_result_set.update(set(expected_result)) self.assertSetEqual(expected_result_set,link_aggregator._links) self.assertEqual(len(expected_result_set), site_graph.num_nodes()) for link in expected_result_set: self.assertTrue(site_graph.has_vertex(link)) self.assertEqual(len(expected_links), site_graph.num_edges()) for link in expected_links: self.assertTrue(site_graph.has_edge(normalized_url, link))
from flask import Flask, request, session, g, redirect, url_for, \ abort, flash import db import routes DATABASE = 'test.db' DEBUG = True SECRET_KEY =
'k
ey' USERNAME = 'admin' PASSWORD = 'password' app = Flask(__name__) app.config.from_object(__name__) if __name__ == '__main__': app.run()
-- the von Mises stress in physical units (Pa) It has a number of additional properties (see __readHeader for full details) """ _raw_row = [('id', int), ('position', float, (3,)), ('grid', int, (3,)), ('pressure', float), ('velocity', float, (3,)), ('stress', float)] _readable_row = np.dtype(_raw_row[2:]) row = np.dtype(_raw_row) _attr
s = {'stable': None, 'voxel_size': None, 'origin': np.array([np.nan, np.nan, np.nan]), 'bb_min': None, 'bb_max': None, 'bb_len': None, 'voxel_count': None} # header = len(_attrs) def __new__(cls, filename): """Create a new instance. Numpy array subclasses use this method instead of __init__ for initialization. """ headerDict = cls._readHeader(filename)
noindex = cls._load(filename, headerDict) index = np.recarray(shape=noindex.shape, dtype=cls.row) for el in cls._raw_row[2:]: key = el[0] index.__setattr__(key, noindex.__getattribute__(key)) continue index.id = np.arange(len(noindex)) try: index.position = cls._computePosition(index.grid, headerDict) except: index.position = np.nan pass obj = index.view(cls) # Set the attributes on the snapshot for headerField in headerDict: setattr(obj, headerField, headerDict[headerField]) continue return obj def __array_finalize__(self, parent): """Numpy special method.""" if parent is None: return for a in self._attrs: setattr(self, a, getattr(parent, a, self._attrs[a])) continue return pass class PositionlessSnapshot(BaseSnapshot): """Base class for the original text snapshots and the XDR equivalent. These lack the data required to compute the positions of grid points. It is supplied through the coords.asc file generated by the old setuptool. """ def computePosition(self, coordsFile): """Given the coordinate file from the segtool, calculate all the lattice positions' coordinates. """ from os.path import exists if exists (coordsFile): from ...coordinates import Transformer trans = Transformer(coordsFile) self.position = 1e-3 * trans.siteToStl(self.grid + self.bb_min) return else: # The coords file is missing! warnings.warn('Missing coordinates file "%s", assuming origin at [0,0,0]' % coordsFile, stacklevel=2) self.position = (self.grid + self.bb_min) * self.voxel_size # + origin, but we'll just assume it's zero here. pass class TextSnapshot(PositionlessSnapshot): """Read a text snapshot. """ nHeaderLines = 6 @classmethod def _readHeader(cls, filename): """Read the header lines, according to: 0- Flag for simulation stability, 0 or 1 1- Voxel size in physical units (units of m) 2- vertex coords of the minimum bounding box with minimum values (x, y and z values) 3- vertex coords of the minimum bounding box with maximum values (x, y and z values) 4- #voxels within the minimum bounding box along the x, y, z axes (3 values) 5- total number of fluid voxels """ f = file(filename) stable = int(f.readline()) voxel_size = float(f.readline()) bb_min = np.array([int(x) for x in f.readline().split()]) bb_max = np.array([int(x) for x in f.readline().split()]) bb_len = np.array([int(x) for x in f.readline().split()]) voxel_count = int(f.readline()) return {'stable': stable, 'voxel_size': voxel_size, 'bb_min': bb_min, 'bb_max': bb_max, 'bb_len': bb_len, 'voxel_count': voxel_count} @classmethod def _load(cls, filename, header): return np.loadtxt(filename, skiprows=cls.nHeaderLines, dtype=cls._readable_row).view(np.recarray) pass class XdrVoxelFormatOneSnapshot(object): @classmethod def _load(cls, filename, header): # Skip past the header, slurp data, create XDR object f = file(filename) f.seek(cls._headerLengthBytes) reader = xdrlib.Unpacker(f.read()) ans = np.recarray((header['voxel_count'],), dtype=cls._readable_row) # Read all the voxels. for i in xrange(header['voxel_count']): ans[i] = ((reader.unpack_int(), reader.unpack_int(), reader.unpack_int()), reader.unpack_float(), (reader.unpack_float(), reader.unpack_float(), reader.unpack_float()), reader.unpack_float()) continue reader.done() return ans pass class XdrSnapshotVersionOne(PositionlessSnapshot, XdrVoxelFormatOneSnapshot): """Read an old-style XDR snapshot. """ # int float 3x int 3x int 3x int int _headerLengthBytes = 4 + 8 + 3*4 + 3*4 + 3*4 + 4 @classmethod def _readHeader(cls, filename): """Read the header lines, according to: 0- Flag for simulation stability, 0 or 1 1- Voxel size in physical units (units of m) 2- vertex coords of the minimum bounding box with minimum values (x, y and z values) 3- vertex coords of the minimum bounding box with maximum values (x, y and z values) 4- #voxels within the minimum bounding box along the x, y, z axes (3 values) 5- total number of fluid voxels """ reader = xdrlib.Unpacker(file(filename).read(cls._headerLengthBytes)) header = {} header['stable'] = reader.unpack_int() header['voxel_size'] = reader.unpack_double() header['bb_min'] = np.array((reader.unpack_int(), reader.unpack_int(), reader.unpack_int())) header['bb_max'] = np.array((reader.unpack_int(), reader.unpack_int(), reader.unpack_int())) header['bb_len'] = np.array((reader.unpack_int(), reader.unpack_int(), reader.unpack_int())) header['voxel_count'] = reader.unpack_int(); return header pass class XdrSnapshotVersionTwo(BaseSnapshot, XdrVoxelFormatOneSnapshot): """Read snapshots for the updated format as for August 2011. """ _headerLengthBytes = 80 VersionNumber = 2 @classmethod def _readHeader(cls, filename): """Read the header lines, according to description in Code/io/formats/snapshot.h """ reader = xdrlib.Unpacker(file(filename).read(cls._headerLengthBytes)) header = {} assert reader.unpack_uint() == HemeLbMagicNumber assert reader.unpack_uint() == SnapshotMagicNumber assert reader.unpack_uint() == cls.VersionNumber bodyStart = reader.unpack_uint() assert bodyStart == cls._headerLengthBytes header['stable'] = reader.unpack_int() header['voxel_size'] = reader.unpack_double() header['origin'] = np.array((reader.unpack_double(), reader.unpack_double(), reader.unpack_double())) header['bb_min'] = np.array((reader.unpack_int(), reader.unpack_int(), reader.unpack_int())) header['bb_max'] = np.array((reader.unpack_int(), reader.unpack_int(), reader.unpack_int())) he
: cb_idx = self.tasks.index(task) if stay_resident: if cb_idx not in self.resident_tasks: self.resident_tasks.append(self.current_task) print "task going resident:", task else: print "task keeps staying resident:", task return if len(res): print ">>> Error:", res self.status = self.FAILED self.state_changed() self.callback(self, task, res) if cb_idx != self.current_task: if cb_idx in self.resident_tasks: print "resident task finished:", task self.resident_tasks.remove(cb_idx) if not res: self.state_changed() self.current_task += 1 self.runNext() def retry(self): assert self.status == self.FAILED self.restart() def abort(self): if self.current_task < len(self.tasks): self.tasks[self.current_task].abort() for i in self.resident_tasks: self.tasks[i].abort() def cancel(self): self.abort() def __str__(self): return "Components.Task.Job name=%s #tasks=%s" % (self.name, len(self.tasks)) class Task(object): def __init__(self, job, name): self.name = name self.immediate_preconditions = [ ] self.global_preconditions = [ ] self.postconditions = [ ] self.returncode = None self.initial_input = None self.job = None self.end = 100 self.weighting = 100 self.__progress = 0 self.cmd = None self.cwd = "/tmp" self.args = [ ] self.cmdline = None self.task_progress_changed = None self.output_line = "" job.addTask(self) self.container = None def setCommandline(self, cmd, args): self.cmd = cmd self.args = args def setTool(self, tool): self.cmd = tool self.args = [tool] self.global_preconditions.append(ToolExistsPrecondition()) self.postconditions.append(ReturncodePostcondition()) def setCmdline(self, cmdline): self.cmdline = cmdline def checkPreconditions(self, immediate = False): not_met = [ ] if immediate: preconditions = self.immediate_preconditions else: preconditions = self.global_preconditions for precondition in preconditions: if not precondition.check(self): not_met.append(precondition) return not_met def _run(self): if (self.cmd is None) and (self.cmdline is None): self.finish() return from enigma import eConsoleAppContainer self.container = eConsoleAppContainer() self.container.appClosed.append(self.processFinished) self.container.stdoutAvail.append(self.processStdout) self.container.stderrAvail.append(self.processStderr) if self.cwd is not None: self.container.setCWD(self.cwd) if not self.cmd and self.cmdline: print "execute:", self.container.execute(self.cmdline), self.cmdline else: assert self.cmd is not None assert len(self.args) >= 1 print "execute:", self.container.execute(self.cmd, *self.args), ' '.join(self.args) if self.initial_input: self.writeInput(self.initial_input) def run(self, callback): failed_preconditions = self.checkPreconditions(True) + self.checkPreconditions(False) if failed_preconditions: print "[Task] preconditions failed" callback(self, failed_preconditions) return self.callback = callback try: self.prepare() self._run() except Exception, ex: print "[Task] exception:", ex self.postconditions = [FailedPostcondition(ex)] self.finish() def prepare(self): pass def cleanup(self, failed): pass def processStdout(self, data): self.processOutput(data) def processStderr(self, data): self.processOutput(data) def processOutput(self, data): self.output_line += data while True: i = self.output_line.find('\n') if i == -1: break self.processOutputLine(self.output_line[:i+1]) self.output_line = self.output_line[i+1:] def processOutputLine(self, line): pri
nt "[Task %s]" % self.name, line[:-1] pass def processFinished(self, returncode): self.returncode = returncode self.finish() def abort(self): if self.container: self.container.kill() self.finish(aborted = T
rue) def finish(self, aborted = False): self.afterRun() not_met = [ ] if aborted: not_met.append(AbortedPostcondition()) else: for postcondition in self.postconditions: if not postcondition.check(self): not_met.append(postcondition) self.cleanup(not_met) self.callback(self, not_met) def afterRun(self): pass def writeInput(self, input): self.container.write(input) def getProgress(self): return self.__progress def setProgress(self, progress): if progress > self.end: progress = self.end if progress < 0: progress = 0 self.__progress = progress if self.task_progress_changed: self.task_progress_changed() progress = property(getProgress, setProgress) def __str__(self): return "Components.Task.Task name=%s" % self.name class LoggingTask(Task): def __init__(self, job, name): Task.__init__(self, job, name) self.log = [] def processOutput(self, data): print "[%s]" % self.name, data, self.log.append(data) class PythonTask(Task): def _run(self): from twisted.internet import threads from enigma import eTimer self.aborted = False self.pos = 0 threads.deferToThread(self.work).addBoth(self.onComplete) self.timer = eTimer() self.timer.callback.append(self.onTimer) self.timer.start(5) def work(self): raise NotImplemented, "work" def abort(self): self.aborted = True if self.callback is None: self.finish(aborted = True) def onTimer(self): self.setProgress(self.pos) def onComplete(self, result): self.postconditions.append(FailedPostcondition(result)) self.timer.stop() del self.timer self.finish() class ConditionTask(Task): """ Reactor-driven pthread_condition. Wait for something to happen. Call trigger when something occurs that is likely to make check() return true. Raise exception in check() to signal error. Default is to call trigger() once per second, override prepare/cleanup to do something else (like waiting for hotplug)... """ def __init__(self, job, name, timeoutCount=None): Task.__init__(self, job, name) self.timeoutCount = timeoutCount def _run(self): self.triggerCount = 0 def prepare(self): from enigma import eTimer self.timer = eTimer() self.timer.callback.append(self.trigger) self.timer.start(1000) def cleanup(self, failed): if hasattr(self, 'timer'): self.timer.stop() del self.timer def check(self): # override to return True only when condition triggers return True def trigger(self): self.triggerCount += 1 try: if (self.timeoutCount is not None) and (self.triggerCount > self.timeoutCount): raise Exception, "Timeout elapsed, sorry" res = self.check() except Exception, e: self.postconditions.append(FailedPostcondition(e)) res = True if res: self.finish() # The jobmanager will execute multiple jobs, each after another. # later, it will also support suspending jobs (and continuing them after reboot etc) # It also supports a notification when some error occurred, and possibly a retry. class JobManager: def __init__(self): self.active_jobs = [ ] self.failed_jobs = [ ] self.job_classes = [ ] self.in_background = False self.visible = False self.active_job = None # Set onSuccess to popupTaskView to get a visible notification. # onFail defaults to notifyFailed which tells the user that it went south. def AddJob(self, job, onSuccess=None, onFail=None): job.onSuccess = onSuccess if onFail is None: job.onFail = self.notifyFailed else: job.onFail = onFail self.active_jobs.append(job) self.kick() def kick(self): if self.active_job is None: if self.active_jobs: self.active_job = self.active_jobs.pop(0) self.active_job.start(self.jobDone) def notifyFailed(self, job, task, problems): from Tools import Notifications from Screens.MessageBox import MessageBox if problems[0].RECOVERABLE: Notifications.AddNotificationWithCallback(self.errorCB, MessageBox, _("Error: %s\nRetry?") % (problems[0].getErrorMessage(task))) return True else: Notifications.AddNotification(MessageBox, job.name + "\n" + _("Error") + ': %s' % (problems[0].getErrorMessage(task)), type = MessageBox.TYPE_ERROR ) return False def jobDone(self, job, task, problems): print "job
nt = try_import('neutronclient.v2_0.client') health_monitor_template = ''' { "AWSTemplateFormatVersion" : "2010-09-09", "Description" : "Template to test load balancer resources", "Parameters" : {}, "Resources" : { "monitor": { "Type": "OS::Neutron::HealthMonitor", "Properties": { "type": "HTTP", "delay": 3, "max_retries": 5, "timeout": 10 } } } } ''' pool_template = ''' { "AWSTemplateFormatVersion" : "2010-09-09", "Description" : "Template to test load balancer resources", "Parameters" : {}, "Resources" : { "pool": { "Type": "OS::Neutron::Pool", "Properties": { "protocol": "HTTP", "subnet_id": "sub123", "lb_method": "ROUND_ROBIN", "vip": { "protocol_port": 80 } } } } } ''' member_template = ''' { "AWSTemplateFormatVersion" : "2010-09-09", "Description" : "Template to test load balancer member", "Resources" : { "member": { "Type": "OS::Neutron::PoolMember", "Properties": { "protocol_port": 8080, "pool_id": "pool123", "address": "1.2.3.4" } } } } ''' lb
_template = ''' { "AWSTemplateFormatVersion" : "2010-09-09", "Description" : "Template to test load balancer resources", "Parameters" : {}, "Resources" : { "lb": { "Type": "OS::Neutron::LoadBalancer", "Properties": { "protocol_port": 8080, "pool_id": "pool123", "members": ["1234"] } } } } ''' pool_with_session_persistence_template = ''' { "AWSTemplateFormatVersion" : "2010-09-09", "Description" : "Template t
o test load balancer resources wit", "Parameters" : {}, "Resources" : { "pool": { "Type": "OS::Neutron::Pool", "Properties": { "protocol": "HTTP", "subnet_id": "sub123", "lb_method": "ROUND_ROBIN", "vip": { "protocol_port": 80, "session_persistence": { "type": "APP_COOKIE", "cookie_name": "cookie" } } } } } } ''' @skipIf(neutronclient is None, 'neutronclient unavailable') class HealthMonitorTest(HeatTestCase): def setUp(self): super(HealthMonitorTest, self).setUp() self.m.StubOutWithMock(neutronclient.Client, 'create_health_monitor') self.m.StubOutWithMock(neutronclient.Client, 'delete_health_monitor') self.m.StubOutWithMock(neutronclient.Client, 'show_health_monitor') self.m.StubOutWithMock(neutronclient.Client, 'update_health_monitor') self.m.StubOutWithMock(clients.OpenStackClients, 'keystone') utils.setup_dummy_db() def create_health_monitor(self): clients.OpenStackClients.keystone().AndReturn( fakes.FakeKeystoneClient()) neutronclient.Client.create_health_monitor({ 'health_monitor': { 'delay': 3, 'max_retries': 5, 'type': u'HTTP', 'timeout': 10, 'admin_state_up': True}} ).AndReturn({'health_monitor': {'id': '5678'}}) snippet = template_format.parse(health_monitor_template) stack = utils.parse_stack(snippet) return loadbalancer.HealthMonitor( 'monitor', snippet['Resources']['monitor'], stack) def test_create(self): rsrc = self.create_health_monitor() self.m.ReplayAll() scheduler.TaskRunner(rsrc.create)() self.assertEqual((rsrc.CREATE, rsrc.COMPLETE), rsrc.state) self.m.VerifyAll() def test_create_failed(self): clients.OpenStackClients.keystone().AndReturn( fakes.FakeKeystoneClient()) neutronclient.Client.create_health_monitor({ 'health_monitor': { 'delay': 3, 'max_retries': 5, 'type': u'HTTP', 'timeout': 10, 'admin_state_up': True}} ).AndRaise(loadbalancer.NeutronClientException()) self.m.ReplayAll() snippet = template_format.parse(health_monitor_template) stack = utils.parse_stack(snippet) rsrc = loadbalancer.HealthMonitor( 'monitor', snippet['Resources']['monitor'], stack) error = self.assertRaises(exception.ResourceFailure, scheduler.TaskRunner(rsrc.create)) self.assertEqual( 'NeutronClientException: An unknown exception occurred.', str(error)) self.assertEqual((rsrc.CREATE, rsrc.FAILED), rsrc.state) self.m.VerifyAll() def test_delete(self): neutronclient.Client.delete_health_monitor('5678') neutronclient.Client.show_health_monitor('5678').AndRaise( loadbalancer.NeutronClientException(status_code=404)) rsrc = self.create_health_monitor() self.m.ReplayAll() scheduler.TaskRunner(rsrc.create)() scheduler.TaskRunner(rsrc.delete)() self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state) self.m.VerifyAll() def test_delete_already_gone(self): neutronclient.Client.delete_health_monitor('5678').AndRaise( loadbalancer.NeutronClientException(status_code=404)) rsrc = self.create_health_monitor() self.m.ReplayAll() scheduler.TaskRunner(rsrc.create)() scheduler.TaskRunner(rsrc.delete)() self.assertEqual((rsrc.DELETE, rsrc.COMPLETE), rsrc.state) self.m.VerifyAll() def test_delete_failed(self): neutronclient.Client.delete_health_monitor('5678').AndRaise( loadbalancer.NeutronClientException(status_code=400)) rsrc = self.create_health_monitor() self.m.ReplayAll() scheduler.TaskRunner(rsrc.create)() error = self.assertRaises(exception.ResourceFailure, scheduler.TaskRunner(rsrc.delete)) self.assertEqual( 'NeutronClientException: An unknown exception occurred.', str(error)) self.assertEqual((rsrc.DELETE, rsrc.FAILED), rsrc.state) self.m.VerifyAll() def test_attribute(self): rsrc = self.create_health_monitor() neutronclient.Client.show_health_monitor('5678').MultipleTimes( ).AndReturn( {'health_monitor': {'admin_state_up': True, 'delay': 3}}) self.m.ReplayAll() scheduler.TaskRunner(rsrc.create)() self.assertIs(True, rsrc.FnGetAtt('admin_state_up')) self.assertEqual(3, rsrc.FnGetAtt('delay')) self.m.VerifyAll() def test_attribute_failed(self): rsrc = self.create_health_monitor() self.m.ReplayAll() scheduler.TaskRunner(rsrc.create)() error = self.assertRaises(exception.InvalidTemplateAttribute, rsrc.FnGetAtt, 'subnet_id') self.assertEqual( 'The Referenced Attribute (monitor subnet_id) is incorrect.', str(error)) self.m.VerifyAll() def test_update(self): rsrc = self.create_health_monitor() neutronclient.Client.update_health_monitor( '5678', {'health_monitor': {'delay': 10}}) self.m.ReplayAll() scheduler.TaskRunner(rsrc.create)() update_template = copy.deepcopy(rsrc.t) update_template['Properties']['delay'] = 10 scheduler.TaskRunner(rsrc.update, update_template)() self.m.VerifyAll() @skipIf(neutronclient is None, 'neutronclient unavailable') class PoolTest(HeatTestCase): def setUp(self): super(PoolTest, self).setUp() self.m.StubOutWithMock(neutronclient.Client, 'create_pool') self.m.StubOutWithMock(neutronclient.Client, 'delete_pool') self.m.StubOutWithMock(neutronclient.Client, 'show_pool') self.m.StubOutWithMock(neutronclient.Client, 'update_pool') self.m.StubOutWithMock(neutronclient.Client, 'associate_health_monitor') self.m.StubOutWithMock(neutronclient.Client, 'disassociate_health_monitor') self.m.StubOutWithMock(neutronclient.Client, 'create_vip') self.m.StubOutWithMock(neutronclient.Client, 'delete_vip') self.m.StubOutWithMock(neutronc
oken_view calls get_token() indirectly CsrfViewMiddleware().process_view(req, token_view, (), {}) resp = token_view(req) resp2 = CsrfViewMiddleware().process_response(req, resp) csrf_cookie = resp2.cookies.get('myname', False) self.assertNotEqual(csrf_cookie, False) self.assertEqual(csrf_cookie['domain'], '.example.com') self.assertEqual(csrf_cookie['secure'], True) self.assertEqual(csrf_cookie['path'], '/test/') self.assertTrue('Cookie' in res
p2.get('Vary','')) def test_process_response_get_token_not_used(self): """ Check that if get_token() is not called, the view middleware doe
s not add a cookie. """ # This is important to make pages cacheable. Pages which do call # get_token(), assuming they use the token, are not cacheable because # the token is specific to the user req = self._get_GET_no_csrf_cookie_request() # non_token_view_using_request_processor does not call get_token(), but # does use the csrf request processor. By using this, we are testing # that the view processor is properly lazy and doesn't call get_token() # until needed. CsrfViewMiddleware().process_view(req, non_token_view_using_request_processor, (), {}) resp = non_token_view_using_request_processor(req) resp2 = CsrfViewMiddleware().process_response(req, resp) csrf_cookie = resp2.cookies.get(settings.CSRF_COOKIE_NAME, False) self.assertEqual(csrf_cookie, False) # Check the request processing def test_process_request_no_csrf_cookie(self): """ Check that if no CSRF cookies is present, the middleware rejects the incoming request. This will stop login CSRF. """ req = self._get_POST_no_csrf_cookie_request() req2 = CsrfViewMiddleware().process_view(req, post_form_view, (), {}) self.assertEqual(403, req2.status_code) def test_process_request_csrf_cookie_no_token(self): """ Check that if a CSRF cookie is present but no token, the middleware rejects the incoming request. """ req = self._get_POST_csrf_cookie_request() req2 = CsrfViewMiddleware().process_view(req, post_form_view, (), {}) self.assertEqual(403, req2.status_code) def test_process_request_csrf_cookie_and_token(self): """ Check that if both a cookie and a token is present, the middleware lets it through. """ req = self._get_POST_request_with_token() req2 = CsrfViewMiddleware().process_view(req, post_form_view, (), {}) self.assertEqual(None, req2) def test_process_request_csrf_cookie_no_token_exempt_view(self): """ Check that if a CSRF cookie is present and no token, but the csrf_exempt decorator has been applied to the view, the middleware lets it through """ req = self._get_POST_csrf_cookie_request() req2 = CsrfViewMiddleware().process_view(req, csrf_exempt(post_form_view), (), {}) self.assertEqual(None, req2) def test_csrf_token_in_header(self): """ Check that we can pass in the token in a header instead of in the form """ req = self._get_POST_csrf_cookie_request() req.META['HTTP_X_CSRFTOKEN'] = self._csrf_id req2 = CsrfViewMiddleware().process_view(req, post_form_view, (), {}) self.assertEqual(None, req2) def test_put_and_delete_rejected(self): """ Tests that HTTP PUT and DELETE methods have protection """ req = TestingHttpRequest() req.method = 'PUT' req2 = CsrfViewMiddleware().process_view(req, post_form_view, (), {}) self.assertEqual(403, req2.status_code) req = TestingHttpRequest() req.method = 'DELETE' req2 = CsrfViewMiddleware().process_view(req, post_form_view, (), {}) self.assertEqual(403, req2.status_code) def test_put_and_delete_allowed(self): """ Tests that HTTP PUT and DELETE methods can get through with X-CSRFToken and a cookie """ req = self._get_GET_csrf_cookie_request() req.method = 'PUT' req.META['HTTP_X_CSRFTOKEN'] = self._csrf_id req2 = CsrfViewMiddleware().process_view(req, post_form_view, (), {}) self.assertEqual(None, req2) req = self._get_GET_csrf_cookie_request() req.method = 'DELETE' req.META['HTTP_X_CSRFTOKEN'] = self._csrf_id req2 = CsrfViewMiddleware().process_view(req, post_form_view, (), {}) self.assertEqual(None, req2) # Tests for the template tag method def test_token_node_no_csrf_cookie(self): """ Check that CsrfTokenNode works when no CSRF cookie is set """ req = self._get_GET_no_csrf_cookie_request() resp = token_view(req) self.assertEqual(u"", resp.content) def test_token_node_empty_csrf_cookie(self): """ Check that we get a new token if the csrf_cookie is the empty string """ req = self._get_GET_no_csrf_cookie_request() req.COOKIES[settings.CSRF_COOKIE_NAME] = "" CsrfViewMiddleware().process_view(req, token_view, (), {}) resp = token_view(req) self.assertNotEqual(u"", resp.content) def test_token_node_with_csrf_cookie(self): """ Check that CsrfTokenNode works when a CSRF cookie is set """ req = self._get_GET_csrf_cookie_request() CsrfViewMiddleware().process_view(req, token_view, (), {}) resp = token_view(req) self._check_token_present(resp) def test_get_token_for_exempt_view(self): """ Check that get_token still works for a view decorated with 'csrf_exempt'. """ req = self._get_GET_csrf_cookie_request() CsrfViewMiddleware().process_view(req, csrf_exempt(token_view), (), {}) resp = token_view(req) self._check_token_present(resp) def test_get_token_for_requires_csrf_token_view(self): """ Check that get_token works for a view decorated solely with requires_csrf_token """ req = self._get_GET_csrf_cookie_request() resp = requires_csrf_token(token_view)(req) self._check_token_present(resp) def test_token_node_with_new_csrf_cookie(self): """ Check that CsrfTokenNode works when a CSRF cookie is created by the middleware (when one was not already present) """ req = self._get_GET_no_csrf_cookie_request() CsrfViewMiddleware().process_view(req, token_view, (), {}) resp = token_view(req) resp2 = CsrfViewMiddleware().process_response(req, resp) csrf_cookie = resp2.cookies[settings.CSRF_COOKIE_NAME] self._check_token_present(resp, csrf_id=csrf_cookie.value) def test_https_bad_referer(self): """ Test that a POST HTTPS request with a bad referer is rejected """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_REFERER'] = 'https://www.evil.org/somepage' req2 = CsrfViewMiddleware().process_view(req, post_form_view, (), {}) self.assertNotEqual(None, req2) self.assertEqual(403, req2.status_code) def test_https_good_referer(self): """ Test that a POST HTTPS request with a good referer is accepted """ req = self._get_POST_request_with_token() req._is_secure_override = True req.META['HTTP_HOST'] = 'www.example.com' req.META['HTTP_REFERER'] = 'https://www.example.com/somepage' req2 = CsrfViewMiddleware().process_view(req, post_form_view, (), {}) self.assertEqual(None, req2) def test_https_good_referer_2(self): """ Test that a POST HTTPS request with a good referer is accepted where the referer contains no trailing slash """ # See ticket #15617 req = self._get_POST_request_with_token()
the_count = [1, 2, 3, 4, 5] fruits = ['apple', 'oranges', 'pears', 'apricots',] change = [1, 'pennies', 2,
'dimes', 3, 'quarters',] #this first kind of for-loop goes through a list for number in the_count: print("This is
count %d" % number) # same as above for fruit in fruits: print("A fruit of type: %s" % fruit) # also we can go through mixed lists too # notice we have to use %r since we don't know what's in it for i in change: print("I got %r " % i) # we can alse build lists, first start with an empty one elements = [] # then use the range function to do 0 to 5 counts for i in range(0,6): print("Adding %d to the list." % i) # append is a function that lists understand elements.append(i) # now we can print them out too for i in elements: print("Element was: %d" % i)
from pydub import * class AudioMerger: voice_tags = ["one", "two", "three", "four", "five", "ten
", "RUN", "relax", "completed"] def __init__(self, music): self.music = music
self.additionalGain = 8 self.voices={} for voice in self.voice_tags: sound = AudioSegment.from_file('voices/' + voice + '.wav') sound += self.additionalGain self.voices[voice] = sound def addCountdown(self, startTime, isRun = True): for i in range(1, 6): voice = self.voices[self.voice_tags[i - 1]] self.music = self.music.overlay(voice, position = (startTime - i) * 1000) self.music = self.music.overlay(self.voices["ten"], position = (startTime - 10) * 1000) voice = self.voices["RUN" if isRun else "relax"] self.music = self.music.overlay(voice, position = startTime * 1000) def addCompleted(self, startTimeSec): self.music = self.music.overlay(self.voices["completed"], position = (startTimeSec * 1000)) def exportMusic(self, fname): self.music.export(fname + ".mp3", format="mp3")
# -*- coding: utf-8 -*- # Generated by Dja
ngo 1.9.7 on 2016-0
7-03 16:13 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('books', '0003_book_owner'), ] operations = [ migrations.RenameModel( old_name='Book', new_name='BookItem', ), ]
# Copyright 2012 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from tempest_lib.commo
n.utils import data_utils from testtools import matchers from tempest.api.compute import base from tempest import config from tempest import test CONF = config.CONF class V
olumesGetTestJSON(base.BaseV2ComputeTest): @classmethod def skip_checks(cls): super(VolumesGetTestJSON, cls).skip_checks() if not CONF.service_available.cinder: skip_msg = ("%s skipped as Cinder is not available" % cls.__name__) raise cls.skipException(skip_msg) @classmethod def setup_clients(cls): super(VolumesGetTestJSON, cls).setup_clients() cls.client = cls.volumes_extensions_client @test.idempotent_id('f10f25eb-9775-4d9d-9cbe-1cf54dae9d5f') def test_volume_create_get_delete(self): # CREATE, GET, DELETE Volume volume = None v_name = data_utils.rand_name('Volume') metadata = {'Type': 'work'} # Create volume volume = self.client.create_volume(display_name=v_name, metadata=metadata) self.addCleanup(self.delete_volume, volume['id']) self.assertIn('id', volume) self.assertIn('displayName', volume) self.assertEqual(volume['displayName'], v_name, "The created volume name is not equal " "to the requested name") self.assertTrue(volume['id'] is not None, "Field volume id is empty or not found.") # Wait for Volume status to become ACTIVE self.client.wait_for_volume_status(volume['id'], 'available') # GET Volume fetched_volume = self.client.show_volume(volume['id']) # Verification of details of fetched Volume self.assertEqual(v_name, fetched_volume['displayName'], 'The fetched Volume is different ' 'from the created Volume') self.assertEqual(volume['id'], fetched_volume['id'], 'The fetched Volume is different ' 'from the created Volume') self.assertThat(fetched_volume['metadata'].items(), matchers.ContainsAll(metadata.items()), 'The fetched Volume metadata misses data ' 'from the created Volume')
= map(str, line.split("=")) os_username = os_username.rstrip('\n') elif "OS_PASSWORD" in line: trash, os_password = map(str, line.split("=")) os_password = os_password.rstrip('\n') elif "OS_URL" in line: trash, os_url = map(str, line.split("=")) os_url = os_url.rstrip('\n') elif "OS_TOKEN" in line: trash, os_token = map(str, line.split("=")) os_token = os_token.rstrip('\n') d = {} d['username'] = os_username d['password'] = os_password d['auth_url'] = os_auth_url d['tenant_name'] = os_tenant_name d['token'] = os_token d['url'] = os_url logger.info(_("VirtualElephant::VMware::BDE - Loaded VIO credentials - %s") % d) # Using BDE API and vSphere API return the MAC address # for the virtual machines created by BDE. bde_server = self.properties.get(self.BDE_ENDPOINT) vcm_server = self.properties.get(self.VCM_SERVER) admin_user = self.properties.get(self.USERNAME) admin_pass = self.properties.get(self.PASSWORD) cluster_name = self.properties.get(self.CLUSTER_NAME) network_id = self.properties.get(self.NETWORK) security_group = self.properties.get(self.SECURITY_GROUP) prefix = 'https://' port = ':8443' logger.info(_("VirtualElephant::VMware::BDE - Creating NSX ports for network %s") % network_id) # Get the node names for the cluster from BDE curr = self._open_connection() header = {'content-type': 'application/json'} api_call = '/serengeti/api/cluster/' + cluster_name url = prefix + bde_server + port + api_call r = curr.get(url, headers=header, verify=False) raw_json = json.loads(r.text) cluster_data = raw_json["nodeGroups"] # Open connect to the vSphere API si = SmartConnect(host=vcm_server, user=admin_user, pwd=admin_pass, port=443) search_index = si.content.searchIndex root_folder = si.content.rootFolder for ng in cluster_data: nodes = ng["instances"] for node in nodes: logger.info(_("VirtualElephant::VMware::BDE - Creating NSX port for %s") % node.get("name")) vm_name = node.get("name") vm_moId = node.get("moId") port_name = vm_name + "-port0" # moId is not in format we need to match (x,y,z) = vm_moId.split(":") vm_moId = "'vim." + y + ":" + z + "'" # Go through each DC one at a time, in case there are multiple in vCenter for dc in root_folder.childEntity: content = si.content objView = content.viewManager.CreateContainerView(dc, [vim.VirtualMachine], True) vm_list = objView.view objView.Destroy() for instance in vm_list: # convert object to string so we can search i = str(instance.summary.vm) if vm_moId in i: # Matched the VM in BDE and vCenter logger.info(_("VirtualElephant::VMware::BDE - Match found for BDE node %s") % instance) for device in instance.config.hardware.device: if isinstance(device, vim.vm.device.VirtualEthernetCard): mac_address = str(device.macAddress) logger.info(_("VirtualElephant::VMware::BDE - Found MAC address %s") % mac_address) # If the node is already trying to get an IP address, # then a powercycle is required. #logger.info(_("VirtualElephant::VMware::BDE - Powercycling the node %s") % node.get("name")) #if instance.runtime.powerState == vim.VirtualMachinePowerState.poweredOn: # task = instance.PowerOff() # while task.info.state not in [vim.TaskInfo.State.success,
# vim.TaskInfo.State.error]: # logger.info(_("VirtualElephant::VMware::BDE - Waiting for node power off %s") % nod
e.get("name")) # time.sleep(5) # task = instance.PowerOn() # while task.info.state not in [vim.TaskInfo.State.success, # vim.TaskInfo.State.error]: # logger.info(_("VirtualElephant::VMware::BDE - Waiting for node power on %s") % node.get("name")) # time.sleep(5) # Create a new port through Neutron neutron = client.Client('2.0', username=os_username, password=os_password, auth_url=os_auth_url, tenant_name=os_tenant_name, endpoint_url=os_url, token=os_token) port_info = { "port": { "admin_state_up": True, "device_id": vm_name, "name": port_name, "mac_address": mac_address, "network_id": network_id } } logger.info(_("VirtualElephant::VMware::BDE - Neutron port string %s") % port_info) response = neutron.create_port(body=port_info) logger.info(_("VirtualElephant::VMware::BDE - NSX port creation response - %s") % response) return def handle_create(self): # REST API call to create a new VMware BDE cluster bde_server = self.properties.get(self.BDE_ENDPOINT) vcm_server = self.properties.get(self.VCM_SERVER) bde_user = self.properties.get(self.USERNAME) bde_pass = self.properties.get(self.PASSWORD) distro = self.properties.get(self.CLUSTER_TYPE) clusterName = self.properties.get(self.CLUSTER_NAME) network = self.properties.get(self.NETWORK) rp = self.properties.get(self.CLUSTER_RP) prefix = 'https://' port = ':8443' # hack because of Heat sends call before NSX network is created/assigned #time.sleep(60) # determine actual NSX portgroup created # hack - regex in Python is not a strength mob_string = '/mob/?moid=datacenter-2' curl_cmd = 'curl -k -u ' + bde_user + ':' + bde_pass + ' ' + prefix + vcm_server + mob_string grep_cmd = " | grep -oP '(?<=\(vxw).*(?=" + network + "\))' | grep -oE '[^\(]+$'" awk_cmd = " | awk '{print $0 \"" + network + "\"}'" full_cmd = curl_cmd + grep_cmd + awk_cmd p = subprocess.Popen(full_cmd, stdout=subprocess.PIPE, shell=True) (net_uid, err) = p.communicate() # Check to see if network_id is as we expect it if 'vxw' in net_uid: network_id = net_uid else: network_id = "vxw" + net_uid network_id = network_id.rstrip('\n') # Authenticate in a requests.session to the BDE server curr = self._open_connection() # Should check to see if network already exists as available network # This logs a big fat error message in /opt/serengeti/logs/serengeti.log # when the network doesn't exist. header = {'content-type': 'application/json'} api_call = '/serengeti/api/network/' + network url = prefix + bde_server + port + api_call r = curr.get(url, headers=header, verify=False) # Add new