prefix
stringlengths
0
918k
middle
stringlengths
0
812k
suffix
stringlengths
0
962k
modules import pygame from pygame.locals import * #see if we can load more than standard BMP if not pygame.image.get_extended(): raise SystemExit("Sorry, extended image module r
equired") #game constants MAX_SHOTS = 2 #most player bullets onscreen ALIEN_ODDS = 22 #chances a new alien appears BOMB_ODDS = 60 #chances a new bomb will drop ALIEN_RELOAD
= 12 #frames between new aliens SCREENRECT = Rect(0, 0, 640, 480) SCORE = 0 main_dir = os.path.split(os.path.abspath(__file__))[0] def load_image(file): "loads an image, prepares it for play" file = os.path.join(main_dir, 'data', file) try: surface = pygame.image.load(file) except pygame.error: raise SystemExit('Could not load image "%s" %s'%(file, pygame.get_error())) return surface.convert() def load_images(*files): imgs = [] for file in files: imgs.append(load_image(file)) return imgs class dummysound: def play(self): pass def load_sound(file): if not pygame.mixer: return dummysound() file = os.path.join(main_dir, 'data', file) try: sound = pygame.mixer.Sound(file) return sound except pygame.error: print ('Warning, unable to load, %s' % file) return dummysound() # each type of game object gets an init and an # update function. the update function is called # once per frame, and it is when each object should # change it's current position and state. the Player # object actually gets a "move" function instead of # update, since it is passed extra information about # the keyboard class Player(pygame.sprite.Sprite): speed = 10 bounce = 24 gun_offset = -11 images = [] def __init__(self): pygame.sprite.Sprite.__init__(self, self.containers) self.image = self.images[0] self.rect = self.image.get_rect(midbottom=SCREENRECT.midbottom) self.reloading = 0 self.origtop = self.rect.top self.facing = -1 def move(self, direction): if direction: self.facing = direction self.rect.move_ip(direction*self.speed, 0) self.rect = self.rect.clamp(SCREENRECT) if direction < 0: self.image = self.images[0] elif direction > 0: self.image = self.images[1] self.rect.top = self.origtop - (self.rect.left//self.bounce%2) def gunpos(self): pos = self.facing*self.gun_offset + self.rect.centerx return pos, self.rect.top class Alien(pygame.sprite.Sprite): speed = 13 animcycle = 12 images = [] def __init__(self): pygame.sprite.Sprite.__init__(self, self.containers) self.image = self.images[0] self.rect = self.image.get_rect() self.facing = random.choice((-1,1)) * Alien.speed self.frame = 0 if self.facing < 0: self.rect.right = SCREENRECT.right def update(self): self.rect.move_ip(self.facing, 0) if not SCREENRECT.contains(self.rect): self.facing = -self.facing; self.rect.top = self.rect.bottom + 1 self.rect = self.rect.clamp(SCREENRECT) self.frame = self.frame + 1 self.image = self.images[self.frame//self.animcycle%3] class Explosion(pygame.sprite.Sprite): defaultlife = 12 animcycle = 3 images = [] def __init__(self, actor): pygame.sprite.Sprite.__init__(self, self.containers) self.image = self.images[0] self.rect = self.image.get_rect(center=actor.rect.center) self.life = self.defaultlife def update(self): self.life = self.life - 1 self.image = self.images[self.life//self.animcycle%2] if self.life <= 0: self.kill() class Shot(pygame.sprite.Sprite): speed = -11 images = [] def __init__(self, pos): pygame.sprite.Sprite.__init__(self, self.containers) self.image = self.images[0] self.rect = self.image.get_rect(midbottom=pos) def update(self): self.rect.move_ip(0, self.speed) if self.rect.top <= 0: self.kill() class Bomb(pygame.sprite.Sprite): speed = 9 images = [] def __init__(self, alien): pygame.sprite.Sprite.__init__(self, self.containers) self.image = self.images[0] self.rect = self.image.get_rect(midbottom= alien.rect.move(0,5).midbottom) def update(self): self.rect.move_ip(0, self.speed) if self.rect.bottom >= 470: Explosion(self) self.kill() class Score(pygame.sprite.Sprite): def __init__(self): pygame.sprite.Sprite.__init__(self) self.font = pygame.font.Font(None, 20) self.font.set_italic(1) self.color = Color('white') self.lastscore = -1 self.update() self.rect = self.image.get_rect().move(10, 450) def update(self): if SCORE != self.lastscore: self.lastscore = SCORE msg = "Score: %d" % SCORE self.image = self.font.render(msg, 0, self.color) def main(winstyle = 0): # Initialize pygame pygame.init() if pygame.mixer and not pygame.mixer.get_init(): print ('Warning, no sound') pygame.mixer = None # Set the display mode winstyle = 0 # |FULLSCREEN bestdepth = pygame.display.mode_ok(SCREENRECT.size, winstyle, 32) screen = pygame.display.set_mode(SCREENRECT.size, winstyle, bestdepth) #Load images, assign to sprite classes #(do this before the classes are used, after screen setup) img = load_image('hearts.jpg') Player.images = [img, pygame.transform.flip(img, 1, 0)] img = load_image('explosion1.gif') Explosion.images = [img, pygame.transform.flip(img, 1, 1)] Alien.images = load_images('alien1.gif', 'alien2.gif', 'alien3.gif') Bomb.images = [load_image('bomb.gif')] Shot.images = [load_image('shot.gif')] #decorate the game window icon = pygame.transform.scale(Alien.images[0], (32, 32)) pygame.display.set_icon(icon) pygame.display.set_caption('Pygame Aliens') pygame.mouse.set_visible(0) #create the background, tile the bgd image bgdtile = load_image('table.jpg') background = pygame.Surface(SCREENRECT.size) for x in range(0, SCREENRECT.width, bgdtile.get_width()): background.blit(bgdtile, (x, 0)) screen.blit(background, (0,0)) pygame.display.flip() #load the sound effects boom_sound = load_sound('boom.wav') shoot_sound = load_sound('car_door.wav') if pygame.mixer: music = os.path.join(main_dir, 'data', 'house_lo.wav') pygame.mixer.music.load(music) pygame.mixer.music.play(-1) # Initialize Game Groups aliens = pygame.sprite.Group() shots = pygame.sprite.Group() bombs = pygame.sprite.Group() all = pygame.sprite.RenderUpdates() lastalien = pygame.sprite.GroupSingle() #assign default groups to each sprite class Player.containers = all Alien.containers = aliens, all, lastalien Shot.containers = shots, all Bomb.containers = bombs, all Explosion.containers = all Score.containers = all #Create Some Starting Values global score alienreload = ALIEN_RELOAD kills = 0 clock = pygame.time.Clock() #initialize our starting sprites global SCORE player = Player() Alien() #note, this 'lives' because it goes into a sprite group if pygame.font: all.add(Score()) while player.alive(): #get input for event in pygame.event.get(): if event.type == QUIT or \ (event.type == KEYDOWN and event.key == K_ESCAPE): return keystate = pygame.key.get_pressed() # clear/erase the last drawn sprites all.clear(screen, background) #update all the sprites all.update() #handle pl
turned: always type: str sample: 'CREATE INDEX CONCURRENTLY foo_idx ON test_table USING BTREE (id)' storage_params: description: Index storage parameters. returned: always type: list sample: [ "fillfactor=90" ] valid: description: Index validity. returned: always type: bool sample: true ''' import traceback PSYCOPG2_IMP_ERR = None try: import psycopg2 HAS_PSYCOPG2 = True except ImportError: HAS_PSYCOPG2 = False PSYCOPG2_IMP_ERR = traceback.format_exc() from ansible.module_utils.basic import AnsibleModule, missing_required_lib from ansible.module_utils.database import SQLParseError from ansible.module_utils.postgres import postgres_common_argument_spec from ansible.module_utils._text import to_native from ansible.module_utils.six import iteritems VALID_IDX_TYPES = ('BTREE', 'HASH', 'GIST', 'SPGIST', 'GIN', 'BRIN') # =========================================== # PostgreSQL module specific support methods. # class Index(object): def __init__(self, module, cursor, schema, name): self.name = name if schema: self.schema = schema else: self.schema = 'public' self.module = module self.cursor = cursor self.info = { 'name': self.name, 'state': 'absent', 'schema': '', 'tblname': '', 'tblspace': '', 'valid': True, 'storage_params': [], } self.exists = False self.__exists_in_db() self.executed_query = '' def get_info(self): """ Getter to refresh and return table info """ self.__exists_in_db() return self.info def __exists_in_db(self): """ Check index and collect info """ query = ("SELECT i.schemaname, i.tablename, i.tablespace, " "pi.indisvalid, c.reloptions " "FROM pg_catalog.pg_indexes AS i " "JOIN pg_catalog.pg_class AS c " "ON i.indexname = c.relname " "JOIN pg_catalog.pg_index AS pi " "ON c.oid = pi.indexrelid " "WHERE i.indexname = '%s'" % self.name) res = self.__exec_sql(query) if res: self.exists = True self.info = dict( name=self.name, state='present', schema=res[0][0], tblname=res[0][1], tblspace=res[0][2] if res[0][2] else '', valid=res[0][3], storage_params=res[0][4] if res[0][4] else [], ) return True else: self.exists = False return False def create(self, tblname, idxtype, columns, cond, tblspace, storage_params, concurrent=True): """ Create PostgreSQL index. """ # To change existing index we should write # 'postgresql_alter_table' standalone module. if self.exists: return False changed = False if idxtype is None: idxtype = "BTREE" query = 'CREATE INDEX' if concurrent: query += ' CONCURRENTLY' query += ' %s' % self.name if self.schema: query += ' ON %s.%s ' % (self.schema, tblname) else: query += 'public.%s ' % tblname query += 'USING %s (%s)' % (idxtype, columns) if storage_params: query += ' WITH (%s)' % storage_params if tblspace: query += ' TABLESPACE %s' % tblspace if cond: query += ' WHERE %s' % cond self.executed_query = query if self.__exec_sql(query, ddl=True): return True return False def drop(self, schema, cascade=False, concurrent=True): """ Drop PostgreSQL index. """ changed = False if not self.exists: return False query = 'DROP INDEX' if concurrent: query += ' CONCURRENTLY' if not schema: query += ' public.%s' % self.name else: query += ' %s.%s' % (schema, self.name) if cascade: query += ' CASCADE' self.executed_query = query if self.__exec_sql(query, ddl=True): return True return False def __exec_sql(self, query, ddl=False): try: self.cursor.execute(query) if not ddl: res = self.cursor.fetchall() return res return True except SQLParseError as e: self.module.fail_json(msg=to_native(e)) except Exception as e: self.module.fail_json(msg="Cannot execute SQL '%s': %s" % (query, to_native(e))) return False # =========================================== # Module execution. # def main(): argument_spec = postgres_common_argument_spec() argument_spec.update( idxname=dict(type='str', required=True, aliases=['name']), db=dict(type='str', aliases=['login_db']), state=dict(type='str', default='present', choices=['absent', 'present']), concurrent=dict(type='bool', default=True), table=dict(type='str'), idxtype=dict(type='str', aliases=['type']), columns=dict(type='list', aliases=['column']), cond=dict(type='str'), session_role=dict(type='str'), tablespace=dict(type='str'), storage_params=dict(type='list'), cascade=dict(type='bool', default=False), schema=dict(type='str'), ) module = An
sibleModule( ar
gument_spec=argument_spec, supports_check_mode=True, ) if not HAS_PSYCOPG2: module.fail_json(msg=missing_required_lib('psycopg2'), exception=PSYCOPG2_IMP_ERR) idxname = module.params["idxname"] state = module.params["state"] concurrent = module.params["concurrent"] table = module.params["table"] idxtype = module.params["idxtype"] columns = module.params["columns"] cond = module.params["cond"] sslrootcert = module.params["ca_cert"] session_role = module.params["session_role"] tablespace = module.params["tablespace"] storage_params = module.params["storage_params"] cascade = module.params["cascade"] schema = module.params["schema"] if concurrent and cascade: module.fail_json(msg="Cuncurrent mode and cascade parameters are mutually exclusive") if state == 'present': if not table: module.fail_json(msg="Table must be specified") if not columns: module.fail_json(msg="At least one column must be specified") else: if table or columns or cond or idxtype or tablespace: module.fail_json(msg="Index %s is going to be removed, so it does not " "make sense to pass a table name, columns, conditions, " "index type, or tablespace" % idxname) if cascade and state != 'absent': module.fail_json(msg="cascade parameter used only with state=absent") # To use defaults values, keyword arguments must be absent, so # check which values are empty and don't include in the **kw # dictionary params_map = { "login_host": "host", "login_user": "user", "login_password": "password", "port": "port", "db": "database", "ssl_mode": "sslmode", "ca_cert": "sslrootcert" } kw = dict((params_map[k], v) for (k, v) in iteritems(module.params) if k in params_map and v != "" and v is not None) # If a login_unix_socket is specified, incorporate it here. is_localhost = "host" not in kw or kw["host"] is None or kw["host"] == "localhost" if is_localhost and module.params["login_unix_socket"] != "": kw["host"] = module.params["login_unix_socket"] if psycopg2.__version__ < '2.4.3' and sslrootcert is not None: module.fail_json(msg='psycopg2 must be at least 2.4.3 in order to user the ca_cert parameter') try: db_connection = psycopg2.connect(**kw)
#!/usr/bin/env python import os from skimage._build import cython base_path = os.path.abspath(os.path.dirname(__file__)) def configuration(parent_package='', top_path=None): from numpy.distutils.misc_util import Configuration, get_numpy_include_dirs config = Configuration('morphology', parent_package, top_path) config.add_data_dir('tests') cython(['ccomp.pyx'], working_path=base_path) cython(['cmorph.pyx'], working_path=base_path) cython(['_watershed.pyx'], working_path=base_path) cython(['_skeletonize_cy.pyx'], working_path=base_path) cython(['_pnpoly.pyx'], working_path=base_path) cython(['_convex_hull.pyx'], working_path=base_path) cython(['_greyreconstruct.pyx'], working_path=base_path) config.add_extension('ccomp', sources=['ccomp.c'], include_dirs=[get_numpy_include_dirs()]) config.add_extension('cmorph', sources=['cmorph.c'], include_dirs=[get_numpy_include_dirs()]) config.add_extension('_watershed', sources=['_watershed.c'], include_dirs=[get_numpy_include_dirs()]) config.add_extension('_skeletonize_cy', sources=['_skeletonize_cy.c'], include_dirs=[get_numpy_include_dirs()]) config.add_extension('_pnpoly', sources=['_pnpoly.c'], include_dirs=[get_numpy_include_dirs(), '../_shared']) config.add_extension('_convex_hull', sources=['_convex_hull.c'], include_dirs=[get_numpy_include_dirs()]) config.add_extension('_greyreconstruct', sources=['_greyreconstruct.c'], include_dirs=[get_numpy_include_dirs()]) return config if __name__ == '__main__': from numpy.distutils.core imp
ort setup setup(maintai
ner='scikit-image Developers', author='Damian Eads', maintainer_email='scikit-image@googlegroups.com', description='Morphology Wrapper', url='https://github.com/scikit-image/scikit-image', license='SciPy License (BSD Style)', **(configuration(top_path='').todict()) )
#!/usr/bin/env python #coding:utf-8 # Author: mozman --<mozman@gmx.at> # Purpose: test svg element # Created: 25.09.2010 # Copyr
ight (C) 2010, Manfred Moitzi # License: MIT License import sys import unittest from svgwrite.container import SVG, Symbol class TestSVG(unittest.TestCase): def test_constructor(self): svg = SVG(insert=(10,20), size=(100,200)) self.assertTrue(isinstance(svg, Symbol)) self.assertEqual(svg.tostring(), '<svg height="200" width="100" x="10" y="20"><defs /></svg>') def test_add_svg_as_subelement(self): svg = SVG(id='svg')
subsvg = SVG(id='subsvg') svg.add(subsvg) self.assertEqual(svg.tostring(), '<svg id="svg"><defs /><svg id="subsvg"><defs /></svg></svg>') if __name__=='__main__': unittest.main()
from __fu
ture__ import absolute_import from .dev import Dev # noqa try: from .production import Prod
uction # noqa except ImportError: pass
#!/u
sr/bin/env python # -*- coding: utf-8 -*- from .randomproxy im
port ProxyDownloaderMiddleware
from flask import Flask import config from db import db_from_config from address_deduper.views import init_views from address_normalizer.deduping.near_duplicates import * def create_app(env, **kw): app = Flask(__name__) specified_config = kw.g
et('config') if specified_config: __import__('address_normalizer.' + specified_config) config.current_env = env conf = config.valid_configs.get(env) if not conf: sys.exit('Invalid config, choices are [%s]' % ','.join(valid_configs.keys())) app.config.from_object(conf) app.url_map.strict_slashes = False db = db_from_config(app.config) AddressNearDupe.configure(db, geohash_precision=app.config['GEOHASH_PRECISION'])
init_views(app) return app
import base64 import pathlib import pickle import textwrap from docutils.nodes import FixedTextElement, General, SkipNode from uqbar.book.extensions import Extension from uqbar.strings import normalize from supriya.ext import websafe_audio from supriya.io import Player class RenderExtension(Extension): template = normalize( """ <audio controls src="{file_path}"> Your browser does not support the <code>audio</code> element. </audio> """ ) class render_block(General, FixedTextElement): pass @classmethod def setup_console(cls, console, monkeypatch): monkeypatch.setattr( Player, "__call__", lambda self: console.push_proxy(cls(self.renderable, self.render_kwargs)), ) @classmethod def setup_sphinx(cls, app): app.add_node( cls.render_block, html=[cls.visit_block_html, None], latex=[cls.visit_block_latex, None], text=[cls.visit_block_text, cls.depart_block_text], ) def __init__(self, renderable, render_kwargs): self.renderable = pickle.loads(pickle.dumps(renderable)) self.render_kwargs = pickle.loads(pickle.dumps(render_kwargs)) def to_docutils(self): code = "\n".join( textwrap.wrap( base64.b64encode( pickle.dumps((self.renderable, self.render_kwargs))
).decode() ) ) node = self.render_block(code, code) return [node] @classmethod def render(cls, node, output_path): output_path.mkdir(exist_ok=True) renderable, render_kwargs = pickle.loads( base64.b64decode("".join(node[0].split())) ) return web
safe_audio( renderable.__render__(render_directory_path=output_path, **render_kwargs) ) @staticmethod def visit_block_html(self, node): absolute_file_path = RenderExtension.render( node, pathlib.Path(self.builder.outdir) / "_images" ) relative_file_path = ( pathlib.Path(self.builder.imgpath) / absolute_file_path.name ) result = RenderExtension.template.format(file_path=relative_file_path) self.body.append(result) raise SkipNode
__doc__ = """Random number array generators for numarray. This package was ported to numarray from Numeric's RandomA
rray and provides functions to generat
e numarray of random numbers. """ from RandomArray2 import *
# -*- coding: utf-8 -*- # Generated by Django 1.11 on 2019-01-29 16:08 from __
future__ import unicode_literals import django.contrib.postgres.fields from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('dashboard', '0013_auto_20181002_1939'), ] operations = [ migrations.CreateModel( name='Product', fields=[ ('id', models.AutoField(auto_created=
True, primary_key=True, serialize=False, verbose_name='ID')), ('value', django.contrib.postgres.fields.ArrayField(base_field=models.FloatField(), size=None)), ('key', models.CharField(help_text='Metric Key', max_length=30)), ('mjd', models.FloatField(help_text='MJD', null=True)), ('job', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='product_job', to='dashboard.Job')), ], ), ]
#20140125 Cui.Yingyun #Convert C style code to Char style import sys file = sys.stdin if len(sys.argv) > 1: file = open(s
ys.argv[1]) else: print "Input argument which specify shader program" sys.exit(0); lines = file.readlines() print '\"\\' for line in lines[:-1]: print line.rstrip() + '\\n\\' print lines[-1].rstrip(
) + '\\n\"' file.close()
#************************************************************************ # Codelet Tuning Infrastructure # Copyright (C) 2010-2015 Intel Corporation, CEA, GENCI, and UVSQ # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. #************************************************************************ # Authors: Franck Talbart, Mathieu Bordet, Nicolas Petit from cti_hapi import database_manager, database from cti_hapi.main import HapiPlugin, hapi_command import sys class CategoryPlugin(HapiPlugin): @hapi_command("list") def list_cmd(self, params): """ Returns a list of category Args: self: class of the plugin params: working parameters Returns: Nothing """ db = database.Database() result = database_manager.search({'NAME':["plugin_uid"], 'TYPE':"=", 'VAL':str(self
.plugin_uid)}, db, "category", fields=["entry_info.entry_uid", "category.name"]
) for r in result: print r[0] + ":" + r[1] #--------------------------------------------------------------------------- # By pass the authentification system # Needed by the doc plugin def check_passwd(self): return True #--------------------------------------------------------------------------- if __name__ == "__main__": p = CategoryPlugin() exit(p.main(sys.argv))
response.title = settings.title response.subtitle = settings.subtitle re
sponse.meta.author = '%(author)s <%(author_email)s>' % settings response.meta.keywords = settings.keywords response.meta.description = settings.description response.menu = [ (T('Index'),URL('default','index')==URL(),URL('default','index'),[]), (T('Video'),URL('default','video')==URL(),URL('default','video'),[]),
(T('Info'), False, "http://www.oarstack.com/2015/04/oarstack-analysis/", []), ] response.google_analytics_id="UA-52135133-2"
def vault(args): parser = argparse.ArgumentPar
ser() subparsers = parser.add_subparsers() if __name__ == "__main_
_": vault(sys.argv)
# -*- coding: utf-8 -*- # # phaxio-python documentation build configuration file, created by # sphinx-quickstart on Sun Jan 8 20:17:15 2017. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # import os import sys sys.path.insert(0, os.path.abspath('../../')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. # # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.ifconfig', # 'sphinx.ext.githubpages', # 'sphinx.ext.autosectionlabel', 'sphinx.ext.autosummary' ] autosummary_generate = True autodoc_default_flags = ['members', 'undoc-members'] # skips dccumenting to_dict and to_str in model types def skip_member(app, what, name, obj, skip, options): if name in ['to_dict', 'to_str']: return True return skip # skips all docstrings in model types, but leave the :rtype: tags so we have type information and links def remove_module_docstring(app, what, name, obj, options, lines): if name.startswith("phaxio.swagger_client"): lines[:] = [x for x in lines if 'rtype' in x] def setup(app): app.connect('autodoc-skip-member', skip_member) app.connect("autodoc-process-docstring", remove_module_docstring) # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The master toctree document. master_doc = 'index' # General information about the project. project = u'phaxio-python' copyright = u'2017, Ari Polsky' author = u'Ari Polsky' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = u'.2' # The full version, including alpha/beta/rc tags. release = u'.2' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also effect to html_static_path and html_extra_path exclude_patterns = [] # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'alabaster' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # # html_theme_options = {} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # -- Options for HTMLHelp output ------------------------------------------ # Output file base name for HTML help builder. htmlhelp_basename = 'phax
io-pythondoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # # 'papersiz
e': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # # 'preamble': '', # Latex figure (float) alignment # # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'phaxio-python.tex', u'phaxio-python Documentation', u'Ari Polsky', 'manual'), ] # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'phaxio-python', u'phaxio-python Documentation', [author], 1) ] # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'phaxio-python', u'phaxio-python Documentation', author, 'phaxio-python', 'One line description of project.', 'Miscellaneous'), ] # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = {'https://docs.python.org/': None}
# Copyright 2013 dotCloud inc. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 import json import logging import os import six from .. import errors INDEX_NAME = 'docker.io' INDEX_URL = 'https://{0}/v1/'.format(INDEX_NAME) DOCKER_CONFIG_FILENAME = os.path.join('.docker', 'config.json') LEGACY_DOCKER_CONFIG_FILENAME = '.dockercfg' log = logging.getLogger(__name__) def resolve_repository_name(repo_name): if '://' in repo_name: raise errors.InvalidRepository( 'Repository name cannot contain a scheme ({0})'.format(repo_name) ) index_name, remote_name = split_repo_name(repo_name) if index_name[0] == '-' or index_name[-1] == '-': raise errors.InvalidRepository( 'Invalid index name ({0}). Cannot begin or end with a' ' hyphen.'.format(index_name) ) return resolve_index_name(index_name), remote_name def resolve_index_name(index_name): index_name = convert_to_hostname(index_name) if index_name == 'index.'+INDEX_NAME: index_name = INDEX_NAME return index_name def split_repo_name(repo_name): parts = repo_name.split('/', 1) if len(parts) == 1 or ( '.' not in parts[0] and ':' not in parts[0] and parts[0] != 'localhost' ): # This is a docker index repo (ex: username/foobar or ubuntu) return INDEX_NAME, repo_name return tuple(parts) def resolve_authconfig(authconfig, registry=None): """ Returns the authentication data from the given auth configuration for a specific registry. As with the Docker client, legacy entries in the config with full URLs are stripped down to hostnames before checking for a match. Returns None if no match was found. """ # Default to the public index server registry = resolve_index_name(registry) if registry else INDEX_NAME log.debug("Looking for auth entry for {0}".format(repr(registry))) if registry in authconfig: log.debug("Found {0}".format(repr(registry))) return authconfig[registry] for key, config in six.iteritems(authconfig): if resolve_index_name(key) == registry: log.debug("Found {0}".format(repr(key))) return config log.debug("No entry found") return None def convert_to_hostname(url): return url.replace('http://', '').replace('https://', '').split('/', 1)[0] def decode_auth(auth): if isinstance(auth, six.string_types): auth = auth.encode('ascii') s = base64.b64decode(auth) login, pwd = s.split(b':', 1) return login.decode('utf8'), pwd.decode('utf8') def encode_header(auth): auth_json = json.dumps(auth).encode('ascii') return base64.urlsafe_b64encode(auth_json) def parse_auth(entries): """ Parses authentication entries Args: entries: Dict of authentication entries. Returns: Authentication registry. """ conf = {} for registry, entry in six.iteritems(entries): username, password = decode_auth(entry['auth']) log.debug( 'Found entry (registry={0}, username={1})' .format(repr(registry), repr(username)) ) conf[registry] = { 'username': username, 'password': password, 'email': entry.get('email'), 'serveraddress': registry, } return conf def find_config_file(config_path=None): environment_path = os.path.join( os.environ.get('DOCKER_CONFIG'), os.path.basename(DOCKER_CONFIG_FILENAME) ) if os.environ.get('DOCKER_CONFIG') else None paths = [ config_path, # 1 environment_path, # 2 os.path.join(os.path.expanduser('~'), DOCKER_CONFIG_FILENAME), # 3 os.path.join( os.path.expanduser('~'), LEGACY_DOCKER_CONFIG_FILENAME ) # 4 ] for path in paths: if path and os.path.exists(path): return path return None def load_config(config_path=None): """ Loads authentication data from a Docker configuration file in the given root directory or if config_path is passed use given path. Lookup priority: explicit config_path parameter > DOCKER_CONFIG environment variable > ~/.docker/config.json > ~/.dockercfg """ config_file = find_config_file(config_path) if not config_file: log.debug("File doesn't exist") return {} try: with open(config_file) as f: data = json.load(f) if data.get('auths'): log.debug("Found 'auths' section") return parse_auth(data['auths']) else: log.debug("Couldn't find 'auths' section") f.seek(0) return parse_auth(json.load(f)) except (IOError,
KeyError, Va
lueError) as e: # Likely missing new Docker config file or it's in an # unknown format, continue to attempt to read old location # and format. log.debug(e) log.debug("Attempting to parse legacy auth file format") try: data = [] with open(config_file) as f: for line in f.readlines(): data.append(line.strip().split(' = ')[1]) if len(data) < 2: # Not enough data raise errors.InvalidConfigFile( 'Invalid or empty configuration file!' ) username, password = decode_auth(data[0]) return { INDEX_NAME: { 'username': username, 'password': password, 'email': data[1], 'serveraddress': INDEX_URL, } } except Exception as e: log.debug(e) pass log.debug("All parsing attempts failed - returning empty config") return {}
def foo(a, b=None)
: pas
s foo("a", "b") foo("a") foo("a", b="b") foo("a", None)
edFloatField, EncryptedEmailField, EncryptedBooleanField, ) from keyczar import keyczar, readers # Test class that encapsulates some Keyczar functions. # Requirements are to implement __init__, decrypt(), encrypt() class TestCrypter(object): def __init__(self, keyname, *args, **kwargs): self.keydata = readers.FileReader(keyname) self.crypter = keyczar.Crypter(self.keydata) def encrypt(self, cleartext): return self.crypter.Encrypt(cleartext) def decrypt(self, ciphertext): return self.crypter.Decrypt(ciphertext) class TestModel(models.Model): char = EncryptedCharField(max_length=255, null=True, blank=True) prefix_char = EncryptedCharField(max_length=255, prefix='ENCRYPTED:::', blank=True) decrypt_only = EncryptedCharField(max_length=255, decrypt_only=True, blank=True) short_char = EncryptedCharField( max_length=50, null=True, enforce_max_length=True, blank=True) text = EncryptedTextField(null=True, blank=True) datetime = EncryptedDateTimeField(null=True, blank=True) integer = EncryptedIntegerField(null=True, blank=True) date = EncryptedDateField(null=True, blank=True) floating = EncryptedFloatField(null=True, blank=True) email = EncryptedEmailField(null=True, blank=True) boolean = EncryptedBooleanField(default=False, blank=True) char_custom_crypter = EncryptedCharField( max_length=255, null=True,crypter_klass=TestCrypter, blank=True) class FieldTest(TestCase): IS_POSTGRES = settings.DATABASES['default']['ENGINE'] == 'django.db.backends.postgresql_psycopg2' def get_db_value(self, field, model_id): cursor = connection.cursor() cursor.execute( 'select {0} ' 'from encrypted_fields_testmodel ' 'where id = {1};'.format(field, model_id) ) return cursor.fetchone()[0] def test_char_field_encrypted_custom(self): plaintext = 'Oh hi, test reader!' model = TestModel() model.char_custom_crypter = plaintext model.save() ciphertext = self.get_db_value('char_custom_crypter', model.id) self.assertNotEqual(plaintext, ciphertext) self.assertTrue('test' not in ciphertext) fresh_model = TestModel.objects.get(id=model.id) self.assertEqual(fresh_model.char_custom_crypter, plaintext) def test_prefix_char_field_encrypted(self): plaintext = 'Oh hi, test reader!' model = TestModel() model.prefix_char = plaintext model.save() ciphertext = self.get_db_value('prefix_char', model.id) self.assertNotEqual(plaintext, ciphertext) self.assertTrue('test' not in ciphertext) self.assertTrue(ciphertext.startswith('ENCRYPTED:::')) def test_decrypt_only_field(self): known_plaintext = 'Oh hi, test reader!' known_ciphertext = ( 'ADQA_82aYN2v_PzXcNPZprS-Ak_xbPmHj8TRuj8sU74ydIJeWtgpKK' 'Irmvw9ZnZCRpXRfZ6blOaBWhjsw62nNu7vQXWJXMCdmw' ) model = TestModel() model.decrypt_only = known_ciphertext model.save() plaintext = self.get_db_value('decrypt_only', model.id) self.assertEquals(plaintext, known_plaintext) def test_decrypt_only_plaintext(self): known_plaintext = 'I am so plain and ordinary' model = TestModel() model.decrypt_only = known_plaintext model.save() plaintext = self.get_db_value('decrypt_only', model.id) self.assertEquals(plaintext, known_plaintext) def test_char_field_encrypted(self): plaintext = 'Oh hi, test reader!' model = TestModel() model.char = plaintext model.save() ciphertext = self.get_db_value('char', model.id) self.assertNotEqual(plaintext, ciphertext) self.assertTrue('test' not in ciphertext) fresh_model = TestModel.objects.get(id=model.id) self.assertEqual(fresh_model.char, plaintext) def tes
t_unicode_encrypted(self): plaintext = u'Oh hi, test reader! 🐱' model = TestModel() model.char = plaintext m
odel.save() ciphertext = self.get_db_value('char', model.id) self.assertNotEqual(plaintext, ciphertext) self.assertTrue('test' not in ciphertext) fresh_model = TestModel.objects.get(id=model.id) self.assertEqual(fresh_model.char, plaintext) def test_short_char_field_encrypted(self): """ Test the max_length validation of an encrypted char field """ plaintext = 'Oh hi, test reader!' model = TestModel() model.short_char = plaintext self.assertRaises(ValueError, model.save) def test_text_field_encrypted(self): plaintext = 'Oh hi, test reader!' * 10 model = TestModel() model.text = plaintext model.save() ciphertext = self.get_db_value('text', model.id) self.assertNotEqual(plaintext, ciphertext) self.assertTrue('test' not in ciphertext) fresh_model = TestModel.objects.get(id=model.id) self.assertEqual(fresh_model.text, plaintext) def test_datetime_field_encrypted(self): plaintext = timezone.now() model = TestModel() model.datetime = plaintext model.save() ciphertext = self.get_db_value('datetime', model.id) # Django's normal date serialization format self.assertTrue(re.search('^\d\d\d\d-\d\d-\d\d', ciphertext) is None) fresh_model = TestModel.objects.get(id=model.id) self.assertEqual(fresh_model.datetime, plaintext) def test_integer_field_encrypted(self): plaintext = 42 model = TestModel() model.integer = plaintext model.save() ciphertext = self.get_db_value('integer', model.id) self.assertNotEqual(plaintext, ciphertext) self.assertNotEqual(plaintext, str(ciphertext)) fresh_model = TestModel.objects.get(id=model.id) self.assertEqual(fresh_model.integer, plaintext) def test_date_field_encrypted(self): plaindate = timezone.now().date() model = TestModel() model.date = plaindate model.save() ciphertext = self.get_db_value('date', model.id) fresh_model = TestModel.objects.get(id=model.id) self.assertNotEqual(ciphertext, plaindate.isoformat()) self.assertEqual(fresh_model.date, plaindate) def test_float_field_encrypted(self): plaintext = 42.44 model = TestModel() model.floating = plaintext model.save() ciphertext = self.get_db_value('floating', model.id) self.assertNotEqual(plaintext, ciphertext) self.assertNotEqual(plaintext, str(ciphertext)) fresh_model = TestModel.objects.get(id=model.id) self.assertEqual(fresh_model.floating, plaintext) def test_email_field_encrypted(self): plaintext = 'aron.jones@gmail.com' # my email address, btw model = TestModel() model.email = plaintext model.save() ciphertext = self.get_db_value('email', model.id) self.assertNotEqual(plaintext, ciphertext) self.assertTrue('aron' not in ciphertext) fresh_model = TestModel.objects.get(id=model.id) self.assertEqual(fresh_model.email, plaintext) def test_boolean_field_encrypted(self): plaintext = True model = TestModel() model.boolean = plaintext model.save() ciphertext = self.get_db_value('boolean', model.id) self.assertNotEqual(plaintext, ciphertext) self.assertNotEqual(True, ciphertext) self.assertNotEqual('True', ciphertext) self.assertNotEqual('true', ciphertext) self.assertNotEqual('1', ciphertext) self.assertNotEqual(1, ciphertext) self.assertTrue(not isinstance(ciphertext, bool)) fresh_model = TestModel.objects.get(id=model.id) self.assertEqual(fresh_model.boolean, plaintext) @unittest.skipIf(django.VERSION < (1, 7), "Issue exists in django 1.7+") @unittest.skipIf(not IS_POSTGRES, "Issue exists
# -*- coding: UTF-8 -*- ''' series9 scraper for Exodus forks. Nov 9 2018 - Checked Updated and refactored by someone. Originally created by others. ''' import re,traceback,urllib,urlparse from resources.lib.modules import cleantitle from resources.lib.modules import client from resources.lib.modules import directstream from resources.lib.modules import log_utils class source: def __init__(self): self.priority = 1 self.language = ['en'] self.domains = ['seriesonline.io','series9.io','gomovies.pet'] self.base_link = 'https://series9.co/' self.search_link = '/movie/search/%s' def matchAlias(self, title, aliases): try: for alias in aliases: if cleantitle.get(title) == cleantitle.get(alias['title']): return True except: return False def movie(self, imdb, title, localtitle, aliases, year): try: aliases.append({'country': 'us', 'title': title}) url = {'imdb': imdb, 'title': title, 'year': year, 'aliases': aliases} url = urllib.urlencode(url) return url except: failure = traceback.format_exc() log_utils.log('Series9 - Exception: \n' + str(failure)) return def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year): try: aliases.append({'country': 'us', 'title': tvshowtitle}) url = {'imdb': imdb, 'tvdb': tvdb, 'tvshowtitle': tvshowtitle, 'year': year, 'aliases': aliases} url = urllib.urlencode(url) return url except: failure = traceback.format_exc() log_utils.log('Series9 - Exception: \n' + str(failure)) return def episode(self, url, imdb, tvdb, title, premiered, season, episode): try: if url == None: return url = urlparse.parse_qs(url) url = dict([(i, url[i][0]) if url[i] else (i, '') for i in url]) url['title'],
url['premiered'], url['season'], url['episode'] = title, premiered, season, episode url = urllib.urlencode(url) return url except: failure = traceback.format_exc() log_utils.log('Series9 - Exception: \n' + str(failure))
return def searchShow(self, title, season, aliases, headers): try: title = cleantitle.normalize(title) search = '%s Season %01d' % (title, int(season)) url = urlparse.urljoin(self.base_link, self.search_link % cleantitle.geturl(search)) r = client.request(url, headers=headers, timeout='15') r = client.parseDOM(r, 'div', attrs={'class': 'ml-item'}) r = zip(client.parseDOM(r, 'a', ret='href'), client.parseDOM(r, 'a', ret='title')) r = [(i[0], i[1], re.findall('(.*?)\s+-\s+Season\s+(\d)', i[1])) for i in r] r = [(i[0], i[1], i[2][0]) for i in r if len(i[2]) > 0] url = [i[0] for i in r if self.matchAlias(i[2][0], aliases) and i[2][1] == season][0] url = urlparse.urljoin(self.base_link, '%s/watching.html' % url) return url except: failure = traceback.format_exc() log_utils.log('Series9 - Exception: \n' + str(failure)) return def searchMovie(self, title, year, aliases, headers): try: title = cleantitle.normalize(title) url = urlparse.urljoin(self.base_link, self.search_link % cleantitle.geturl(title)) r = client.request(url, headers=headers, timeout='15') r = client.parseDOM(r, 'div', attrs={'class': 'ml-item'}) r = zip(client.parseDOM(r, 'a', ret='href'), client.parseDOM(r, 'a', ret='title')) results = [(i[0], i[1], re.findall('\((\d{4})', i[1])) for i in r] try: r = [(i[0], i[1], i[2][0]) for i in results if len(i[2]) > 0] url = [i[0] for i in r if self.matchAlias(i[1], aliases) and (year == i[2])][0] except: url = None pass if (url == None): url = [i[0] for i in results if self.matchAlias(i[1], aliases)][0] url = urlparse.urljoin(self.base_link, '%s/watching.html' % url) return url except: failure = traceback.format_exc() log_utils.log('Series9 - Exception: \n' + str(failure)) return def sources(self, url, hostDict, hostprDict): try: sources = [] if url == None: return sources data = urlparse.parse_qs(url) data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data]) aliases = eval(data['aliases']) headers = {} if 'tvshowtitle' in data: ep = data['episode'] url = '%s/film/%s-season-%01d/watching.html?ep=%s' % (self.base_link, cleantitle.geturl(data['tvshowtitle']), int(data['season']), ep) r = client.request(url, headers=headers, timeout='10', output='geturl') if url == None: url = self.searchShow(data['tvshowtitle'], data['season'], aliases, headers) else: url = self.searchMovie(data['title'], data['year'], aliases, headers) if url == None: raise Exception() r = client.request(url, headers=headers, timeout='10') r = client.parseDOM(r, 'div', attrs={'class': 'les-content'}) if 'tvshowtitle' in data: ep = data['episode'] links = client.parseDOM(r, 'a', attrs={'episode-data': ep}, ret='player-data') else: links = client.parseDOM(r, 'a', ret='player-data') for link in links: if '123movieshd' in link or 'seriesonline' in link: r = client.request(link, headers=headers, timeout='10') r = re.findall('(https:.*?redirector.*?)[\'\"]', r) for i in r: try: sources.append({'source': 'gvideo', 'quality': directstream.googletag(i)[0]['quality'], 'language': 'en', 'url': i, 'direct': True, 'debridonly': False}) except: pass else: try: host = re.findall('([\w]+[.][\w]+)$', urlparse.urlparse(link.strip().lower()).netloc)[0] if not host in hostDict: raise Exception() host = client.replaceHTMLCodes(host) host = host.encode('utf-8') sources.append({'source': host, 'quality': 'SD', 'language': 'en', 'url': link, 'direct': False, 'debridonly': False}) except: pass return sources except: failure = traceback.format_exc() log_utils.log('Series9 - Exception: \n' + str(failure)) return sources def resolve(self, url): if "google" in url: return directstream.googlepass(url) else: return url
#!/usr/bin/env python # # Copyright (c) 2011, Willow Garage, Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the Willow Garage, Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # import ecto import ecto_test import sys def test_nodelay(): plasm = ecto.Plasm() ping = ecto_test.Ping("Ping") metrics = ecto_test.Metrics("Metrics", queue_size=10) plasm.connect(ping[:] >> metrics[:]) sched = ecto.Scheduler(plasm) sched.execute(niter=10000) print "Hz:", metrics.outputs.hz, " Latency in seconds: %f" % metrics.outputs.latency_seconds # these are kinda loose assert metrics.outputs.hz > 5000 assert metrics.outputs.latency_seconds < 0.0001 def test_20hz(): plasm = ecto.Plasm() ping = ecto_test.Ping("Ping") throttle = ecto_test.Throttle("Throttle", rate=20) metrics = ecto_test.Metrics("Metrics", queue_size=10) plasm.connect(ping[:] >> throttle[:], throttle[:] >> metrics[:]) sched = ecto.Scheduler(plasm) sched.execute(niter=100) print "Hz:", metrics.outputs.hz, " Latency in seconds: %f" % metrics.outputs.latency_seconds # these are kinda loose assert 19 < metrics.outputs.hz < 21 assert 0.04 < metrics.outputs.latency_seconds < 0.06 def makeplasm(n_nodes): plasm = ecto.Plasm() ping = ecto_test.Ping("Ping") throttle = ecto_test.Sleep("Sleep_0", seconds=1.0/n_nodes) plasm.connect(ping[:] >> throttle[:]) for j in range(n_nodes-1): # one has already been added throttle_next = ecto_test.Sleep("Sleep_%u" % (j+1), seconds=1.0/n_nodes) plasm.connect(throttle, "out", throttle_next, "in") throttle = throttle_next metrics = ecto_test.Metrics("Metrics", queue_size=4) plasm.connect(throttle[:] >
> met
rics[:]) # o = open('graph.dot', 'w') # print >>o, plasm.viz() # o.close() # print "\n", plasm.viz(), "\n" return (plasm, metrics) def test_st(niter, n_nodes): (plasm, metrics) = makeplasm(n_nodes) #sched = ecto.Scheduler(plasm) #sched.execute(niter) sched = ecto.Scheduler(plasm) sched.execute(niter) print "Hz:", metrics.outputs.hz, " Latency in seconds:", metrics.outputs.latency_seconds assert 0.95 < metrics.outputs.hz < 1.05 assert 0.95 < metrics.outputs.latency_seconds < 1.05 # # It is hard to test the middle cases, i.e. if you have one thread # per node, things should run at n_nodes hz and 1 second latency but # if there are less than that, things are somewhere in the middle. # Also your latency tends to be worse as you have to wait for the # graph to "fill up" # def test_tp(niter, n_nodes): (plasm, metrics) = makeplasm(n_nodes) sched = ecto.Scheduler(plasm) sched.execute(niter=niter) print "Hz:", metrics.outputs.hz, " Latency in seconds:", metrics.outputs.latency_seconds assert n_nodes * 0.95 < metrics.outputs.hz < n_nodes * 1.05 assert 0.9 < metrics.outputs.latency_seconds < 1.1 test_nodelay() test_20hz() test_st(5, 5) test_st(5, 12) test_tp(20, 15) test_tp(20, 10) test_tp(20, 5)
# -*- coding: utf-8 -*- # This file is a part of MediaDrop (http://www.mediadrop.net), # Copyright 2009-2014 MediaDrop contributors # For the exact contribution history, see the git revision log. # The source code in this file is dual licensed under the MIT license or # the GPLv3 or (at your option) any later version. # See LICENSE.txt in the main project directory, for more information. from mediadrop.lib.auth
.query_result_proxy import StaticQuery from mediadrop.lib.test.pythonic_testcase import *
class StaticQueryTest(PythonicTestCase): def setUp(self): self.query = StaticQuery([1, 2, 3, 4, 5]) def test_can_return_all_items(self): assert_equals([1, 2, 3, 4, 5], self.query.all()) def test_can_return_all_items_with_iteration(self): assert_equals([1, 2, 3, 4, 5], list(self.query)) def test_can_use_offset(self): assert_equals([3, 4, 5], self.query.offset(2).all()) def test_can_build_static_query(self): assert_equals([1, 2], list(self.query.limit(2))) def test_knows_number_of_items(self): all_items = self.query.offset(1).all() assert_length(4, all_items) assert_equals(4, self.query.count()) assert_equals(4, len(self.query)) def test_supports_slicing(self): assert_equals([3, 4, 5], self.query[2:]) assert_equals(3, self.query.offset(1)[2]) def test_can_return_first_item(self): assert_equals(1, self.query.first()) list(self.query) # consume all other items assert_none(self.query.first()) import unittest def suite(): suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(StaticQueryTest)) return suite if __name__ == '__main__': unittest.main(defaultTest='suite')
#!/us
r/bin/python35 def add(x, y): return x + y def dec(x, y): return x - y def div(x, y): if y == 0: return 0 return x / y def mult(x, y): return x * y if __name__ ==
'__main__': print('Module: Calc')
# -*- coding: utf-8 -*- # Generated by Django 1.11 on 2017-11-01 20:02 from __future__ import unicode_literals f
rom django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ ('phone_numbers', '0001_initial'), ('sims', '0001_initial'), ] operations = [ migrations.AddField( model_name='phonenumber', name='related_sim', field=models.Forei
gnKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='phone_numbers', to='sims.Sim'), ), ]
# -*- coding: utf-8 -*- """ ********** Exceptions ********** Base exceptions and errors for NetworkX. """ __author__ = """Aric Hagberg (hagberg@lanl.gov)\nPieter Swart (swart@lanl.gov)\nDan Schult(dschult@colgate.edu)\nLoïc Séguin-C. <loicseguin@gmail.com>""" # Copyright (C) 2004-2016 by # Aric Hagberg <hagberg@lanl.gov> # Dan Schult <dschult@colgate.edu> # Pieter Swart <swart@lanl.gov> # All rights reserved. # BSD license. # # Exception handling # the root of all Exceptions class NetworkXException(Exceptio
n): """Base class for exceptions in NetworkX.""" class NetworkXError(NetworkXException): """Exception for a serious error in NetworkX""" class NetworkXPointlessConcept(NetworkXException): """Harary, F. and Read, R. "Is the Null Graph a Pointless Concept?" In Graphs and Combinatorics Conference, Geor
ge Washington University. New York: Springer-Verlag, 1973. """ class NetworkXAlgorithmError(NetworkXException): """Exception for unexpected termination of algorithms.""" class NetworkXUnfeasible(NetworkXAlgorithmError): """Exception raised by algorithms trying to solve a problem instance that has no feasible solution.""" class NetworkXNoPath(NetworkXUnfeasible): """Exception for algorithms that should return a path when running on graphs where such a path does not exist.""" class NetworkXNoCycle(NetworkXUnfeasible): """Exception for algorithms that should return a cycle when running on graphs where such a cycle does not exist.""" class NetworkXUnbounded(NetworkXAlgorithmError): """Exception raised by algorithms trying to solve a maximization or a minimization problem instance that is unbounded.""" class NetworkXNotImplemented(NetworkXException): """Exception raised by algorithms not implemented for a type of graph."""
na
me = "hello_world" version = "1.0.0" authors = [ "ajohns" ] description = \ """ Python-based hello world example package. """ tools = [ "hello" ] requires = [ "python" ] uuid = "examples.hello_world_py" build_command = 'python {root}/build.py {install}' def commands(): env.PYTHONPATH.append("{root}/python") env.PATH.append("{root}/bin")
from netfields import InetAddressField, CidrAddressField from django.db import models from django.core.exceptions import ValidationError from django.utils.translation import ugettext_lazy as _ from django.conf import settings from nodeshot.core.base.models import BaseAccessLevel from ..managers import NetAccessLevelManager from choices import IP_PROTOCOLS class Ip(BaseAccessLevel): """ IP Address Model """ interface = models.ForeignKey('net.Interface', verbose_name=_('interface')) address = InetAddressField(verbose_name=_('ip address'), unique=True, db_index=True) protocol = models.CharField(_('IP Protocol Version'), max_length=4, choices=IP_PROTOCOLS, default=IP_PROTOCOLS[0][0], blank=True) netmask = CidrAddressField(_('netmask (CIDR, eg: 10.40.0.0/24)'), blank=True, null=True) objects = NetAccessLevelManager() class Meta: app_label = 'net' permissions = (('can_view_ip', 'Can view ip'),) verbose_name = _('ip address') verbose_name_plural = _('ip addresses') def __uni
code__(self): return '%s: %s' % (self.protocol, self.address) def clean(self, *args, **kwargs): """ TODO """ # netaddr.IPAddress('10.40.2.1') in netaddr.IPNetwork('10.40.0.0/24') pass def save(self, *args, **kwargs): """ Determines ip protocol version automatically. Stores address in interface shortcuts for convenience. """ self.protocol = 'ipv%d' % self.addre
ss.version # save super(Ip, self).save(*args, **kwargs) # TODO: do we really need this? # save shortcut on interfaces #ip_cached_list = self.interface.ip_addresses ## if not present in interface shorctus add it to the list #if str(self.address) not in ip_cached_list: # # recalculate cached_ip_list # recalculated_ip_cached_list = [] # for ip in self.interface.ip_set.all(): # recalculated_ip_cached_list.append(str(ip.address)) # # rebuild string in format "<ip_1>, <ip_2>" # self.interface.data['ip_addresses'] = recalculated_ip_cached_list # self.interface.save() @property def owner(self): return self.interface.owner if 'grappelli' in settings.INSTALLED_APPS: @staticmethod def autocomplete_search_fields(): return ('address__icontains',)
def ternary(cond1, result1, result2): if cond1: return result1 e
lse:
return result2
.7]], ... evidence=[('X', 0)], ... evidence_card=2) >>> z_trans_cpd = TabularCPD(('Z', 1), 2, [[0.4, 0.7], ... [0.6, 0.3]], ... evidence=[('Z', 0)], ... evidence_card=2) >>> dbnet.add_cpds(z_start_cpd, z_trans_cpd, x_i_cpd, y_i_cpd) >>> dbnet.initialize_initial_state() >>> dbn_inf = DBNInference(dbnet) >>> dbn_inf.start_junction_tree.nodes() [(('X', 0), ('Z', 0)), (('X', 0), ('Y', 0))] >>> dbn_inf.one_and_half_junction_tree.nodes() [(('Z', 1), ('Z', 0)), (('Y', 1), ('X', 1)), (('Z', 1), ('X', 1))] References: ---------- [1] Dynamic Bayesian Networks: Representation, Inference and Learning by Kevin Patrick Murphy http://www.cs.ubc.ca/~murphyk/Thesis/thesis.pdf Public Methods: -------------- forward_inference backward_inference query """ super(DBNInference, self).__init__(model) self.interface_nodes_0 = model.get_interface_nodes(time_slice=0) self.interface_nodes_1 = model.get_interface_nodes(time_slice=1) start_markov_model = self.start_bayesian_model.to_markov_model() one_and_half_markov_model = self.one_and_half_model.to_markov_model() combinations_slice_0 = tee(combinations(self.interface_nodes_0, 2), 2) combinations_slice_1 = combinations(self.interface_nodes_1, 2) start_markov_model.add_edges_from(combinations_slice_0[0]) one_and_half_markov_model.add_edges_from(chain(combinations_slice_0[1], combinations_slice_1)) self.one_and_half_junction_tree = one_and_half_markov_model.to_junction_tree() self.start_junction_tree = start_markov_model.to_junction_tree() self.start_interface_clique = self._get_clique(self.start_junction_tree, self.interface_nodes_0) self.in_clique = self._get_clique(self.one_and_half_junction_tree, self.interface_nodes_0) self.out_clique = self._get_clique(self.one_and_half_junction_tree, self.interface_nodes_1) def _shift_nodes(self, nodes, time_slice): """ Shifting the nodes to a certain required timeslice. Parameters: ---------- nodes: list, array-like List of node names. nodes that are to be shifted to some other time slice. time_slice: int time slice where to shift the nodes. """ return [(node[0], time_slice) for node in nodes] def _get_clique(self, junction_tree, nodes): """ Extracting the cliques from the junction tree which are a subset of the given nodes. Parameters: ---------- junction_tree: Junction tree from which the nodes are to be extracted. nodes: iterable container A container of nodes (list, dict, set, etc.). """ return [clique for clique in junction_tree.nodes() if set(nodes).issubset(clique)][0] def _get_evidence(self, evidence_dict, time_slice, shift): """ Getting the evidence belonging to a particular timeslice. Parameters: ---------- evidence: dict a dict key, value pair as {var: state_of_var_observed} None if no evidence time: int the evidence corresponding to the time slice shift: int shifting the evidence corresponding to the given time slice. """ if evidence_dict: return {(node[0], shift): evidence_dict[node] for node in evidence_dict if node[1] == time_slice} def _marginalize_factor(self, nodes, factor): """ Marginalizing the factor selectively for a set of variables. Parameters: ---------- nodes: list, array-like A container of nodes (list, dict, set, etc.). factor: factor factor which is to be marginalized. """ marginalizing_nodes = list(set(factor.scope()).difference(nodes)) return factor.marginalize(marginalizing_nodes, inplace=False) def _update_belief(self, belief_prop, clique, clique_potential, message=None): """ Method for updating the belief. Parameters: ---------- belief_prop: Belief Propagation Belief Propagation which needs to be updated. in_clique: clique The factor which needs to be updated corresponding to the input clique. out_clique_potential: factor Multiplying factor which will be multiplied to the factor corresponding to the clique. """ old_factor = belief_prop.junction_tree.get_factors(clique) belief_prop.junction_tree.remove_factors(old_factor) if message: if message.scope() and clique_potential.scope(): new_factor = old_factor * message new_factor = new_factor / clique_potential else: new_factor = old_factor else: new_factor = old_factor * clique_potential belief_prop.junction_tree.add_factors(new_factor) belief_prop.calibrate() def _get_factor(self, belief_prop, evidence): """ Extracts the required factor from the junction tree. Parameters: ---------- belief_prop: Belief Propagation Belief Propagation which needs to be updated. evidence: dict a dict key, value pair as {var: state_of_var_observed} """ final_factor = factor_product(*belief_prop.junction_tree.get_factors()) if evidence: for var in evidence: if var in final_factor.scope(): final_factor.reduce([(var, evidence[var])]) return final_factor def _shift_factor(self, factor, shift): """ Shifting the factor to a certain required time slice. Parameters: ---------- factor: Factor The factor which needs to be shifted. shift: int The new timeslice to which the factor should belong to. """ new_scope = self._shift_nodes(factor.scope(), shift) return Factor(new_scope, factor.cardinality, factor.values) def forward_inference(self, variables, evidence=None, args=None): """ Forward inference method using belief propagation.
Parameters: ---------- variables: list list of variables for which y
ou want to compute the probability evidence: dict a dict key, value pair as {var: state_of_var_observed} None if no evidence Examples: -------- >>> from pgmpy.factors import TabularCPD >>> from pgmpy.models import DynamicBayesianNetwork as DBN >>> from pgmpy.inference import DBNInference >>> dbnet = DBN() >>> dbnet.add_edges_from([(('Z', 0), ('X', 0)), (('X', 0), ('Y', 0)), ... (('Z', 0), ('Z', 1))]) >>> z_start_cpd = TabularCPD(('Z', 0), 2, [[0.5, 0.5]]) >>> x_i_cpd = TabularCPD(('X', 0), 2, [[0.6, 0.9], ... [0.4, 0.1]], ... evidence=[('Z', 0)], ... evidence_card=2) >>> y_i_cpd = TabularCPD(('Y', 0), 2, [[0.2, 0.3], ... [0.8, 0.7]], ... evidence=[('X', 0)], ... evidence_card=2) >>> z_trans_cpd = TabularCPD(('Z', 1), 2, [[0.4, 0.7], ... [0.6, 0.3]], ... evidence=[('Z', 0)], ... evidence_card=2) >>> dbnet.add_cpds(z_start_cpd, z_trans_cpd, x_i_cpd, y_i_cpd) >>> dbnet.initialize_initial_state() >>> dbn_inf = DBNInference(dbnet) >>> dbn_inf.forward_inference([('X', 2)], {('Y', 0):1, ('Y', 1):0, ('Y'
# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import six from apscheduler.triggers.cron import CronTrigger from st2common.exceptions.apivalidation import ValueValidationException from st2common.constants.triggers import SYSTEM_TRIGGER_TYPES from st2common.constants.triggers import CRON_TIMER_TRIGGER_REF from st2common.util import schema as util_schema import st2common.operators as criteria_operators __all__ = [ 'validate_criteria', 'validate_trigger_parameters' ] allowed_operators = criteria_operators.get_allowed_operators() def validate_criteria(criteria): if not isinstance(criteria, dict): raise ValueValidationException('Criteria should be a dict.') for key, value in six.iteritems(criteria): operator = value.get('type', None) if operator is None: raise ValueValidationException('Operator not specified for field: ' + key) if operator not in allowed_operators: raise ValueValidationException('For field: ' + key + ', operator ' + operator + ' not in list of allowed operators: ' + str(allowed_operators.keys())) pattern = value.get('pattern', None) if pattern is None: raise ValueValidationException('For field: ' + key + ', no pattern specified ' + 'for operator ' + operator) def validate_trigger_parameters(trigger_type_ref, parameters): """ This function validates parameters for system triggers (e.g. webhook and timers). Note: Eventually we should also validate parameters for user defined triggers which correctly specify JSON schema for the parameters. :param trigger_type_ref: Reference of a trigger type. :type trigger_type_ref: ``str`` :param parameters: Trigger p
arameters. :type parameters: ``dict`` """ if not trigger_type_ref:
return None if trigger_type_ref not in SYSTEM_TRIGGER_TYPES: # Not a system trigger, skip validation for now return None parameters_schema = SYSTEM_TRIGGER_TYPES[trigger_type_ref]['parameters_schema'] cleaned = util_schema.validate(instance=parameters, schema=parameters_schema, cls=util_schema.CustomValidator, use_default=True, allow_default_none=True) # Additional validation for CronTimer trigger # TODO: If we need to add more checks like this we should consider abstracting this out. if trigger_type_ref == CRON_TIMER_TRIGGER_REF: # Validate that the user provided parameters are valid. This is required since JSON schema # allows arbitrary strings, but not any arbitrary string is a valid CronTrigger argument # Note: Constructor throws ValueError on invalid parameters CronTrigger(**parameters) return cleaned
#!/usr/bin/env python """ Extract minor planet orbital elements and discovery dates to json. Orbital elements are extracted from the file MPCORB.DAT: https://minorplanetcenter.net/iau/MPCORB/MPCORB.DAT Discovery dates are extracted from the file NumberedMPs.txt: https://minorplanetcenter.net/iau/lists/NumberedMPs.txt Usage: ====== ./data_to_json.py [-h] [-c] [N] Parse orbital and discovery data to json. positional arguments: N maximum number of results optional arguments: -h, --help show this help message and exit -c, --compact output as compact json format TODO: ===== - Get range between discovery dates - Create an API (python server) """ OUTPUT_FILE = 'catalog.json' MPCORB_FILE = 'MPCORB.DAT' NUMMPS_FILE = 'NumberedMPs.txt' import os, sys, json, argparse from time import time from datetime import datetime from itertools import izip from operator import itemgetter # Change working directory to the module path os.chdir(os.path.dirname(os.path.realpath(__file__))) # Datetime to Julian date def dt2jd(dt):
dt = dt - datetime(2000, 1, 1) return dt.days + (dt.seconds + dt.microseconds / 1000000) / 86400 + 2451544.5 # Packed date to Datetime def pd2dt(pd): y = int(str(int(pd[0], 36)) + pd[1:3]) m = int(pd[3], 36) d = int(pd[4], 36) retur
n datetime(y, m, d) # Packed to Julian date def pd2jd(pd): dt = pd2dt(pd) return dt2jd(dt) def main(argv): # Parse argumanets parser = argparse.ArgumentParser(description='Parse orbital and discovery data to json.') parser.add_argument('amount', metavar='N', type=int, nargs='?', help='maximum number of results') parser.add_argument('-c', '--compact', action='store_true', dest='compact', help='output as compact json format') args = parser.parse_args() print 'Extracting MPC discovery dates and orbital elements ...' start_time = time() # Extract the discovery dates from NumberedMPs.txt # For a description of the format see https://minorplanetcenter.net/iau/lists/NumberedMPs000001.html mpcs_disc = {} for line in open(NUMMPS_FILE): nr = int(line[1:7].strip().replace('(', '')) # Extract the discovery date (YYYY MM DD) and convert it to Julian date date = datetime.strptime(line[41:51], '%Y %m %d') mpcs_disc[nr] = dt2jd(date) """ Extract the orbital elements from MPCORB.DAT For a description of the format see https://minorplanetcenter.net/iau/info/MPOrbitFormat.html The following columns are extracted: epoch = Date for which the information is valid (packed date) a = Semi-major axis (AU) e = Orbital eccentricity (0..1) i = Inclination to the ecliptic (degrees) W = Longitude of ascending node (degrees) w = Argument of perihelion (degrees) M = Mean anomaly (degrees) n = Mean daily motion (degrees per day) """ mpcs = [] count = 0 for line in open(MPCORB_FILE): nr = line[167:173].strip().replace('(', '') if not nr: continue nr = int(nr) # Skip if discovery date is missing if nr not in mpcs_disc: print 'Skipping MPC #%d (no discovery date found)' % (nr) continue # Extract the orbital elements _, _, _, epoch, M, w, W, i, e, n, a, _ = line.split(None, 11) mpc = (mpcs_disc[nr], pd2jd(epoch), float(a), float(e), float(i), float(W), float(w), float(M), float(n)) mpcs.append(mpc) # Maximum requested reached? count += 1 if count == args.amount: break # Sort by discovery date mpcs.sort(key=itemgetter(0)) if args.compact: output = mpcs else: keys = ['disc', 'epoch', 'a', 'e', 'i', 'W', 'w', 'M', 'n'] output = [dict(izip(keys, mpc)) for mpc in mpcs] with open(OUTPUT_FILE, 'w') as outfile: json.dump(output, outfile) # json.dump(output, outfile, indent=2, separators=(',', ':')) print 'Finished extracting %d MPCs in %s seconds.' % (len(mpcs), time()-start_time) if __name__ == '__main__': main(sys.argv[1:])
# -*-coding:Utf-8 -* # Copyright (c) 2013 LE GOFF Vincent # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # * Neither the name of the copyright holder nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT # OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. """Fichier contenant la volonté RelacherGouvernail""" import re from secondaires.navigation.equipage.ordres.relacher_gouvernail import \ RelacherGouvernail as OrdreRelacherGouvernail from secondaires.navigation.equipage.ordres.long_deplacer import LongDeplacer from secondaires.navigation.equipage.volonte import Volonte class RelacherGouvernail(Volonte): """Classe représentant une volonté. Cette volonté demande simplement au matelot qui tient le gouvernail de le relâcher. Comme la plupart des volontés, le matelot est encouragé à retourner dans sa salle d'affectation après coup. """ cle = "relacher_gouvernail" ordre_court = re.compile(r"^rg$", re.I) ordre_long = re.compile(r"^relacher\s+gouvernail?$", re.I) def choisir_matelots(self, exception=None): """Retourne le matelot le plus apte à accomplir la volonté.""" navire = self.navire equipage = navire.equipage gouvernail = self.navire.gouvernail if gouvernail is None or gouvernail.tenu is None: return None personnage = gouvernail.tenu matelot = equipage.get_matelot_depuis_personnage(personnage) return matelot def executer(self, matelot): """Exécute la volonté.""" if matelot is None: self.terminer() return navire = self.navire ordres = [] matelot.invalider_ordres("virer") relacher =
OrdreRelacherGouvernail(matelot, navire) ordres.append(relacher)
ordres.append(self.revenir_affectation(matelot)) self.ajouter_ordres(matelot, ordres) def crier_ordres(self, personnage): """On fait crier l'ordre au personnage.""" msg = "{} s'écrie : relâchez la barre !".format( personnage.distinction_audible) self.navire.envoyer(msg) @classmethod def extraire_arguments(cls, navire): """Extrait les arguments de la volonté.""" return ()
from trifle.server.views.frontend import frontend f
rom trifle.server.views.api import api from trifle.server.views.monitor impor
t monitor from trifle.server.views.configure import configure
""" Mail.ru OAuth2 backend, docs at: http://psa.matiasaguirre.net/docs/backends/mailru.html """ from hashlib import md5 from social.p3 import unquote from social.backends.oauth import BaseOAuth2 class MailruOAuth2(BaseOAuth2): """Mail.ru authentication backend""" name = 'mailru-oauth2' ID_KEY = 'uid' AUTHORIZATION_URL = 'https://connect.mail.ru/oauth/authorize' ACCESS_TOKEN_URL = 'https://connect.mail.ru/oauth/token' ACCESS_TOKEN_METHOD = 'POST' EXTRA_DATA = [('refresh_token', 'refresh_token'), ('expires_in', 'expires')] def get_user_details(self, response): """Return user details from Mail.ru request""" fullname, first_name, last_name = self.get_user_names( first_name=unquote(response['first_name']), last_name=unquote(response['last_name']) ) return {'username': unquote(response['nick']), 'email': unquote(response['email']),
'fullname': fullname, 'first_name': first_name, 'last_name': last_name} def user_data(self, access_token, *args, **kwargs): """Return user data from Mail.ru REST API""" key, secret = self.get_key_and_secret() data = {'method': 'users.getInfo',
'session_key': access_token, 'app_id': key, 'secure': '1'} param_list = sorted(list(item + '=' + data[item] for item in data)) data['sig'] = md5( (''.join(param_list) + secret).encode('utf-8') ).hexdigest() return self.get_json('http://www.appsmail.ru/platform/api', params=data)[0]
# Halley provider definition # Mandatory imports from .Provider import Provider from rfeed import * # Optional imports import requests, mimetypes, logging logging.basicConfig(level=logging.INFO) from bs4 import BeautifulSoup as bs # Halley provider class inherit from the Provider one defined in providers/Provider.py file # # Inherited attributes: # - output_format # - tz # - language # - feed_base_url # - docs_base_url # - specs_base_url # - options # # Inherited methods: # - dt # # Specific methods to customize: # - opts: using options from csv file properly # - urls: extract single item urls from index page # - item: extract and structure data from single item page # # WARNING: class name is also the value of provider column in elenco_albi.csv # class Halley(Provider): # Mandatory attributes input_format = "DD/MM/YYYY" # Optional attributes # ... # Transform and prepare options from CSV row (options column) def opts(self, opt): self.options["base_url"] = "http://halleyweb.com/%s/mc/" % opt return self # Mandatory for chaining # Scrape index page and return single item urls def urls(self): index_page_url = self.options["base_url"] + "mc_gridev_messi_datigrid.php" index_page_response = requests.get(index_page_url) # Manage exceptions and return consistent values if index_page_response.status_code != 200: logging.warning("Index page %s unavailable!" % index_page_url) return [] # Parsing with BeautifulSoup index_page_soup = bs(index_page_response.content,"lxml") logging.info("Scraping %s:" % index_page_url) # Very simple scraping of single item urls for row in index_page_soup.findAll("row"): single_page_id = row['id'].strip() single_page_url = self.options["base_url"] + "mc_gridev_dettaglio.php?id_pubbl=%s" % single_page_id yield single_page_url # Scrape a single item page from its url and return structured data as Item() instance (from rfeed) def item(self,single_page_url): single_page_response = requests.get(single_page_url) if single_page_response.status_code != 200 or "non può essere visualizzato" in single_page_response.text: print("Single page %s unavailable!" % single_page_url) return None # None items are dropped in final feed single_page_soup = bs(single_page_response.content,"lxml") logging.debug("- Scraping %s" % single_page_url) ### MAIN SCRAPING LOGIC ### contents = [] for cell in single_page_soup.select("td"): if cell.findAll('a'): contents.append([]) for a in cell.findAll('a'): if a.get('onlick'): href = a['onclick'].replace("window.open('","").replace("');","") elif a.get('href'): href = a['href'] else: href = "" contents[-1].append({ "content": self.clean_string(a.text), "href": self.options["base_url"] + self.clean_string(href) }) else: contents.append(self.clean_string(cell.text).strip(':')) document = dict([tuple(contents[i:i+2]) for i in range(0,len(contents),2)]) document["Documento"] = document["Documento"] if "Documento" in document and isinstance(document["Documento"],list) else [] document["Allegati"] = document["Allegati"] if "Allegati" in document and isinstance(document["Allegati"],list) else [] ### END SCRAPING LOGIC ### # Return scraping data as an Item() instance retur
n Item( title = document["Oggetto Atto"], link = single_page_url, description = document["Oggetto Atto"], pubDate = self.format_datetime(document.get("Data Atto") or document.get("Data Inizio Pubblicazione")), guid = Guid(single_page_url), categories = [ c for c in [ Category( domain = self.specs_base_url + "#" +
"item-category-uid", category = "%s/%s" % (document["Anno di Pubblicazione"], document["Numero Pubblicazione"]) ), Category( domain = self.specs_base_url + "#" + "item-category-type", category = document["Tipo Atto"] ) if document.get("Tipo Atto") else None, Category( domain = self.specs_base_url + "#" + "item-category-pubStart", category = self.format_datetime(document.get("Data Inizio Pubblicazione") or document.get("Data Atto")) ), Category( domain = self.specs_base_url + "#" + "item-category-pubEnd", category = self.format_datetime(document["Data Fine Pubblicazione"]) ) if document.get("Data Fine Pubblicazione") else None, Category( domain = self.specs_base_url + "#" + "item-category-unit", category = document["Mittente"] ) if document.get("Mittente") else None ] if c is not None ], enclosure = [ Enclosure( url = enclosure["href"], length = 3000, type = mimetypes.guess_type(enclosure["content"])[0] or "application/octet-stream" ) for enclosure in document["Documento"] + document["Allegati"] ] ) # Simple and generic wrapper around item() method if a list of urls is passed # Unavailable items are filtered out def items(self, single_page_urls): for single_page_url in single_page_urls: try: item = self.item(single_page_url) except Exception as e: logging.warning("Error scraping page %s: %s" % ( single_page_url , e )) continue if item: yield item # Public method called by scraper.py def scrape(self): return self.items(self.urls())
# coding: utf-8 """ Copyright 2015 SmartBear Software Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ from pprint import pformat from six import iteritems class V1PersistentVolumeClaimList(object): """ NOTE: This class is auto generated by the swagger code
generator program. Do not edit the class manually. """ def __init__(self): """ Swagger model :param dict swaggerTypes: The key is attribute name and the value is attribute type. :param dict attributeMa
p: The key is attribute name and the value is json key in definition. """ self.swagger_types = { 'kind': 'str', 'api_version': 'str', 'metadata': 'V1ListMeta', 'items': 'list[V1PersistentVolumeClaim]' } self.attribute_map = { 'kind': 'kind', 'api_version': 'apiVersion', 'metadata': 'metadata', 'items': 'items' } self._kind = None self._api_version = None self._metadata = None self._items = None @property def kind(self): """ Gets the kind of this V1PersistentVolumeClaimList. kind of object, in CamelCase; cannot be updated; see http://releases.k8s.io/v1.0.4/docs/api-conventions.md#types-kinds :return: The kind of this V1PersistentVolumeClaimList. :rtype: str """ return self._kind @kind.setter def kind(self, kind): """ Sets the kind of this V1PersistentVolumeClaimList. kind of object, in CamelCase; cannot be updated; see http://releases.k8s.io/v1.0.4/docs/api-conventions.md#types-kinds :param kind: The kind of this V1PersistentVolumeClaimList. :type: str """ self._kind = kind @property def api_version(self): """ Gets the api_version of this V1PersistentVolumeClaimList. version of the schema the object should have; see http://releases.k8s.io/v1.0.4/docs/api-conventions.md#resources :return: The api_version of this V1PersistentVolumeClaimList. :rtype: str """ return self._api_version @api_version.setter def api_version(self, api_version): """ Sets the api_version of this V1PersistentVolumeClaimList. version of the schema the object should have; see http://releases.k8s.io/v1.0.4/docs/api-conventions.md#resources :param api_version: The api_version of this V1PersistentVolumeClaimList. :type: str """ self._api_version = api_version @property def metadata(self): """ Gets the metadata of this V1PersistentVolumeClaimList. standard list metadata; see http://releases.k8s.io/v1.0.4/docs/api-conventions.md#types-kinds :return: The metadata of this V1PersistentVolumeClaimList. :rtype: V1ListMeta """ return self._metadata @metadata.setter def metadata(self, metadata): """ Sets the metadata of this V1PersistentVolumeClaimList. standard list metadata; see http://releases.k8s.io/v1.0.4/docs/api-conventions.md#types-kinds :param metadata: The metadata of this V1PersistentVolumeClaimList. :type: V1ListMeta """ self._metadata = metadata @property def items(self): """ Gets the items of this V1PersistentVolumeClaimList. a list of persistent volume claims; see http://releases.k8s.io/v1.0.4/docs/persistent-volumes.md#persistentvolumeclaims :return: The items of this V1PersistentVolumeClaimList. :rtype: list[V1PersistentVolumeClaim] """ return self._items @items.setter def items(self, items): """ Sets the items of this V1PersistentVolumeClaimList. a list of persistent volume claims; see http://releases.k8s.io/v1.0.4/docs/persistent-volumes.md#persistentvolumeclaims :param items: The items of this V1PersistentVolumeClaimList. :type: list[V1PersistentVolumeClaim] """ self._items = items def to_dict(self): """ Return model properties dict """ result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() else: result[attr] = value return result def to_str(self): """ Return model properties str """ return pformat(self.to_dict()) def __repr__(self): """ For `print` and `pprint` """ return self.to_str()
# -*- coding: utf-8 -*- # # Copyright (C) Pootle contributors. # # This file is a part of the Pootle project. It is distributed under the GPL3 # or later license. See the LICENSE file for a copy of the license and the # AUTHORS file for copyright and authorship information. import os os.environ['DJANGO_SETTINGS_MODULE'] = 'pootle.settings' from django.core.management.base import CommandError from pootle_format.models import Format from pootle_project.models import Project from . import PootleCommand class Command(PootleCommand): help = "Manage Store formats." def add_arguments(self, parser): super(Command, self).add_arguments(parser) parser.add_argument( 'filetype', action='store', help="File type to set") parser.add_argument( '--from-filetype', action='store', help="Only convert Stores of this file type") parser.add_argument( '--matching', action='store', help="Glob match Store path excluding extension") def get_projects(self): if not self.projects: return Project.objects.all() return
Project.objects.filter(code__in=self.projects) def get_filetype(self, name): try: return Format.objects.get(name=name)
except Format.DoesNotExist: raise CommandError("Unrecognized filetype '%s'" % name) def handle_all(self, **options): filetype = self.get_filetype(options["filetype"]) from_filetype = ( options["from_filetype"] and self.get_filetype(options["from_filetype"]) or None) for project in self.get_projects(): # add the filetype to project, and convert the stores project.filetype_tool.add_filetype(filetype) project.filetype_tool.set_filetypes( filetype, from_filetype=from_filetype, matching=options["matching"])
#!/usr/bin/env python """ MaTiSSe.py, Markdown To Impressive Scientific Slides """ from __future__ import print_function import argparse import os import sys from matisse_config import MatisseConfig from presentation import Presentation __appname__ = "MaTiSSe.py" __description__ = "MaTiSSe.py, Markdown To Impressive Scientific Slides" __long_description__ = "MaTiSSe.py, Markdown To Impressive Scientific Slides. It is a very simple and stupid-to-use (KISS) presentation maker based on simple markdown syntax producing high quality first-class html/css presentation with great support for scientific contents." __version__ = "1.2.0" __author__ = "Stefano Zaghi" __author_email__ = "stefano.zaghi@gmail.com" __license__ = "GNU General Public License v3 (GPLv3)" __url__ = "https://github.com/szaghi/MaTiSSe" __sample__ = r""" --- theme: - backround: black --- # Part 1 ## Section 1 ### Subsection 1 #### Slide 1 ##### A H5 heading Lorem ipsum dolor sit amet... ##### Math $$ x=\frac{-b\pm\sqrt{b^2-4ac}}{2a} $$ $note $content{Just a note enviroment} $endnote """ def main(): """Main function.""" cliparser = argparse.ArgumentParser(prog=__appname__, description='MaTiSSe.py, Markdown To Impressive Scientific Slides') cliparser.add_argument('-v', '--version', action='version', help='Show version', version='%(prog)s ' + __version__) cliparser.add_argument('-i', '--input', required=False, action='store', default=None, help='Input file name of markdown source to be parsed') cliparser.add_argument('-o', '--output', required=False, action='store', default=None, help='Output directory name containing the presentation files') cliparser.add_argument('-t', '--theme', required=False, action='store', default=None, help='Select a builtin theme for initializing a new sample presentation') cliparser.add_argument('-hs', '--highlight-style', required=False, action='store', default='github.css', help='Select the highlight.js style (default github.css); select "disable" to disable highligth.js', metavar='STYLE.CSS') cliparser.add_argument('-s', '--sample', required=False, action='store', default=None, help='Generate a new sample presentation as skeleton of your one') cliparser.add_argument('--toc-at-chap-beginning', required=False, action='store', default=None, help='Insert Table of Contents at each chapter beginning (default no): to activate indicate the TOC depth', metavar='TOC-DEPTH') cliparser.add_argument('--toc-at-sec-beginning', required=False, action='store', default=None, help='Insert Table of Contents at each section beginning (default no): to activate indicate the TOC depth', metavar='TOC-DEPTH') cliparser.add_argument('--toc-at-subsec-beginning', required=False, action='store', default=None, help='Insert Table of Contents at each subsection beginning (default no): to activate indicate the TOC depth', metavar='TOC-DEPTH') cliparser.add_argument('--print-preamble', required=False, action='store_true', default=None, help='Print the preamble data as parsed from source') cliparser.add_argument('--print-css', required=False, action='store_true', default=None, help='Print the css as parsed from source (if done)')
cliparser.add_argument('--print-options', required=False, action='store_true', default=None, help='Print the available options for each presentation element') cliparser.add_
argument('--print-highlight-styles', required=False, action='store_true', default=None, help='Print the available highlight.js style (default github.css)') cliparser.add_argument('--print-themes', required=False, action='store_true', default=None, help='Print the list of the builtin themes') cliparser.add_argument('--verbose', required=False, action='store_true', default=False, help='More verbose printing messages (default no)') cliparser.add_argument('--online-MathJax', required=False, action='store_true', default=None, help='Use online rendering of LaTeX equations by means of online MathJax service; default use offline, local copy of MathJax engine') cliargs = cliparser.parse_args() config = MatisseConfig(cliargs=cliargs) if cliargs.input: if not os.path.exists(cliargs.input): sys.stderr.write('Error: input file "' + cliargs.input + '" not found!') sys.exit(1) else: with open(cliargs.input, 'r') as mdf: source = mdf.read() presentation = Presentation() if config.verbose: print('Parsing source ' + cliargs.input) presentation.parse(config=config, source=source) if cliargs.output: output = cliargs.output else: output = os.path.splitext(os.path.basename(cliargs.input))[0] output = os.path.normpath(output) config.make_output_tree(output=output) presentation.save(config=config, output=output) if __name__ == '__main__': main()
ve(*args, **kwargs) def is_english(self): if self.language == ENGLISH: return True return False def is_spanish(self): if self.language == SPANISH: return True return False def is_vietnamese(self): if self.language == VIETNAMESE: return True return False def is_chinese(self): if self.language == CHINESE: return True return False def is_tagalog(self): if self.language == TAGALOG: return True return False def get_kind_of_ballot_item(self): if positive_value_exists(self.contest_office_we_vote_id): return OFFICE elif positive_value_exists(self.candidate_campaign_we_vote_id): return CANDIDATE elif positive_value_exists(self.politician_we_vote_id): return POLITICIAN elif positive_value_exists(self.contest_measure_we_vote_id): return MEASURE return None def get_ballot_item_we_vote_id(self): if positive_value_exists(self.contest_office_we_vote_id): return self.contest_office_we_vote_id elif positive_value_exists(self.candidate_campaign_we_vote_id): return self.candidate_campaign_we_vote_id elif positive_value_exists(self.politician_we_vote_id): return self.politician_we_vote_id elif positive_value_exists(self.contest_measure_we_vote_id): return self.contest_measure_we_vote_id return None def more_info_credit_text(self): if self.more_info_credit == BALLOTPEDIA: return "Courtesy of Ballotpedia.org" if self.more_info_credit == WIKIPEDIA: return "Courtesy of Wikipedia.org" return "" class QuickInfoManager(models.Manager): def __unicode__(self): return "QuickInfoManager" def fetch_we_vote_id_from_local_id(self, quick_info_id): if positive_value_exists(quick_info_id): results = self.retrieve_quick_info_from_id(quick_info_id) if results['quick_info_found']: quick_info = results['quick_info'] return quick_info.we_vote_id else: return None else: return None def retrieve_contest_office_quick_info(self, contest_office_we_vote_id): quick_info_id = 0 quick_info_we_vote_id = None candidate_we_vote_id = None politician_we_vote_id = None contest_measure_we_vote_id = None quick_info_manager = QuickInfoManager() return quick_info_manager.retrieve_quick_info( quick_info_id, quick_info_we_vote_id, contest_office_we_vote_id, candidate_we_vote_id, politician_we_vote_id, contest_measure_we_vote_id ) def retrieve_candidate_quick_info(self, candidate_we_vote_id): quick_info_id = 0 quick_info_we_vote_id = None politician_we_vote_id = None contest_measure_we_vote_id = None contest_office_we_vote_id = None quick_info_manager = QuickInfoManager() return quick_info_manager.retrieve_quick_info( quick_info_id, quick_info_we_vote_id, contest_office_we_vote_id, candidate_we_vote_id, politician_we_vote_id, contest_measure_we_vote_id ) def retrieve_contest_measure_quick_info(self, contest_measure_we_vote_id): quick_info_id = 0 quick_info_we_vote_id = None candidate_we_vote_id = None politician_we_vote_id = None contest_office_we_vote_id = None quick_info_manager = QuickInfoManager() return quick_info_manager.retrieve_quick_info( quick_info_id, quick_info_we_vote_id, contest_office_we_vote_id, candidate_we_vote_id, politician_we_vote_id, contest_measure_we_vote_id ) def retrieve_quick_info_from_id(self, quick_info_id): quick_info_we_vote_id = None candidate_we_vote_id = None politician_we_vote_id = None contest_office_we_vote_id = None contest_measure_we_vote_id = None quick_info_manager = QuickInfoManager() return quick_info_manager.retrieve_quick_info( quick_info_id, quick_info_we_vote_id, contest_office_we_vote_id, candidate_we_vote_id, politician_we_vote_id, contest_measure_we_vote_id ) def retrieve_quick_info_from_we_vote_id(self, quick_info_we_vote_id): quick_info_id = 0 candidate_we_vote_id = None politician_we_vote_id = None contest_office_we_vote_id = None contest_measure_we_vote_id = None quick_info_manager = QuickInfoManager() return quick_info_manager.retrieve_quick_info( quick_info_id, quick_info_we_vote_id, contest_office_we_vote_id, candidate_we_vote_id, politician_we_vote_id, contest_measure_we_vote_id ) def retrieve_quick_info(self, quick_info_id, quick_info_we_vote_id=None, contest_office_we_vote_id=None, candidate_we_vote_id=None, politician_we_vote_id=None, contest_measure_we_vote_id=None): error_result = False exception_does_not_exist = False exception_multiple_object_returned = False quick_info_on_stage = QuickInfo() success = False try: if positive_value_exists(quick_info_id): status = "RETRIEVE_QUICK_INFO_FOUND_WITH_QUICK_INFO_ID" quick_info_on_stage = QuickInfo.objects.get(id=quick_info_id) quick_info_id = quick_info_on_stage.id success = True elif positive_value_exists(quick_info_we_vote_id): status = "RETRIEVE_QUICK_INFO_FOUND_WITH_WE_VOTE_ID" quick_info_on_stage = QuickInfo.objects.get(we_vote_id=quick_info_we_vote_id) quick_info_id = quick_info_on_stage.id success = True elif positive_value_exists(contest_office_we_
vote_id): status = "RETRIEVE_QUICK_INFO_FOUND_WITH_OFFICE_WE_VOTE_ID" quick_info_on_stage = QuickInfo.objects.get( contest_office_we_vote_id=contest_office_we_vote_id) quick_info_id = quick_info_on_stage.id success = True elif positive_value_exists(candidate_we_vote_id): sta
tus = "RETRIEVE_QUICK_INFO_FOUND_WITH_CANDIDATE_WE_VOTE_ID" quick_info_on_stage = QuickInfo.objects.get( candidate_campaign_we_vote_id=candidate_we_vote_id) quick_info_id = quick_info_on_stage.id success = True elif positive_value_exists(politician_we_vote_id): status = "RETRIEVE_QUICK_INFO_FOUND_WITH_POLITICIAN_WE_VOTE_ID" quick_info_on_stage = QuickInfo.objects.get( politician_we_vote_id=politician_we_vote_id) quick_info_id = quick_info_on_stage.id success = True elif positive_value_exists(contest_measure_we_vote_id): status = "RETRIEVE_QUICK_INFO_FOUND_WITH_MEASURE_WE_VOTE_ID" quick_info_on_stage = QuickInfo.objects.get( contest_measure_we_vote_id=contest_measure_we_vote_id) quick_info_id = quick_info_on_stage.id success = True else: status = "RETRIEVE_QUICK_INFO_INSUFFICIENT_VARIABLES" except QuickInfo.MultipleObjectsReturned as e: handle_record_found_more_than_one_exception(e, logger=logger) error_result = True exception_multiple_object_returned = True success = False status = "RETRIEVE_QUICK_INFO_MULTIPLE_FOUND" except QuickInfo.DoesNotExist: error_result = False exception_does_not_e
# -*- coding: utf-8 -*- # # This file is part of PyBuilder # # Copyright 2011-2020 PyBuilder Team # # Licensed under the Apache License, Version 2.0 (the "License"); # y
ou may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limit
ations under the License. from unittest import TestCase from pybuilder.graph_utils import Graph class GraphUtilsTests(TestCase): def test_should_find_trivial_cycle_in_graph_when_there_is_one(self): graph_with_trivial_cycle = Graph({"a": "a"}) self.assertIsNotNone(graph_with_trivial_cycle.assert_no_trivial_cycles_present()) def test_should_find_trivial_cycle_in_graph_when_there_are_two(self): graph_with_trivial_cycles = Graph({"a": "a", "b": "b"}) self.assertIsNotNone(graph_with_trivial_cycles.assert_no_trivial_cycles_present()) def test_should_find_trivial_cycle_in_graph_when_searching_for_cycles(self): graph_with_trivial_cycle = Graph({"a": "a"}) self.assertIsNotNone(graph_with_trivial_cycle.assert_no_cycles_present()) def test_should_not_find_trivial_cycles_in_graph_when_there_are_none(self): graph_without_trivial_cycle = Graph({"a": "b", "b": "c", "d": "e"}) graph_without_trivial_cycle.assert_no_trivial_cycles_present() def test_should_not_find_cycles_in_graph_when_there_are_none(self): graph_without_cycle = Graph({"a": "b", "b": "c", "d": "e"}) graph_without_cycle.assert_no_cycles_present() def test_should_find_simple_nontrivial_cycle_in_graph_when_there_is_one(self): graph_with_simple_cycle = Graph({"a": "b", "b": "a"}) self.assertIsNotNone(graph_with_simple_cycle.assert_no_cycles_present()) def test_should_find_long_nontrivial_cycle_in_graph_when_there_is_one(self): graph_with_long_cycle = Graph({"a": "b", "b": "c", "c": "d", "d": "b"}) self.assertIsNotNone(graph_with_long_cycle.assert_no_cycles_present()) def test_should_find_long_nontrivial_cycle_in_graph_when_there_are_two(self): graph_with_long_cycle = Graph({"a": "b", "b": "c", "c": "a", "d": "e", "e": "f", "f": "d"}) self.assertIsNotNone(graph_with_long_cycle.assert_no_cycles_present())
#!/usr/bin/env python from __future__ import division import math import optparse import sys #------------------------------------------------------------------------------- # Illumicone simulator, based on code from: https://github.com/zestyping/openpixelcontrol NUM_STRINGS = 48 PIXELS_PER_STRING = 100 SCALE = 7 # You can also think of this as the length of the strands.
Use 7 for cone_med, 10 for cone_lg. print "scale: " + str(SCALE) PIXEL_DISTANCE = SCALE / PIXELS_PER_STRING print "\npixel distance: " + s
tr(PIXEL_DISTANCE) HEIGHT = math.sqrt(SCALE * SCALE / 2) print "\ncone height: " + str(HEIGHT) MIN_RADIUS = HEIGHT * .1 # i.e. the radius of the little circle on top print "\nmin radius: " + str(MIN_RADIUS) #------------------------------------------------------------------------------- result = ['['] theta = 0 for s in range(NUM_STRINGS): theta = 2 * math.pi * s / NUM_STRINGS for p in range(PIXELS_PER_STRING): z = HEIGHT - PIXEL_DISTANCE * p radius = PIXEL_DISTANCE * p + MIN_RADIUS x = math.cos(theta) * radius y = math.sin(theta) * radius result.append(' {"point": [%.4f, %.4f, %.4f]},' % (x, y, z)) # trim off last comma result[-1] = result[-1][:-1] result.append(']') print '\n'.join(result)
# coding=utf-8 from __future__ import unicode_literals from collections import OrderedDict from .. import BaseProvider localized = True class Provider(BaseProvider): all_colors = OrderedDict(( ("AliceBlue", "#F0F8FF"), ("AntiqueWhite", "#FAEBD7"), ("Aqua", "#00FFFF"), ("Aquamarine", "#7FFFD4"), ("Azure", "#F0FFFF"), ("Beige", "#F5F5DC"), ("Bisque", "#FFE4C4"), ("Black", "#000000"), ("BlanchedAlmond", "#FFEBCD"), ("Blue", "#0000FF"), ("BlueViolet", "#8A2BE2"), ("Brown", "#A52A2A"), ("BurlyWood", "#DEB887"), ("CadetBlue", "#5F9EA0"), ("Chartreuse", "#7FFF00"), ("Chocolate", "#D2691E"), ("Coral", "#FF7F50"), ("CornflowerBlue", "#6495ED"), ("Cornsilk", "#FFF8DC"), ("Crimson", "#DC143C"), ("Cyan", "#00FFFF"), ("DarkBlue", "#00008B"), ("DarkCyan", "#008B8B"), ("DarkGoldenRod", "#B8860B"), ("DarkGray", "#A9A9A9"), ("DarkGreen", "#006400"), ("DarkKhaki", "#BDB76B"), ("DarkMagenta", "#8B008B"), ("DarkOliveGreen", "#556B2F"), ("DarkOrange", "#FF8C00"), ("DarkOrchid", "#9932CC"), ("DarkRed", "#8B0000"), ("DarkSalmon", "#E9967A"), ("DarkSeaGreen", "#8FBC8F"), ("DarkSlateBlue", "#483D8B"), ("DarkSlateGray", "#2F4F4F"), ("DarkTurquoise", "#00CED1"), ("DarkViolet", "#9400D3"), ("DeepPink", "#FF1493"), ("DeepSkyBlue", "#00BFFF"), ("DimGray", "#696969"), ("DodgerBlue", "#1E90FF"), ("FireBrick", "#B22222"), ("FloralWhite", "#FFFAF0"), ("ForestGreen", "#228B22"), ("Fuchsia", "#FF00FF"), ("Gainsboro", "#DCDCDC"), ("GhostWhite", "#F8F8FF"), ("Gold", "#FFD700"), ("GoldenRod", "#DAA520"), ("Gray", "#808080"), ("Green", "#008000"), ("GreenYellow", "#ADFF2F"), ("HoneyDew", "#F0FFF0"), ("HotPink", "#FF69B4"), ("IndianRed", "#CD5C5C"), ("Indigo", "#4B0082"), ("Ivory", "#FFFFF0"), ("Khaki", "#F0E68C"), ("Lavender", "#E6E6FA"), ("LavenderBlush", "#FFF0F5"), ("LawnGreen", "#7CFC00"), ("LemonChiffon", "#FFFACD"), ("LightBlue", "#ADD8E6"), ("LightCoral", "#F08080"), ("LightCyan", "#E0FFFF"), ("LightGoldenRodYellow", "#FAFAD2"), ("LightGray", "#D3D3D3"), ("LightGreen", "#90EE90"), ("LightPink", "#FFB6C1"), ("LightSalmon", "#FFA07A"), ("LightSeaGreen", "#20B2AA"), ("LightSkyBlue", "#87CEFA"), ("LightSlateGray", "#778899"), ("LightSteelBlue", "#B0C4DE"), ("LightYellow", "#FFFFE0"), ("Lime", "#00FF00"), ("LimeGreen", "#32CD32"), ("Linen", "#FAF0E6"), ("Magenta", "#FF00FF"), ("Maroon", "#800000"), ("MediumAquaMarine", "#66CDAA"), ("MediumBlue", "#0000CD"), ("MediumOrchid", "#BA55D3"), ("MediumPurple", "#9370DB"), ("MediumSeaGreen", "#3CB371"), ("MediumSlateBlue", "#7B68EE"), ("MediumSpringGreen", "#00FA9A"), ("MediumTurquoise", "#48D1CC"), ("MediumVioletRed", "#C71585"), ("MidnightBlue", "#191970"), ("MintCream", "#F5FFFA"), ("MistyRose", "#FFE4E1"), ("Moccasin", "#FFE4B5"), ("NavajoWhite", "#FFDEAD"), ("Navy", "#000080"), ("OldLace", "#FDF5E6"), ("Olive", "#808000"), ("OliveDrab", "#6B8E23"), ("Orange", "#FFA500"), ("OrangeRed", "#FF4500"), ("Orchid", "#DA70D6"),
("PaleGoldenRod", "#EEE8AA"), ("PaleGreen", "#98FB98"), ("PaleTurquoise", "#AFEEEE"), ("PaleVioletRed", "#DB7093"), ("PapayaWhip", "#FFEFD5"), ("PeachPuff", "#FFDAB9"), ("Peru", "#CD853F"), ("Pink", "#FFC0CB"), ("Plum", "#DDA0DD"), ("PowderBlue", "#B0E0E6"), ("Purple", "#800080"), ("Red", "#
FF0000"), ("RosyBrown", "#BC8F8F"), ("RoyalBlue", "#4169E1"), ("SaddleBrown", "#8B4513"), ("Salmon", "#FA8072"), ("SandyBrown", "#F4A460"), ("SeaGreen", "#2E8B57"), ("SeaShell", "#FFF5EE"), ("Sienna", "#A0522D"), ("Silver", "#C0C0C0"), ("SkyBlue", "#87CEEB"), ("SlateBlue", "#6A5ACD"), ("SlateGray", "#708090"), ("Snow", "#FFFAFA"), ("SpringGreen", "#00FF7F"), ("SteelBlue", "#4682B4"), ("Tan", "#D2B48C"), ("Teal", "#008080"), ("Thistle", "#D8BFD8"), ("Tomato", "#FF6347"), ("Turquoise", "#40E0D0"), ("Violet", "#EE82EE"), ("Wheat", "#F5DEB3"), ("White", "#FFFFFF"), ("WhiteSmoke", "#F5F5F5"), ("Yellow", "#FFFF00"), ("YellowGreen", "#9ACD32"), )) safe_colors = ( 'black', 'maroon', 'green', 'navy', 'olive', 'purple', 'teal', 'lime', 'blue', 'silver', 'gray', 'yellow', 'fuchsia', 'aqua', 'white', ) def color_name(self): return self.random_element(self.all_colors.keys()) def safe_color_name(self): return self.random_element(self.safe_colors) def hex_color(self): return "#{0}".format( ("%x" % self.random_int( 1, 16777215)).ljust( 6, '0')) def safe_hex_color(self): color = ("%x" % self.random_int(0, 255)).ljust(3, '0') return "#{0}{0}{1}{1}{2}{2}".format(*color) def rgb_color(self): return ','.join(map(str, (self.random_int(0, 255) for _ in range(3)))) def rgb_css_color(self): return 'rgb(%s)' % ','.join( map(str, (self.random_int(0, 255) for _ in range(3))))
#-*- coding: utf-8 -*- import traceback from libcommon import ut
ils from libcommon import commonlib from libcommon.logger import stsdebug #---------------------------------- # 获取san服务是否运行 #---------------------------------- def get_san_status(): san_status = False try: retcode,proc = utils.cust_popen([commonlib.ISCSI_SCST,'status']) result = proc.stdout.read() if utils.list_match(result,['iSCSI-SCST target is running','iscsi-scstd .* is running']): san_status = True
except: stsdebug.write(stsdebug.get_line(),"stssan",traceback.print_exc()) return san_status
import numpy as np import torch import torch.nn as nn import torch.nn.functional as F def norm_col_init(weights, std=1.0): x = torch.randn(weights.size()) x *= std / torch.sqrt((x**2).sum(1, keepdim=True)) return x def weights_init(m): classname = m.__class__.__name__ if classname.find('Conv') != -1: weight_shape = list(m.weight.data.size()) fan_in = np.prod(weight_shape[1:4]) fan_out = np.prod(weight_shape[2:4]) * weight_
shape[0] w_bound = np.sqrt(6. / (fan_in + fan_out)) m.weight.data.uniform_(-w_bound, w_bound) m.bias.data.fill_(0) elif classname.find('Linear') != -1: weight_shape = list(m.weight.data.size()) fan_in = weight_shape[1] fan_out = weight_shape[0] w_bound = np.sqrt(6. / (fan_in + fan_out)) m.weight.data.uniform_(-w_bound, w_bound)
m.bias.data.fill_(0) class A3Clstm(torch.nn.Module): def __init__(self, num_inputs, action_space): super(A3Clstm, self).__init__() # convolutional neural networks self.conv1 = nn.Conv2d(num_inputs, 32, 5, stride=1, padding=2) self.maxp1 = nn.MaxPool2d(2, 2) self.conv2 = nn.Conv2d(32, 32, 5, stride=1, padding=1) self.maxp2 = nn.MaxPool2d(2, 2) self.conv3 = nn.Conv2d(32, 64, 4, stride=1, padding=1) self.maxp3 = nn.MaxPool2d(2, 2) self.conv4 = nn.Conv2d(64, 64, 3, stride=1, padding=1) self.maxp4 = nn.MaxPool2d(2, 2) # LSTM Cells self.lstm = nn.LSTMCell(1024, 512) num_outputs = action_space.n # The critic layer self.critic_linear = nn.Linear(512, 1) # The actor layer self.actor_linear = nn.Linear(512, num_outputs) self.apply(weights_init) self.actor_linear.weight.data = norm_col_init( self.actor_linear.weight.data, 0.01) self.actor_linear.bias.data.fill_(0) self.critic_linear.weight.data = norm_col_init( self.critic_linear.weight.data, 1.0) self.critic_linear.bias.data.fill_(0) self.lstm.bias_ih.data.fill_(0) self.lstm.bias_hh.data.fill_(0) self.train() # forward propagation def forward(self, inputs): inputs, (hx, cx) = inputs x = F.relu(self.maxp1(self.conv1(inputs))) x = F.relu(self.maxp2(self.conv2(x))) x = F.relu(self.maxp3(self.conv3(x))) x = F.relu(self.maxp4(self.conv4(x))) x = x.view(x.size(0), -1) hx, cx = self.lstm(x, (hx, cx)) x = hx return self.critic_linear(x), self.actor_linear(x), (hx, cx)
# The given constraint must evaluate to a boolean expression. #=============================================================================== return True def eval_attr13(self, attr_value, this): #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True def eval_attr14(self, attr_value, this): #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True def eval_attr15(self, attr_value, this): #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True def eval_attr16(self, attr_value, this): #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True def eval_attr17(self, attr_value, this): #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True def eval_attr18(self, attr_value, this): #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True def eval_attr19(self, attr_value, this): #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the curre
nt node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return True def eval_attr123(self, attr_value, this): #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return attr_value == "source" def eval_attr124(self, attr_value, this): #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. # You can access any attribute x of this node by: this['x']. # If the constraint relies on attribute values from other nodes, # use the LHS/NAC constraint instead. # The given constraint must evaluate to a boolean expression. #=============================================================================== return attr_value == "instance" def eval_attr125(self, attr_value, this): #=============================================================================== # This code is executed when evaluating if a node shall be matched by this rule. # You can access the value of the current node's attribute value by: attr_value. #
# # Copyright (c) SAS Institute Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # U
nless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # filters = ('test', ('%(testdir)s/',
))
from django.conf import settings from django.conf.urls import include, url from django.views.generic import TemplateView from django.contrib import admin admin.autodiscover() from booth.views impor
t HomepageView urlpatterns = [ url(r'^admin/', include(admin.site.urls)), url(r'^accounts/', include('allauth.urls')), url(r'^', include('booth.urls', namespace='booth')), url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')), url("^$", HomepageView.as_view(), name="homepage") ] if settings.DEBUG: import debug_toolbar urlpatterns.append( ur
l(r'^__debug__/', include(debug_toolbar.urls)), )
#! /usr/bin/env python # -*- coding: utf-8 -*-
# vim:fenc=utf-8 # # Copyright © 2017 Malcolm Ramsay <malramsay64@gmail.com> # # Distributed under terms of the MIT license. """Test function from the generation of figures.""" import math import gsd.hoomd from hypothesis import given from
hypothesis.strategies import floats from statdyn.analysis.order import compute_voronoi_neighs from statdyn.figures import colour from statdyn.figures.configuration import plot, snapshot2data @given(floats(min_value=-math.pi, max_value=math.pi)) def test_colour_orientation(orientation): """Ensure hex values being returned by colour_orientation.""" int(colour.colour_orientation(orientation)[1:], 16) def test_plot(): with gsd.hoomd.open('test/data/trajectory-13.50-3.00.gsd') as trj: plot(trj[0], repeat=True, offset=True) def test_snapshot2data(): with gsd.hoomd.open('test/data/trajectory-13.50-3.00.gsd') as trj: snapshot2data(trj[0]) def test_order(): with gsd.hoomd.open('test/data/trajectory-13.50-3.00.gsd') as trj: order_list = compute_voronoi_neighs(trj[0].configuration.box, trj[0].particles.position) plot(trj[0], repeat=True, offset=True, order_list=order_list)
.OneToOneField', [], {'related_name': "'publisher_draft'", 'unique': 'True', 'null': 'True', 'to': "orm['cms.Page']"}), 'publisher_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '0', 'db_index': 'True'}), 'reverse_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '40', 'null': 'True', 'blank': 'True'}), 'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}), 'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}), 'soft_root': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), 'template': ( 'django.db.models.fields.CharField', [], {'max_length': '100'}), 'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}) }, 'cms.pagemoderator': { 'Meta': {'object_name': 'PageModerator'}, 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'moderate_children': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'moderate_descendants': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'moderate_page': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']"}), 'user': ('django.db.models.fields.
related.ForeignKey', [], {'to': "orm['auth.User']"}) }, 'cms.pagemoderatorstate': { 'Meta': {'ordering': "('page', 'action', '-created')",
'object_name': 'PageModeratorState'}, 'action': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'message': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '1000', 'blank': 'True'}), 'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']"}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}) }, 'cms.pagepermission': { 'Meta': {'object_name': 'PagePermission'}, 'can_add': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_change': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_change_advanced_settings': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'can_change_permissions': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'can_delete': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_moderate': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_move_page': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_publish': ( 'django.db.models.fields.BooleanField', [], {'default': 'True'}), 'can_view': ( 'django.db.models.fields.BooleanField', [], {'default': 'False'}), 'grant_on': ( 'django.db.models.fields.IntegerField', [], {'default': '5'}), 'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Group']", 'null': 'True', 'blank': 'True'}), 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']", 'null': 'True', 'blank': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}) }, 'cms.pageuser': { 'Meta': {'object_name': 'PageUser', '_ormbases': ['auth.User']}, 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_users'", 'to': "orm['auth.User']"}), 'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'primary_key': 'True'}) }, 'cms.pageusergroup': { 'Meta': {'object_name': 'PageUserGroup', '_ormbases': ['auth.Group']}, 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_usergroups'", 'to': "orm['auth.User']"}), 'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'}) }, 'cms.placeholder': { 'Meta': {'object_name': 'Placeholder'}, 'default_width': ( 'django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}), 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}) }, 'cms.title': { 'Meta': {'unique_together': "(('language', 'page'),)", 'object_name': 'Title'}, 'application_urls': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '200', 'null': 'True', 'blank': 'True'}), 'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'has_url_overwrite': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), 'id': ( 'django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}), 'menu_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'meta_description': ('django.db.models.fields.TextField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'meta_keywords': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'page': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'title_set'", 'to': "orm['cms.Page']"}), 'page_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), 'redirect': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. ### # Standard library from __future__ import print_function import threading import hashlib import socket import time import sys import re # Third-party modules from PyQt4 import QtCore, QtGui # Local modules import connection import window # FIXME: internationalize _ = lambda x:x refreshingTree = threading.Lock() class ConfigurationTreeRefresh: def __init__(self, eventsManager, window): if not refreshingTree.acquire(False): return self._eventsManager = eventsManager parentItem = QtGui.QStandardItemModel() window.connect(parentItem, QtCore.SIGNAL('itemClicked()'), window.configurationItemActivated) window.configurationTree.setModel(parentItem) self.items = {'supybot': parentItem} hash_ = eventsManager.sendCommand('config search ""') eventsManager.hook(hash_, self.slot) def slot(self, reply): """Slot called when a childs list is got.""" childs = reply.split(', ') for child in childs: if '\x02' in child: hash_ = self._eventsManager.sendCommand('more') self._eventsManager.hook(hash_, self.slot) break elif ' ' in child: refreshingTree.release() break splitted = child.split('.') parent, name = '.'.join(splitted[0:-1]), splitted[-1] item = QtGui.QStandardItem(name) item.name = QtCore.QString(child) self.items[parent].appendRow(item) self.items[child] = item class Connection(QtGui.QTabWidget, connection.Ui_connection): """Represents the connection dialog.""" def __init__(self, parent=None): QtGui.QWidget.__init__(self, parent) self.setupUi(self) def accept(self): """Signal called when the button 'accept' is clicked.""" self.state.text = _('Connecting...') if not self._connect(): self.state.text = _('Connection failed.') return self.state.text = _('Connected. Loading GUI...') window = Window(self._eventsManager) window.show() window.commandEdit.setFocus() self._eventsManager.callbackConnectionClosed = window.connectionClosed self._eventsManager.defaultCallback = window.replyReceived self.hide() def _connect(self): """Connects to the server, using the filled fields in the GUI. Return wheter or not the connection succeed. Note that a successful connection with a failed authentication is interpreted as successful. """ server = str(self.editServer.text()).split(':') username = str(self.editUsername.text()) password = str(self.editPassword.text()) assert len(server) == 2 assert re.match('[0-9]+', server[1]) assert ' ' not in username assert ' ' not in password sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server[1] = int(server[1]) try: sock.connect(tuple(server)) except socket.error: return False sock.settimeout(0.01) self._eventsManager = EventsManager(sock) self._eventsManager.sendCommand('identify %s %s' % (username, password)) return True def reject(self): """Signal called when the button 'close' is clicked.""" exit() class Window(QtGui.QTabWidget, window.Ui_window): """Represents the main window.""" def __init__(self, eventsManager, parent=None): QtGui.QWidget.__init__(self, parent) self._eventsManager = eventsManager self.setupUi(self) self.connect(self.commandEdit, QtCore.SIGNAL('returnPressed()'), self.commandSendHandler) self.connect(self.commandSend, QtCore.SIGNAL('clicked()'), self.commandSendHandler) self.connect(self.refreshConfigurationTree, QtCore.SIGNAL('clicked()'), self._refreshConfigurationTree) def commandSendHandler(self): """Slot called when the user clicks 'Send' or presses 'Enter' in the raw commands tab.""" command = self.commandEdit.text() self.commandEdit.clear() try: # No hooking, because the callback would be the default callback self._eventsManager.sendCommand(command) s = _('<-- ') + command except socket.error: s = _('(not sent) <-- ') + command self.commandsHistory.appendPlainText(s) def replyReceived(self, reply): """Called by the events manager when a reply to a raw command is received.""" self.commandsHistory.appendPlainText(_('--> ') + reply.decode('utf8')) def connectionClosed(self): """Called by the events manager when a special message has to be displayed.""" self.commandsHistory.appendPlainText(_('* connection closed *')) self.commandEdit.readOnly = True self._eventsManager.stop() def _refreshConfigurationTree(self): """Slot called when the user clicks 'Refresh' under the configuration tree.""" ConfigurationTreeRefresh(self._eventsManager, self) def configurationItemActivated(self, item): print(repr(item)) class EventsManager(QtCore.QObject): """This class handles all incoming messages, and call the associated callback (using hook() method)""" def __init__(self, sock): self._sock = sock self.defaultCallback = lambda x:x self._currentLine = '' self._hooks = {} # FIXME: should be cleared every minute self._timerGetReplies = QtCore.QTimer() self.connect(self._timerGetReplies, QtCore.SIGNAL('timeout()'), self._getReplies); self._timerGetReplies.start(100) self._timerCleanHooks = QtCore.QTimer() self.connect(self._timerCleanHooks, QtCore.SIGNAL('timeout()'), self._cleanHooks); self._timerCleanHooks.start(100) def _getReplies(self): """Called by the QTimer; fetches the messages and calls the hooks.""" currentLine = self._currentLine self.currentLine = '' if not '\n' in currentLine: try: data = self._sock.recv(65536) if not data: # Frontend closed connection self.callbackConnectionClosed() return currentLine += data except socket.timeout: return if '\n' in currentLine: splitted = currentLine.split('\n') nextLines = '\n'.join(splitted[1:-1]) splitted = splitted[0].split(': ') hash_, reply = splitted[0], ': '.join(splitted[1:]) if hash_ in self._hooks: self._hooks[hash_][0](reply) else: self.defaultCallback(reply) else: nextLines = currentLine self._currentLine = nextLines def hook(self, hash_, callback, lifeTime=60): """Attach a callback to a hash: everytime a reply with this hash is received, the callback is called.""" self._hooks[hash_] = (callback, time.time() + lifeTime) def unhook(self, hash_): """Undo hook().""" return self._hooks.pop(hash_) def _cleanHooks(self): for hash_, data in self._hooks.items(): if data[1] < time.time(): self._hooks.pop(hash_) def sendCommand(self, command): """Get a command, s
end it, and returns a unique hash, used to identify replies to this command.""" hash_ = hashlib.sha1(str(time.time()) + command).hexdigest() command = '%s: %s\n' % (hash_, unicode(command).encode('utf8
', 'replace')) self._sock.send(command) return hash_ def stop(self): """Stops the loop.""" self._timer.stop() if __name__ == "__main__": app = QtGui.QApplication(sys.argv) connection = Connection()
""" # Pytess Pure Python tessellation of points into polygons, including Delauney/Thiessin, and Voronoi polygons. Built as a convenient user interface for Bill Simons/Carson Farmer python port of Steven Fortune C++ version of a Delauney triangulator. ## Platforms Tested on Python version 2.x. ## Dependencies Pure
Python, no dependencies. ## Installing it Pytess is installed with pip from the commandline: pip install pytess ## Usage To triangulate a set of points, simply do: import pytess points = [(1,1), (5,5), (3,5), (8,1)] triangles = pytess.triangulate(points) And for voronoi diagrams: import pytess points = [(1,1), (5,5), (3,5),
(8,1)] voronoipolys = pytess.voronoi(points) ## More Information: - [Home Page](http://github.com/karimbahgat/Pytess) - [API Documentation](http://pythonhosted.org/Pytess) ## License: This code is free to share, use, reuse, and modify according to the MIT license, see license.txt ## Credits: I just made it more convenient to use for end-users and uploaded it to PyPi. The real credit goes to Bill Simons/Carson Farmer and Steven Fortune for implementing the algorithm in the first place. Karim Bahgat (2015) """ __version__ = "0.1.0" from .main import *
def
load_text_file(text_file: str) -> str: wit
h open(text_file, 'r') as f: return f.read()
# -*- coding: utf-8 -*- from qiniu import config from qiniu import http class PersistentFop(object): """持久化处理类 该类用于主动触发异步持久化操作,具体规格参考: http://developer.qiniu.com/docs/v6/api/reference/fop/pfop/pfop.html Attributes: auth: 账号管理密钥对,Auth对象 bucket: 操作资源所在空间 pipeline: 多媒体处理队列,详见 https://portal.qiniu.com/mps/pipeline notify_url: 持久化处理结果通知URL """ def __init__(self, auth, bucket, pipeline=None, notify_url=None): """初始化持久化处理类""" self.auth = auth self.bucket = bucket self.pipeline = pipeline self.notify_url = notify_url def execute(self, key, fops, force=None): """执行持久化处理: Args: key: 待处理的源文件 fops: 处理详细操作,规格详见 http://developer.qiniu.com/docs/v6/api/reference/fop/ force: 强制执行持久化处理开关 Returns: 一个dict变量,返回持久化处理的persistentId,类似{"persistentId": 5476bedf7823de4068253bae}; 一个ResponseInfo对象 """ ops = ';'.join(fops) data = {'bucket': self.bucket, 'key': key, 'fops': ops} if self.pipeline:
data['pipeline'] = self.pipeline if self.notify_url: data['notifyURL'] = self.notify_url if force == 1: data['force'] = 1 url = 'http://{0}/pfop'.format(config.get_default('default_api_host')) return http._post_with_auth(url, data, self.auth)
################################################################################ # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ################################################################################ import array import unittest from pyflink import keyword from pyflink.ml.api.param import ParamInfo, TypeConverters, Params from pyflink.ml.lib.param.colname import HasSelectedCols, HasOutputCol class ParamsTest(unittest.TestCase): def test_default_behavior(self): params = Params() not_optinal = ParamInfo("a", "", is_optional=False) with self.assertRaises(ValueError): params.get(not_optinal) # get optional without default param optional_without_default = ParamInfo("a", "") with self.assertRaises(ValueError): params.get(optional_without_default) def test_get_optional_param(self): param_info = ParamInfo( "key", "", has_default_value=True, default_value=None, type_converter=TypeConverters.to_string) params = Params() self.assertIsNone(params.get(param_info)) val = "3" params.set(param_info, val) self.assertEqual(val, params.get(param_info)) params.set(param_info, None) self.assertIsNone(params.get(param_info)) def test_remove_contains_size_clear_is_empty(self): param_info = ParamInfo( "key", "", has_default_value=True, default_value=None, type_converter=TypeConverters.to_string) params = Params() self.assertEqual(params.size(), 0) self.assertTrue(params.is_empty()) val = "3" params.set(param_info, val) self.assertEqual(params.size(), 1) self.assertFalse(params.is_empty()) params_json = params.to_json() params_new = Params.from_json(params_json) self.assertEqual(params.get(param_info), val) self.assertEqual(params_new.get(param_info), val) params.clear() self.assertEqual(params.size(), 0) self.assertTrue(params.is_empty()) def test_to_from_json(self): import jsonpickle param_info = ParamInfo( "key", "", has_default_value=True, default_value=None, type_converter=TypeConverters.to_string) param_info_new = jsonpickle.decode(jsonpickle.encode(param_info)) self.assertEqual(param_info_new, param_info) params = Params() val = "3" params.set(param_info, val) params_new = Params.from_json(params.to_json()) self.assertEqual(params_new.get(param_info), val) class ParamTypeConversionTests(unittest.TestCase): """ Test that param type conversion happens. """ def test_list(self): l = [0, 1] for lst_like in [l, range(2), tuple(l), array.array('l', l)]: converted = TypeConverters.to_list(lst_like) self.assertEqual(type(converted), list) self.assertListEqual(converted, l) def test_list_float_or_list_int(self): l = [0, 1] for lst_like in [l, range(2), tuple(l), array.array('l', l)]: converted1 = TypeConverters.to_list_float(lst_like) converted2 = TypeConverters.to_list_int(lst_like) self.assertEqual(type(converted1), list) self.assertEqual(type(converted2), list) self.assertListEqual(converted1, l) self.assertListEqual(converted2, l) def test_list_string(self): l = ["aa", "bb"] for lst
_like in [l, tuple(l)]: converted = TypeConverters.to_list_string(lst_like) self.assertEqual(type(converted), list) self.assertListEqual(converted, l) def test_float(self): data = 1.45 converted = TypeConverters.to_float(data) self.assertEqual(type(converted), float) self.assertEqual(converted, data) def tes
t_int(self): data = 1234567890 converted = TypeConverters.to_int(data) self.assertEqual(type(converted), int) self.assertEqual(converted, data) def test_string(self): data = "1234567890" converted = TypeConverters.to_string(data) self.assertEqual(type(converted), str) self.assertEqual(converted, data) def test_boolean(self): data = True converted = TypeConverters.to_boolean(data) self.assertEqual(type(converted), bool) self.assertEqual(converted, data) class MockVectorAssembler(HasSelectedCols, HasOutputCol): @keyword def __init__(self, *, selected_cols=None, output_col=None): self._params = Params() kwargs = self._input_kwargs self._set(**kwargs) def get_params(self): return self._params class TestWithParams(unittest.TestCase): def test_set_params_with_keyword_arguments(self): assembler = MockVectorAssembler(selected_cols=["a", "b"], output_col="features") params = assembler.get_params() self.assertEqual(params.size(), 2) self.assertEqual(assembler.get(HasSelectedCols.selected_cols), ["a", "b"]) self.assertEqual(assembler.get(HasOutputCol.output_col), "features") def test_set_params_with_builder_mode(self): assembler = MockVectorAssembler()\ .set_selected_cols(["a", "b"])\ .set_output_col("features") params = assembler.get_params() self.assertEqual(params.size(), 2) self.assertEqual(assembler.get(HasSelectedCols.selected_cols), ["a", "b"]) self.assertEqual(assembler.get(HasOutputCol.output_col), "features")
# Copyright 2021 The Google Earth Engine Community Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/L
ICENSE-2.0 # # Unless
required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # [START earthengine__apidocs__ee_dictionary_aside] # A dictionary (e.g. results of ee.Image.reduceRegion of an S2 image). dic = { 'B1': 182, 'B2': 219, 'B3': 443 } def print_dic(dic): """Prints the dictionary.""" print('ee.Dictionary from client-side dictionary:', dic.getInfo()) # Print a message when constructing the ee.Dictionary. ee_dic = ee.Dictionary(dic).aside(print_dic) # [END earthengine__apidocs__ee_dictionary_aside]
written permission. ## ## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" ## AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, ## THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR ## PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR ## CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, ## EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, ## PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; ## OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, ## WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR ## OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ## ADVISED OF THE POSSIBILITY OF SUCH DAMAGE." ## ############################################################################### from __future__ import division from itertools import izip import operator from vistrails.core.data_structures.bijectivedict import Bidict from vistrails.core.modules.utils import create_port_spec_string, parse_port_spec_string from vistrails.core.system import get_vistrails_basic_pkg_id, \ get_module_registry from vistrails.core.utils import enum, VistrailsInternalError from vistrails.core.vistrail.port_spec_item import PortSpecItem from vistrails.db.domain import DBPortSpec, IdScope from ast import literal_eval import unittest import copy PortEndPoint = enum('PortEndPoint', ['Invalid', 'Source', 'Destination']) ################################################################################ class PortSpec(DBPortSpec): port_type_map = Bidict([('input', 'destination'), ('output', 'source'), ('invalid', 'invalid')]) end_point_map = Bidict([('source', PortEndPoint.Source), ('destination', PortEndPoint.Destination), ('invalid', PortEndPoint.Invalid)]) ########################################################################## # Constructors and copy def __init__(self, *args, **kwargs): signature = None if 'signature' in kwargs: signature = kwargs['signature'] del kwargs['signature'] sigstring = None if 'sigstring' in kwargs: sigstring = kwargs['sigstring'] del kwargs['sigstring'] defaults = None if 'defaults' in kwargs: defaults = kwargs['defaults'] del kwargs['defaults'] labels = None if 'labels' in kwargs: labels = kwargs['labels'] del kwargs['labels'] values = None if 'values' in kwargs: values = kwargs['values'] del kwargs['values'] entry_types = None if 'entry_types' in kwargs: entry_types = kwargs['entry_types'] del kwargs['entry_types'] if 'items' in kwargs and 'portSpecItems' not in kwargs: kwargs['portSpecItems'] = kwargs['items'] del kwargs['items'] if 'optional' not in kwargs: kwargs['optional'] = 0 # False elif not isinstance(kwargs['optional'], (int, long)): if isinstance(kwargs['optional'], bool): if kwargs['optional']: kwargs['optional'] = 1 else: kwargs['optional'] = 0 else: raise VistrailsInternalError("Cannot parse 'optional' kw " "-- must be an int or bool") if 'min_conns' not in kwargs: kwargs['min_conns'] = 0 elif kwargs['optional'] == 1 and kwargs['min_conns'] > 0: raise VistrailsInternalError("A mandatory port cannot be set " "to optional") if 'max_conns' not in kwargs: kwargs['max_conns'] = -1 if kwargs['min_conns'] >= 0 and kwargs['max_conns'] >= 0 and \ kwargs['min_conns'] > kwargs['max_conns']: raise VistrailsInternalError("Minimum number of connections " "cannot be greater than maximum " "number of connections") if 'sort_key' not in kwargs: kwargs['sort_key'] = -1 if 'depth' not in kwargs: kwargs['depth'] = 0 if 'id' not in kwargs: kwargs['id'] = -1 if 'tooltip' in kwargs: self._tooltip = kwargs['tooltip'] del kwargs['tooltip'] else: self._tooltip = None if 'docstring' in kwargs: self._docstring = kwargs['docstring'] del kwa
rgs['docstring'] else: self._docstring = None if 'shape' in kwargs: self._shape = kwargs['shape'] del kwargs['shape'] else: self._shape = None DBPortSpec.__init__(self, *args, **kwargs) if sum(1 for container in (self.por
t_spec_items, signature, sigstring) if container) > 1: raise ValueError("Please specify only one of portSpecItems," " signature, or sigstring kwargs.") self.create_spec_items(self.port_spec_items, signature, sigstring, defaults, labels, values, entry_types) self._short_sigstring = None # if signature is not None: # self.create_entries(signature) # if not self.sigstring and self._entries is not None: # # create sigstring from entries # self.create_sigstring_and_descriptors() # DAKOOP: removed this---we will check in module_registry and pipeline # validation, this way, we can let errors go all the way up # elif self._entries is None and self.sigstring: # # create entries from sigstring # self.create_entries_and_descriptors() # else: # raise VistrailsInternalError("Need to specify signature or " # "sigstring to create PortSpec") # if self._entries is not None and self._tooltip is None: # self.create_tooltip() self.is_valid = True def __copy__(self): return PortSpec.do_copy(self) def do_copy(self, new_ids=False, id_scope=None, id_remap=None): cp = DBPortSpec.do_copy(self, new_ids, id_scope, id_remap) cp._short_sigstring = self._short_sigstring cp._tooltip = self._tooltip cp._shape = self._shape cp._docstring = self._docstring cp.is_valid = self.is_valid cp.__class__ = PortSpec # if cp._entries is not None: # cp.create_tooltip() return cp @staticmethod def convert(_port_spec): if _port_spec.__class__ == PortSpec: return _port_spec.__class__ = PortSpec for _port_spec_item in _port_spec.db_portSpecItems: PortSpecItem.convert(_port_spec_item) _port_spec._short_sigstring = None _port_spec._tooltip = None _port_spec._shape = None _port_spec._docstring = None _port_spec.is_valid = True _port_spec.port_spec_items.sort(key=operator.attrgetter('db_pos')) @staticmethod def from_sigstring(sigstring): """from_sig(sigstring: string) -> PortSpec Returns a portspec from the given sigstring. """ return PortSpec(sigstring=sigstring) ########################################################################## # Properties id = DBPortSpec.db_id name = DBPortSpec.db_name type = DBPortSpec.db_type optional = DBPortSpec.db_optional sort_key = DBPortSpec.db_sort_key min_conns = DBPortSpec.db_min_conns max_conns = DBPortSpec.db_max_conns _depth = DBPortSpec.db_depth port_spec_items = DBPortSpec.db_portSpecItems items = DBPortSpec.db_portSpecItems def _get_sigstring(self): return create_port_spec_string([i.spec_tuple for i in
========================= # Copyright (C) 2010 Diego Duclos # # This file is part of pyfa. # # pyfa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # pyfa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with pyfa. If not, see <http://www.gnu.org/licenses/>. # ============================================================================= # noinspection PyPackageRequirements import wx import gui.globalEvents as GE import gui.mainFrame from gui.bitmap_loader import BitmapLoader from gui.pyfa_gauge import PyGauge from gui.statsView import StatsView from gui.utils import fonts from gui.utils.numberFormatter import formatAmount class ResistancesViewFull(StatsView): name = "resistancesViewFull" def __init__(self, parent): StatsView.__init__(self) self.parent = parent self._cachedValues = [] self.showEffective = True self.activeFit = None self.mainFrame = gui.mainFrame.MainFrame.getInstance() self.mainFrame.Bind(GE.EFFECTIVE_HP_TOGGLED, self.ehpSwitch) def getHeaderText(self, fit): return "Resistances" def getTextExtentW(self, text): width, height = self.parent.GetTextExtent(text) return width def populatePanel(self, contentPanel, headerPanel): contentSizer = contentPanel.GetSizer() self.panel = contentPanel self.headerPanel = headerPanel # Custom header EHP headerContentSizer = self.headerPanel.Parent.GetHeaderContentSizer() self.stEff = wx.StaticText(headerPanel, wx.ID_ANY, "( Effective HP: ") headerContentSizer.Add(self.stEff) headerPanel.GetParent().AddToggleItem(self.stEff) self.labelEhp = wx.StaticText(headerPanel, wx.ID_ANY, "0") headerContentSizer.Add(self.labelEhp, 0) headerPanel.GetParent().AddToggleItem(self.labelEhp) stCls = wx.StaticText(headerPanel, wx.ID_ANY, " )") headerPanel.GetParent().AddToggleItem(stCls) headerContentSizer.Add(stCls) # headerContentSizer.Add(wx.StaticLine(headerPanel, wx.ID_ANY), 1, wx.ALIGN_CENTER) # Display table col = 0 row = 0 sizerResistances = wx.GridBagSizer() contentSizer.Add(sizerResistances, 0, wx.EXPAND, 0) # Add an empty label, then the rest. sizerResistances.Add(wx.StaticText(contentPanel, wx.ID_ANY), wx.GBPosition(row, col), wx.GBSpan(1, 1)) col += 1 toolTipText = {"em": "Electromagnetic resistance", "thermal": "Thermal resistance", "kinetic": "Kinetic resistance", "explosive": "Explosive resistance"} for damageType in ("em", "thermal", "kinetic", "explosive"): bitmap = BitmapLoader.getStaticBitmap("%s_big" % damageType, contentPanel, "gui") tooltip = wx.ToolTip(toolTipText[damageType]) bitmap.SetToolTip(tooltip) sizerResistances.Add(bitmap, wx.GBPosition(row, col), wx.GBSpan(1, 1), wx.ALIGN_CENTER) col += 1 self.stEHPs = wx.Button(contentPanel, style=wx.BU_EXACTFIT, label="EHP") self.stEHPs.SetToolTip(wx.ToolTip("Click to toggle between effective HP and raw HP")) self.stEHPs.Bind(wx.EVT_BUTTON, self.toggleEHP) for i in range(4): sizerResistances.AddGrowableCol(i + 1) sizerResistances.Add(self.stEHPs, wx.GBPosition(row, col), wx.GBSpan(1, 1), wx.ALIGN_CENTER) col = 0 row += 1 gaugeColours = (((38, 133, 198), (52, 86, 98)), ((198, 38, 38), (83, 65, 67)), ((163, 163, 163), (74, 90, 93)), ((198, 133, 38), (81, 83, 67))) toolTipText = {"shield": "Shield resistance", "armor": "Armor resistance", "hull": "Hull resistance", "damagePattern": "Incoming damage pattern"} for tankType in ("shield", "armor", "hull", "separator", "damagePattern"): if tankType != "separator": bitmap = BitmapLoader.getStaticBitmap("%s_big" % tankType, contentPanel, "gui") tooltip = wx.ToolTip(toolTipText[tankType]) bitmap.SetToolTip(tooltip) sizerResistances.Add(bitmap, wx.GBPosition(row, col), wx.GBSpan(1, 1), wx.ALIGN_CENTER) col += 1 else: sizerResistances.Add(wx.StaticLine(contentPanel, wx.ID_ANY), wx.GBPosition(row, col), wx.GBSpan(1, 6), wx.EXPAND | wx.ALIGN_CENTER) row += 1 col = 0 continue currGColour = 0 font = wx.Font(fonts.NORMAL, wx.SWISS, wx.NORMAL, wx.NORMAL, False) for damageType in ("em", "thermal", "kinetic", "explosive"): box = wx.BoxSizer(wx.HORIZONTAL) sizerResistances.Add(box, wx.GBPosition(row, col), wx.GBSpan(1, 1), wx.ALIGN_CENTER) # Fancy gauges addon pgColour = gaugeColours[currGColour] fc = pgColour[0] bc = pgColour[1] currGColour += 1 lbl = PyGauge(contentPanel, font, 100) lbl.SetMinSize((48, 16)) lbl.SetBackgroundColour(wx.Colour(bc[0], bc[1], bc[2])) lbl.SetBarColour(wx.Colour(fc[0], fc[1], fc[2])) lbl.SetBarGradient() lbl.SetFractionDigits(1) setattr(self, "gaugeResistance%s%s" % (tankType.capitalize(), damageType.capitalize()), lbl) box.Add(lbl, 0, wx.ALIGN_CENTER) col += 1 box = wx.BoxSizer(wx.VERTICAL) box.SetMinSize(wx.Size(self.getTextExtentW("WWWWk"), -1)) lbl = wx.StaticText(contentPanel, wx.ID_ANY, "0" if tankType != "dama
gePattern" else "") box.Add(lbl, 0, wx.ALIGN_CENTER) setattr(self, "labelResistance%sEhp
" % tankType.capitalize(), lbl) sizerResistances.Add(box, wx.GBPosition(row, col), wx.GBSpan(1, 1), wx.ALIGN_CENTER) row += 1 col = 0 self.stEHPs.SetToolTip(wx.ToolTip("Click to toggle between effective HP and raw HP")) def toggleEHP(self, event): wx.PostEvent(self.mainFrame, GE.EffectiveHpToggled(effective=self.stEHPs.GetLabel() == "HP")) def ehpSwitch(self, event): event.Skip() self.showEffective = event.effective fitID = self.mainFrame.getActiveFit() wx.PostEvent(self.mainFrame, GE.FitChanged(fitIDs=(fitID,))) def refreshPanel(self, fit): # If we did anything intresting, we'd update our labels to reflect the new fit's stats here if fit is None and not self.showEffective: self.showEffective = True wx.PostEvent(self.mainFrame, GE.EffectiveHpToggled(effective=True)) return self.stEHPs.SetLabel("EHP" if self.showEffective else "HP") self.activeFit = fit.ID if fit is not None else None for tankType in ("shield", "armor", "hull"): for damageType in ("em", "thermal", "kinetic", "explosive"): if fit is not None: resonanceType = tankType if tankType != "hull" else "" resonance = "%s%sDamageResonance" % (resonanceType, damageType.capitalize()) resonance = resonance[0].lower() + resonance[1:] resonance = (1 - fit.ship.getModifiedItemAttr(resonance)) * 100 else: resonance = 0 lbl = getattr(self, "gaugeResistance%s%s" % (tankType.capitalize(), damageType.capitalize())) lbl.SetValue(resonance) ehp = (fit.ehp if self.showEffective else fit.hp) if fit is not None else None total = 0
""" Benchmark functions for fftpack.pseudo_diffs module """ from numpy import arange, sin, cos, pi, exp, tanh, sign from .common import Benchmark, safe_import with safe_import(): from scipy.fftpack import diff, fft, ifft, tilbert, hilbert, shift, fftfreq def direct_diff(x, k=1, period=None): fx = fft(x) n = len(fx) if period is None: period = 2*pi w = fftfreq(n)*2j*pi/period*n if k < 0: w = 1 / w**k w[0] = 0.0 else: w = w**k if n > 2000: w[250:n-250] = 0.0 return ifft(w*fx).real def direct_tilbert(x, h=1, period=None): fx = fft(x) n = len(fx
) if period is None: period = 2*pi w = fftfreq(n)*h*2*pi/period*n w[0] = 1 w = 1j/tanh(w) w[0] = 0j return ifft(w*fx) def direct_hilbert(x): fx = fft(x) n = len(fx) w = fftfreq(n)*n w = 1j*sign(w) return ifft(w*fx) def
direct_shift(x, a, period=None): n = len(x) if period is None: k = fftfreq(n)*1j*n else: k = fftfreq(n)*2j*pi/period*n return ifft(fft(x)*exp(k*a)).real class Bench(Benchmark): params = [ [100, 256, 512, 1000, 1024, 2048, 2048*2, 2048*4], ['fft', 'direct'], ] param_names = ['size', 'type'] def setup(self, size, type): size = int(size) x = arange(size)*2*pi/size a = 1 self.a = a if size < 2000: self.f = sin(x)*cos(4*x)+exp(sin(3*x)) self.sf = sin(x+a)*cos(4*(x+a))+exp(sin(3*(x+a))) else: self.f = sin(x)*cos(4*x) self.sf = sin(x+a)*cos(4*(x+a)) def time_diff(self, size, soltype): if soltype == 'fft': diff(self.f, 3) else: direct_diff(self.f, 3) def time_tilbert(self, size, soltype): if soltype == 'fft': tilbert(self.f, 1) else: direct_tilbert(self.f, 1) def time_hilbert(self, size, soltype): if soltype == 'fft': hilbert(self.f) else: direct_hilbert(self.f) def time_shift(self, size, soltype): if soltype == 'fft': shift(self.f, self.a) else: direct_shift(self.f, self.a)
import unittest from src.data_structures.mockdata import MockData class TestMockData (unittest.TestCase): def setUp(self): self.data = MockData() def tes
t_random_data(self): data = MockData()
a_set = data.get_random_elements(10) self.assertTrue(len(a_set) == 10, "the data should have 10 elements!") if __name__ == '__main__': unittest.main()
#%% # Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== from tensorflow.examples.tutorials.mnist import input_data mnist = input_data.read_data_sets("MNIS
T_data/", one_hot=True) print(mnist.tra
in.images.shape, mnist.train.labels.shape) print(mnist.test.images.shape, mnist.test.labels.shape) print(mnist.validation.images.shape, mnist.validation.labels.shape) import tensorflow as tf sess = tf.InteractiveSession() x = tf.placeholder(tf.float32, [None, 784]) W = tf.Variable(tf.zeros([784, 10])) b = tf.Variable(tf.zeros([10])) y = tf.nn.softmax(tf.matmul(x, W) + b) y_ = tf.placeholder(tf.float32, [None, 10]) cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y), reduction_indices=[1])) train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy) tf.global_variables_initializer().run() for i in range(1000): batch_xs, batch_ys = mnist.train.next_batch(100) train_step.run({x: batch_xs, y_: batch_ys}) correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(y_, 1)) accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32)) print(accuracy.eval({x: mnist.test.images, y_: mnist.test.labels}))
try: import openeye # These can only be imported if openeye tools are available
from smarty.atomtyper import * from smarty.sampler import * from smarty.utils import * from smarty.sampler_smirky import * ex
cept Exception as e: print(e) print('Warning: Cannot import openeye toolkit; not all functionality will be available.') from smarty.score_utils import *
##################################################################### # # core.py # # Copyright (c) 2015, Eran Egozy # # Released under the MIT License (http://opensource.org/licenses/MIT) # ##################################################################### import kivy from kivy.app import App from kivy.core.window import Window from kivy.uix.widget import Widget from kivy.clock import Clock import traceback class BaseWidget(Widget): """Has some common core functionality we want in all our apps - handling key up/down, closing the app, and update on every frame. The subclass of BaseWidget can optionally define these methods, which will get called if defined: def on_key_down(self, keycode, modifiers): def on_key_up(self, keycode): def on_close(self): def on_update(self): """ def __init__(self, **kwargs): super(BaseWidget, self).__init__(**kwargs) if hasattr(self.__class__, 'on_init'): Clock.schedule_once(self._init, 0) # keyboard up / down messages self.down_keys = [] kb = Window.request_keyboard(target=self, callback=None) if hasattr(self.__class__, 'on_key_down'): kb.bind(on_key_down=self._key_down) if hasattr(self.__class__, 'on_key_up'): kb.bind(on_key_up=self._key_up) # get called when app is about to shut down if hasattr(self.__class__, 'on_close'): Window.bind(on_close=self._close) # create a clock to poll us every frame if hasattr(self.__class__, 'on_update')
: Clock.schedule_interval(self._update, 0) def _key_down(self, keyboard, keycode, text, modifiers): if not keycode[1] in self.down_keys: self.down_keys.append(keycode[1]) self.on_key_down(keycode, modifiers)
def _key_up(self, keyboard, keycode): if keycode[1] in self.down_keys: self.down_keys.remove(keycode[1]) self.on_key_up(keycode) def _close(self, *args): self.on_close() def _update(self, dt): self.on_update() g_terminate_funcs = [] def register_terminate_func(f) : global g_terminate_funcs g_terminate_funcs.append(f) def run(widget): """Pass in a widget, and this will automatically run it. Will also run termination functions (g_terminate_funcs) at the end of the run, even if it was caused by a program crash """ class MainApp(App): def build(self): return widget() try: MainApp().run() except: traceback.print_exc() global g_terminate_funcs for t in g_terminate_funcs: t()
# -*- coding: utf-8 -*- import logging if __name__ == '__main__': logging.basicConfig() _log = logging.getLogger(__name__) import pyxb.binding.generate import pyxb.binding.datatypes as xsd import pyxb.utils.domutils from xml.dom import Node import os.path schema_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../schemas/test-ctd-simple.xsd')) code = pyxb.binding.generate.GeneratePython(schema_location=schema_path) rv = compile(code, 'test', 'exec') eval(rv) from pyxb.exceptions_ import * import unittest class TestCTDSimple (unittest.TestCase): def testClause4 (self): self.assertTrue(clause4._IsSimpleTypeContent()) self.assertTrue(clause4._TypeDefinition == xsd.string) self.assertEqual(None, clause4._TypeDefinition._CF_length.value()) def testClause3 (self): self.assertTrue(clause3._IsSimpleTypeContent()) self.assertTrue(issubclass(clause3, clause4)) self.assertTrue(clause3._TypeDefinition == xsd.string) def testClause2 (self): self.assertTrue(clause2._IsSimpleTypeContent()) self.assertTrue(issubclass(clause2, ctype)) self.assertTrue(issubclass(clause2._TypeDefinition, xsd.string)) self.assertEqual(6, clause2._TypeDefinition._CF_length.value()) def testClause1_1 (self): self.assertTrue(clause1_1._IsSimpleTypeContent()) self.assertTrue(issubclass(clause1_1, clause4)) self.assertTrue(issubclass(clause1_1._TypeD
efinition, xsd.string)) self.assertEqual(2, clause1_1._TypeDefinition._CF_minLength.value()) self.assertEqual(4, clause1_1._TypeDefinition._CF_maxLength.value()) def testClause1_2 (self): self.assertTrue(clause1_2._IsSimpleTypeContent()) self.assertTrue(issubclass(
clause1_2, clause4)) self.assertTrue(issubclass(clause1_2._TypeDefinition, xsd.string)) self.assertEqual(6, clause1_2._TypeDefinition._CF_length.value()) if __name__ == '__main__': unittest.main()
#!/usr/bin/python # Copyright 2017-present Open Networking Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. DOCUMENTATION = ''' --- module: maas_item short_description: Manage MAAS Clusters Interfaces options: maas: description: - URL of MAAS server default: http://localhost/MAAS/api/1.0/ key: description: - MAAS API key required: yes name: description: - name of the item required: yes state: description: - possible states for this item choices: ['present', 'absent', 'query'] default: present requirements: [ipaddress, requests_oauthlib, maasclient] author: David Bainbridge ''' EXAMPLES = ''' examples: maas_item: maas: http://my.maas.server.com/MAAS/api/1.0/ key: 'xBvr9dx5k7S52myufC:fqBXV7hJgXegNZDw9c:K8hsmL47XjAppfQy2pDVW7G49p6PELgp' name: MyItem state: present maas_item: maas: http://my.maas.server.com/MAAS/api/1.0/ key: 'xBvr9dx5k7S52myufC:fqBXV7hJgXegNZDw9c:K8hsmL47XjAppfQy2pDVW7G49p6PELgp' name: MyDeadItem state: absent ''' import sys import json import ipaddress import requests from maasclient.auth import MaasAuth from maasclient import MaasClient # For some reason the maasclient doesn't provide a put method. So # we will add it here def put(client, url, params=None): return requests.put(url=client.auth.api_url + url, auth=client._oauth(), data=params) # Attempt to interpret the given value as a JSON object, if that fails # just return it as a string def string_or_object(val): try: return json.loads(val) except: return val # Return a copy of the given dictionary with any `null` valued entries # removed def remove_null(d_in): d = d_in.copy() to_remove = [] for k in d.keys(): if d[k] == None: to_remove.append(k) for k in to_remove: del d[k] return d # Deterine if two dictionaries are different def different(have, want): have_keys = have.keys() for key in want.keys(): if (key in have_keys and want[key] != have[key]) or key not in have_keys: return True return False # Get an item from MAAS using its name, if not found return None def get_item(maas, name): res = maas.get('/items/%s/' % name) if res.ok: return json.loads(res.text) return None # Create an item based on the value given def create_item(maas, item): merged = item.copy() # merged['op'] = 'new' res = maas.post('/items/', merged) if res.ok: return { 'error': False, 'status': get_item(maas, merged['name']) } return { 'error': True, 'status': string_or_object(res.text) } # Delete an item based on the name def delete_item(maas, name): res = maas.delete('/items/%s/' % name) if res.ok: return { 'error': False } return { 'error': True, 'status': string_or_object(res.text) } def update_item(maas, have, want): merged = have.copy() merged.update(want) res = put(maas, '/items/%s/' % merged['name'], merged) if res.ok: return { 'error': False, 'status': get_item(maas, merged['name']) } return { 'error': True, 'status': string_or_object(res.text) } def main(): module = AnsibleModule( argument_spec = dict( maas=dict(default='http://localhost/MAAS/api/1.0/'), key=dict(required=True), name=dict(required=True), state=dict(default='present', choices=['present', 'absent', 'query']) ), supports_check_mode = False ) maas = module.params['maas'] key = module.params['key'] state = module.params['state'] # Construct a sparsely populate desired state desired = remove_null({ 'name': module.params['name'], }) # Authenticate into MAAS auth = MaasAuth(maas, key) maas = MaasClient(auth) # Attempt to get the item from MAAS item = get_item(maas, desired['name']) # Actions if the item does not currently exist if not item: if state == 'query': # If this is a query, returne it is not found module.exit_json(changed=False, found=False) elif state == 'present': # If this should be present, then attempt to create it
res = create_item(maas, desired) if res['error']: module.fail_json(msg=res['status']) else: module.exit_json(changed=True, item=res['status']) else: # If this should be absent, then we are done and in the desired state module.exit_json(changed=False) # Done with items does not exists actions return # Actions if the item does
exist if state == 'query': # If this is a query, return the item module.exit_json(changed=False, found=True, item=item) elif state == 'present': # If we want this to exists check to see if this is different and # needs updated if different(item, desired): res = update_item(maas, item, desired) if res['error']: module.fail_json(msg=res['status']) else: module.exit_json(changed=True, item=res['status']) else: # No differences, to nothing to change module.exit_json(changed=False, item=item) else: # If we don't want this item, then delete it res = delete_item(maas, item['name']) if res['error']: module.fail_json(msg=res['status']) else: module.exit_json(changed=True, item=item) # this is magic, see lib/ansible/module_common.py #<<INCLUDE_ANSIBLE_MODULE_COMMON>> if __name__ == '__main__': main()
from .utils import do, do_ex, trace from .version import meta from os.path import abspath, realpath FILES_COMMAND = 'git ls-files' DEFAULT_DESCRIBE = 'git describe --dirty --tags --long --match *.*' def parse(root, describe_command=DEFAULT_DESCRIBE): real_root, _, ret = do_ex('git rev-parse --show-toplevel', root) if ret: return trace('real root', real_root) if abspath(realpath(real_root)) != abspath(realpath(root)): return rev_node, _, ret = do_ex('git rev-parse --verify --quiet HEAD',
root) if ret: return meta('0.0') rev_node = rev_node[:7] out, err, ret = do_ex(describe_command, root) if '-' not in out and '
.' not in out: revs = do('git rev-list HEAD', root) count = revs.count('\n') if ret: out = rev_node return meta('0.0', distance=count + 1, node=out) if ret: return dirty = out.endswith('-dirty') if dirty: out = out.rsplit('-', 1)[0] tag, number, node = out.rsplit('-', 2) number = int(number) if number: return meta(tag, distance=number, node=node, dirty=dirty) else: return meta(tag, dirty=dirty, node=node)
y: if conn: conn.close() # close in case of errors return result def _file_url(filepath, url): """Return URL for the given `filepath` in the DAV collection `url`.""" basename = os.path.basename(filepath) return urlparse.urljoin(url.rstrip('/') + '/', basename) def _dav_put(filepath, url, login, progress=None): """Upload `filepath` to given `url` (referring to a WebDAV collection).""" fileurl = _file_url(filepath, url) sys.stdout.write(" Uploading %s: " % os.path.basename(filepath)) sys.stdout.flush() size = os.path.getsize(filepath) with closing(open(filepath, 'r')) as handle: if progress: handle = dputhelper.FileWithProgress(handle, ptype=progress, progressf=sys.stdout, size=size) trace("HTTP PUT to URL: %s" % fileurl) try: conn = _url_connection(fileurl, "PUT") try: conn.putheader("Authorization", 'Basic %s' % login.encode('base64').replace('\n', '').strip()) conn.putheader("Content-Length", str(size)) conn.endheaders() conn.debuglevel = 0 while True: data = handle.read(CHUNK_SIZE) if not data: break conn.send(data) conn.debuglevel = int(trace.debug) resp = conn.getresponse() if 200 <= resp.status <= 299: print " done." #elif res.status == 401 and not auth_headers: #print "need authentication." #auth_headers = AuthHandlerHackAround(url, res.msg, pwman).get_auth_headers() elif resp.status == 401: print " unauthorized." raise urllib2.URLError("Upload failed as unauthorized (%s)," " maybe wrong username or password?" % resp.reason) else: print " failed." raise urllib2.URLError("Unexpected HTTP status %d %s" % (resp.status, resp.reason)) resp.read() # eat response body finally: conn.close() except httplib.HTTPException, exc: raise urllib2.URLError(exc) def _check_url(url, allowed, mindepth=0): """Check if HTTP GET `url` returns a status code in `allowed`.""" if mindepth: scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) path = '/'.join(path.split('/')[:mindepth+1]).rstrip('/') + '/' url = urlparse.urlunparse((scheme, netloc, path, params, query, fragment)) trace("Checking URL '%(url)s'", url=url) try: # TODO: Check requests need to use login credentials with closing(urllib2.urlopen(url)) as handle: handle.read() code = handle.code if code not in allowed: raise urllib2.HTTPError(url, code, "Unallowed HTTP status %d (%s)" % (code, handle.msg), handle.headers, None) except urllib2.HTTPError, exc: code = exc.code if code not in allowed: raise trace("Code %(code)d OK for URL '%(url)s'", url=url, code=code) def _get_host_argument(fqdn): """ We have to jump through several hoops to get to our config section, which in turn is the only place where the host argument is available. """ import __main__ as dput # if only we would get passed our config section... config = dput.config # pylint: disable=no-member result = "" for section in config.sections(): if (config.has_option(section, "fqdn") and config.get(section, "fqdn") == fqdn and config.has_option(section, section)): result = config.get(section, section) return result def _get_config_data(fqdn): """Get configuration section for the chosen host, and CLI host parameters.""" # Without the patch applied, fall back to ugly hacks if not upload.extended_info: try: caller = sys._getframe(2) # pylint: disable=protected-access except AttributeError: pass # somehow not CPython else: config = caller.f_globals.get("config") host = caller.f_locals.get("host") del caller if config and host: upload.extended_info = dict(config=config, host=host) if upload.extended_info: host_config = dict(upload.extended_info["config"].items(upload.extended_info["host"])) host_argument = host_config.get(upload.extended_info["host"], "") else: host_config = {} host_argument = _get_host_argument(fqdn) log("WARN: Extended host configuration not available!") # Parse "host:key=val;..." argument from command line into a dict cli_params = dict(cgi.parse_qsl(host_argument.replace(',', ';'), keep_blank_values=True)) return host_config, cli_params def upload(fqdn, login, incoming, files_to_upload, # pylint: disable=too-many-arguments debug, dummy, progress=None): """Upload the files via WebDAV.""" assert sys.version_info >= (2, 5), "Your snake is a rotting corpse (Python 2.5+ required)" trace.debug = bool(debug) try: host_config, cli_params = _get_config_data(fqdn) login = _resolve_credentials(fqdn, login) # Handle .changes file changes_file = [i for i in files_to_upload if i.endswith(".changes")] if not changes_file: log("WARN: No changes file found in %(n)d files to upload", n=len(files_to_upload)) changes_file = None else: if len(changes_file) > 1: log("WARN: More than one changes file found in %(n)d files to upload," " taking the 1st:\n %(changes)s", n=len(files_to_upload), changes="\n ".join(changes_file)) changes_file = changes_file[0] # Prepare for uploading incoming, repo_params = _resolve_incoming(fqdn, login, incoming, changes=changes_file, cli_params=cli_params, repo_mappings=host_config.get("repo_mappings", "")) log("INFO: Destination base URL is\n %(url)s", url=urllib2.quote(incoming, safe=":/~;#")) repo_params.update(cli_params) mindepth = int(repo_params.get("mindepth", "0"), 10) overwrit
e = int(repo_params.get("overwrite", "0"), 10) # TODO: Add ability to enter missing password via terminal # auth_handler = PromptingPasswordMgr(login) # Special handling for integration test code if "integration-test" in cli_params: import pprint print "upload arguments = ", pprint.pprint(dict((k, v) for k, v in locals().iteritems() if k in ( "fqdn", "login", "incoming", "files_to_upload", "debug", "dummy", "pro
gress"))) print "host config = ", pprint.pprint(host_config) print "host arguments = ", pprint.pprint(cli_params) else: # TODO: "bintray" REST API support # POST /packages/:subject/:repo # POST /packages/:subject/:repo/:package/versions # Check if .changes file already exists if not overwrite and changes_file: try: _check_url(_file_url(changes_file, incoming), [404]) except urllib2.HTTPError, exc: raise dputhelper.DputUploadFatalException("Overwriting existing changes at '%s' not allowed: %s" % ( _file_url(changes_file, incoming), exc)) # Check for existence of target path with minimal depth if mindepth: try: _check_url(incoming, range(200, 300), mindepth=mindepth) except urllib2.HTTPError, exc: raise dputhelper.DputUploadFatalException("Required repository path '%s' doesn't exist: %s" % ( exc.filename, exc)) # Upload the files in the given order f
#-*- coding: utf-8 -*- # This file based on MIT licensed code at: https://github.com/imwilsonxu/fbone from functools import wraps from
flask import abort from flask.ext.login import current_user def admin_required(f): @wraps(f) def decorated_function(*args, **kwargs): if not current_user.is_admin(): ab
ort(403) return f(*args, **kwargs) return decorated_function
# coding=utf-8 # Copyright (C) Duncan Macleod (2015) # # This file is part of the GW DetChar python package. # # GW DetChar is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either vers
ion 3 of the License, or # (at your option) any later version. # # GW DetChar is distributed in the hope that it will be useful, #
but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GW DetChar. If not, see <http://www.gnu.org/licenses/>. """Methods and utilties for performing Omega pipline scans See Chatterji 2005 [thesis] for details on the Q-pipeline. """ __author__ = 'Duncan Macleod <duncan.macleod@ligo.org>' __credits__ = 'Alex Urban <alexander.urban@ligo.org>' # -- imports ------------------------------------------------------------------ # import pyomega utils from .core import *
from setuptools import setup, find_packages from codecs import open from os import path here = path.abspath(path.dirname(__file__)) with open(path.join(here, 'DESCRIPTION.md'), encoding='utf-8') as f: long_description = f.read() setup( name='jestocke-mangopaysdk', version='3.0.6', description='A client library written in python to work with mangopay v2 api', long_description='This SDK is a client library for interacting with the Mangopay API.', url='https://github.com/Mangopay/mangopay2-python-sdk', author='Mangopay (www.mangopay.com)', author_email='support@mangopay.com',
license='MIT', classifiers=[ 'Intended Audience :: Developers', 'Topic :: Software Development :: Build Tools', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Programming La
nguage :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', ], keywords='mangopay api development emoney sdk', packages=find_packages(exclude=['contrib', 'docs', 'tests*']), install_requires=['requests', 'simplejson', 'blinker', 'six' ], extras_require={ 'dev': ['responses', 'nose', 'coverage', 'httplib2', 'pyopenssl', 'ndg-httpsclient', 'pyasn1', 'exam'], 'test': ['responses', 'nose', 'coverage', 'httplib2', 'pyopenssl', 'ndg-httpsclient', 'pyasn1', 'exam'], }, entry_points={ 'console_scripts': [ 'sample=sample:main', ], }, )
#!/usr/bin/env python from __future__ import print_function import os import subprocess syst_names = ['nominal','lightdown','lightup','ckv','calibneg','calibpos','calibshape'] flists = [syst_name+'.txt' for syst_name in syst_names] def process_dataset(ds_idx): flist = flists[ds_idx] fns = [] with open(flist, 'r') as inf: for line in inf: fns.append(line.rstrip('\n')) # create output directory out_base = '/pnfs/nova/scratch/users/
slin' out_path = os.path.join(out_base,'numuccinc','evt_matched_cafs',syst_names[ds_idx]) if not os.path.exists(out_path): os.makedirs(out_path) for fn in fns: bashout = subprocess.check_output('samweb locate-file {}'.format(fn), shell=True) bashout = bashout.split('\n') for line in bashout: if 'dcache' in line: location = line.split(':')[1] cmd = 'rsync -v {} {}'.
format(os.path.join(location, fn), out_path) os.system(cmd) if __name__ == '__main__': for i in range(len(syst_names)): process_dataset(i)
# -*- coding: utf-8 -*- # # HnTool rules - php # Copyright (C) 2009-2010 Candido Vieira <cvieira.br@gmail.com> # # This program is free
software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Softwar
e Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # import os import ConfigParser import HnTool.modules.util from HnTool.modules.rule import Rule as MasterRule class Rule(MasterRule): def __init__(self, options): MasterRule.__init__(self, options) self.short_name="php" self.long_name="Checks security problems on php config file" self.type="config" self.required_files = ['/etc/php5/apache2/php.ini', '/etc/php5/cli/php.ini', '/etc/php.ini'] def requires(self): return self.required_files def analyze(self, options): check_results = self.check_results conf_files = self.required_files for php_conf in conf_files: if os.path.isfile(php_conf): config = ConfigParser.ConfigParser() try: config.read(php_conf) except ConfigParser.ParsingError, (errno, strerror): check_results['info'].append('Could not parse %s: %s' % (php_conf, strerror)) continue if not config.has_section('PHP'): check_results['info'].append('%s is not a PHP config file' % (php_conf)) continue if config.has_option('PHP', 'register_globals'): rg = config.get('PHP', 'register_globals').lower() if rg == 'on': check_results['medium'].append('Register globals is on (%s)' % (php_conf)) elif rg == 'off': check_results['ok'].append('Register globals is off (%s)' % (php_conf)) else: check_results['info'].append('Unknown value for register globals (%s)' % (php_conf)) else: check_results['info'].append('Register globals not found (%s)' % (php_conf)) if config.has_option('PHP', 'safe_mode'): sm = config.get('PHP', 'safe_mode').lower() if sm == 'on': check_results['low'].append('Safe mode is on (fake security) (%s)' % (php_conf)) elif sm == 'off': check_results['info'].append('Safe mode is off (%s)' % (php_conf)) else: check_results['info'].append('Unknown value for safe mode (%s)' % (php_conf)) else: check_results['info'].append('Safe mode not found (%s)' % (php_conf)) if config.has_option('PHP', 'display_errors'): de = config.get('PHP', 'display_errors').lower() if de == 'on': check_results['medium'].append('Display errors is on (stdout) (%s)' % (php_conf)) elif de == 'off': check_results['ok'].append('Display errors is off (%s)' % (php_conf)) elif de == 'stderr': check_results['info'].append('Display errors set to stderr (%s)' % (php_conf)) else: check_results['info'].append('Unknown value for display errors (%s)' % (php_conf)) else: check_results['info'].append('Display errors not found (%s)' % (php_conf)) if config.has_option('PHP', 'expose_php'): ep = config.get('PHP', 'expose_php').lower() if ep == 'on': check_results['low'].append('Expose PHP is on (%s)' % (php_conf)) elif ep == 'off': check_results['ok'].append('Expose PHP is off (%s)' % (php_conf)) else: check_results['info'].append('Unknown value for expose PHP (%s)' % (php_conf)) else: check_results['info'].append('Expose PHP not found (%s)' % (php_conf)) return check_results
_branch: description: - Clone only the history leading to the tip of the specified I(branch). type: bool default: 'no' version_added: '2.11' track_submodules: description: - If C(yes), submodules will track the latest commit on their master branch (or other branch specified in .gitmodules). If C(no), submodules will be kept at the revision specified by the main project. This is equivalent to specifying the --remote flag to git submodule update. type: bool default: 'no' version_added: "1.8" verify_commit: description: - If C(yes), when cloning or checking out a I(version) verify the signature of a GPG signed commit. This requires git version>=2.1.0 to be installed. The commit MUST be signed and the public key MUST be present in the GPG keyring. type: bool default: 'no' version_added: "2.0" archive: description: - Specify archive file path with extension. If specified, creates an archive file of the specified format containing the tree structure for the source tree. Allowed archive formats ["zip", "tar.gz", "tar", "tgz"]. - This will clone and perform git archive from local directory as not all git servers support git archive. type: path version_added: "2.4" archive_prefix: description: - Specify a prefix to add to each file path in archive. Requires I(archive) to be specified. version_added: "2.10" type: str separate_git_dir: description: - The path to place the cloned repository. If specified, Git repository can be separated from working tree. type: path version_added: "2.7" gpg_whitelist: description: - A list of trusted GPG fingerprints to compare to the fingerprint of the GPG-signed commit. - Only used when I(verify_commit=yes). - Use of this feature requires Git 2.6+ due to its reliance on git's C(--raw) flag to C(verify-commit) and C(verify-tag). type: list elements: str default: [] version_added: "2.9" requirements: - git>=1.7.1 (the command line tool) notes: - "If the task seems to be hanging, first verify remote host is in C(known_hosts). SSH will prompt user to authorize the first contact with a remote host. To avoid this prompt, one solut
ion is to use the option accept_hostkey. Another solution is to add the remote host public key in C(/etc/ssh/ssh_known_hosts) before calling the git module, with the following command: ssh-keyscan -H remote_host.com >> /etc/ssh/s
sh_known_hosts." - Supports C(check_mode). ''' EXAMPLES = ''' - name: Git checkout ansible.builtin.git: repo: 'https://foosball.example.org/path/to/repo.git' dest: /srv/checkout version: release-0.22 - name: Read-write git checkout from github ansible.builtin.git: repo: git@github.com:mylogin/hello.git dest: /home/mylogin/hello - name: Just ensuring the repo checkout exists ansible.builtin.git: repo: 'https://foosball.example.org/path/to/repo.git' dest: /srv/checkout update: no - name: Just get information about the repository whether or not it has already been cloned locally ansible.builtin.git: repo: 'https://foosball.example.org/path/to/repo.git' dest: /srv/checkout clone: no update: no - name: Checkout a github repo and use refspec to fetch all pull requests ansible.builtin.git: repo: https://github.com/ansible/ansible-examples.git dest: /src/ansible-examples refspec: '+refs/pull/*:refs/heads/*' - name: Create git archive from repo ansible.builtin.git: repo: https://github.com/ansible/ansible-examples.git dest: /src/ansible-examples archive: /tmp/ansible-examples.zip - name: Clone a repo with separate git directory ansible.builtin.git: repo: https://github.com/ansible/ansible-examples.git dest: /src/ansible-examples separate_git_dir: /src/ansible-examples.git - name: Example clone of a single branch ansible.builtin.git: single_branch: yes branch: master - name: Avoid hanging when http(s) password is missing ansible.builtin.git: repo: https://github.com/ansible/could-be-a-private-repo dest: /src/from-private-repo environment: GIT_TERMINAL_PROMPT: 0 # reports "terminal prompts disabled" on missing password # or GIT_ASKPASS: /bin/true # for git before version 2.3.0, reports "Authentication failed" on missing password ''' RETURN = ''' after: description: Last commit revision of the repository retrieved during the update. returned: success type: str sample: 4c020102a9cd6fe908c9a4a326a38f972f63a903 before: description: Commit revision before the repository was updated, "null" for new repository. returned: success type: str sample: 67c04ebe40a003bda0efb34eacfb93b0cafdf628 remote_url_changed: description: Contains True or False whether or not the remote URL was changed. returned: success type: bool sample: True warnings: description: List of warnings if requested features were not available due to a too old git version. returned: error type: str sample: git version is too old to fully support the depth argument. Falling back to full checkouts. git_dir_now: description: Contains the new path of .git directory if it is changed. returned: success type: str sample: /path/to/new/git/dir git_dir_before: description: Contains the original path of .git directory if it is changed. returned: success type: str sample: /path/to/old/git/dir ''' import filecmp import os import re import shlex import stat import sys import shutil import tempfile from distutils.version import LooseVersion from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.six import b, string_types from ansible.module_utils._text import to_native, to_text from ansible.module_utils.common.process import get_bin_path def relocate_repo(module, result, repo_dir, old_repo_dir, worktree_dir): if os.path.exists(repo_dir): module.fail_json(msg='Separate-git-dir path %s already exists.' % repo_dir) if worktree_dir: dot_git_file_path = os.path.join(worktree_dir, '.git') try: shutil.move(old_repo_dir, repo_dir) with open(dot_git_file_path, 'w') as dot_git_file: dot_git_file.write('gitdir: %s' % repo_dir) result['git_dir_before'] = old_repo_dir result['git_dir_now'] = repo_dir except (IOError, OSError) as err: # if we already moved the .git dir, roll it back if os.path.exists(repo_dir): shutil.move(repo_dir, old_repo_dir) module.fail_json(msg=u'Unable to move git dir. %s' % to_text(err)) def head_splitter(headfile, remote, module=None, fail_on_error=False): '''Extract the head reference''' # https://github.com/ansible/ansible-modules-core/pull/907 res = None if os.path.exists(headfile): rawdata = None try: f = open(headfile, 'r') rawdata = f.readline() f.close() except Exception: if fail_on_error and module: module.fail_json(msg="Unable to read %s" % headfile) if rawdata: try: rawdata = rawdata.replace('refs/remotes/%s' % remote, '', 1) refparts = rawdata.split(' ') newref = refparts[-1] nrefparts = newref.split('/', 2) res = nrefparts[-1].rstrip('\n') except Exception: if fail_on_error and module: module.fail_json(msg="Unable to split head from '%s'" % rawdata) return res def unfrackgitpath(path): if path is None: return None # copied from ansible.utils.path return
#!/usr/bin/env python # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # from __future__ import print_function import optparse from proton import Url from proton.handlers import MessagingHandler from proton.reactor import Container from proton_extension import Capabilities class Recv(MessagingHandler): def __init__(self, url, id, subscription, count): super(Recv, self).__init__() self.url = Url(url) self.id = id self.subscription = subscription self.expected = count self.received = 0 def on_start(self, event): # shared, container-scoped id, non-durable event.container.container_id = self.id event.container.create_receiver(self.url, name=self.subscription, options=[Capabilities('shared')]) def on_message(self, event): if self.expected == 0 or self.received < self.expected: print(event.message.body) self.received += 1 if self.received == self.expected: event.receiver.close() event.connection.close() parser = optparse.OptionParser(usage="usage: %prog [options]") parser.add_option("-a", "--address", default="localhost:5672/examples", help="addre
ss from which messages are received (default %default)") parser.add_option("-m", "--messages", type="int", default=100, help="number of messages to r
eceive; 0 receives indefinitely (default %default)") parser.add_option("-i", "--id", default="client-d", help="client's connection identifier (default %default)") parser.add_option("-s", "--subscription", default="subscription-d", help="client's subscription identifier (default %default)") opts, args = parser.parse_args() try: Container(Recv(opts.address, opts.id, opts.subscription, opts.messages)).run() except KeyboardInterrupt: pass
# coding=utf-8 # Copyright 2014 Pants project c
ontributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) from pants.build_graph.build_graph import sort_targets from pants.task.console_task import ConsoleTask class SortTargets(ConsoleTask): """Topologically sort the targets."
"" @classmethod def register_options(cls, register): super(SortTargets, cls).register_options(register) register('--reverse', action='store_true', default=False, help='Sort least-dependent to most-dependent.') def console_output(self, targets): sorted_targets = sort_targets(targets) # sort_targets already returns targets in reverse topologically sorted order. if not self.get_options().reverse: sorted_targets = reversed(sorted_targets) for target in sorted_targets: if target in self.context.target_roots: yield target.address.reference()
import numpy as np from pyraf import iraf from pyraf.iraf import kepler ''' Useful functions for Kepler light curve processing Use this with the program 'makelc.py' Originally by Jean McKeever Edited and improved by Meredith Rawls ''' # calculate orbital phase # times must be a list of observation times in the same units as BJD0 # it returns 'phases': orbital phases from 0 to 1 # it also returns 'phasedoubles': twice as long as 'phases' and now from 0 to 2 def phasecalc(times, period=100, BJD0=2454833): phases = [] cycles = [] for i in range(0, len(times)): fracP = (times[i] - BJD0) / period if fracP < 0: phases.append(fracP % 1) cycles.append(int(fracP)) else: phases.append(fracP % 1) cycles.append(int(fracP) + 1) #print(fracP, phases[i]) return phases # remove long-term trends # uses a simple 3rd-order polynomial by default # operates on one array at a time (e.g., after all quarters have been combined) def long_detrend(t, flux, order=3): model = np.polyfit(t, flux, order) fit = np.zeros(len(t)) # apply the model coefficients to create the fit for i in range(0, order+1): fit += model[i]*np.power(t, (order-i)) #flux = flux/fit*1e6 - 1e6 # put it in ppm >:( flux = flux/fit*np.median(flux) # don't put it in ppm, because ppm is annoying return t, flux # Delete any observation that has one or more NaN values. # Assumes there are six parallel arrays... use dummy arrays if you don't have 6 # columns of interest to operate on (sorry). # Operates on one quarter at a time def nan_delete(time, flux, ferr, other1, other2, other3): a = [] a = [time, flux, ferr, other1, other2, other3] atrans = np.transpose(a) newatrans = [] newa = [] for row in atrans: # only save rows that DON'T contain a NaN value if np.isnan(row).any() != True: newatrans.append(row) newa = np.transpose(newatrans) newtime = newa[0] newflux = newa[1] newferr = newa[2] newother1 = newa[3] newother2 = newa[4] newother3 = newa[5] return newtime, newflux, newferr, newother1, newother2, newother3 # Put data from different quarters on the same AVERAGE level # operates on a list of arrays (multiple quarters) all at once # DON'T USE THIS ONE # def normalize_qtr_avg(flux): # sumflux = 0 # npts = 0 # for arr in flux: # sumflux += np.nansum(arr) # npts += len(arr[arr>0]) # avgflux = sumflux/npts # overall average for all quarters # for arr in flux: # avg_arr = np.mean(arr[arr>0]) # average for an individual quarter # arr += avgflux - avg_arr # return flux # Put data from different quarters on the sa
me MEDIAN level # operates on a list of arrays (multiple quarters) all at once def normalize_qtr_med(flux): sumflux = 0 npts = 0 for arr in flux: sumflux += np.nansum(arr) npts += len(arr) avgflux = sumflux/npts # overall average for all quarters for arr in flux: med_arr = np.median(arr) # median for an individual quarter arr += avgflux - med_arr return flux # Line up the gaps within each quarter # operates on a list of arrays (multiple quarters) all at once def lineup_qtr_gaps(time, flux,
maskstart, maskend): diffs = np.zeros(len(time) - 1) for i in range(0,len(time) - 1): # loop through quarters # calculate differences between flux points at quarter start/end start = 0 end = -1 for idx, mask in enumerate(maskstart): while (time[i][end] > maskstart[idx] and time[i][end] < maskend[idx]): #print('end', end, time[i][end], maskstart[idx], maskend[idx]) end -= 1 while (time[i+1][start] > maskstart[idx] and time[i+1][start] < maskend[idx]): #print('start', start, time[i+1][start], maskstart[idx], maskend[idx]) start += 1 diffs[i] = (flux[i][end] - flux[i+1][start]) # maxi will find the point with the largest change in flux maxi = lambda z: np.where(max(abs(z)) == abs(z))[0][0] cntr = 0 # counter max_val = max(abs(diffs)) while max_val > 100: #original value here was 100 # this is the index of the largest change in flux, so it needs adjusting ind = maxi(diffs) # this is the actual change in flux associated with that index diff = diffs[ind] # adjust the flux at this spot and its neighbor so they meet flux[ind] = flux[ind] - diff/2.0 flux[ind+1] = flux[ind+1] + diff/2.0 diffs = np.zeros(len(time) - 1) for i in range(0, len(time) - 1): # calculate differences between flux points at quarter start/end, again start = 0 end = -1 for idx, mask in enumerate(maskstart): while time[i][end] > maskstart[idx] and time[i][end] < maskend[idx]: #print('end', end, time[i][end], maskstart[idx], maskend[idx]) end -= 1 while time[i+1][start] > maskstart[idx] and time[i+1][start] < maskend[idx]: #print('start', start, time[i+1][start], maskstart[idx], maskend[idx]) start += 1 diffs[i] = (flux[i][end] - flux[i+1][start]) cntr += 1 # count how many times this while-loop happens max_val = max(abs(diffs)) # print(max_val, cntr) return time, flux # performs detrending with cotrending basis vectors (cbvs) # lcin and lcout must both be FITS filenames def kepcotrend(lcin, lcout, cbvfile, maskfile=''): iraf.kepcotrend(infile=lcin, outfile=lcout, cbvfile=cbvfile, vectors='1 2', method='simplex', fitpower=1, iterate='yes', sigmaclip=2.0, maskfile=maskfile, scinterp='None', plot='no', clobber='yes', verbose='no') return
overrides from NEP-18.""" import collections import functools import os import textwrap from numpy.core._multiarray_umath import ( add_docstring, implement_array_function, _get_implementing_args) from numpy.compat._inspect import getargspec ARRAY_FUNCTION_ENABLED = bool( int(os.environ.get('NUMPY_EXPERIMENTAL_ARRAY_FUNCTION', 1))) array_function_like_doc = ( """like : array_like Reference object to allow the creation of arrays which are not NumPy arrays. If an array-like passed in as ``like`` supports the ``__array_function__`` protocol, the result will be defined by it. In this case, it ensures the creation of an array object compatible with that passed in via this argument. .. note:: The ``like`` keyword is an experimental feature pending on acceptance of :ref:`NEP 35 <NEP35>`.""" ) def set_array_function_like_doc(public_api): if public_api.__doc__ is not None: public_api.__doc__ = public_api.__doc__.replace( "${ARRAY_FUNCTION_LIKE}", array_function_like_doc, ) return public_api add_docstring( implement_array_function, """ Implement a function with checks for __array_function__ overrides. All arguments are required, and can only be passed by position. Parameters ---------- implementation : function Function that implements the operation on NumPy array without overrides when called like ``implementation(*args, **kwargs)``. public_api : function Function exposed by NumPy's public API originally called like ``public_api(*args, **kwargs)`` on which arguments are now being checked. relevant_args : iterable Iterable of arguments to check for __array_function__ methods. args : tuple Arbitrary positional arguments originally passed into ``public_api``. kwargs : dict Arbitrary keyword arguments originally passed into ``public_api``. Returns ------- Result from calling ``implementation()`` or an ``__array_function__`` method, as appropriate. Raises ------ TypeError : if no implementation is found. """) # exposed for testing purposes; used internally by implement_array_function add_docstring( _get_implementing_args, """ Collect arguments on which to call __array_function__. Parameters ---------- relevant_args : iterable of array-like Iterable of possibly array-like arguments to check for __array_function__ methods. Returns ------- Sequence of arguments with __array_function__ methods, in the order in which they should be called. """) ArgSpec = collections.namedtuple('ArgSpec', 'args varargs keywords defaults') def verify_matching_signatures(implementation, dispatcher): """Verify that a dispatcher function has the right signature.""" implementation_spec = ArgSpec(*getargspec(implementation)) dispatcher_spec = ArgSpec(*getargspec(dispatcher)) if (implementation_spec.args != dispatcher_spec.args or implementation_spec.varargs != dispatcher_spec.varargs or implementation_spec.keywords != dispatcher_spec.keywords or (bool(implementation_spec.defaults) != bool(dispatcher_spec.defaults)) or (implementation_spec.defaults is not None and len(implementation_spec.defaults) != len(dispatcher_spec.defaults))): raise RuntimeError('implementation and dispatcher for %s have ' 'different function signatures' % implementation) if implementation_spec.defaults is not None: if dispatcher_spec.defaults != (None,) * len(dispatcher_spec.defaults): raise RuntimeError('dispatcher functions can only use None for ' 'default argument values') def set_module(module): """Decorator for overriding __module__ on a function or class. Example usage:: @set_module('numpy') def example(): pass assert example.__module__ == 'numpy' """ def decorator(func): if module is not None: func.__module__ = module return func return decorator # Call textwrap.dedent here instead of in the function so as to avoid # calling dedent multiple times on the same text _wrapped_func_source = textwrap.dedent(""" @functools.wraps(implementation) def {name}(*args, **kwargs): relevant_args = dispatcher(*args, **kwargs) return implement_array_function( implementation, {name}, relevant_args, args, kwargs) """) def array_function_dispatch(dispatcher, module=None, verify=True, docs_from_dispatcher=False): """Decorator for adding dispatch with the __array_function__ protocol. See NEP-18 for example usage. Parameters ---------- dispatcher : callable Function that when called like ``dispatcher(*args, **kwargs)`` with arguments from the NumPy function call returns an iterable of array-like arguments to check for ``__array_function__``. module : str, optional __module__ attribute to set on new function, e.g., ``module='numpy'``. By default, module is copied from the decorated function. verify : bool, optional If True, verify the that the signature of the dispatcher and decorated function signatures match exactly: all required and optional arguments should appear in order with the same names, but the default values for all optional arguments should be ``None``. Only disable verification if the dispatcher's signature needs to deviate for some particular reason, e.g., because the function has a signature like ``func(*args, **kwargs)``. docs_from_dispatcher : bool, optional If True, copy docs from the dispatcher function onto the dispatched function, rather than from the implementation. This is useful for functions defined in C, which otherwise don't have docstrings. Returns ------- Function suitable for decorating the implementation of a NumPy function. """ if not ARRAY_FUNCTION_ENABLED: def decorator(implementation): if docs_from_dispatcher: add_docstring(implementation, dispatcher.__doc__) if module is not None: implementation.__module__ = module return implementation return decorator
def decorator(implementation): if verify:
verify_matching_signatures(implementation, dispatcher) if docs_from_dispatcher: add_docstring(implementation, dispatcher.__doc__) # Equivalently, we could define this function directly instead of using # exec. This version has the advantage of giving the helper function a # more interpettable name. Otherwise, the original function does not # show up at all in many cases, e.g., if it's written in C or if the # dispatcher gets an invalid keyword argument. source = _wrapped_func_source.format(name=implementation.__name__) source_object = compile( source, filename='<__array_function__ internals>', mode='exec') scope = { 'implementation': implementation, 'dispatcher': dispatcher, 'functools': functools, 'implement_array_function': implement_array_function, } exec(source_object, scope) public_api = scope[implementation.__name__] if module is not None: public_api.__module__ = module public_api._implementation = implementation return public_api return decorator def array_function_from_dispatcher( implementation, module=None, verify=True, docs_from_dispatcher=True): """Like array_function_dispatcher, but with function arguments flipped.""" def decorator(dispatcher): return array_function_dispatch( dispatcher, module, verify=verify, docs_from_dispatcher=docs_
""" Support to interact with a Music Player Daemon. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/media_player.mpd/ """ import logging import socket import voluptuous as vol from homeassistant.components.media_player import ( MEDIA_TYPE_MUSIC, SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, PLATFORM_SCHEMA, SUPPORT_PREVIOUS_TRACK, SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_SET, SUPPORT_PLAY_MEDIA, MEDIA_TYPE_PLAYLIST, MediaPlayerDevice) from homeassistant.const import ( STATE_OFF, STATE_PAUSED, STATE_PLAYING, CONF_PORT, CONF_PASSWORD, CONF_HOST) import homeassistant.helpers.config_validation as cv REQUIREMENTS = ['python-mpd2==0.5.5'] _LOGGER = logging.getLogger(__name__) CONF_LOCATION = 'location' DEFAULT_LOCATION = 'MPD' DEFAULT_PORT = 6600 SUPPORT_MPD = SUPPORT_PAUSE | SUPPORT_VOLUME_SET | SUPPORT_TURN_OFF | \ SUPPORT_TURN_ON | SUPPORT_PREVIOUS_TRACK | SUPPORT_NEXT_TRACK | \ SUPPORT_PLAY_MEDIA PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_HOST): cv.string, vol.Optional(CONF_LOCATION, default=DEFAULT_LOCATION): cv.string, vol.Optional(CONF_PASSWORD): cv.string, vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, }) # pylint: disable=unused-argument def setup_platform(hass, config, add_devices, discovery_info=None): """Setup the MPD platform.""" daemon = config.get(CONF_HOST) port = config.get(CONF_PORT) location = config.get(CONF_LOCATION) password = config.get(CONF_PASSWORD) import mpd # pylint: disable=no-member try: mpd_client = mpd.MPDClient() mpd_client.connect(daemon, port) if password is not None: mpd_client.password(password) mpd_client.close() mpd_client.disconnect() except socket.error: _LOGGER.error("Unable to connect to MPD") return False except mpd.CommandError as error: if "incorrect password" in str(error): _LOGGER.error("MPD reported incorrect password") return False else: raise add_devices([MpdDevice(daemon, port, location, password)]) class MpdDevice(MediaPlayerDevice): """Representation of a MPD server.""" # pylint: disable=no-member def __init__(self, server, port, location, password): """Initialize the MPD device.""" import mpd self.server = server self.port = port self._name = location self.password = password self.status = None self.currentsong = None self.client = mpd.MPDClient() self.client.timeout = 10 self.client.idletimeout = None self.update() def update(self): """Get the latest data and update the state.""" import mpd try: self.status = self.client.status() self.currentsong = self.client.currentsong() except (mpd.ConnectionError, BrokenPipeError, ValueError): # Cleanly disconnect in case connection is not in valid state try: self.client.disconnect() except mpd.ConnectionError: pass self.client.connect(self.server, self.port) if self.password is not None: self.client.password(self.password) self.status = self.client.status() self.currentsong = self.client.currentsong() @property def name(self): """Return the name of the device.""" return self._name @property def state(self): """Return the media state.""" if self.status['state'] == 'play': return STATE_PLAYING elif self.status['state'] == 'pause': return STATE_PAUSED else: return STATE_OFF @property def media_content_id(self): """Content ID of current playing media.""" return self.currentsong['id'] @property def media_content_type(self): """Content type of current playing media.""" return MEDIA_TYPE_MUSIC @property def media_duration(self): """Duration of current playing media in seconds.""" # Time does not exist for streams return self.currentsong.get('time') @property def media_title(self): """Title of current playing media.""" name = self.currentsong.get('name', None) title = self.currentsong.get('title', None) if name is None and title is None: return "None" elif name is None: return title elif title is None: return name else: return '{}: {}'.format(name, title) @property def media_artist(self): """Artist of current playing media (Music track only).""" return self.currentsong.get('artist') @property def media_album_name(self): """Album of current playing media (Music track only).""" return self.currentsong.get('album') @property def volume_level(self): """Return the volume level.""" return int(self.status['volume'])/100 @property def supported_media_commands(self): """Flag of media commands that are supported.""" return SUPPORT_MPD def turn_off(self): """Service to send the MPD the command to stop playing.""" self.client.stop() def turn_on(self): """Service to send the MPD the command to start playing.""" self.client.play() def set_volume_level(self, volume): """Set volume of media player.""" self.client.setvol(int(volume * 100)) def volume_up(self): """Service to send the MPD the command for volume up.""" current_volume = int(self.status['volume']) if current_volume <= 100: self.client.setvol(current_volume + 5) def volume_down(self): """Service to send the MPD the command for volume down.""" current_volume = int(self.status['volume']) if current_
volume >= 0: self.cli
ent.setvol(current_volume - 5) def media_play(self): """Service to send the MPD the command for play/pause.""" self.client.pause(0) def media_pause(self): """Service to send the MPD the command for play/pause.""" self.client.pause(1) def media_next_track(self): """Service to send the MPD the command for next track.""" self.client.next() def media_previous_track(self): """Service to send the MPD the command for previous track.""" self.client.previous() def play_media(self, media_type, media_id, **kwargs): """Send the media player the command for playing a playlist.""" _LOGGER.info(str.format("Playing playlist: {0}", media_id)) if media_type == MEDIA_TYPE_PLAYLIST: self.client.clear() self.client.load(media_id) self.client.play() else: _LOGGER.error(str.format("Invalid media type. Expected: {0}", MEDIA_TYPE_PLAYLIST))
lf.assertIn(addr, ['127.0.0.1', '::1']) self.assertTrue(port, 80) handler.send(True) while True: buf = sock.recv(65535) if buf.endswith(b'\r\n\r\n'): break sock.sendall(b'HTTP/1.1 200 OK\r\n' b'Server: SocksTestServer\r\n' b'Content-Length: 0\r\n' b'\r\n') sock.close() self._start_server(request_handler) proxy_url = "socks5://%s:%s" % (self.host, self.port) pm = socks.SOCKSProxyManager(proxy_url) response = pm.request('GET', 'http://localhost') self.assertEqual(response.status, 200) self.assertEqual(response.data, b'') self.assertEqual(response.headers['Server'], 'SocksTestServer') def test_correct_header_line(self): def request_handler(listener): sock = listener.accept()[0] handler = handle_socks5_negotiation(sock, negotiate=False) addr, port = next(handler) self.assertEqual(addr, b'example.com') self.assertTrue(port, 80) handler.send(True) buf = b'' while True: buf += sock.recv(65535) if buf.endswith(b'\r\n\r\n'): break self.assertTrue(buf.startswith(b'GET / HTTP/1.1')) self.assertTrue(b'Host: example.com' in buf) sock.sendall(b'HTTP/1.1 200 OK\r\n' b'Server: SocksTestServer\r\n' b'Content-Length: 0\r\n' b'\r\n') sock.close() self._start_server(request_handler) proxy_url = "socks5h://%s:%s" % (self.host, self.port) pm = socks.SOCKSProxyManager(proxy_url) self.addCleanup(pm.clear) response = pm.request('GET', 'http://example.com') self.assertEqual(response.status, 200) def test_connection_timeouts(self): event = threading.Event() def request_handler(listener): event.wait() self._start_server(request_handler) proxy_url = "socks5h://%s:%s" % (self.host, self.port) pm = socks.SOCKSProxyManager(proxy_url) self.addCleanup(pm.clear) self.assertRaises( ConnectTimeoutError, pm.request, 'GET', 'http://example.com', timeout=0.001, retries=False ) event.set() def test_connection_failure(self): event = threading.Event() def request_handler(listener): listener.close() event.set() self._start_server(request_handler) proxy_url = "socks5h://%s:%s" % (self.host, self.port) pm = socks.SOCKSProxyManager(proxy_url) self.addCleanup(pm.clear) event.wait() self.assertRaises( NewConnectionError, pm.request, 'GET', 'http://example.com', retries=False ) def test_proxy_rejection(self): evt = threading.Event() def request_handler(listener): sock = listener.accept()[0] handler = handle_socks5_negotiation(sock, negotiate=False) addr, port = next(handler) handler.send(False) evt.wait() sock.close() self._start_server(request_handler) proxy_url = "socks5h://%s:%s" % (self.host, self.port) pm = socks.SOCKSProxyManager(proxy_url) self.addCleanup(pm.clear) self.assertRaises( NewConnectionError, pm.request, 'GET', 'http://example.com', retries=False ) evt.set() def test_socks_with_password(self): def request_handler(listener): sock = listener.accept()[0] handler = handle_socks5_negotiation( sock, negotiate=True, username=b'user', password=b'pass' ) addr, port = next(handler) self.assertEqual(addr, '16.17.18.19') self.assertTrue(port, 80) handler.send(True) while True: buf = sock.recv(65535) if buf.endswith(b'\r\n\r\n'): break sock.sendall(b'HTTP/1.1 200 OK\r\n' b'Server: SocksTestServer\r\n' b'Content-Length: 0\r\n' b'\r\n') sock.close() self._start_server(request_handler) proxy_url = "socks5://%s:%s" % (self.host, self.port) pm = socks.SOCKSProxyManager(proxy_url, username='user', password='pass') self.addCleanup(pm.clear) response = pm.request('GET', 'http://16.17.18.19') self.assertEqual(response.status, 200) self.assertEqual(response.data, b'') self.assertEqual(response.headers['Server'], 'SocksTestServer') def test_socks_with_invalid_password(self): def request_handler(listener): sock = listener.accept()[0] handler = handle_socks5_negotiation( sock, negotiate=True, username=b'user', password=b'pass' )
next(handler) self._start_server(request_handler) proxy_url = "socks5h://%s:%s" % (self.host, self.port) pm = socks.SOCKSProxyManager(proxy_url, username='user', password='badpass') self.addCleanup(pm.clear) try: pm.request('GET', 'http://example.com', retries=False) except NewConnectionError as e: self.assertTr
ue("SOCKS5 authentication failed" in str(e)) else: self.fail("Did not raise") def test_source_address_works(self): expected_port = _get_free_port(self.host) def request_handler(listener): sock = listener.accept()[0] self.assertEqual(sock.getpeername()[0], '127.0.0.1') self.assertEqual(sock.getpeername()[1], expected_port) handler = handle_socks5_negotiation(sock, negotiate=False) addr, port = next(handler) self.assertEqual(addr, '16.17.18.19') self.assertTrue(port, 80) handler.send(True) while True: buf = sock.recv(65535) if buf.endswith(b'\r\n\r\n'): break sock.sendall(b'HTTP/1.1 200 OK\r\n' b'Server: SocksTestServer\r\n' b'Content-Length: 0\r\n' b'\r\n') sock.close() self._start_server(request_handler) proxy_url = "socks5://%s:%s" % (self.host, self.port) pm = socks.SOCKSProxyManager( proxy_url, source_address=('127.0.0.1', expected_port) ) self.addCleanup(pm.clear) response = pm.request('GET', 'http://16.17.18.19') self.assertEqual(response.status, 200) class TestSOCKS4Proxy(IPV4SocketDummyServerTestCase): """ Test the SOCKS proxy in SOCKS4 mode. Has relatively fewer tests than the SOCKS5 case, mostly because once the negotiation is done the two cases behave identically. """ def test_basic_request(self): def request_handler(listener): sock = listener.accept()[0] handler = handle_socks4_negotiation(sock) addr, port = next(handler) self.assertEqual(addr, '16.17.18.19') self.assertTrue(port, 80) handler.send(True) while True: buf = sock.recv(65535) if buf.endswith(b'\r\n\r\n'): break sock.sendall(b'HTTP/1.1 200 OK\r\n' b'Server: SocksTestServer\r\n' b'Content-Length: 0\r\n' b'\r\n') sock.close() self._start_server(request_handler) proxy_url = "socks4://%s:%s" % (self.host, self.port) pm = socks.SOCKSProxyManager(proxy_url) self.addCleanup(pm.clear) response = pm.request('GET', 'http://16.17.18.19') self.assertEqual(r
ermissions = ignore_permissions self.user = user or frappe.session.user self.update = update self.user_settings_fields = copy.deepcopy(self.fields) if user_settings: self.user_settings = json.loads(user_settings) if query: result = self.run_custom_query(query) else: result = self.build_and_run() if with_comment_count and not as_list and self.doctype: self.add_comment_count(result) if save_user_settings: self.save_user_settings_fields = save_user_settings_fields self.update_user_settings() return result def build_and_run(self): args = self.prepare_args() args.limit = self.add_limit() if args.conditions: args.conditions = "where " + args.conditions if self.distinct: args.fields = 'distinct ' + args.fields query = """select %(fields)s from %(tables)s %(conditions)s %(group_by)s %(order_by)s %(limit)s""" % args return frappe.db.sql(query, as_dict=not self.as_list, debug=self.debug, update=self.update) def prepare_args(self): self.parse_args() self.sanitize_fields() self.extract_tables() self.set_optional_columns() self.build_conditions() args = frappe._dict() if self.with_childnames: for t in self.tables: if t != "`tab" + self.doctype + "`": self.fields.append(t + ".name as '%s:name'" % t[4:-1]) # query dict args.tables = self.tables[0] # left join parent, child tables for child in self.tables[1:]: args.tables += " {join} {child} on ({child}.parent = {main}.name)".format(join=self.join, child=child, main=self.tables[0]) if self.grouped_or_conditions: self.conditions.append("({0})".format(" or ".join(self.grouped_or_conditions))) args.conditions = ' and '.join(self.conditions) if self.or_conditions: args.conditions += (' or ' if args.conditions else "") + \ ' or '.join(self.or_conditions) self.set_field_tables() args.fields = ', '.join(self.fields) self.set_order_by(args) self.validate_order_by_and_group_by(args.order_by) args.order_by = args.order_by and (" order by " + args.order_by) or "" self.validate_order_by_and_group_by(self.group_by) args.group_by = self.group_by and (" group by " + self.group_by) or "" return args def parse_args(self): """Convert fields and filters from strings to list, dicts""" if isinstance(self.fields, string_types): if self.fields == "*": self.fields = ["*"] else: try: self.fields = json.loads(self.fields) except ValueError: self.fields = [f.strip() for f in self.fields.split(",")] for filter_name in ["filters", "or_filters"]: filters = getattr(self, filter_name) if isinstance(filters, string_types): filters = json.loads(filters) if isinstance(filters, dict): fdict = filters filters = [] for key, value in iteritems(fdict): filters.append(make_filter_tuple(self.doctype, key, value)) setattr(self, filter_name, filters) def sanitize_fields(self): ''' regex : ^.*[,();].* purpose : The regex will look for malicious patterns like `,`, '(', ')', ';' in each field which may leads to sql injection. example : field = "`DocType`.`issingle`, version()" As field contains `,` and mysql function `version()`, with the help of regex the system will filter out this field. ''' regex = re.compile('^.*[,();].*') blacklisted_keywords = ['select', 'create', 'insert', 'delete', 'drop', 'update', 'case'] blacklisted_functions = ['concat', 'concat_ws', 'if', 'ifnull', 'nullif', 'coalesce', 'connection_id', 'current_user', 'database', 'last_insert_id', 'session_user', 'system_user', 'user', 'version'] def _raise_exception(): frappe.throw(_('Cannot use sub-query or function in fields'), frappe.DataError) for field in self.fields: if regex.match(field): if any(keyword in field.lower() for keyword in blacklisted_keywords): _raise_exception() if any("{0}(".format(keyword) in field.lower() \ for keyword in blacklisted_functions): _raise_exception() def extract_tables(self): """extract tables from fields""" self.tables = ['`tab' + self.doctype + '`'] # add tables from fields if self.fields: for f in self.fields: if ( not ("tab" in f and "." in f) ) or ("locate(" in f) or ("count(" in f): continue table_name = f.split('.')[0] if table_name.lower().startswith('group_concat('): table_name = table_name[13:] if table_name.lower().startswith('ifnull('): table_name = table_name[7:] if not table_name[0]=='`': table_name = '`' + table_name + '`' if not table_name in self.tables: self.append_table(table_name) def append_table(self, table_name): self.tables.append(table_name) doctype = table_name[4:-1] if (not self.flags.ignore_permissions) and (not frappe.has_per
mission(doctype)): frappe.flags.error_message = _('Insufficient Permission for {0}').format(frappe.bold(doctype)) raise frappe.PermissionError(doctype) def set_field_tables(self): '''If there are more than one table, the fieldname must not
be ambigous. If the fieldname is not explicitly mentioned, set the default table''' if len(self.tables) > 1: for i, f in enumerate(self.fields): if '.' not in f: self.fields[i] = '{0}.{1}'.format(self.tables[0], f) def set_optional_columns(self): """Removes optional columns like `_user_tags`, `_comments` etc. if not in table""" columns = frappe.db.get_table_columns(self.doctype) # remove from fields to_remove = [] for fld in self.fields: for f in optional_fields: if f in fld and not f in columns: to_remove.append(fld) for fld in to_remove: del self.fields[self.fields.index(fld)] # remove from filters to_remove = [] for each in self.filters: if isinstance(each, string_types): each = [each] for element in each: if element in optional_fields and element not in columns: to_remove.append(each) for each in to_remove: if isinstance(self.filters, dict): del self.filters[each] else: self.filters.remove(each) def build_conditions(self): self.conditions = [] self.grouped_or_conditions = [] self.build_filter_conditions(self.filters, self.conditions) self.build_filter_conditions(self.or_filters, self.grouped_or_conditions) # match conditions if not self.flags.ignore_permissions: match_conditions = self.build_match_conditions() if match_conditions: self.conditions.append("(" + match_conditions + ")") def build_filter_conditions(self, filters, conditions, ignore_permissions=None): """build conditions from user filters""" if ignore_permissions is not None: self.flags.ignore_permissions = ignore_permissions if isinstance(filters, dict): filters = [filters] for f in filters: if isinstance(f, string_types): conditions.append(f) else: conditions.append(self.prepare_filter_condition(f)) def prepare_filter_condition(self, f): """Returns a filter condition in the format: ifnull(`tabDocType`.`fieldname`, fallback) operator "value" """ f = get_filter(self.doctype, f) tname = ('`tab' + f.doctype + '`') if not tname in self.tables: self.append_table(tname) if 'ifnull(' in f.fieldname: column_name = f.fieldname else: column_name = '{tname}.{fname}'.format(tname=tname, fname=f.fieldname) can_be_null = True # prepare in condition if f.operator.lower() in ('in', 'not in'): values = f.value or '' if not isinstance(values, (list, tuple)): values = values.split(",") fallback = "''" value = (frappe.db.escape((v or '').strip(), percent=False) for v in values) value = '("{0}")'.format('", "'.join(value)) else: df = frappe.get_meta(f.doctype).get("fields", {"fieldname": f.fieldname}) df = df[0] if df else None if df and df.fieldtype in ("Check", "Float", "Int", "Currency", "Percent"): can_be_null = False if f.operator.lower() == 'between' and \ (f.fieldname in ('creation', 'modified') or (df and (df.fieldtype=="Date" or df.fieldtype=="Datetime"))): value = get_between_date_filter(f.value, df) fallback = "'0000-00-00 00:00:00'" elif df and df.fieldtype=="Date":
########################################################################### # # This program is part of Zenoss Core, an open source monitoring platform. # Copyright (C) 2008, Zenoss Inc. # # This program is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License version 2 as published by # the Free Software Foundation. # # For complete information please visit: http://www.zenoss.com/oss/ # ########################################################################### ################################ # These variables are overwritten by Zenoss when the ZenPack is exported # or saved. Do not modify them directly here. NAME = 'ZenPacks.community.powerware' VERSION = '1.0' AUTHOR = 'Alexander Vorobiyov' LICENSE = '' NAMESPACE_PACKAGES = ['ZenPacks', 'ZenPacks.community'] PACKAGES = ['ZenPacks', 'ZenPacks.community', 'ZenPacks.community.powerware'] INSTALL_REQUIRES = [] COMPAT_ZENOSS_VERS = '>=2.2' PREV_ZENPACK_NAME = '' # STOP_REPLACEMENTS ################################ # Zenoss will not overwrite any changes you make below here. from setuptools import setup, find_packages setup( # This ZenPack metadata should usually be edited with the Zenoss # ZenPack edit page. Whenever the edit page is submitted it will # overwrite the values below (the ones it knows about) with new values. name = NAME, version = VERSION, author = AUTH
OR, license = LICENSE, # This is the version spec which indicates what versions of Zenoss # this ZenPack is compatible with compatZenossVers = COMPAT_ZENOSS_VERS, # previousZenPackName is a facility for telling Zenoss that the name # of this ZenPack has changed. If no ZenPack with the current name is # installed then a zenpack of this na
me if installed will be upgraded. prevZenPackName = PREV_ZENPACK_NAME, # Indicate to setuptools which namespace packages the zenpack # participates in namespace_packages = NAMESPACE_PACKAGES, # Tell setuptools what packages this zenpack provides. packages = find_packages(), # Tell setuptools to figure out for itself which files to include # in the binary egg when it is built. include_package_data = True, # Tell setuptools what non-python files should also be included # with the binary egg. package_data = { '': ['*.txt'], '':['../COPYRIGHT.txt','../LICENSE.txt'], NAME: ['objects/*','skins/*/*','services/*', 'reports/*/*', 'modeler/*/*', 'daemons/*', 'lib/*', 'libexec/*'], }, # Indicate dependencies on other python modules or ZenPacks. This line # is modified by zenoss when the ZenPack edit page is submitted. Zenoss # tries to put add/delete the names it manages at the beginning of this # list, so any manual additions should be added to the end. Things will # go poorly if this line is broken into multiple lines or modified to # dramatically. install_requires = INSTALL_REQUIRES, # Every ZenPack egg must define exactly one zenoss.zenpacks entry point # of this form. entry_points = { 'zenoss.zenpacks': '%s = %s' % (NAME, NAME), }, # All ZenPack eggs must be installed in unzipped form. zip_safe = False, )
s PivocramConnetcTest(base.TestCase): def setUp(self): self.connect = pivocram.Connect('PIVOTAL_TEST_TOKEN') def test_should_have_the_pivotal_api_url(self): self.connect.PIVOTAL_URL.should.be.equal('https://www.pivotaltracker.com/services/v5') def test_should_have_header_with_token(self): self.connect.headers.should.be.equal({'X-TrackerToken': 'PIVOTAL_TEST_TOKEN'}) def test_should_have_projects_url_for_list(self): self.connect.projects_url().should.be.equal('https://www.pivotaltracker.com/services/v5/projects') def test_should_have_projects_url_for_item(self): self.connect.projects_url(123).should.be.equal('https://www.pivotaltracker.com/services/v5/projects/123') def test_should_have_account_member_url(self): self.connect.account_member_url(123, 333).should.be.equal('https://www.pivotaltracker.com/services/v5/accounts/123/memberships/333') def test_should_have_iterations_url(self): self.connect.iterations_url(123, 1).should.be.equal('https://www.pivotaltracker.com/services/v5/projects/123/iterations/1') def test_should_have_project_story_url(self): self.connect.project_story_url(123, 1234).should.be.equal('https://www.pivotaltracker.com/services/v5/projects/123/stories/1234') def test_should_have_project_story_tasks_url(self): self.connect.project_story_tasks_url(123, 1234).should.be.equal('https://www.pivotaltracker.com/services/v5/projects/123/stories/1234/tasks') def test_should_have_project_story_task_url(self): self.connect.project_story_task_url(123, 1234, 12345).should.be.equal('https://www.pivotaltracker.com/services/v5/projects/123/stories/1234/tasks/12345') @base.TestCase.mock.patch('app.pivocram.requests') def test_should_make_get(self, req_mock): response = self.mock.MagicMock() response.json.return_value = 'req-response' req_mock.get.return_value = response self.connect.get('url').should.be.equal('req-response') req_mock.get.assert_called_with('url', headers={'X-TrackerToken': 'PIVOTAL_TEST_TOKEN'}) @base.TestCase.mock.patch('app.pivocram.requests') def test_should_make_put(self, req_mock): response = self.mock.MagicMock() response.json.return_value = 'req-response' req_mock.put.return_value = response self.connect.put('url', {'data': 'value'}).should.be.equal('req-response') req_mock.put.assert_called_with('url', {'data': 'value'}, headers={'X-TrackerToken': 'PIVOTAL_TEST_TOKEN'}) def test_should_get_projects_list(self): self.connect.get = self.mock.MagicMock(return_value='req-response') self.connect.projects_url = self.mock.MagicMock(return_value='url-projects') self.connect.get_projects().should.be.equal('req-response') self.connect.get.assert_called_with('url-projects') def test_should_get_project(self): self.connect.get = self.mock.MagicMock(return_value='req-response') self.connect.projects_url = self.mock.MagicMock(return_value='url-projects') self.connect.get_project(123).should.be.equal('req-response') self.connect.get.assert_called_with('url-projects') self.connect.projects_url.assert_called_with(123) def test_should_get_project_member(self): self.connect.get = self.mock.MagicMock(return_value='req-response') self.connect.account_member_url = self.mock.MagicMock(return_value='url-project-member') self.connect.get_account_member(123, 333).should.be.equal('req-response') self.connect.get.assert_called_with('url-project-member') self.connect.account_member_url.assert_called_with(123, 333) def test_should_get_project_story_tasks(self): self.connect.get = self.mock.MagicMock(return_value='req-response') self.connect.project_story_tasks_url = self.mock.MagicMock(return_value='url-tasks') self.connect.get_project_story_tasks(123, 1234).should.be.equal('req-response') self.connect.get.assert_called_with('url-tasks') self.connect.project_story_tasks_url.assert_called_with(123, 1234) def test_should_get_iteration_stories(self): self.connect.get = self.mock.MagicMock(return_value='req-response') self.connect.iterations_url = self.mock.MagicMock(return_value='url-iterations') self.connect.get_current_iteration(123, 1).should.be.equal('req-response') self.connect.get.assert_called_with('url-iterations') self.connect.iterations_url.assert_called_with(123, 1) de
f test_should_update_story(self): self.connect.put = self.mock.MagicMock(return_value='req-response') self.connect.project_story_url = self.mock.MagicMock(return_value='url-stories') self.connect.update_story(123, 1234, {'data': 'value'}).should.be.equal('req-response') self.connect.put.assert_called_with('url-stories', {'data': 'value'}) self
.connect.project_story_url.assert_called_with(123, 1234) def test_should_update_story_task(self): self.connect.put = self.mock.MagicMock(return_value='req-response') self.connect.project_story_task_url = self.mock.MagicMock(return_value='url-stories') self.connect.update_story_task(123, 1234, 12345, {'data': 'value'}).should.be.equal('req-response') self.connect.put.assert_called_with('url-stories', {'data': 'value'}) self.connect.project_story_task_url.assert_called_with(123, 1234, 12345) class PivocramClientTest(base.TestCase): project_mock = {"current_iteration_number": 1} def setUp(self): user = self.mock.MagicMock() user.pivotal_token = 'PIVOTAL_TEST_TOKEN' self.client = pivocram.Client(user, project_id='PROJECT-ID') def test_should_have_connect_attribute(self): self.assertTrue(isinstance(self.client.connect, pivocram.Connect)) def test_should_be_create_with_project_id(self): self.client.project_id.should.be.equal('PROJECT-ID') def test_should_have_property_list_stories(self): self.client._current_iteration = 'CURRENT' self.client.current_iteration.should.be.equal('CURRENT') def test_should_have_method_to_get_story(self): self.client.get_story('STORY-ID').should.be.equal(None) def test_should_have_method_to_list_story_tasks(self): self.client.connect = self.mock.MagicMock() self.client.connect.get_project_story_tasks.return_value = [1, 2, 3] self.client.get_story_tasks('STORY-ID').should.be.equal([1, 2, 3]) self.client.connect.get_project_story_tasks.assert_called_with('PROJECT-ID', 'STORY-ID') def test_should_have_method_to_get_story_task(self): self.client.get_story_task('STORY-ID', 'TASKS-ID').should.be.equal(None) def test_should_get_projects(self): self.client.connect = self.mock.MagicMock() self.client.connect.get_projects.return_value = [1, 2, 3] self.client.get_projects().should.be.equal([1, 2, 3]) def test_should_get_empty_if_no_projects(self): self.client.connect = self.mock.MagicMock() self.client.connect.get_projects.return_value = [] self.client.get_projects().should.be.equal([]) def test_should_set_current_iteration(self): self.client.connect = self.mock.MagicMock() self.client.connect.get_project.return_value = self.project_mock self.client._current_iteration_number = None self.client.current_iteration_number.should.be.equal(1) self.client.connect.get_project.assert_called_with('PROJECT-ID') def test_should_get_current_stories(self): self.client.connect = self.mock.MagicMock() self.client.connect.get_current_iteration.return_value = {'stories': [1, 2, 3]} self.client.current_iteration.should.be.equal({'stories': [1, 2, 3]}) def test_should_update_story(self): self.client.connect = self.mock.MagicMock() self.client.connect.update_story.return_value = {'id': 1234} self.client.update_story(1234, {'data': 'value'}).should.be.equal({'id': 1234}) def test_should_complete_story_
""" Function
s and classes dealing with commands.
"""
# THIS FILE IS AUTO-GENERATED. DO NOT EDIT from verta._swagger.base_type import BaseType class ModeldbFindHydratedProjectsByTeam(BaseType): def __init__(self, find_projects=None, org_id=None, name=None, id=None): required = { "find_projects": False, "org_id": False,
"name": False, "id": False, } self.find_projects = find_projects self.org_id = org_id self.name = name self.id = id for k, v in required.items(): if self[k] is None and v: raise ValueError('attribute {} is required'.format(k)) @staticmethod def from_json(d): from .ModeldbFindProjects import ModeldbFindProjects tmp = d.get('find_
projects', None) if tmp is not None: d['find_projects'] = ModeldbFindProjects.from_json(tmp) tmp = d.get('org_id', None) if tmp is not None: d['org_id'] = tmp tmp = d.get('name', None) if tmp is not None: d['name'] = tmp tmp = d.get('id', None) if tmp is not None: d['id'] = tmp return ModeldbFindHydratedProjectsByTeam(**d)
# -*- coding: utf-8 -*- # darwinvpnlauncher.py # Copyright (C) 2013 LEAP # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """ Darwin VPN launcher implementation. """ import commands import getpass import logging import os import sys from leap.bitmask.services.eip.vpnlauncher import VPNLauncher from leap.bitmask.services.eip.vpnlauncher import VPNLauncherException from leap.bitmask.util import get_path_prefix logger = logging.getLogger(__name__) class EIPNoTunKextLoaded(VPNLauncherException): pass class DarwinVPNLauncher(VPNLauncher): """ VPN launcher for the Darwin Platform """ COCOASUDO = "cocoasudo" # XXX need the good old magic translate for these strings # (look for magic in 0.2.0 release) SUDO_MSG = ("Bitmask needs administrative privileges to run " "Encrypted Internet.") INSTALL_MSG = ("\"Bitmask needs administrative privileges to install " "missing scripts and fix permissions.\"") # Hardcode the installation path for OSX for security, openvpn is # run as root INSTALL_PATH = "/Applications/Bitmask.app/" INSTALL_PATH_ESCAPED = os.path.realpath(os.getcwd() + "/../../") OPENVPN_BIN = 'openvpn.leap' OPENVPN_PATH = "%s/Contents/Resources/openvpn" % (INSTALL_PATH,) OPENVPN_PATH_ESCAPED = "%s/Contents/Resources/openvpn" % ( INSTALL_PATH_ESCAPED,) OPENVPN_BIN_PATH = "%s/Contents/Resources/%s" % (INSTALL_PATH, OPENVPN_BIN) UP_SCRIPT = "%s/client.up.sh" % (OPENVPN_PATH,) DOWN_SCRIPT = "%s/client.down.sh" % (OPENVPN_PATH,) OPENVPN_DOWN_PLUGIN = '%s/openvpn-down-root.so' % (OPENVPN_PATH,) UPDOWN_FILES = (UP_SCRIPT, DOWN_SCRIPT, OPENVPN_DOWN_PLUGIN) OTHER_FILES = [] @classmethod def cmd_for_missing_scripts(kls, frompath): """ Returns a command that can copy the missing scripts. :rtype: str """ to = kls.OPENVPN_PATH_ESCAPED cmd = "#!/bin/sh\n" cmd += "mkdir -p {0}\n".format(to) cmd += "cp '{0}'/* {1}\n".format(frompath, to) cmd += "chmod 744 {0}/*".format(to) return cmd @classmethod
def is_kext_loaded(kls): """ Checks if the needed kext is loaded before launching openvpn. :returns: True if kext is loaded, False otherwise. :rtype: bool """ return bool(commands.getoutput('kextstat | grep "leap.tun"')) @classm
ethod def _get_icon_path(kls): """ Returns the absolute path to the app icon. :rtype: str """ resources_path = os.path.abspath( os.path.join(os.getcwd(), "../../Contents/Resources")) return os.path.join(resources_path, "bitmask.tiff") @classmethod def get_cocoasudo_ovpn_cmd(kls): """ Returns a string with the cocoasudo command needed to run openvpn as admin with a nice password prompt. The actual command needs to be appended. :rtype: (str, list) """ # TODO add translation support for this sudo_msg = ("Bitmask needs administrative privileges to run " "Encrypted Internet.") iconpath = kls._get_icon_path() has_icon = os.path.isfile(iconpath) args = ["--icon=%s" % iconpath] if has_icon else [] args.append("--prompt=%s" % (sudo_msg,)) return kls.COCOASUDO, args @classmethod def get_cocoasudo_installmissing_cmd(kls): """ Returns a string with the cocoasudo command needed to install missing files as admin with a nice password prompt. The actual command needs to be appended. :rtype: (str, list) """ # TODO add translation support for this install_msg = ('"Bitmask needs administrative privileges to install ' 'missing scripts and fix permissions."') iconpath = kls._get_icon_path() has_icon = os.path.isfile(iconpath) args = ["--icon=%s" % iconpath] if has_icon else [] args.append("--prompt=%s" % (install_msg,)) return kls.COCOASUDO, args @classmethod def get_vpn_command(kls, eipconfig, providerconfig, socket_host, socket_port="unix", openvpn_verb=1): """ Returns the OSX implementation for the vpn launching command. Might raise: EIPNoTunKextLoaded, OpenVPNNotFoundException, VPNLauncherException. :param eipconfig: eip configuration object :type eipconfig: EIPConfig :param providerconfig: provider specific configuration :type providerconfig: ProviderConfig :param socket_host: either socket path (unix) or socket IP :type socket_host: str :param socket_port: either string "unix" if it's a unix socket, or port otherwise :type socket_port: str :param openvpn_verb: the openvpn verbosity wanted :type openvpn_verb: int :return: A VPN command ready to be launched. :rtype: list """ if not kls.is_kext_loaded(): raise EIPNoTunKextLoaded # we use `super` in order to send the class to use command = super(DarwinVPNLauncher, kls).get_vpn_command( eipconfig, providerconfig, socket_host, socket_port, openvpn_verb) cocoa, cargs = kls.get_cocoasudo_ovpn_cmd() cargs.extend(command) command = cargs command.insert(0, cocoa) command.extend(['--setenv', "LEAPUSER", getpass.getuser()]) return command @classmethod def get_vpn_env(kls): """ Returns a dictionary with the custom env for the platform. This is mainly used for setting LD_LIBRARY_PATH to the correct path when distributing a standalone client :rtype: dict """ ld_library_path = os.path.join(get_path_prefix(), "..", "lib") ld_library_path.encode(sys.getfilesystemencoding()) return { "DYLD_LIBRARY_PATH": ld_library_path }
# coding: utf-8 """ MINDBODY Public API No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501 OpenAPI spec version: v6 Generated by: https://github.com/swagger-api/swagger-codegen.git """ from __future__ import absolute_import import unittest import swagger_client from swagger_client.api.client_api import ClientApi # noqa: E501 from swagger_client.rest import ApiException class TestClientApi(unittest.TestCase): """ClientApi unit test stubs""" def setUp(self): self.api = swagger_client.api.client_api.ClientApi() # noqa: E501 def tearDown(self): pass def test_client_add_arrival(self): """Test case for client_add_arrival Add an arrival for a client. # noqa: E501 """ pass def test_client_add_client(self): """Test case for client_add_client Add a client to a site. # noqa: E501 """ pass def test_client_add_contact_log(self): """Test case for client_add_contact_log Add a contact log to a client's account. # noqa: E501 """ pass def test_client_get_active_client_memberships(self): """Test case for client_get_active_client_memberships Get a client's active memberships. # noqa: E501 """ pass def test_client_get_client_account_balances(self): """Test case for client_get_client_account_balances Get account balance information for one or more client(s). # noqa: E501 """ pass def test_client_get_client_contracts(self): """Test case for client_get_client_contracts Get contracts that a client has purchased. # noqa: E501 """ pass def test_client_get_client_formula_notes(self): """Test case for client_get_client_formula_notes Get a client's formula notes. # noqa: E501 """ pass def test_client_get_client_indexes(self): """Test case for client_get_client_indexes Get a site's configured client indexes and client index values. # noqa: E501 """ pass def test_client_get_client_purchases(self): """Test case for client_get_client_purchases Get a client's purchase history. # noqa: E501 """ pass def test_client_get_client_referral_types(self): """Test case for client_get_client_referral_types Get a site's configured client referral types. # noqa: E501 """ pass def test_client_get_client_services(self): """Test case for client_get_client_services Get pricing options that a client has purchased. # noqa: E501 """ pass def test_client_get_client_visits(self): """Test case for client_get_client_visits Get a client's visit history. # noqa: E501 """ pass def test_client_get_clients(self): """Test case for client_get_clients Get clients. # noqa: E501 """ pass def test_client_get_contact_logs(self): """Test case for client_get_contact_logs
Get contact logs on a client's account. # noqa: E501 """ pass def test_client_get_cross_regional_client_associations(self): """Test case for client_get_cross_regional_client_associations Get a client's cross regional si
te associations. # noqa: E501 """ pass def test_client_get_custom_client_fields(self): """Test case for client_get_custom_client_fields Get a site's configured custom client fields. # noqa: E501 """ pass def test_client_get_required_client_fields(self): """Test case for client_get_required_client_fields Get client required fields for a site. # noqa: E501 """ pass def test_client_send_password_reset_email(self): """Test case for client_send_password_reset_email Send a password reset email to a client. # noqa: E501 """ pass def test_client_update_client(self): """Test case for client_update_client Update a client at a site. # noqa: E501 """ pass def test_client_update_client_service(self): """Test case for client_update_client_service Update a client's purchase pricing option. # noqa: E501 """ pass def test_client_update_client_visit(self): """Test case for client_update_client_visit Update a client's visit. # noqa: E501 """ pass def test_client_update_contact_log(self): """Test case for client_update_contact_log Update a contact log on a client's account. # noqa: E501 """ pass def test_client_upload_client_document(self): """Test case for client_upload_client_document Upload a document to a client's profile. # noqa: E501 """ pass def test_client_upload_client_photo(self): """Test case for client_upload_client_photo Upload a profile photo to a client's profile. # noqa: E501 """ pass if __name__ == '__main__': unittest.main()
import sys from services.housing import HouseTemplate from engine.resources.scene import Point3D def setup(housingTemplates): houseTemplate = HouseTemplate("object/tangible/deed/player_house_deed/shared_corellia_house_large_st
yle_02_deed.iff", "object/buildi
ng/player/shared_player_house_generic_large_style_02.iff", 5) houseTemplate.addBuildingSign("object/tangible/sign/player/shared_house_address.iff", Point3D(float(-13.4), float(3), float(9.1))) houseTemplate.addPlaceablePlanet("corellia") houseTemplate.addPlaceablePlanet("talus") houseTemplate.setDefaultItemLimit(500) houseTemplate.setBaseMaintenanceRate(26) housingTemplates.put(houseTemplate.getDeedTemplate(), houseTemplate) return
or full net: ('[label]', '[netdef]', 'fullnet') ('soumith1', '3i128-96c11', 'layer'), ('soumith2', '64i64-128c9', 'layer'), ('soumith3', '128i32-128c9', 'layer'), ('soumith4', '128i16-128c7', 'layer'), ('soumith5', '384i13-384c3', 'layer'), ('maddison-convolve', '128i19-128c3', 'layer'), ('maddison-fc', '128i19-361n', 'layer'), ('mnist-c1', '1i28-8c5', 'layer'), ('mnist-c2', '8i14-16c5', 'layer'), ('mnist-fc', '16i7-150n', 'layer'), ('mnist-full', '1i24-8c5z-relu-mp2-16c5z-relu-mp3-150n-tanh-10n', 'fullnet'), ('mnist-full-factorized', '1i24-8c5z-relu-mp2-16c5z-relu-mp3-150n-tanh-10n', 'fullnet'), ('maddison-full', '8i19-12*128c3z-relu-361n', 'fullnet'), ('maddison-full-factorized', '8i19-12*(128c3z-relu)-361n', 'fullnet') ] def write_results( label, net_string, layer, benchmark_type, direction, time_ms ): global cmd_line results_dict = {} results_dict['label'] = label results_dict['type'] = benchmark_type results_dict['format'] = 'v0.4' results_dict['direction'] = direction results_dict['net_string'] = net_string if layer is not None: results_dict['layer_string'] = layer.asString() results_dict['time_ms'] = str(time_ms) results_dict['cmd_line'] = cmd_line f = open('results.txt', 'a') json.dump(results_dict, f) f.write( '\n' ) f.close() def time_layer(num_epochs, label, batch_size, net_string): print('building network...') input_string, layer_string = net_string.split('-') input_planes, input_size = map(lambda x: int(x), input_string.split('i')) cl = PyDeepCL.DeepCL() net = PyDeepCL.NeuralNet( cl, input_planes, input_size ) net.addLayer( PyDeepCL.ForceBackpropMaker() ) # this forces the next layer to backwar
d gradients to # this layer print( net.asString() ) if 'c' in layer_string: num_filters, filter_size = map(lambda x: int(x), layer_string.split('c')) net.addLayer( PyDeepCL.ConvolutionalMaker().numFilters(num_filters) .filterSize(
filter_size).biased() ) elif 'n' in layer_string: num_neurons = int(layer_string.split('n')[0]) net.addLayer( PyDeepCL.FullyConnectedMaker().numPlanes(num_neurons).imageSize(1).biased() ) else: raise Exception('layer_string {layer_string} not recognized'.format( layer_string=layer_string)) print( net.asString() ) net.addLayer( PyDeepCL.FullyConnectedMaker().numPlanes(1).imageSize(1) ) net.addLayer( PyDeepCL.SoftMaxMaker() ) print( net.asString() ) images = np.zeros((batch_size, input_planes, input_size, input_size), dtype=np.float32) images[:] = np.random.uniform(-0.5, 0.5, images.shape) labels = np.zeros((batch_size,), dtype=np.int32) print('warming up...') #try: net.setBatchSize(batch_size) # warm up forward for i in range(9): last = time.time() net.forward( images ) now = time.time() print(' warm up forward all-layer time', ( now - last ) * 1000, 'ms' ) last = now net.backwardFromLabels(labels) now = time.time() print(' warm up backward all-layer time', (now - last) * 1000, 'ms' ) last = now layer = net.getLayer(2) print('running forward prop timings:') for i in range(num_epochs): layer.forward() now = time.time() print('forward layer total time', ( now - last) * 1000, 'ms' ) print('forward layer average time', ( now - last ) * 1000 / float(num_epochs), 'ms' ) # forward_time_per_layer_ms = ( now - last ) / float(num_epochs) * 1000 # writeResults( label + ', ' + net_string + ', ' + layer.asString() + ', forward=' + str( ( now - last ) / float(num_epochs) * 1000 ) + 'ms' ) write_results( label=label, net_string=net_string, layer=layer, direction='forward', benchmark_type='layer', time_ms=( now - last ) / float(num_epochs) * 1000 ) print('warm up backwards again') layer.backward() layer.backward() print('warm up backwards done. start timings:') now = time.time() last = now for i in range(num_epochs): layer.backward() now = time.time() print('backward layer total time', (now - last)*1000, 'ms' ) print('backward layer average time', ( now - last ) * 1000 / float(num_epochs), 'ms' ) # writeResults( label + ', ' + net_string + ', ' + layer.asString() + ', backward=' + str( ( now - last ) / float(num_epochs) * 1000 ) + 'ms' ) write_results( label=label, net_string=net_string, layer=layer, direction='backward', benchmark_type='layer', time_ms=( now - last ) / float(num_epochs) * 1000 ) last = now def time_fullnet(num_epochs, label, batch_size, net_string): print('building network...') split_net_string = net_string.split('-') input_string = split_net_string[0] netdef = '-'.join(split_net_string[1:]) input_planes, input_size = map(lambda x: int(x), input_string.split('i')) cl = PyDeepCL.DeepCL() net = PyDeepCL.NeuralNet(cl, input_planes, input_size ) PyDeepCL.NetdefToNet.createNetFromNetdef(net, netdef) print( net.asString() ) images = np.zeros((batch_size, input_planes, input_size, input_size), dtype=np.float32) images[:] = np.random.uniform(-0.5, 0.5, images.shape) labels = np.zeros((batch_size,), dtype=np.int32) print('warming up...') #try: net.setBatchSize(batch_size) # warm up forward for i in range(8): last = time.time() net.forward( images ) now = time.time() print(' warm up forward all-layer time', (now - last)*1000.0, 'ms') last = now print('warming up backward:') last = time.time() net.backwardFromLabels(labels) now = time.time() print(' warm up backward time', (now - last) * 1000, 'ms' ) last = now net.backwardFromLabels(labels) now = time.time() print(' warm up backward time', (now - last) * 1000, 'ms' ) total_forward = 0 total_backward = 0 last = time.time() num_epochs = 0 while total_forward < 1000 or total_backward < 0: # make sure collect suffiicnet timing # for epoch in range(num_epochs): print('epoch {epoch}'.format(epoch=num_epochs+1)) print('run forward for real...') # last = time.time() net.forward(images) now = time.time() diff = now - last forward_ms = diff * 1000.0 total_forward += forward_ms print('forward time: {forward_ms}ms'.format( forward_ms=forward_ms)) last = now print('backward for real:') # last = time.time() net.backwardFromLabels(labels) now = time.time() diff = now - last backward_ms = diff * 1000.0 total_backward += backward_ms print('backward time: {backward_ms}ms'.format( backward_ms=backward_ms)) last = now num_epochs += 1 print('num_epochs: {num_epochs}'.format(num_epochs=num_epochs)) average_forward = total_forward / num_epochs average_backward = total_backward / num_epochs print('average forward time: {forward_ms}ms'.format( forward_ms=average_forward)) print('average backward time: {backward_ms}ms'.format( backward_ms=average_backward)) write_results( label=label, net_string=net_string, layer=None, benchmark_type='fullnet', direction='forward', time_ms=average_forward ) write_results( label=label, net_string=net_string, layer=None, benchmark_type='fullnet', direction='backward', time_ms=average_backward ) def time_run(fn): times = [] fn() # warm-up call, outputPlanest timed for _ in range(repeat): start = time.time() for _ in range(number): fn() times.append((time.time() - start) / number) return min(times) #def parse_custom_config(s): # # parses a custom configuration string of the format: # # AxB-CcD where A: input channels, B: input size, # # C: output channels, D: kernel size, E: batchsize # run = {'batch_size': 128 } # defs = {'i': ['input_planes', 'inp
# -*- coding: utf-8 -*- u""" .. module:: organizations """ from django.contrib import messages from django.contrib.auth.decorators import login_required from django.core.urlresolvers import reverse from django.shortcuts import get_object_or_404 from django.shortcuts import redirect from django.shortcuts import render from django.utils.text import slugify from django.views.generic import View from apps.volontulo.forms import VolounteerToOrganizationContactForm from apps.volontulo.lib.email import send_mail from apps.volontulo.models import Offer from apps.volontulo.models import Organization from apps.volontulo.models import UserProfile from apps.volontulo.utils import correct_slug def organizations_list(request): u"""View responsible for listing all organizations. :param request: WSGIRequest instance """ organizations = Organization.objects.all() return render( request, "organizations/list.html", {'organizations': organizations}, ) class OrganizationsCreate(View): u"""Class view supporting creation of new organization.""" @staticmethod @login_required def get(request): u"""Method responsible for rendering form for new organization.""" return render( request, "organizations/organization_form.html", {'organization': Organization()} ) @staticmethod @login_required def post(request): u"""Method responsible for saving new organization.""" if not ( request.POST.get('name') and request.POST.get('address') and request.POST.get('description') ): messages.error( request, u"Należy wypełnić wszystkie pola formularza." ) return render( request, "organizations/organization_form.html", {'organization': Organization()} ) organization = Organization( name=request.POST.get('name'), address=request.POST.get('address'), description=request.POST.get('description'), ) organization.save() request.user.userprofile.organizations.add(organization) messages.success( request, u"Organizacja została dodana." ) return redirect( 'organization_view', slug=slugify(organization.name), id_=organization.id, ) @correct_slug(Organization, 'organization_form', 'name') @login_required def organization_form(request, slug, id_): # pylint: disable=unused-argument u"""View responsible for editing organization. Edition will only work, if logged user has been registered as organization. """ org = Organization.objects.get(pk=id_) users = [profile.user.email for profile in org.userprofiles.all()] if ( request.user.is_authenticated() and request.user.email not in users ): messages.error( request, u'Nie masz uprawnień do edycji tej organizacji.' ) return redirect( reverse( 'organization_view', args=[slugify(org.name), org.id] ) ) if not ( request.user.is_authenticated() and UserProfile.objects.get(user=request.user).organizations ): return redirect('homepage') if request.method == 'POST': if ( request.POST.get('name') and request.POST.get('address') and request.POST.get('description') ): org.name = request.POST.get('name') org.address = request.POST.get('address') org.description = request.POST.get('description') org.save() messages.success( request, u'Oferta została dodana/zmieniona.' ) return redirect( reverse( 'organization_view
', args=[slugify(org.name), org.id] ) ) else: message
s.error( request, u"Należy wypełnić wszystkie pola formularza." ) return render( request, "organizations/organization_form.html", {'organization': org}, ) @correct_slug(Organization, 'organization_view', 'name') def organization_view(request, slug, id_): # pylint: disable=unused-argument u"""View responsible for viewing organization.""" org = get_object_or_404(Organization, id=id_) offers = Offer.objects.filter(organization_id=id_) allow_contact = True allow_edit = False allow_offer_create = False if ( request.user.is_authenticated() and request.user.userprofile in org.userprofiles.all() ): allow_contact = False allow_edit = True allow_offer_create = True if request.method == 'POST': form = VolounteerToOrganizationContactForm(request.POST) if form.is_valid(): # send email to first organization user (I assume it's main user) profile = Organization.objects.get(id=id_).userprofiles.all()[0] send_mail( request, 'volunteer_to_organisation', [ profile.user.email, request.POST.get('email'), ], {k: v for k, v in request.POST.items()}, ) messages.success(request, u'Email został wysłany.') else: messages.error( request, u"Formularz zawiera nieprawidłowe dane: {}".format(form.errors) ) return render( request, "organizations/organization_view.html", { 'organization': org, 'contact_form': form, 'offers': offers, 'allow_contact': allow_contact, 'allow_edit': allow_edit, 'allow_offer_create': allow_offer_create, }, ) return render( request, "organizations/organization_view.html", { 'organization': org, 'contact_form': VolounteerToOrganizationContactForm(), 'offers': offers, 'allow_contact': allow_contact, 'allow_edit': allow_edit, 'allow_offer_create': allow_offer_create, } )
class Post(object): def _
_init__(self, **kwargs): self.__dict__.update(kwargs) def __iter__(self): return iter(self.__d
ict__)
from flask.ext.restplus import Namespace from app.api.tickets import ORDER, TICKET from app.helpers.ticketing import TicketingManager from app.api.helpers.helpers import ( requires_auth, can_access, replace_event_id) from app.api.helpers.utils import POST_RESPONSES from app.api.helpers.utils import Resource from app.api.helpers import custom_fields as fields api = Namespace('attendees', de
scription='Attendees', path='/') ATTENDEE = api.model('TicketHolder', { 'id': fields.Integer(), 'firstname': fields.String(), 'lastname': fields.S
tring(), 'email': fields.Email(), 'checked_in': fields.Boolean(), 'order': fields.Nested(ORDER, allow_null=False), 'ticket': fields.Nested(TICKET, allow_null=False) }) @api.route('/events/<string:event_id>/attendees/') class AttendeesList(Resource): @requires_auth @replace_event_id @can_access @api.doc('check_in_toggle', responses=POST_RESPONSES) @api.marshal_list_with(ATTENDEE) def get(self, event_id): """Get attendees of the event""" return TicketingManager.get_attendees(event_id) @api.route('/events/<string:event_id>/attendees/check_in_toggle/<holder_identifier>') class AttendeeCheckInToggle(Resource): @requires_auth @replace_event_id @can_access @api.doc('check_in_toggle', responses=POST_RESPONSES) @api.marshal_with(ATTENDEE) def post(self, event_id, holder_identifier): """Toggle and Attendee's Checked in State""" holder = TicketingManager.attendee_check_in_out(event_id, holder_identifier) return holder, 200 @api.route('/events/<string:event_id>/attendees/check_in_toggle/<holder_identifier>/check_in') class AttendeeCheckIn(Resource): @requires_auth @replace_event_id @can_access @api.doc('check_in_toggle', responses=POST_RESPONSES) @api.marshal_with(ATTENDEE) def post(self, event_id, holder_identifier): """Check in attendee""" holder = TicketingManager.attendee_check_in_out(event_id, holder_identifier, True) return holder, 200 @api.route('/events/<string:event_id>/attendees/check_in_toggle/<holder_identifier>/check_out') class AttendeeCheckOut(Resource): @requires_auth @replace_event_id @can_access @api.doc('check_in_toggle', responses=POST_RESPONSES) @api.marshal_with(ATTENDEE) def post(self, event_id, holder_identifier): """Check out attendee""" holder = TicketingManager.attendee_check_in_out(event_id, holder_identifier, False) return holder, 200
#!/usr/bin/env python ''' Component to handle data storage and search of all commands run ''' from framework.dependency_management.dependency_resolver import BaseComponent from framework.dependency_management.interfaces import CommandRegisterInterface from framework.lib.general import cprint from framework.db import models from framework.db.target_manager import target_required class CommandRegister(BaseComponent, CommandRegisterInterface): COMPONENT_NAME = "command_register" def __init__(self):
self.register_in_service_locator() self.config = self.get_component("config") self.db = self.get_component("db") self.plugin_output = None self.target = None def init(self): self.target = self.get_component("target") self.plugin_output = self.get_component("plugin_output") def AddCommand(self, Command): self.db.session.merge(models.Command( start_time=Command['Start'], end_time=Command['End'],
success=Command['Success'], target_id=Command['Target'], plugin_key=Command['PluginKey'], modified_command=Command['ModifiedCommand'].strip(), original_command=Command['OriginalCommand'].strip() )) self.db.session.commit() def DeleteCommand(self, Command): command_obj = self.db.session.query(models.Command).get(Command) self.db.session.delete(command_obj) self.db.session.commit() @target_required def CommandAlreadyRegistered(self, original_command, target_id=None): register_entry = self.db.session.query(models.Command).get(original_command) if register_entry: # If the command was completed and the plugin output to which it # is referring exists if register_entry.success: if self.plugin_output.PluginOutputExists(register_entry.plugin_key, register_entry.target_id): return self.target.GetTargetURLForID(register_entry.target_id) else: self.DeleteCommand(original_command) return None else: # Command failed self.DeleteCommand(original_command) return self.target.GetTargetURLForID(register_entry.target_id) return None
import numpy as np import keras from keras.datasets import mnist from keras.models import Model, Sequential from keras.layers import Input, Dense, Conv2D, MaxPooling2D, UpSampling2D from keras.layers import Flatten, Reshape from keras import regularizers from plotly import offline as py import plotly.graph_objs as go from plotly import tools py.init_notebook_mode() # Loads the training and test data sets (ignoring class labels) (x_train, _), (x_test, _) = mnist.load_data() # Scales the training and test data to range between 0 and 1. max_value = float(x_train.max()) x_train = x_train.astype('float32') / max_value x_test = x_test.astype('float32') / max_value # Reshape x_train = x_train.reshape((len(x_train), np.prod(x_train.shape[1:]))) x_test = x_test.reshape((len(x_test), np.prod(x_test.shape[1:]))) # x_train.shape # Autoencoder input_dim = x_train.shape[1] encoding_dim = 32 compression_factor = float(input_dim) / encoding_dim print("Compression factor: %s" % compression_factor) autoencoder = Sequential() autoencoder.add( Dense(encoding_dim, input_shape=(input_dim,), activation='relu') ) autoencoder.add( Dense(input_dim, activation='sigmoid') ) auto
encoder.summary() input_img = Input(shape=(input_dim,)) encoder_layer = autoencoder.layers[0] encoder = Model(input_img, encoder_layer(input_img)) encoder.summary() autoencoder.compile(optimizer='adam', loss='binary_crossentropy') autoencoder.fit(x_t
rain, x_train, epochs=50, batch_size=256, shuffle=True, validation_data=(x_test, x_test)) num_images = 10 np.random.seed(42) random_test_images = np.random.randint(x_test.shape[0], size=num_images) encoded_imgs = encoder.predict(x_test) decoded_imgs = autoencoder.predict(x_test) encoded_imgs[0] decoded_imgs[0] fig = tools.make_subplots(rows=1, cols=3, print_grid=False) t1 = go.Heatmap(z=x_test[random_test_images[0]].reshape(28, 28), showscale=False) fig.append_trace(t1, 1, 1) # fig.append_trace(trace2, 1, 2) # fig.append_trace(trace3, 1, 3) for i in map(str,range(1, 4)): y = 'yaxis'+ i x = 'xaxis' + i fig['layout'][y].update(autorange='reversed', showticklabels=False, ticks='', scaleanchor = 'x') fig['layout'][x].update(showticklabels=False, ticks='') fig['layout'].update(height=600) py.iplot(fig)
# OCF 1.0 doesn't define unique groups, they ar
e defined since OCF 1.1. Pcs # transforms OCF 1.0 agents to OCF 1.1 structure and therefore needs to create # a group name for OCF 1.0 unique at
trs. The name is: {this_prefix}{attr_name} DEFAULT_UNIQUE_GROUP_PREFIX = "_pcs_unique_group_"
# Copyright 2021, Kay Hayen, mailto:kay.hayen@gmail.com # # Python tests originally created or extracted from other peoples work. The # parts were too small to be protected. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the Li
cense is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ Using absolute import, do from module imports. """
from __future__ import absolute_import, print_function from foobar import util from . import local # pylint: disable=unused-import class Foobar(object): def __init__(self): print(util.someFunction())
import email.utils from email.mime.multipart import MIMEMultipart from email.mime.text import MIMEText class Mailer(threading.Thread): """ Mailer :desc: Class that sends an email Exte
nds Thread """ def __init__(self): """
__init__ :desc: Constructor function that calls parent """ Thread.__init__(self) def run(self, stri, dictio): """ run :desc : Function that does the heavy lifting :params : The string to be mailed and a dict containing config options necessary for the mail to be delivered. """ dictionary = dictio msg = MIMEMultipart("alternative") #get it from the queue? stripped = stri.strip() part1 = MIMEText(stripped, "plain") msg['Subject'] = dictionary['email_subject'] #http://pymotw.com/2/smtplib/ msg['To'] = email.utils.formataddr(('Recipient', dictionary['email_to'])) msg['From'] = email.utils.formataddr((dictionary['email_from'], dictionary['email_from'])) msg.attach(part1) if dictionary['smtp'] == True: # no auth if dictionary['auth'] == False: s = smtplib.SMTP() s.connect(host=str(dictionary['smtp_host']), port=dictionary['smtp_port']) try: send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string()) except Exception as ex: template = "An exception of type {0} occured. Arguments:\n{1!r}" message = template.format(type(ex).__name__, ex.args) journal.send("systemd-denotify: "+message) finally: s.quit() del s # auth elif dictionary['auth'] == True: s = smtplib.SMTP() s.connect(host=str(dictionary['smtp_host']), port=dictionary['smtp_port']) s.login(str(dictionary['auth_user']), str(dictionary['auth_password'])) try: send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string().strip()) except Exception as ex: template = "An exception of type {0} occured. Arguments:\n{1!r}" message = template.format(type(ex).__name__, ex.args) journal.send("systemd-denotify: "+message) finally: s.quit() del s else: pass #smtps if dictionary['smtps'] == True: # no auth ? if dictionary['auth'] == False: try: if len(dictionary['smtps_cert']) > 0 and len(dictionary['smtps_key']) > 0: s = smtplib.SMTP_SSL(host=str(dictionary['smtps_host']), port=dictionary['smtps_port'], keyfile=dictionary['smtps_key'], certfile=dictionary['smtps_cert']) s.ehlo_or_helo_if_needed() send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string()) else: s = smtplib.SMTP_SSL(host=str(dictionary['smtps_host']), port=dictionary['smtps_port']) s.ehlo_or_helo_if_needed() send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string()) except Exception as ex: template = "An exception of type {0} occured. Arguments:\n{1!r}" message = template.format(type(ex).__name__, ex.args) journal.send("systemd-denotify: "+message) finally: s.quit() del s # auth elif dictionary['auth'] == True: try: #check whether it is a real file and pem encoded if len(dictionary['smtps_cert']) > 0 and len(dictionary['smtps_key']) > 0: s = smtplib.SMTP_SSL(host=str(dictionary['smtps_host']), port=dictionary['smtps_port'], keyfile=dictionary['smtps_key'], certfile=dictionary['smtps_cert']) s.ehlo_or_helo_if_needed() s.login(dictionary['auth_user'], dictionary['auth_password']) send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string()) else: s = smtplib.SMTP_SSL(host=str(dictionary['smtps_host']), port=dictionary['smtps_port']) s.ehlo_or_helo_if_needed() s.login(dictionary['auth_user'], dictionary['auth_password']) send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string()) except Exception as ex: template = "An exception of type {0} occured. Arguments:\n{1!r}" message = template.format(type(ex).__name__, ex.args) journal.send("systemd-denotify: "+message) finally: s.quit() del s else: pass #starttls if dictionary['starttls'] == True: # no auth if dictionary['auth'] == False: try: s = smtplib.SMTP() s.connect(host=str(dictionary['starttls_host']), port=dictionary['starttls_port']) s.ehlo() #http://pymotw.com/2/smtplib/ if s.has_extn("STARTTLS"): #check whether it is a real file and pem encoded if len(dictionary['starttls_cert']) > 0 and len(dictionary['starttls_key']) > 0: s.starttls(keyfile=dictionary['starttls_key'], certfile=dictionary['starttls_cert']) s.ehlo() send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string()) else: s.starttls() s.ehlo() send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string()) except Exception as ex: template = "An exception of type {0} occured. Arguments:\n{1!r}" message = template.format(type(ex).__name__, ex.args) journal.send("systemd-denotify: "+message) finally: s.quit() del s # auth elif dictionary['auth'] == True: try: s = smtplib.SMTP() s.connect(host=str(dictionary['starttls_host']), port=dictionary['starttls_port']) #http://pymotw.com/2/smtplib/ s.ehlo() if s.has_extn("STARTTLS"): #check whether it is a real file and pem encoded if len(dictionary['starttls_cert']) >0 and len(dictionary['starttls_key'])>0: s.starttls(keyfile=dictionary['starttls_key'], certfile=dictionary['starttls_cert']) s.ehlo() s.login(str(dictionary['auth_user']).strip(), str(dictionary['auth_password'])) send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string()) else: s.starttls() s.ehlo() s.login(str(dictionary['auth_user']).strip(), str(dictionary['auth_password'])) send = s.sendmail(str(dictionary['email_from']), [str(dictionary['email_to'])], msg.as_string()) except Exception as ex: template = "An exception of type {0} occured. Arguments:\n{1!r}" message = templ
from ert_gui.ide.keywords.definitions import ProperNameFormatArgument from ecl.test import ExtendedTestCase class ProperNameFormatArgumentTest(ExtendedTest
Case): def test_proper_name_format_argument(self): argument = ProperNameFormatArgument() self.assertTrue(argument.validate("NAME%d"))
self.assertTrue(argument.validate("__NA%dME__")) self.assertTrue(argument.validate("<NAME>%d")) self.assertTrue(argument.validate("%d-NAME-")) self.assertFalse(argument.validate("-%dNA ME-")) self.assertFalse(argument.validate("NAME*%d"))
er = HeaderParser()
if not header_lines: header_lines = [
'##fileformat=VCFv4.2', '##FILTER=<ID=LowQual,Description="Low quality">', '##INFO=<ID=MQ,Number=1,Type=Float,Description="RMS Mapping Quality">', '##INFO=<ID=CNT,Number=A,Type=Integer,Description="Number of times '\ 'this allele was found in external db">', '##contig=<ID=1,length=249250621,assembly=b37>', '##INFO=<ID=DP_HIST,Number=R,Type=String,Description="Histogram for '\ 'DP; Mids: 2.5|7.5|12.5|17.5|22.5|27.5|32.5|37.5|42.5|47.5|52.5|57.5|'\ '62.5|67.5|72.5|77.5|82.5|87.5|92.5|97.5">', '##FORMAT=<ID=AD,Number=.,Type=Integer,Description="Allelic depths for'\ ' the ref and alt alleles in the order listed">', '##INFO=<ID=CSQ,Number=.,Type=String,Description="Consequence type as'\ ' predicted by VEP. Format: Allele|Gene|Feature">' '##FORMAT=<ID=DP,Number=1,Type=Integer,Description="Read Depth">', '##FORMAT=<ID=GT,Number=1,Type=String,Description="Genotype">', '##FORMAT=<ID=GQ,Number=1,Type=String,Description="GenotypeQuality">' '##reference=file:///human_g1k_v37.fasta', '#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\tFORMAT\tfather\tmother\tproband' ] for line in header_lines: if line.startswith('##'): header_parser.parse_meta_data(line) elif line.startswith('#'): header_parser.parse_header_line(line) return header_parser def test_simple_split(): """ Test how split genotypes behave when a simple split """ header_parser = get_header() variant_line = "3\t947379\t.\tA\tT,C\t100\tPASS\tMQ=1;CNT=5,8;"\ "DP_HIST=12,43,22\tGT:GQ:AD:DP\t1/1:60:0,7,0:12\t0/2:60:7,0,10:17"\ "\t1/2:60:0,7,8:16" variant = variant = format_variant( line = variant_line, header_parser=header_parser, check_info=True ) splitted_variants = [] for variant in split_variants(variant, header_parser): splitted_variants.append(variant) assert len(splitted_variants) == 2 first_variant = splitted_variants[0] second_variant = splitted_variants[1] # Test if the splitted variants still have the same reference assert first_variant['REF'] == 'A' assert second_variant['REF'] == 'A' # Test if the alternative was splitted properly assert first_variant['ALT'] == 'T' assert second_variant['ALT'] == 'C' # Test if simple ino field is handled correct assert first_variant['info_dict']['MQ'] == ['1'] assert second_variant['info_dict']['MQ'] == ['1'] # Test if info field with Number='A' is handled correct assert first_variant['info_dict']['CNT'] == ['5'] assert second_variant['info_dict']['CNT'] == ['8'] # Test if info field with Number='R' is handled correct assert first_variant['info_dict']['DP_HIST'] == ['12', '43'] assert second_variant['info_dict']['DP_HIST'] == ['12', '22'] # Test if the genortypes are on the correct format assert first_variant['father'] == "1/1:60:0,7:12" assert second_variant['father'] == "0/0:60:0,0:12" assert first_variant['mother'] == "0/0:60:7,0:17" assert second_variant['mother'] == "0/1:60:7,10:17" assert first_variant['proband'] == "0/1:60:0,7:16" assert second_variant['proband'] == "0/1:60:0,8:16" def test_split_minimal(): """ Test to split a vcf line without genotypes """ header_lines = [ '##fileformat=VCFv4.2', '##FILTER=<ID=LowQual,Description="Low quality">', '##INFO=<ID=MQ,Number=1,Type=Float,Description="RMS Mapping Quality">', '##contig=<ID=1,length=249250621,assembly=b37>', '#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO' ] header_parser = get_header(header_lines) variant_line = "3\t947379\t.\tA\tT,C\t100\tPASS\tMQ=1" variant = format_variant( line = variant_line, header_parser=header_parser, check_info=True ) splitted_variants = [] for variant in split_variants(variant, header_parser): splitted_variants.append(variant) assert len(splitted_variants) == 2 def test_csq_split(): """ Test works when splitting CSQ fields """ header_parser = get_header() variant_line = "3\t947379\t.\tA\tT,C\t100\tPASS\tCSQ=T|148398|NM_152486.2,"\ "C|148398|NM_152486.2\tGT:GQ:AD:DP\t1/1:60:0,7,0:12\t0/2:60:7,0,10:17"\ "\t1/2:60:0,7,8:16" variant = format_variant( line = variant_line, header_parser=header_parser, check_info=True ) splitted_variants = [] for variant in split_variants(variant, header_parser): splitted_variants.append(variant) assert len(splitted_variants) == 2 first_variant = splitted_variants[0] second_variant = splitted_variants[1] assert first_variant['info_dict']['CSQ'] == ['T|148398|NM_152486.2'] assert second_variant['info_dict']['CSQ'] == ['C|148398|NM_152486.2'] assert list(first_variant['vep_info'].keys()) == ['T'] assert list(second_variant['vep_info'].keys()) == ['C'] assert first_variant['vep_info']['T'] == [{ 'Allele':'T', 'Gene':'148398', 'Feature':'NM_152486.2' }] def test_csq_split_missing_allele(): """ Test works when splitting CSQ fields where one allele is missing """ header_parser = get_header() variant_line = "3\t947379\t.\tA\tT,C\t100\tPASS\tCSQ=T|148398|NM_152486.2"\ "\tGT:GQ:AD:DP\t1/1:60:0,7,0:12\t0/2:60:7,0,10:17"\ "\t1/2:60:0,7,8:16" variant = format_variant( line = variant_line, header_parser=header_parser, check_info=True ) splitted_variants = [] for variant in split_variants(variant, header_parser): splitted_variants.append(variant) assert len(splitted_variants) == 2 first_variant = splitted_variants[0] second_variant = splitted_variants[1] assert first_variant['info_dict']['CSQ'] == ['T|148398|NM_152486.2'] with pytest.raises(KeyError): assert second_variant['info_dict']['CSQ'] == [''] assert list(first_variant['vep_info'].keys()) == ['T'] assert list(second_variant['vep_info'].keys()) == ['C'] assert second_variant['vep_info']['C'] == [] def test_wrong_number_of_A_entrys(): """ Test how split genotypes when wrong number of entrys """ header_parser = get_header() # CNT should have two entrys since Number=A variant_line = "3\t947379\t.\tA\tT,C\t100\tPASS\tMQ=1;CNT=5;"\ "DP_HIST=12,43,22\tGT:GQ:AD:DP\t1/1:60:0,7,0:12\t0/2:60:7,0,10:17"\ "\t1/2:60:0,7,8:16" variant = format_variant( line = variant_line, header_parser=header_parser, check_info=False ) splitted_variants = [] for variant in split_variants(variant, header_parser): splitted_variants.append(variant) assert len(splitted_variants) == 2 first_variant = splitted_variants[0] second_variant = splitted_variants[1] #Vcf-parser should use the first annotation for both alleles assert first_variant['info_dict']['CNT'] == ['5'] assert second_variant['info_dict']['CNT'] == ['5'] def test_wrong_number_of_R_entrys(): """ Test how split genotypes when wrong number of entrys """ header_parser = get_header() # CNT should have two entrys since Number=A variant_line = "3\t947379\t.\tA\tT,C\t100\tPASS\tMQ=1;CNT=5,8;"\ "DP_HIST=12,43\tGT:GQ:AD:DP\t1/1:60:0,7,0:12\t0/2:60:7,0,10:17"\ "\t1/2:60:0,7,8:16" #But then we need to skip the info check variant = format_variant( line = variant_line, header_parser=header_parser, check_info=False ) splitted_variants = [] for variant in split_variants(variant, header_parser): split
import urwid from mitmproxy import http from mitmproxy.tools.console import common, searchable from mitmproxy.utils import human from mitmproxy.utils import strutils def maybe_timestamp(base, attr): if base is not None and getattr(base, attr): return human.format_timestamp_with_milli(getattr(base, attr)) else: return "active" def flowdetails(state, flow: http.HTTPFlow): text = [] sc = flow.server_conn cc = flow.client_conn req = flow.request resp = flow.response metadata = flow.metadata if metadata is not None and len(metadata) > 0: parts = [(str(k), repr(v)) for k, v in metadata.items()] text.append(urwid.Text([("head", "Metadat
a:")])) text.extend(common.format_keyvals(parts, indent=4)) if sc is not None and sc.ip_address: text.append(urwid.Text([("head", "Server Connection:")])) parts = [ ("Address", human.format_address(sc.address)), ]
if sc.ip_address: parts.append(("Resolved Address", human.format_address(sc.ip_address))) if resp: parts.append(("HTTP Version", resp.http_version)) if sc.alpn_proto_negotiated: parts.append(("ALPN", sc.alpn_proto_negotiated)) text.extend( common.format_keyvals(parts, indent=4) ) c = sc.cert if c: text.append(urwid.Text([("head", "Server Certificate:")])) parts = [ ("Type", "%s, %s bits" % c.keyinfo), ("SHA1 digest", c.digest("sha1")), ("Valid to", str(c.notafter)), ("Valid from", str(c.notbefore)), ("Serial", str(c.serial)), ( "Subject", urwid.BoxAdapter( urwid.ListBox( common.format_keyvals( c.subject, key_format="highlight" ) ), len(c.subject) ) ), ( "Issuer", urwid.BoxAdapter( urwid.ListBox( common.format_keyvals( c.issuer, key_format="highlight" ) ), len(c.issuer) ) ) ] if c.altnames: parts.append( ( "Alt names", ", ".join(strutils.bytes_to_escaped_str(x) for x in c.altnames) ) ) text.extend( common.format_keyvals(parts, indent=4) ) if cc is not None: text.append(urwid.Text([("head", "Client Connection:")])) parts = [ ("Address", "{}:{}".format(cc.address[0], cc.address[1])), ] if req: parts.append(("HTTP Version", req.http_version)) if cc.tls_version: parts.append(("TLS Version", cc.tls_version)) if cc.sni: parts.append(("Server Name Indication", cc.sni)) if cc.cipher_name: parts.append(("Cipher Name", cc.cipher_name)) if cc.alpn_proto_negotiated: parts.append(("ALPN", cc.alpn_proto_negotiated)) text.extend( common.format_keyvals(parts, indent=4) ) parts = [] if cc is not None and cc.timestamp_start: parts.append( ( "Client conn. established", maybe_timestamp(cc, "timestamp_start") ) ) if cc.tls_established: parts.append( ( "Client conn. TLS handshake", maybe_timestamp(cc, "timestamp_tls_setup") ) ) if sc is not None and sc.timestamp_start: parts.append( ( "Server conn. initiated", maybe_timestamp(sc, "timestamp_start") ) ) parts.append( ( "Server conn. TCP handshake", maybe_timestamp(sc, "timestamp_tcp_setup") ) ) if sc.tls_established: parts.append( ( "Server conn. TLS handshake", maybe_timestamp(sc, "timestamp_tls_setup") ) ) if req is not None and req.timestamp_start: parts.append( ( "First request byte", maybe_timestamp(req, "timestamp_start") ) ) parts.append( ( "Request complete", maybe_timestamp(req, "timestamp_end") ) ) if resp is not None and resp.timestamp_start: parts.append( ( "First response byte", maybe_timestamp(resp, "timestamp_start") ) ) parts.append( ( "Response complete", maybe_timestamp(resp, "timestamp_end") ) ) if parts: # sort operations by timestamp parts = sorted(parts, key=lambda p: p[1]) text.append(urwid.Text([("head", "Timing:")])) text.extend(common.format_keyvals(parts, indent=4)) return searchable.Searchable(text)
import numpy as np from get_params import get_params import os import pickle """ Returns mAP for each query. """ def relnotrel( fileGT, id_q, rankingShots ): '''Takes ground truth file (fileGT), query name (id_q) and ranking (rankingShots) in order to create a vector of 1's and 0's to compute Average Precision Returns: a list of 1 and 0 for the rankingShots and the number of relevant samples in the ground truth file for the given query. ''' a = np.loadtxt( fileGT, dtype='string' ) # Extract shots for the query t_shot = a[ (a[:,0]==id_q) ] # Extract relevant shots for the query t_shot_rel = t_shot[ t_shot[:,3] == '1' ] t_shot_notrel = t_shot[ t_shot[:,3] == '0' ] # Total Number of relevant shots in the ground truth nRelTot = np.shape( t_shot_rel )[0] labelRankingShot = np.zeros((1, len(rankingShots))) i = 0 for shotRanking in rankingShots: if shotRanking in t_shot_rel: labelRankingShot[0, i ] = 1 i +=1 return labelRankingShot, nRelTot def AveragePrecision( relist,nRelTot): '''Takes a list of 1 and 0 a
nd the number of relevant samples and computes the average precision''' accu = 0 numRel = 0 for k in range(min(len(relist),1000)): if relist[k] == 1: numRel = numRel + 1 acc
u += float( numRel )/ float(k+1) return (accu/nRelTot) def rerank(ranking,baseline): new_ranking = [] for shot in ranking: if shot in baseline: new_ranking.append(shot) return new_ranking if __name__ == '__main__': params = get_params() if params['year'] == '2014': queries = range(9099,9129) else: queries = range(9069,9099) errors = [] for query in queries: if query not in (9100,9113,9117): params['query_name'] = str(query) RANKING_FILE = os.path.join(params['root'],'7_rankings',params['net'],params['database'] + params['year'],params['distance_type'],params['query_name'] + '.rank') if params['year'] == '2014': GROUND_TRUTH_FILE = os.path.join(params['root'],'8_groundtruth','src','ins.search.qrels.tv14') else: GROUND_TRUTH_FILE = os.path.join(params['root'],'8_groundtruth','src','ins.search.qrels.tv13') #print RANKING_FILE if os.path.isfile(RANKING_FILE): baseline_file = os.path.join(params['root'],'2_baseline', 'dcu_caffenet',params['query_name'] + '.rank') #print baseline_file f = open(RANKING_FILE) ranking = pickle.load(f) frames = pickle.load(f) regions = pickle.load(f) distances = pickle.load(f) unsorted_distances = pickle.load(f) if params['database'] =='gt_imgs': baseline_ranking = pickle.load(open(baseline_file,'rb')) baseline_ranking = baseline_ranking[0:1000] ranking = rerank(ranking,baseline_ranking) f.close() labels, num_relevant = relnotrel(GROUND_TRUTH_FILE, params['query_name'], ranking) ranking = np.reshape(ranking,(np.shape(ranking)[0],1)) distances = np.reshape(distances,(np.shape(distances)[0],1)) save_file = np.hstack((ranking,np.hstack((distances,regions)))) np.shape(save_file) save_txt_file = os.path.join(params['root'],'9_other','score_txt',params['query_name'] + '.txt') np.savetxt(save_txt_file,save_file,delimiter='\t', fmt="%s") ap = AveragePrecision(np.squeeze(labels),num_relevant) print ap else: errors.append(query) print "Done" print errors
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from typing import Any, Optional, TYPE_CHECKING from azure.core import AsyncPipelineClient from azure.profiles import KnownProfiles, ProfileDefinition from azure.profiles.multiapiclient import MultiApiClientMixin from msrest import Deserializer, Serializer from ._configuration import FormRecognizerClientConfiguration from ._operations_mixin import FormRecognizerClientOperationsMixin if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from azure.core.credentials import TokenCredential from azure.core.credentials_async import AsyncTokenCredential class _SDKClient(object): def __init__(self, *args, **kwargs): """This is a fake class to support current implemetation of MultiApiClientMixin." Will be removed in final version of multiapi azure-core based client """ pass class FormRecognizerClient(FormRecognizerClientOperationsMixin, MultiApiClientMixin, _SDKClient): """Extracts information from forms and images into structured data. This ready contains multiple API versions, to help you deal with all of the Azure clouds (Azure Stack, Azure Government, Azure China, etc.). By default, it uses the latest API version available on public Azure. For production, you should stick to a particular api-version and/or profile. The profile sets a mapping between an operation group and its API version. The api-version parameter sets the default API version if the operation group is not described in the profile. :param credential: Credential needed for the client to connect to Azure. :type credential: ~azure.core.credentials_async.AsyncTokenCredential :param endpoint: Supported Cognitive Services endpoints (protocol and hostname, for example: https://westus2.api.cognitive.microsoft.com). :type endpoint: str :param api_version: API version to use if no profile is provided, or if missing in profile. :type api_version: str :param profile: A profile definition, from KnownProfiles to dict. :type profile: azure.profiles.KnownProfiles :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. """ DEFAULT_API_VERSION = '2.1' _PROFILE_TAG = "azure.ai.formrecognizer.FormRecognizerClient" LATEST_PROFILE = ProfileDefinition({ _PROFILE_TAG: { None: DEFAULT_API_VERSION, 'authorize_copy_document_model': '2022-01-30-preview', 'begin_analyze_document': '2022-01-30-preview', 'begin_build_document_model': '2022-01-30-preview', 'begin_compose_document_model': '2022-01-30-preview', 'begin_copy_document_model_to': '2022-01-30-preview', 'delete_model': '2022-01-30-preview', 'get_analyze_document_result': '2022-01-30-preview', 'get_info': '2022-01-30-preview', 'get_model': '2022-01-30-preview', 'get_models': '2022-01-30-preview', 'get_operation': '2022-01-30-preview', 'get_operations': '2022-01-30-preview', 'train_custom_model_async': '2.0', }}, _PROFILE_TAG + " latest" ) def __init__( self, credential: "AsyncTokenCredential", endpoint: str, api_version: Optional[str] = None, profile: KnownProfiles = KnownProfiles.default, **kwargs # type: Any ) -> None: if api_version == '2022-01-30-preview': base_url = '{endpoint}/formrecognizer' elif api_version == '2.0': base_url = '{endpoint}/formrecognizer/v2.0'
elif api_version == '2.1': base_url = '{endpoint}/formrecognizer/v2.1' else: raise ValueError("API version {} is not available".format(api_version)) self._config = FormRecognizerClientConfiguration(credential, endpoint, **kwargs) self._client = AsyncPipelineClient(base_url=base_url, config=self._config, **kwargs) super(FormRecognizerClient, self).__init__( api_version=api_vers
ion, profile=profile ) @classmethod def _models_dict(cls, api_version): return {k: v for k, v in cls.models(api_version).__dict__.items() if isinstance(v, type)} @classmethod def models(cls, api_version=DEFAULT_API_VERSION): """Module depends on the API version: * 2022-01-30-preview: :mod:`v2022_01_30_preview.models<azure.ai.formrecognizer.v2022_01_30_preview.models>` * 2.0: :mod:`v2_0.models<azure.ai.formrecognizer.v2_0.models>` * 2.1: :mod:`v2_1.models<azure.ai.formrecognizer.v2_1.models>` """ if api_version == '2022-01-30-preview': from ..v2022_01_30_preview import models return models elif api_version == '2.0': from ..v2_0 import models return models elif api_version == '2.1': from ..v2_1 import models return models raise ValueError("API version {} is not available".format(api_version)) async def close(self): await self._client.close() async def __aenter__(self): await self._client.__aenter__() return self async def __aexit__(self, *exc_details): await self._client.__aexit__(*exc_details)
#!/usr/bin/env python # BEGIN_COPYRIGHT # # Copyright 2009-2018 CRS4. # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy # of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # END_COPYRIGHT import logging logging.basicConfig() LOGGER = logging.getLogger("MapOnly") LOGGER.setLevel(logging.INFO) import pydoop.mapreduce.api as api import pydoop.mapreduce.pipes as pipes import pydoop.hdfs as hdfs class Mapper(api.Mapper): def __init__(self, context): self.name = hdfs.path.basename(context.input_split.filename) def map(self, context): context.emit((self.name, context.key), context.value.upper()) class Writer(api.RecordWriter): def __init__(self, context): super(Writer, self).__init__(context) self.logger = LOGGER.getChild("Writer") jc = context.job_conf outfn = context.get_default_work_file() self.logger.info("writing to %s", outfn) hdfs_user = jc.get("pydoop.hdfs.user", None) self.sep = jc.get("mapreduce.output.textoutputformat.separator", "\t") self.file = hdfs.open(outfn, "wt", user=hdfs_user)
def close(self): self.file.close() s
elf.file.fs.close() def emit(self, key, value): self.file.write("%r%s%s%s" % (key, self.sep, value, "\n")) def __main__(): pipes.run_task(pipes.Factory( mapper_class=Mapper, record_writer_class=Writer, )) if __name__ == "__main__": __main__()