prefix stringlengths 0 918k | middle stringlengths 0 812k | suffix stringlengths 0 962k |
|---|---|---|
import logging
class MockManager(object):
""" Mock manager for filter unit testing. """
def get_child_logger(self, name):
return logging.getLogger('test').getChild(name)
def generic_filter_test(test):
"""Decorator used for creating a generic filter test.
Requires the argument to be a function that assigns the following
attributes when called:
filt = dict used to generate the filter,
event_key = key for the event value | s,
pass_vals = values that create passing events,
fail_vals = values that create failing events
"""
test(test)
def generic_test(self):
# Create the filter
filt = self.gen_filter(test.filt)
# Test passing
for val in test.pass_vals:
event = self.gen_event({test.event_key: val})
self.assertTrue(
filt.check_event(event),
| "pass_val failed check in {}: \n{} passed {}"
"".format(test.__name__, event, filt))
# Test failing
for val in test.fail_vals:
event = self.gen_event({test.event_key: val})
self.assertFalse(
filt.check_event(event),
"fail_val passed check in {}: \n{} passed {}"
"".format(test.__name__, event, filt))
return generic_test
def full_filter_test(test):
"""Decorator used for creating a full filter test.
Requires the argument to be a function that assigns the following
attributes when called:
filt = dict used to generate the filter,
pass_items = array of dicts that should pass,
fail_items = array of dicts that should fail
"""
test(test)
def full_test(self):
filt = self.gen_filter(test.filt)
for val in test.pass_items:
event = self.gen_event(val)
self.assertTrue(
filt.check_event(event),
"pass_val failed check in {}: \n{} passed {}"
.format(test.__name__, event, filt))
for val in test.fail_items:
event = self.gen_event(val)
self.assertFalse(
filt.check_event(event),
"fail_val passed check in {}: \n{} passed {}"
"".format(test.__name__, event, filt))
return full_test
|
from hearthstone.enums import CardType, GameTag, Rarity
import utils
CARDS = utils.fireplace.cards.db
def test_all_tags_known():
"""
Iterate through the card database and check that all specified GameTags
are known in hearthstone.enums.GameTag
"""
unknown_tags = set()
known_tags = list(GameTag)
known_rarities = list(Rarity)
# Check the db loaded correctly
assert utils.fireplace.cards.db
for card in CARDS.values():
for tag in card.tags:
# We have fake tags in fireplace.enums which are always negative
if tag not in known_t | ags and tag > 0:
unknown_tags.add(tag)
# Test rarities as well (cf. TB_BlingBrawl_Blade1e in 10956...)
assert card.rarity in known_rarities
assert not unknown_tags
def test_play_scripts():
for card in CARDS.values():
if card.scripts.activate:
assert card.type == CardType.HERO_POWER
elif card.scripts.play:
assert card.type not in (CardT | ype.HERO, CardType.HERO_POWER, CardType.ENCHANTMENT)
def test_card_docstrings():
for card in CARDS.values():
c = utils.fireplace.utils.get_script_definition(card.id)
name = c.__doc__
if name is not None:
if name.endswith(")"):
continue
assert name == card.name
|
from SloppyCell.ReactionNetworks import *
import Nets
traj2a = Dynamics.integrate(Nets.fig2a, [0, 3*60])
traj2b = Dynamics.integrate(Nets.fig2b, [0, 3*60])
traj2c = Dynamics.integrate(Nets.fig2c, [0, 3*60])
traj2d = Dynamics.integrate(Nets.fig2d, [0, 3*60])
traj2e = Dynamics.integrate(Nets.fig2e, [0, 3*60])
Plotting.figure(2)
for traj in [traj2a, traj2b, traj2c, traj2d, traj2e]:
percent = 100*traj.get_var_traj('BCatenin')/traj.get_var_val('BCatenin', 0)
Plotting.plot(traj.get_times()/ | 60., percent, '-k')
Plotting.axis([0, 3, 0, 105])
traj6a = Dynamics.integrate(Nets.fig6a, [0, 16*60])
traj6b = Dynamics.integrate(Nets.fig6b, [0, 16*60])
traj6c = Dynamics.integrate(Nets.fig6c, [0, 16*60])
Plot | ting.figure(6, (5, 10))
Plotting.subplot(2,1,1)
Plotting.plot(traj6a.get_times()/60., traj6a.get_var_traj('BCatenin'), '-k')
Plotting.plot(traj6b.get_times()/60., traj6b.get_var_traj('BCatenin'), '-r')
Plotting.plot(traj6c.get_times()/60., traj6c.get_var_traj('BCatenin'), '-g')
Plotting.axis([-1, 16, 34, 72])
Plotting.subplot(2,1,2)
Plotting.plot(traj6a.get_times()/60., 1000*traj6a.get_var_traj('Axin'), '-k')
Plotting.plot(traj6b.get_times()/60., 1000*traj6b.get_var_traj('Axin'), '-r')
Plotting.plot(traj6c.get_times()/60., 1000*traj6c.get_var_traj('Axin'), '-g')
Plotting.axis([-1, 16, 0, 22])
|
# -*- coding: utf-8 -*-
# ------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para Google Video basado en flashvideodownloader.org
# http://blog.tvalacarta.info/plugin-xb | mc/pelisalacarta/
# ------------------------------------------------------------
import re
from core import logger
from core import scrapertools
# Re | turns an array of possible video url's from the page_url
def get_video_url(page_url, premium=False, user="", password="", video_password=""):
logger.info("(page_url='%s')" % page_url)
video_urls = []
# Lo extrae a partir de flashvideodownloader.org
if page_url.startswith("http://"):
url = 'http://www.flashvideodownloader.org/download.php?u=' + page_url
else:
url = 'http://www.flashvideodownloader.org/download.php?u=http://video.google.com/videoplay?docid=' + page_url
logger.info("url=" + url)
data = scrapertools.cache_page(url)
# Extrae el vídeo
newpatron = '</script>.*?<a href="(.*?)" title="Click to Download">'
newmatches = re.compile(newpatron, re.DOTALL).findall(data)
if len(newmatches) > 0:
video_urls.append(["[googlevideo]", newmatches[0]])
for video_url in video_urls:
logger.info("%s - %s" % (video_url[0], video_url[1]))
return video_urls
# Encuentra vídeos del servidor en el texto pasado
def find_videos(data):
encontrados = set()
devuelve = []
patronvideos = 'http://video.google.com/googleplayer.swf.*?docid=([0-9]+)'
logger.info("#" + patronvideos + "#")
matches = re.compile(patronvideos, re.DOTALL).findall(data)
for match in matches:
titulo = "[googlevideo]"
if match.count("&") > 0:
primera = match.find("&")
url = match[:primera]
else:
url = match
if url not in encontrados:
logger.info(" url=" + url)
devuelve.append([titulo, url, 'googlevideo'])
encontrados.add(url)
else:
logger.info(" url duplicada=" + url)
return devuelve
|
# -*- coding: utf-8 -*-
############################################## | ################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
###### | ######################################################################## |
if response.status_code in [200,201,202]:
responseObject = self.apiClient.pre_deserialize(response.content, 'SaaSposeResponse', response.headers['content-type'])
return responseObject
else:
raise ApiException(response.status_code,response.content)
except Exception:
raise ApiException(response.status_code,response.content)
def GetProjectAssignment(self, name, assignmentUid, **kwargs):
"""Read project assignment.
Args:
name (str): The name of the file. (required)
assignmentUid (int): Assignment Uid (required)
storage (str): The document storage. (optional)
folder (str): The document folder. (optional)
Returns: AssignmentResponse
"""
allParams = dict.fromkeys(['name', 'assignmentUid', 'storage', 'folder'])
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetProjectAssignment" % key)
params[key] = val
for (key, val) in params.iteritems():
if key in allParams:
allParams[key] = val
resourcePath = '/tasks/{name}/assignments/{assignmentUid}/?appSid={appSid}&storage={storage}&folder={folder}'
resourcePath = resourcePath.replace('&','&').replace("/?","?").replace("toFormat={toFormat}","format={format}").replace("{path}","{Path}")
if 'name' in allParams and allParams['name'] is not None:
resourcePath = resourcePath.replace("{" + "name" + "}" , str(allParams['name']))
else:
resourcePath = re.sub("[&?]name.*?(?=&|\\?|$)", "", resourcePath)
|
if 'assignmentUid' in allParams and allParams['assignmentUid'] is not None:
resourcePath = resourcePath.replace("{" + "assignmentUid" + "}" , str(allParams['assignmentUid']))
else:
resourcePath = re.sub("[&?]assignmentUid.*?(?=&|\\?|$)", "", resourceP | ath)
if 'storage' in allParams and allParams['storage'] is not None:
resourcePath = resourcePath.replace("{" + "storage" + "}" , str(allParams['storage']))
else:
resourcePath = re.sub("[&?]storage.*?(?=&|\\?|$)", "", resourcePath)
if 'folder' in allParams and allParams['folder'] is not None:
resourcePath = resourcePath.replace("{" + "folder" + "}" , str(allParams['folder']))
else:
resourcePath = re.sub("[&?]folder.*?(?=&|\\?|$)", "", resourcePath)
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = { }
bodyParam = None
headerParams['Accept'] = 'application/xml,application/json'
headerParams['Content-Type'] = 'application/json'
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams, files=files)
try:
if response.status_code in [200,201,202]:
responseObject = self.apiClient.pre_deserialize(response.content, 'AssignmentResponse', response.headers['content-type'])
return responseObject
else:
raise ApiException(response.status_code,response.content)
except Exception:
raise ApiException(response.status_code,response.content)
def GetProjectAssignments(self, name, **kwargs):
"""Read project assignment items.
Args:
name (str): The name of the file. (required)
storage (str): The document storage. (optional)
folder (str): The document folder. (optional)
Returns: AssignmentItemsResponse
"""
allParams = dict.fromkeys(['name', 'storage', 'folder'])
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method GetProjectAssignments" % key)
params[key] = val
for (key, val) in params.iteritems():
if key in allParams:
allParams[key] = val
resourcePath = '/tasks/{name}/assignments/?appSid={appSid}&storage={storage}&folder={folder}'
resourcePath = resourcePath.replace('&','&').replace("/?","?").replace("toFormat={toFormat}","format={format}").replace("{path}","{Path}")
if 'name' in allParams and allParams['name'] is not None:
resourcePath = resourcePath.replace("{" + "name" + "}" , str(allParams['name']))
else:
resourcePath = re.sub("[&?]name.*?(?=&|\\?|$)", "", resourcePath)
if 'storage' in allParams and allParams['storage'] is not None:
resourcePath = resourcePath.replace("{" + "storage" + "}" , str(allParams['storage']))
else:
resourcePath = re.sub("[&?]storage.*?(?=&|\\?|$)", "", resourcePath)
if 'folder' in allParams and allParams['folder'] is not None:
resourcePath = resourcePath.replace("{" + "folder" + "}" , str(allParams['folder']))
else:
resourcePath = re.sub("[&?]folder.*?(?=&|\\?|$)", "", resourcePath)
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = { }
bodyParam = None
headerParams['Accept'] = 'application/xml,application/json'
headerParams['Content-Type'] = 'application/json'
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams, postData, headerParams, files=files)
try:
if response.status_code in [200,201,202]:
responseObject = self.apiClient.pre_deserialize(response.content, 'AssignmentItemsResponse', response.headers['content-type'])
return responseObject
else:
raise ApiException(response.status_code,response.content)
except Exception:
raise ApiException(response.status_code,response.content)
def PostProjectAssignment(self, name, taskUid, resourceUid, **kwargs):
"""Adds a new assignment to a project and returns assignment item in a response.
Args:
name (str): The name of the file. (required)
taskUid (int): The unique id of the task to be assigned. (required)
resourceUid (int): The unique id of the resource to be assigned. (required)
units (float): The units for the new assignment. Default value is 1. (optional)
fileName (str): The name of the project document to save changes to. If this parameter is omitted then the changes will be saved to the source project document. (optional)
storage (str): The document storage. (optional)
folder (str): The document folder. (optional)
Returns: AssignmentItemResponse
"""
allParams = dict.fromkeys(['name', 'taskUid', 'resourceUid', 'units', 'fileName', 'storage', 'folder'])
params = locals()
for (key, val) in params['kwargs'].iteritems():
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method PostProjectAssignment" % key)
params[key] = val
for (key, val) in params.iteritems():
if key in allParams:
allParams[key] = val
resourcePath = '/tasks/{name}/assignments/?appSid={appSid}&taskUid={taskUid}&resourceUid={resourceUid}&units={units}&fileName={fileName}&storage={storage}&folder={folder}'
resourcePath = resourcePath.replace('&','&').replace("/?","?").replace("toFormat={toFormat}","format={format}").replace("{path}","{Pa |
__author__ = 'Administrator'
from django.conf.urls import patterns
from django.contrib.auth.views impor | t login, logout_then_login
urlpatterns = patterns('',
(r'^$', 'apps.sims.views.ind | ex_view'),
(r'index/$', 'apps.sims.views.index_view'),
(r'login/$', login, {'template_name': 'sims/login.html'}),
(r'logout/$', logout_then_login),
(r'stuinfo/$', 'apps.sims.views.student_info_list'),
(r'save/$', 'apps.sims.views.save_student'),
(r'delete/$', 'apps.sims.views.delete_student'),
)
|
#!/usr/bin/env python
"""Simulation bootstrapper"""
from formation_flight.formation import handlers as formation_handlers
from formation_fli | ght.aircraft import handlers as aircraft_handlers
from formation_flight.aircraft import generators
from formation_flight.hub import builders
from formation_flight.hub import allocators
from lib import sim, debug, sink
from lib.debug import print_line as p
from formation_flight import | statistics
import config
import os
import numpy as np
config.sink_dir = '%s/sink' % os.path.dirname(__file__)
def init():
sink.init(config.sink_dir)
def execute():
init()
for z in np.linspace(0, 1, 250):
config.Z = z
single_run()
def single_run():
sim.init()
aircraft_handlers.init()
formation_handlers.init()
statistics.init()
# Construct flight list
planes = generators.get_via_stdin()
# Find hubs
config.hubs = builders.build_hubs(planes, config.count_hubs, config.Z)
# Allocate hubs to flights
allocators.allocate(planes, config.hubs)
for flight in planes:
sim.events.append(sim.Event('aircraft-init', flight, 0))
sim.run()
sink.push(statistics.vars)
debug.print_dictionary(statistics.vars)
|
# Copyright 2018 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import threading
from io import StringIO
from unittest.mock import NonCallableMock
from twisted.internet import defer, reactor
from synapse.util.file_consumer import BackgroundFileConsumer
from tests import unittest
class FileConsumerTests(unittest.TestCase):
@defer.inlineCallbacks
def test_pull_consumer(self):
string_file = StringIO()
consumer = BackgroundFileConsumer(string_file, reactor=reactor)
try:
producer = DummyPullProducer()
yield producer.register_with_consumer(consumer)
yield producer.write_and_wait("Foo")
self.ass | ertEqual(string_file.getvalue(), "Foo")
yield producer.write_and_wait("Bar")
self.assertEqual(string_file.getvalue(), "FooBar")
finally:
consumer.unregisterProducer()
yield consumer.wait()
| self.assertTrue(string_file.closed)
@defer.inlineCallbacks
def test_push_consumer(self):
string_file = BlockingStringWrite()
consumer = BackgroundFileConsumer(string_file, reactor=reactor)
try:
producer = NonCallableMock(spec_set=[])
consumer.registerProducer(producer, True)
consumer.write("Foo")
yield string_file.wait_for_n_writes(1)
self.assertEqual(string_file.buffer, "Foo")
consumer.write("Bar")
yield string_file.wait_for_n_writes(2)
self.assertEqual(string_file.buffer, "FooBar")
finally:
consumer.unregisterProducer()
yield consumer.wait()
self.assertTrue(string_file.closed)
@defer.inlineCallbacks
def test_push_producer_feedback(self):
string_file = BlockingStringWrite()
consumer = BackgroundFileConsumer(string_file, reactor=reactor)
try:
producer = NonCallableMock(spec_set=["pauseProducing", "resumeProducing"])
resume_deferred = defer.Deferred()
producer.resumeProducing.side_effect = lambda: resume_deferred.callback(
None
)
consumer.registerProducer(producer, True)
number_writes = 0
with string_file.write_lock:
for _ in range(consumer._PAUSE_ON_QUEUE_SIZE):
consumer.write("Foo")
number_writes += 1
producer.pauseProducing.assert_called_once()
yield string_file.wait_for_n_writes(number_writes)
yield resume_deferred
producer.resumeProducing.assert_called_once()
finally:
consumer.unregisterProducer()
yield consumer.wait()
self.assertTrue(string_file.closed)
class DummyPullProducer:
def __init__(self):
self.consumer = None
self.deferred = defer.Deferred()
def resumeProducing(self):
d = self.deferred
self.deferred = defer.Deferred()
d.callback(None)
def write_and_wait(self, bytes):
d = self.deferred
self.consumer.write(bytes)
return d
def register_with_consumer(self, consumer):
d = self.deferred
self.consumer = consumer
self.consumer.registerProducer(self, False)
return d
class BlockingStringWrite:
def __init__(self):
self.buffer = ""
self.closed = False
self.write_lock = threading.Lock()
self._notify_write_deferred = None
self._number_of_writes = 0
def write(self, bytes):
with self.write_lock:
self.buffer += bytes
self._number_of_writes += 1
reactor.callFromThread(self._notify_write)
def close(self):
self.closed = True
def _notify_write(self):
"Called by write to indicate a write happened"
with self.write_lock:
if not self._notify_write_deferred:
return
d = self._notify_write_deferred
self._notify_write_deferred = None
d.callback(None)
@defer.inlineCallbacks
def wait_for_n_writes(self, n):
"Wait for n writes to have happened"
while True:
with self.write_lock:
if n <= self._number_of_writes:
return
if not self._notify_write_deferred:
self._notify_write_deferred = defer.Deferred()
d = self._notify_write_deferred
yield d
|
__author__ = 'Ting'
from Craw | lWorker.spiders.stackoverflow import StackOverflowSpider
class ServerFaultSpider(StackOverflowSpider):
name = 'ServerFaultSpider'
allowed_domains = ['serverfault.com']
def __init__(self, op=None, **kwargs):
StackOverflowSpider.__init__(self, op, **kwargs)
def get_feed_start_urls(self):
return ['http://serverfault.c | om/questions'] |
import sys
import os
from com.googlecode.fascinator.common import FascinatorHome
sys.path.append(os | .path.join(FascinatorHome.getPath(),"harvest", "workflows"))
from baserules import BaseIndexData
class IndexData(BaseIndexData):
def __activate__(self, context):
BaseIndexData.__activate | __(self,context)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# $Id: test_maximal_independent_set.py 577 2011-03-01 06:07:53Z lleeoo $
"""
Tests for maximal (not maximum) independent sets.
"""
# Copyright (C) 2004-2016 by
# Leo Lopes <leo.lopes@monash.edu>
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
__author__ = """Leo Lopes (leo.lopes@monash.edu)"""
from nose.tools import *
import networkx as nx
import random
class TestMaximalIndependantSet(object):
def setup(self):
se | lf.florentine | = nx.Graph()
self.florentine.add_edge('Acciaiuoli','Medici')
self.florentine.add_edge('Castellani','Peruzzi')
self.florentine.add_edge('Castellani','Strozzi')
self.florentine.add_edge('Castellani','Barbadori')
self.florentine.add_edge('Medici','Barbadori')
self.florentine.add_edge('Medici','Ridolfi')
self.florentine.add_edge('Medici','Tornabuoni')
self.florentine.add_edge('Medici','Albizzi')
self.florentine.add_edge('Medici','Salviati')
self.florentine.add_edge('Salviati','Pazzi')
self.florentine.add_edge('Peruzzi','Strozzi')
self.florentine.add_edge('Peruzzi','Bischeri')
self.florentine.add_edge('Strozzi','Ridolfi')
self.florentine.add_edge('Strozzi','Bischeri')
self.florentine.add_edge('Ridolfi','Tornabuoni')
self.florentine.add_edge('Tornabuoni','Guadagni')
self.florentine.add_edge('Albizzi','Ginori')
self.florentine.add_edge('Albizzi','Guadagni')
self.florentine.add_edge('Bischeri','Guadagni')
self.florentine.add_edge('Guadagni','Lamberteschi')
def test_K5(self):
"""Maximal independent set: K5"""
G = nx.complete_graph(5)
for node in G:
assert_equal(nx.maximal_independent_set(G, [node]), [node])
def test_K55(self):
"""Maximal independent set: K55"""
G = nx.complete_graph(55)
for node in G:
assert_equal(nx.maximal_independent_set(G, [node]), [node])
def test_exception(self):
"""Bad input should raise exception."""
G = self.florentine
assert_raises(nx.NetworkXUnfeasible,
nx.maximal_independent_set, G, ["Smith"])
assert_raises(nx.NetworkXUnfeasible,
nx.maximal_independent_set, G, ["Salviati", "Pazzi"])
def test_florentine_family(self):
G = self.florentine
indep = nx.maximal_independent_set(G, ["Medici", "Bischeri"])
assert_equal(sorted(indep),
sorted(["Medici", "Bischeri", "Castellani", "Pazzi",
"Ginori", "Lamberteschi"]))
def test_bipartite(self):
G = nx.complete_bipartite_graph(12, 34)
indep = nx.maximal_independent_set(G, [4, 5, 9, 10])
assert_equal(sorted(indep), list(range(12)))
def test_random_graphs(self):
"""Generate 50 random graphs of different types and sizes and
make sure that all sets are independent and maximal."""
for i in range(0, 50, 10):
G = nx.random_graphs.erdos_renyi_graph(i*10+1, random.random())
IS = nx.maximal_independent_set(G)
assert_false(list(G.subgraph(IS).edges()))
neighbors_of_MIS = set.union(*(set(G.neighbors(v)) for v in IS))
for v in set(G.nodes()).difference(IS):
assert_true(v in neighbors_of_MIS)
|
"""
Add an excerpt field to the page.
"""
from __future__ import absolute_import, unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
from feincms import extensions
class Extension(extensions.Extension):
def handle_model(self):
self.model.add_to_class(
'excerpt',
models.TextField(
_('excerpt') | ,
blank=True,
help_text=_(
'Add a brief excerpt summarizing the content'
' of this page.')))
def handle_modeladmin(self, modeladmin):
modeladmin.add_extension_options(_('Excerpt'), {
'fields': ('excerpt',),
'classes': ('collapse',), |
})
|
# -*- coding: utf-8 -*-
# © <YEAR(S)> <A | UTHOR(S)>
# License AGPL-3.0 or later (http://www.gn | u.org/licenses/agpl.html).
from . import account_payment
|
from RMPY.representations import curve
from RMPY.creators import skinCluster
import pymel.core as pm
from RMPY.core import config
import os
def save_curve(*args):
"""
:param args: the scene objects that will be saved if nothing is provide it it will try to save the selection.
:return:
"""
if args:
scene_curves = args
else:
scene_curves = pm.ls(selection=True)
saved_curves_list = []
for each in scene_curves:
try:
if pm.objExists(each):
curve_node = curve.Curve.by_name(each)
curve_node.save()
saved_curves_list.append(each)
else:
print "the curve {} doesn't exists".format(each)
except RuntimeWarning('{} not saved'.format):
pass
print 'following curves where saved: {}'.format(saved_curves_list)
def load_curves(*args):
"""
| :param args: the scene objects that will be loaded if nothing is provide it it will try to load the selection.
:return:
"""
if args:
scene_curves = args
e | lse:
scene_curves = pm.ls(selection=True)
for each in scene_curves:
try:
if pm.objExists(each):
curve_node = curve.Curve.by_name(each)
curve_node.load()
curve_node.set_repr_dict()
else:
print "the curve {} doesn't exists".format(each)
except RuntimeWarning('{} not loaded'.format):
pass
def save_skin_cluster(*args):
if args:
scene_objects = args
else:
scene_objects = pm.ls(selection=True)
saved_skin_cluster_list = []
for each_node in scene_objects:
try:
skin_cluster01 = skinCluster.SkinCluster.by_node(each_node)
if skin_cluster01:
skin_cluster01.save('{}'.format(each_node))
saved_skin_cluster_list.append(each_node)
else:
print "object {} does'nt have a skincluster".format(each_node)
except RuntimeWarning('{} not saved'.format(each_node)):
pass
print 'following skin in nodes where saved: {}'.format(saved_skin_cluster_list)
def load_skin_cluster(*args):
if args:
scene_objects = args
else:
scene_objects = pm.ls(selection=True)
for each_node in scene_objects:
try:
skin_cluster01 = skinCluster.SkinCluster()
skin_cluster01.load('{}'.format(each_node))
skin_cluster01.apply_weights_dictionary(geometry=pm.ls(each_node)[0])
except RuntimeWarning('{} not loaded'.format(each_node)):
pass
def export_maya_file(**kwargs):
file_name = kwargs.pop('file_name', 'reference_points')
full_path = '{}/mayaFiles'.format(config.file_path)
pm.exportSelected('{}/{}.ma'.format(full_path, file_name))
def import_maya_file(file_name):
full_path = '{}/mayaFiles'.format(config.file_path)
pm.importFile('{}/{}.ma'.format(full_path, file_name))
def import_all_available_maya_files():
for each in available_maya_files():
import_maya_file(each)
def available_maya_files():
full_path = '{}/mayaFiles'.format(config.file_path)
available_files = []
for each in os.listdir(full_path):
if '.ma' in each:
available_files.append(each)
return each
if __name__ == '__main__':
load_skin_cluster()
|
newline character is found
:param str file: the file name (path)
:param FileSize length: the expected length of the file
:return tuple: (line number, line content)
note: the line number is not available for long files
"""
# In order to use the optimized version for long files, the file has to be opened in binary mode
if length == FileSize.LONG:
with open(file, 'rb') as file_handle:
try:
file_handle.seek(-1, os.SEEK_END)
# Skip all empty lines at the end
while file_handle.read(1) in (b'\n', b'\r'):
file_handle.seek(-2, os.SEEK_CUR)
# Go back to the first non-newline character
file_handle.seek(-1, os.SEEK_CUR)
# Go backwards by one byte at a time and check if it is a newline
while file_handle.read(1) != b'\n':
# Check if the last read character was actually the first character in a file
if file_handle.tell() == 1:
file_handle.seek(-1, os.SEEK_CUR)
break
file_handle.seek(-2, os.SEEK_CUR)
# Newline character found, read the whole line
| return 0, file_handle.readline().decode()
except OSError:
# The file | might be empty or somehow broken
return 0, ''
# Otherwise use simple line enumeration until we hit the last one
else:
with open(file, 'r') as file_handle:
last = (0, '')
for line_num, line in enumerate(file_handle):
last = (line_num + 1, line)
return last
def _wait_for_script_compilation(logfile, stap_process):
""" Waits for the script compilation process to finish - either successfully or not.
An exception is raised in case of failed compilation.
:param str logfile: the name (path) of the SystemTap log file
:param Subprocess stap_process: the subprocess object representing the compilation process
"""
# Start a HeartbeatThread that periodically informs the user of the compilation progress
with PeriodicThread(HEARTBEAT_INTERVAL, _heartbeat_stap, [logfile, 'Compilation']):
while True:
# Check the status of the process
status = stap_process.poll()
if status is None:
# The compilation process has not finished yet, take a small break
time.sleep(LOG_WAIT)
elif status == 0:
# The process has successfully finished
return
else:
# The stap process terminated with non-zero code which means failure
WATCH_DOG.debug("SystemTap build process failed with exit code '{}'".format(status))
raise SystemTapScriptCompilationException(logfile, status)
def _wait_for_systemtap_startup(logfile, stap_process):
""" Waits for the SystemTap collection process to startup.
The SystemTap startup may take some time and it is necessary to wait until the process is ready
before launching the profiled command so that the command output is being collected.
:param str logfile: the name (path) of the SystemTap log file
:param Subprocess stap_process: the subprocess object representing the collection process
"""
while True:
# Check the status of the process
# The process should be running in background - if it terminates it means that it has failed
status = stap_process.poll()
if status is None:
# Check the last line of the SystemTap log file if the process is still running
line_no, line = _get_last_line_of(logfile, FileSize.SHORT)
# The log file should contain at least 4 lines from the compilation and another
# 5 lines from the startup
if line_no >= ((2 * STAP_PHASES) - 1) and ' 5: ' in line:
# If the line contains a mention about the 5. phase, consider the process ready
return
# Otherwise wait a bit before the next check
time.sleep(LOG_WAIT)
else:
WATCH_DOG.debug(
"SystemTap collection process failed with exit code '{}'".format(status)
)
raise SystemTapStartupException(logfile)
def _wait_for_systemtap_data(datafile):
""" Waits until the collection process has finished writing the profiling output to the
data file. This can be checked by observing the last line of the data file where the
ending sentinel should be present.
:param str datafile: the name (path) of the data file
"""
# Start the TimeoutThread so that the waiting is not indefinite
WATCH_DOG.info(
'The profiled command has terminated, waiting for the process to finish writing output '
'to the data file.'
)
with TimeoutThread(HARD_TIMEOUT) as timeout:
while not timeout.reached():
with SuppressedExceptions(IndexError):
# Periodically scan the last line of the data file
# The file can be potentially long, use the optimized method to get the last line
last_line = _get_last_line_of(datafile, FileSize.LONG)[1]
if int(last_line.split()[0]) == RecordType.PROCESS_END.value:
WATCH_DOG.info('The data file is fully written.')
return
time.sleep(LOG_WAIT)
# Timeout reached
WATCH_DOG.info(
'Timeout reached while waiting for the collection process to fully write output '
'into the output data file.'
)
def _heartbeat_stap(logfile, phase):
""" The SystemTap heartbeat function that scans the log file and reports the last record.
:param str logfile: the SystemTap log file name (path)
:param str phase: the SystemTap phase (compilation or collection)
"""
# Report log line count and the last record
WATCH_DOG.info("{} status update: 'log lines count' ; 'last log line'".format(phase))
WATCH_DOG.info("'{}' ; '{}'".format(*_get_last_line_of(logfile, FileSize.SHORT)))
def _extract_processes(extract_command):
""" Extracts and sorts the running processes according to the extraction command.
:param str extract_command: the processes extraction command
:return list: a list of (PID, PPID, PGID, CMD) records representing the corresponding
attributes of the extracted processes
"""
procs = []
out = utils.run_safely_external_command(extract_command, False)[0].decode('utf-8')
for line in out.splitlines():
process_record = line.split()
# Skip the optional first header line
if process_record[0] == 'PID':
continue
# Get the (PID, PPID, PGID, CMD) tuples representing the running parent stap processes
pid, ppid, pgid = int(process_record[0]), int(process_record[1]), int(process_record[2])
cmd = ' '.join(process_record[3:])
# Skip self (the extracting process)
if extract_command in cmd:
continue
procs.append((pid, ppid, pgid, cmd))
return procs
def _loaded_stap_kernel_modules(module=None):
"""Extracts the names of all the SystemTap kernel modules - or a specific one - that
are currently loaded.
:param str module: the name of the specific module to lookup or None for all of them
:return list: the list of names of loaded systemtap kernel modules
"""
# Build the extraction command
module_filter = 'stap_' if module is None else module
extractor = 'lsmod | grep {} | awk \'{{print $1}}\''.format(module_filter)
# Run the command and save the found modules
out, _ = utils.run_safely_external_command(extractor, False)
# Make sure that we have a list of unique modules
modules = set()
for line in out.decode('utf-8').splitlines():
modules.add(line)
return list(modules)
def _wait_for_resource_release(check_function, function_args):
""" Waits for a resource to be released. The state of the resour |
# -*- coding: utf-8 -*-
from __future__ import unicod | e_literals
from django.apps import AppConfig
class BlogConfig(AppConfig):
name = 'blog'
| |
# -*- coding: utf-8 -*-
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('monitoring', '0002_monitoring_updat | e'),
]
operations = [
migrations.RemoveField(
model_name='requestevent',
name='resources',
),
migrations.AddField(
model_name='requestevent',
name='resources',
field=models.Man | yToManyField(help_text='List of resources affected', to='monitoring.MonitoredResource', null=True, blank=True),
),
]
|
"""
Code for MT1 review worksheet.
Direct all complaints to Owen Jow (owenjow@berkeley).
"""
# Part 1: Contr | ol
x = (0 and 1 and 2) + (0 or 1 or 2)
((-4 or 0) and 4) / (-2 or (0 and 2))
if x <= 1:
print('hello')
elif x <= 2:
print(' world')
if x <= 3:
print(' my name is inigo montoya')
else:
print(' from the other side')
# Part 2: HOF / Lambdas
def f(v, x):
def g(y, z):
return y(x, z)(z, x)
return g
u = | lambda y, x: y * 4
v = lambda x, y: x * 3 + y
f(u, 1)(lambda x, y: lambda y, x: y * 3 + v(x, y), 2)
# Part 4: Environment Diagrams
def f(x, h):
def g(y, i):
f = i[:]
h = [f, lambda: g(5, h)]
return h
return g(4, h)
x, y = 6, 7
f = f(3, [lambda: x * y])
g = f[-1]()[0][0][0]()
|
import numpy as np
import unittest
import pycuda.driver a | s drv
import pycuda.compiler
import pycuda.autoinit
import pycuda.gp | uarray as gpuarray
import pycuda.cumath as cumath
from pycuda.compiler import SourceModule
__author__ = 'AlistairMcDougall' |
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# | "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
############## | ##################################################################
import os
import shutil
import socket
import subprocess
import sys
import tempfile
import time
import unittest
import grpc
from apache_beam.portability.api.beam_provision_api_pb2 import (ProvisionInfo,
GetProvisionInfoResponse)
from apache_beam.portability.api.beam_provision_api_pb2_grpc import (
ProvisionServiceServicer, add_ProvisionServiceServicer_to_server)
from concurrent import futures
from google.protobuf import json_format
from pyflink.java_gateway import get_gateway
from pyflink.pyflink_gateway_server import on_windows
from pyflink.testing.test_case_utils import PyFlinkTestCase
class PythonBootTests(PyFlinkTestCase):
def setUp(self):
provision_info = json_format.Parse('{"retrievalToken": "test_token"}', ProvisionInfo())
response = GetProvisionInfoResponse(info=provision_info)
def get_unused_port():
sock = socket.socket()
sock.bind(('', 0))
port = sock.getsockname()[1]
sock.close()
return port
class ProvisionService(ProvisionServiceServicer):
def GetProvisionInfo(self, request, context):
return response
def start_test_provision_server():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=1))
add_ProvisionServiceServicer_to_server(ProvisionService(), server)
port = get_unused_port()
server.add_insecure_port('[::]:' + str(port))
server.start()
return server, port
self.provision_server, self.provision_port = start_test_provision_server()
self.env = dict(os.environ)
self.env["python"] = sys.executable
self.env["FLINK_BOOT_TESTING"] = "1"
self.env["BOOT_LOG_DIR"] = os.path.join(self.env["FLINK_HOME"], "log")
self.tmp_dir = tempfile.mkdtemp(str(time.time()), dir=self.tempdir)
# assume that this file is in flink-python source code directory.
flink_python_source_root = os.path.dirname(
os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
runner_script = "pyflink-udf-runner.bat" if on_windows() else \
"pyflink-udf-runner.sh"
self.runner_path = os.path.join(
flink_python_source_root, "bin", runner_script)
def run_boot_py(self):
args = [self.runner_path, "--id", "1",
"--logging_endpoint", "localhost:0000",
"--artifact_endpoint", "whatever",
"--provision_endpoint", "localhost:%d" % self.provision_port,
"--control_endpoint", "localhost:0000",
"--semi_persist_dir", self.tmp_dir]
return subprocess.call(args, env=self.env)
def test_python_boot(self):
exit_code = self.run_boot_py()
self.assertTrue(exit_code == 0, "the boot.py exited with non-zero code.")
@unittest.skipIf(on_windows(), "'subprocess.check_output' in Windows always return empty "
"string, skip this test.")
def test_param_validation(self):
args = [self.runner_path]
exit_message = subprocess.check_output(args, env=self.env).decode("utf-8")
self.assertIn("No id provided.", exit_message)
args = [self.runner_path, "--id", "1"]
exit_message = subprocess.check_output(args, env=self.env).decode("utf-8")
self.assertIn("No logging endpoint provided.", exit_message)
args = [self.runner_path, "--id", "1",
"--logging_endpoint", "localhost:0000"]
exit_message = subprocess.check_output(args, env=self.env).decode("utf-8")
self.assertIn("No provision endpoint provided.", exit_message)
args = [self.runner_path, "--id", "1",
"--logging_endpoint", "localhost:0000",
"--provision_endpoint", "localhost:%d" % self.provision_port]
exit_message = subprocess.check_output(args, env=self.env).decode("utf-8")
self.assertIn("No control endpoint provided.", exit_message)
def test_set_working_directory(self):
JProcessPythonEnvironmentManager = \
get_gateway().jvm.org.apache.flink.python.env.ProcessPythonEnvironmentManager
output_file = os.path.join(self.tmp_dir, "output.txt")
pyflink_dir = os.path.join(self.tmp_dir, "pyflink")
os.mkdir(pyflink_dir)
# just create an empty file
open(os.path.join(pyflink_dir, "__init__.py"), 'a').close()
fn_execution_dir = os.path.join(pyflink_dir, "fn_execution")
os.mkdir(fn_execution_dir)
open(os.path.join(fn_execution_dir, "__init__.py"), 'a').close()
with open(os.path.join(fn_execution_dir, "boot.py"), "w") as f:
f.write("import os\nwith open(r'%s', 'w') as f:\n f.write(os.getcwd())" %
output_file)
# test if the name of working directory variable of udf runner is consist with
# ProcessPythonEnvironmentManager.
self.env[JProcessPythonEnvironmentManager.PYTHON_WORKING_DIR] = self.tmp_dir
self.env["python"] = sys.executable
args = [self.runner_path]
subprocess.check_output(args, env=self.env)
process_cwd = None
if os.path.exists(output_file):
with open(output_file, 'r') as f:
process_cwd = f.read()
self.assertEqual(os.path.realpath(self.tmp_dir),
process_cwd,
"setting working directory variable is not work!")
def tearDown(self):
self.provision_server.stop(0)
try:
if self.tmp_dir is not None:
shutil.rmtree(self.tmp_dir)
except:
pass
|
"""
Gigablast (Web)
@website https://gigablast.com
@provide-api yes (https://gigablast.com/api.html)
@using-api yes
@results XML
@stable yes
@parse url, title, content
"""
import random
from json import loads
from time import time
from lxml.html import fromstring
from searx.url_utils import urlencode
# engine dependent config
categories = ['general']
paging = True
number_of_results = 10
language_support = True
safesearch = True
# search-url
base_url = 'https://gigablast.com/'
search_string = 'search?{query}'\
'&n={number_of_results}'\
'&c=main'\
'&s={offset}'\
'&format=json'\
'&qh=0'\
'&qlang={lang}'\
'&ff={safesearch} | '\
'&rxiec={rxieu}'\
'& | ulse={ulse}'\
'&rand={rxikd}'\
'&dbez={dbez}'
# specific xpath variables
results_xpath = '//response//result'
url_xpath = './/url'
title_xpath = './/title'
content_xpath = './/sum'
supported_languages_url = 'https://gigablast.com/search?&rxikd=1'
# do search-request
def request(query, params):
offset = (params['pageno'] - 1) * number_of_results
if params['language'] == 'all':
language = 'xx'
else:
language = params['language'].replace('-', '_').lower()
if language.split('-')[0] != 'zh':
language = language.split('-')[0]
if params['safesearch'] >= 1:
safesearch = 1
else:
safesearch = 0
# rxieu is some kind of hash from the search query, but accepts random atm
search_path = search_string.format(query=urlencode({'q': query}),
offset=offset,
number_of_results=number_of_results,
rxikd=int(time() * 1000),
rxieu=random.randint(1000000000, 9999999999),
ulse=random.randint(100000000, 999999999),
lang=language,
safesearch=safesearch,
dbez=random.randint(100000000, 999999999))
params['url'] = base_url + search_path
return params
# get response from search-request
def response(resp):
results = []
# parse results
response_json = loads(resp.text)
for result in response_json['results']:
# append result
results.append({'url': result['url'],
'title': result['title'],
'content': result['sum']})
# return results
return results
# get supported languages from their site
def _fetch_supported_languages(resp):
supported_languages = []
dom = fromstring(resp.text)
links = dom.xpath('//span[@id="menu2"]/a')
for link in links:
href = link.xpath('./@href')[0].split('lang%3A')
if len(href) == 2:
code = href[1].split('_')
if len(code) == 2:
code = code[0] + '-' + code[1].upper()
else:
code = code[0]
supported_languages.append(code)
return supported_languages
|
"""
def __init__(self, inbound_nodes=[]):
"""
Node's constructor (runs when the object is instantiated). Sets
properties that all nodes need.
"""
# A list of nodes with edges into this node.
self.inbound_nodes = inbound_nodes
# The eventual value of this node. Set by running
# the forward() method.
self.value = None
# A list of nodes that this node outputs to.
self.outbound_nodes = []
# New property! Keys are the inputs to this node and
# their values are the partials of this node with
# respect to that input.
self.gradients = {}
# Sets this node as an outbound node for all of
# this node's inputs.
for node in inbound_nodes:
node.outbound_nodes.append(self)
def forward(self):
"""
Every node that uses this class as a base class will
need to define its own `forward` method.
"""
raise NotImplementedError
def backward(self):
"""
Every node that uses this class as a base class will
need to define its own `backward` method.
"""
raise NotImplementedError
class Input(Node):
"""
A generic input into the network.
"""
def __init__(self):
# The base class constructor has to run to set all
# the properties here.
#
# The most important property on an Input is value.
# self.value is set during `topological_sort` later.
Node.__init__(self)
def forward(self):
# Do nothing because nothing is calculated.
pass
def backward(self):
# An Input node has no inputs so the gradient (derivative)
# is zero.
# The key, `self`, is reference to this object.
self.gradients = {self: 0}
# Weights and bias may be inputs, so you need to sum
# the gradient from output gradients.
for n in self.outbound_nodes:
self.gradients[self] += n.gradients[self]
class Linear(Node):
"""
Represents a node that performs a linear transform.
"""
def __init__(self, X, W, b):
# The base class (Node) constructor. Weights and bias
# are treated like inbound nodes.
Node.__init__(self, [X, W, b])
def forward(self):
"""
Performs the math behind a linear transform.
"""
X = self.inbound_nodes[0].value
W = self.inbound_nodes[1].value
b = self.inbound_nodes[2].value
self.value = np.dot(X, W) + b
def backward(self):
"""
Calculates the gradient based on the output values.
"""
# Initialize a partial for each of the inbound_nodes.
| self.gradients = {n: np.zeros_like(n.value) for n in self.inbound_nodes}
# Cycle through the outputs. The gradient will change depending
# on each output, so the gradients are summed over all outputs.
for n in self.outbound_nodes:
# Get the partial of the cost with respect to this node.
grad_cost = n.gradients[self]
| # Set the partial of the loss with respect to this node's inputs.
self.gradients[self.inbound_nodes[0]] += np.dot(grad_cost, self.inbound_nodes[1].value.T)
# Set the partial of the loss with respect to this node's weights.
self.gradients[self.inbound_nodes[1]] += np.dot(self.inbound_nodes[0].value.T, grad_cost)
# Set the partial of the loss with respect to this node's bias.
self.gradients[self.inbound_nodes[2]] += np.sum(grad_cost, axis=0, keepdims=False)
class Sigmoid(Node):
"""
Represents a node that performs the sigmoid activation function.
"""
def __init__(self, node):
# The base class constructor.
Node.__init__(self, [node])
def _sigmoid(self, x):
"""
This method is separate from `forward` because it
will be used with `backward` as well.
`x`: A numpy array-like object.
"""
return 1. / (1. + np.exp(-x))
def forward(self):
"""
Perform the sigmoid function and set the value.
"""
input_value = self.inbound_nodes[0].value
self.value = self._sigmoid(input_value)
def backward(self):
"""
Calculates the gradient using the derivative of
the sigmoid function.
"""
# Initialize the gradients to 0.
self.gradients = {n: np.zeros_like(n.value) for n in self.inbound_nodes}
# Sum the partial with respect to the input over all the outputs.
for n in self.outbound_nodes:
grad_cost = n.gradients[self]
sigmoid = self.value
self.gradients[self.inbound_nodes[0]] += sigmoid * (1 - sigmoid) * grad_cost
class MSE(Node):
def __init__(self, y, a):
"""
The mean squared error cost function.
Should be used as the last node for a network.
"""
# Call the base class' constructor.
Node.__init__(self, [y, a])
def forward(self):
"""
Calculates the mean squared error.
"""
# NOTE: We reshape these to avoid possible matrix/vector broadcast
# errors.
#
# For example, if we subtract an array of shape (3,) from an array of shape
# (3,1) we get an array of shape(3,3) as the result when we want
# an array of shape (3,1) instead.
#
# Making both arrays (3,1) insures the result is (3,1) and does
# an elementwise subtraction as expected.
y = self.inbound_nodes[0].value.reshape(-1, 1)
a = self.inbound_nodes[1].value.reshape(-1, 1)
self.m = self.inbound_nodes[0].value.shape[0]
# Save the computed output for backward.
self.diff = y - a
self.value = np.mean(self.diff**2)
def backward(self):
"""
Calculates the gradient of the cost.
"""
self.gradients[self.inbound_nodes[0]] = (2 / self.m) * self.diff
self.gradients[self.inbound_nodes[1]] = (-2 / self.m) * self.diff
def topological_sort(feed_dict):
"""
Sort the nodes in topological order using Kahn's Algorithm.
`feed_dict`: A dictionary where the key is a `Input` Node and the value is the respective value feed to that Node.
Returns a list of sorted nodes.
"""
input_nodes = [n for n in feed_dict.keys()]
G = {}
nodes = [n for n in input_nodes]
while len(nodes) > 0:
n = nodes.pop(0)
if n not in G:
G[n] = {'in': set(), 'out': set()}
for m in n.outbound_nodes:
if m not in G:
G[m] = {'in': set(), 'out': set()}
G[n]['out'].add(m)
G[m]['in'].add(n)
nodes.append(m)
L = []
S = set(input_nodes)
while len(S) > 0:
n = S.pop()
if isinstance(n, Input):
n.value = feed_dict[n]
L.append(n)
for m in n.outbound_nodes:
G[n]['out'].remove(m)
G[m]['in'].remove(n)
# if no other incoming edges add to S
if len(G[m]['in']) == 0:
S.add(m)
return L
def forward_and_backward(graph):
"""
Performs a forward pass and a backward pass through a list of sorted Nodes.
Arguments:
`graph`: The result of calling `topological_sort`.
"""
# Forward pass
for n in graph:
n.forward()
# Backward pass
# see: https://docs.python.org/2.3/whatsnew/section-slices.html
for n in graph[::-1]:
n.backward()
def sgd_update(trainables, learning_rate=1e-2):
"""
Updates the value of each trainable with SGD.
Arguments:
`trainables`: A list of `Input` Nodes representing weights/biases.
`learning_rate`: The learning rate.
"""
# TODO: update all the `trainables` with SGD
# You can access and assign the value of a trainable with `value` attribute.
# Example:
# for t in trainables:
# t.value = your implementation here
for t in trainables:
partial = t.gradients[t]
t.value -= le |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django | .db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('managers', '0011_auto_20150422_2018'),
]
operations = [
migratio | ns.AlterField(
model_name='managerprofile',
name='picture',
field=models.ImageField(default=b'/static/assets/admin/layout/img/avatar.jpg', upload_to=b'profiles'),
preserve_default=True,
),
]
|
LookingGlassLogHandler
from bagpipe.bgp.engine.bgp_manager import Manager
from bagpipe.bgp.rest_api import RESTAPI
from bagpipe.bgp.vpn import VPNManager
def findDataplaneDrivers(dpConfigs, bgpConfig, isCleaningUp=False):
drivers = dict()
for vpnType in dpConfigs.iterkeys():
dpConfig = dpConfigs[vpnType]
if 'dataplane_driver' not in dpConfig:
logging.error(
"no dataplane_driver set for %s (%s)", vpnType, dpConfig)
driverName = dpConfig["dataplane_driver"]
logging.debug(
"Creating dataplane driver for %s, with %s", vpnType, driverName)
# FIXME: this is a hack, dataplane drivers should have a better way to
# access any item in the BGP dataplaneConfig
if 'dataplane_local_address' not in dpConfig:
dpConfig['dataplane_local_address'] = bgpConfig['local_address']
for tentativeClassName in (driverName,
'bagpipe.%s' % driverName,
'bagpipe.bgp.%s' % driverName,
'bagpipe.bgp.vpn.%s.%s' % (
vpnType, driverName),
):
try:
if '.' not in tentativeClassName:
logging.debug(
"Not trying to import '%s'", tentativeClassName)
continue
driverClass = utils.import_class(tentativeClassName)
try:
logging.info("Found driver for %s, initiating...", vpnType)
# skip the init step if called for cleanup
driver = driverClass(dpConfig, not isCleaningUp)
drivers[vpnType] = driver
logging.info(
"Successfully initiated dataplane driver for %s with"
" %s", vpnType, tentativeClassName)
except ImportError as e:
logging.debug(
"Could not initiate dataplane driver for %s with"
" %s: %s", vpnType, tentativeClassName, e)
except Exception as e:
logging.error(
"Found class, but error while instantiating dataplane"
" driver for %s with %s: %s", vpnType,
tentativeClassName, e)
logging.error(traceback.format_exc())
break
break
except SyntaxError as e:
logging.error(
"Found class, but syntax error while instantiating "
"dataplane driver for %s with %s: %s", vpnType,
tentativeClassName, e)
break
except Exception as e:
logging.debug(
"Could not initiate dataplane driver for %s with %s (%s)",
vpnType, tentativeClassName, e)
return drivers
class BgpDaemon(LookingGlass):
def __init__(self, catchAllLGLogHandler, **kwargs):
self.stdin_path = '/dev/null'
self.stdout_path = '/dev/null'
self.stderr_path = '/dev/null'
self.pidfile_path = '/var/run/bagpipe-bgp/bagpipe-bgp.pid'
self.pidfile_timeout = 5
logging.info("BGP manager configuration : %s", kwargs["bgpConfig"])
self.bgpConfig = kwargs["bgpConfig"]
logging.info("BGP dataplane dataplaneDriver configuration : %s",
kwargs["dataplaneConfig"])
self.dataplaneConfig = kwargs["dataplaneConfig"]
logging.info("BGP API configuration : %s", kwargs["apiConfig"])
self.apiConfig = kwargs["apiConfig"]
self.catchAllLGLogHandler = catchAllLGLogHandler
def run(self):
logging.info("Starting BGP component...")
logging.debug("Creating dataplane drivers")
drivers = findDataplaneDrivers(self.dataplaneConfig, self.bgpConfig)
for vpnType in self.dataplaneConfig.iterkeys():
if vpnType not in drivers:
logging.error(
"Could not initiate any dataplane driver for %s", vpnType)
return
logging.debug("Creating BGP manager")
self.bgpManager = Manager(self.bgpConfig)
logging.debug("Creating VPN manager")
self.vpnManager = VPNManager(self.bgpManager, drivers)
# BGP component REST API
logging.debug("Creating REST API")
bgpapi = RESTAPI(
self.apiConfig, self, self.vpnManager, self.catchAllLGLogHandler)
bgpapi.run()
def stop(self, signum, frame):
logging.info("Received signal %(signum)r, stopping...", vars())
self.vpnManager.stop()
self.bgpManager.stop()
# would need to stop main thread ?
logging.info("All threads now stopped...")
exception = SystemExit("Terminated on signal %(signum)r" % vars())
raise exception
def getLookingGlassLocalInfo(self, pathPrefix):
return {
"d | ataplane": self.dataplaneConfig,
"bgp": self.bgpConfig
}
def _loadConfig(configFile):
parser = SafeConfigParser()
if (len(parser.read(configFile)) == 0):
logging.error("Configuration file not found (%s)", conf | igFile)
exit()
bgpConfig = parser.items("BGP")
dataplaneConfig = dict()
for vpnType in ['ipvpn', 'evpn']:
try:
dataplaneConfig[vpnType] = dict(
parser.items("DATAPLANE_DRIVER_%s" % vpnType.upper()))
except NoSectionError:
if vpnType == "ipvpn": # backward compat for ipvpn
dataplaneConfig['ipvpn'] = dict(
parser.items("DATAPLANE_DRIVER"))
logging.warning("Config file is obsolete, should have a "
"DATAPLANE_DRIVER_IPVPN section instead of"
" DATAPLANE_DRIVER")
else:
logging.error(
"Config file should have a DATAPLANE_DRIVER_EVPN section")
apiConfig = parser.items("API")
# TODO: add a default API config
config = {"bgpConfig": dict(bgpConfig),
"dataplaneConfig": dataplaneConfig,
"apiConfig": dict(apiConfig)
}
return config
def daemon_main():
usage = "usage: %prog [options] (see --help)"
parser = OptionParser(usage)
parser.add_option("--config-file", dest="configFile",
help="Set BGP component configuration file path",
default="/etc/bagpipe-bgp/bgp.conf")
parser.add_option("--log-file", dest="logFile",
help="Set logging configuration file path",
default="/etc/bagpipe-bgp/log.conf")
parser.add_option("--no-daemon", dest="daemon", action="store_false",
help="Do not daemonize", default=True)
(options, _) = parser.parse_args()
action = sys.argv[1]
assert(action == "start" or action == "stop")
if not os.path.isfile(options.logFile):
logging.basicConfig()
print "no logging configuration file at %s" % options.logFile
logging.warning("no logging configuration file at %s", options.logFile)
else:
logging.config.fileConfig(
options.logFile, disable_existing_loggers=False)
if action == "start":
logging.root.name = "Main"
logging.info("Starting...")
else: # stop
logging.root.name = "Stopper"
logging.info("Signal daemon to stop")
catchAllLogHandler = LookingGlassLogHandler()
# we inject this catch all log handler in all configured loggers
for (loggerName, logger) in Logger.manager.loggerDict.iteritems():
if isinstance(logger, Logger):
if (not logger.propagate and logger.parent is not None):
logging.debug("Adding looking glass log handler to logger: %s",
loggerName)
logger.addHandler(catchAllLogHandler)
logging.root.addHandler(catchAllLogHandler)
# logging_tree. |
import os
import logging
import numpy as np
from torch.utils.data import Dataset
import cv2
from PIL import Image
import subprocess
import torchvision.transforms as tfs
np.random.seed(0)
def TransCommon(image):
image = cv2.equalizeHist(image)
image = cv2.GaussianBlur(image, (3, 3), 0)
return image
def TransAug(image):
img_aug = tfs.Compose([
tfs.RandomAffine(degrees=(-15, 15), translate=(0.05, 0.05),
scale=(0.95, 1.05), fillcolor=128)
])
image = img_aug(image)
return image
def GetTransforms(image, target=None, type='common'):
# taget is not support now
if target is not None:
raise Exception(
'Target is not support now ! ')
# get type
if type.strip() == 'Common':
image = TransCommon(image)
return image
elif type.strip() == 'None':
return image
elif type.strip() == 'Aug':
image = TransAug(image)
return image
else:
raise Exception(
'Unknown transforms_type : '.format(type))
class ImageDataset(Dataset):
def __init__(self, data_path, label_path, cfg, mode='train', subsample_size=-1, subsample_seed=1234):
self.cfg = cfg
self._label_header = None
self.data_path = data_path
self._image_paths = []
self._labels = []
self._mode = mode
self.dict = [{'1.0': '1', '': '0', '0.0': '0', '-1.0': '0'},
{'1.0': '1', '': '0', '0.0': '0', '-1.0': '1'}, ]
print(f'ImageDataset constructed with data_path = {self.data_path}')
with open(label_path) as f:
header = f.readline().strip('\n').split(',')
self._label_header = [
header[7],
header[10],
header[11],
header[13],
header[15]]
for line in f:
labels = []
fields = line.strip('\n').split(',')
image_path = os.path.joi | n(self.data_path, os.path.expanduser(fields[0]))
flg_enhance = False
for index, value in enumerate(fields[5:]):
if index == 5 or index == 8:
labels.append(self.dict[1].get(value))
if self.dict[1].get(
value) == '1' and \
self.cfg.enhance_index.count(index) > 0:
flg_enhanc | e = True
elif index == 2 or index == 6 or index == 10:
labels.append(self.dict[0].get(value))
if self.dict[0].get(
value) == '1' and \
self.cfg.enhance_index.count(index) > 0:
flg_enhance = True
# labels = ([self.dict.get(n, n) for n in fields[5:]])
labels = list(map(int, labels))
self._image_paths.append(image_path)
self._labels.append(labels)
if flg_enhance and self._mode == 'train':
for i in range(self.cfg.enhance_times):
self._image_paths.append(image_path)
self._labels.append(labels)
self._num_image = len(self._image_paths)
# NOTE(2020.04.30) we started using explicit config of data index, so disabling this dynamic subsampling
# features to avoid confusion.
assert subsample_size == -1
# if subsample_size > 0:
# if subsample_size > self._num_image:
# raise AssertionError(f'subsample_size ({subsample_size}) should be less than {self._num_image}')
# rng = np.random.RandomState(seed=subsample_seed)
# idx = rng.choice(self._num_image, size=subsample_size, replace=False)
# self._image_paths = [self._image_paths[i] for i in idx]
# self._labels = [self._labels[i] for i in idx]
# self._num_image = len(self._labels)
if cfg.cache_bitmap:
self._bitmap_cache = self._build_bitmap_cache()
else:
self._bitmap_cache = None
def __len__(self):
return self._num_image
def _border_pad(self, image):
h, w, c = image.shape
if self.cfg.border_pad == 'zero':
image = np.pad(
image,
((0, self.cfg.long_side - h),
(0, self.cfg.long_side - w), (0, 0)),
mode='constant', constant_values=0.0
)
elif self.cfg.border_pad == 'pixel_mean':
image = np.pad(
image,
((0, self.cfg.long_side - h),
(0, self.cfg.long_side - w), (0, 0)),
mode='constant', constant_values=self.cfg.pixel_mean
)
else:
image = np.pad(
image,
((0, self.cfg.long_side - h),
(0, self.cfg.long_side - w), (0, 0)),
mode=self.cfg.border_pad
)
return image
def _fix_ratio(self, image):
h, w, c = image.shape
if h >= w:
ratio = h * 1.0 / w
h_ = self.cfg.long_side
w_ = round(h_ / ratio)
else:
ratio = w * 1.0 / h
w_ = self.cfg.long_side
h_ = round(w_ / ratio)
image = cv2.resize(image, dsize=(w_, h_),
interpolation=cv2.INTER_LINEAR)
image = self._border_pad(image)
return image
def _build_bitmap_cache(self):
print('Pre-loading all images...(might take a while)')
return [self._load_image(idx) for idx in range(self._num_image)]
def _load_image(self, idx):
image = cv2.imread(self._image_paths[idx], 0)
image = Image.fromarray(image)
return image
def __getitem__(self, idx):
if self._bitmap_cache is not None:
image = self._bitmap_cache[idx]
else:
image = self._load_image(idx)
if self._mode == 'train':
image = GetTransforms(image, type=self.cfg.use_transforms_type)
image = np.array(image)
if self.cfg.use_equalizeHist:
image = cv2.equalizeHist(image)
image = cv2.cvtColor(image, cv2.COLOR_GRAY2RGB).astype(np.float32)
if self.cfg.fix_ratio:
image = self._fix_ratio(image)
else:
image = cv2.resize(image, dsize=(self.cfg.width, self.cfg.height),
interpolation=cv2.INTER_LINEAR)
if self.cfg.gaussian_blur > 0:
image = cv2.GaussianBlur(image, (self.cfg.gaussian_blur,
self.cfg.gaussian_blur), 0)
# normalization
image -= self.cfg.pixel_mean
# vgg and resnet do not use pixel_std, densenet and inception use.
if self.cfg.use_pixel_std:
image /= self.cfg.pixel_std
# normal image tensor : H x W x C
# torch image tensor : C X H X W
image = image.transpose((2, 0, 1))
labels = np.array(self._labels[idx]).astype(np.float32)
path = self._image_paths[idx]
if self._mode == 'train' or self._mode == 'dev':
return (image, labels)
elif self._mode == 'test':
return (image, path)
elif self._mode == 'heatmap':
return (image, path, labels)
else:
raise Exception('Unknown mode : {}'.format(self._mode))
|
from __future__ import unicode_li | terals |
from django.apps import AppConfig
class PermsConfig(AppConfig):
name = 'perms'
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from config.template_middleware import TemplateResponse
from gaecookie.decorator import no_csrf
from gaepermission.decorator import login_not_required
from routes.campapel.home import returnIndex
from tekton import router
from tekton.gae.middleware.redirect import RedirectResponse
from gaeforms.ndb.form import ModelForm
from gaegraph.model import Node
from google.appengine.ext import ndb
from tekton.gae.middleware.redirect import RedirectResponse
from routes.campapel.modelo import CamPapel, CamPapelForm
@login_not_required
@no_csrf
def form(_resp):
contexto={'salvar_path':router.to_path(salvar)}
return TemplateResponse(contexto,'campapel/form.html')
@login_not_required
def salvar(**prop):
camPapelF=CamPapelForm(**prop)
erros=camPapelF.validate()
if erros:
contexto={'salvar_path':router.to_path(salvar),
'erros':erros,
'camPapel':prop}
return TemplateResponse(contexto,'campapel/form.html')
else:
camPapel=camPapelF.fill_model()
camPapel.put()
return RedirectResponse(returnIndex())
@login_not_required
@no_csrf
def editar_form(camPapel_id):
camPapel_id=int(camPapel_id)
camPapel=CamPapel.ge | t_by_id(camPapel_id)
contexto={'salvar_path':router.to_path(editar,camPapel_id),'camPapel':camPapel}
return TemplateResponse(contexto,template_path='campapel/form.html')
@login_not_required
def editar(camPapel_id,**prop):
camPapel_id=int(camPapel_id)
camPapel=CamPapel.get_by_id(camPapel_id)
camPapelF=CamPapelForm(**prop)
erros=camPap | elF.validate()
if erros:
contexto={'salvar_path':router.to_path(editar),
'erros':erros,
'camPapel':camPapelF}
return TemplateResponse(contexto,'campapel/form.html')
else:
camPapelF.fill_model(camPapel)
camPapel.put()
return RedirectResponse(router.to_path(returnIndex()))
@login_not_required
def deletar(camPapel_id):
chave=ndb.Key(CamPapel,int(camPapel_id))
chave.delete()
return RedirectResponse(router.to_path(returnIndex())) |
#!/usr/bin/env vpython3
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
import argparse
import logging
import sys
import gold_inexact_matching.base_parameter_optimizer as base_optimizer
import gold_inexact_matching.binary_search_parameter_optimizer\
as binary_optimizer
import gold_inexact_matching.brute_force_parameter_optimizer as brute_optimizer
import gold_inexact_matching.local_minima_parameter_optimizer\
as local_optimizer
from gold_inexact_matching import optimizer_set
# Script to find suitable values for Skia Gold inexact matching.
#
# Inexact matching in Skia Gold has three tunable parameters:
# 1. The max number of differing pixels.
# 2. The max delta for any single pixel.
# 3. The threshold for a Sobel filter.
#
# Ideally, we use the following hierarchy of comparison approaches:
# 1. Exact matching.
# 2. Exact matching after a Sobel filter is applied.
# 3. Fuzzy matching after a Sobel filter is applied.
#
# However, there may be cases where only using a Sobel filter requires masking a
# very large amount of the image compared to Sobel + very conservative fuzzy
# matching.
#
# Even if such cases are not hit, the process of determining good values for the
# parameters is quite tedious since it requires downloading images from Gold and
# manually running multiple calls to `goldctl match`.
#
# This script attempts to remedy both issues by handling all of the trial and
# error and suggesting potential parameter values for the user to choose from.
def CreateArgumentParser():
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
script_parser = parser.add_argument_group('Script Arguments')
script_parser.add_argument('-v',
'--verbose',
dest='verbose_count',
default=0,
action='count',
help='Verbose level (multiple times for more')
subparsers = parser.add_subparsers(help='Optimization algorithm')
binary_parser = subparsers.add_parser(
'binary_search',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
help='Perform a binary search to optimize a single parameter. The best '
'option if you only want to tune one parameter.')
binary_parser.set_defaults(
clazz=binary_optimizer.BinarySearchParameterOptimizer)
binary_optimizer.BinarySearchParameterOptimizer.AddArguments(binary_parser)
local_parser = subparsers.add_parser(
'local_minima',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
help='Perform a BFS to find local minima using weights for each '
'parameter. Slower than binary searching, but supports an arbitrary '
'number of parameters.')
local_parser.set_defaults(clazz=local_optimizer.LocalMinimaParameterOptimizer)
local_optimizer.LocalMinimaParameterOptimizer.AddArguments(local_parser)
brute_parser = subparsers.add_parser(
'brute_force',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
help='Brute force all possible combinations. VERY, VERY slow, but can '
'potentially find better values than local_minima.')
brute_parser.set_defaults(clazz=brute_optimizer.BruteForceParameterOptimizer)
brute_optimizer.BruteForceParameterOptimizer.AddArguments(brute_parser)
return | parser
def SetLoggingVerbosity(args):
logger = logging.getLogger()
if args.verbose_count == 0:
logger.setLevel(logging.WARNING)
elif args.verbose_count == 1:
logger.setLevel(logging.INFO)
else:
logger.setLevel(loggin | g.DEBUG)
def main():
parser = CreateArgumentParser()
args = parser.parse_args()
SetLoggingVerbosity(args)
optimizer = optimizer_set.OptimizerSet(args, args.clazz)
optimizer.RunOptimization()
return 0
if __name__ == '__main__':
sys.exit(main())
|
# pylint: disable=too-many-lines
"""
Component to interface with cameras.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/camera/
"""
import asyncio
import logging
from aiohttp import web
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.config_validation import PLATFORM_SCHEMA # noqa
from homeassistant.components.http import HomeAssistantView
DOMAIN = 'camera'
DEPENDENCIES = ['http']
SCAN_INTERVAL = 30
ENTITY_ID_FORMAT = DOMAIN + '.{}'
STATE_RECORDING = 'recording'
STATE_STREAMING = 'streaming'
STATE_IDLE = 'idle'
ENTITY_IMAGE_URL = '/api/camera_proxy/{0}?token={1}'
@asyncio.coroutine
def async_setup(hass, config):
"""Setup the camera component."""
component = EntityComponent(
logging.getLogger(__name__), DOMAIN, hass, SCAN_INTERVAL)
hass.http.register_view(CameraImageView(hass, component.entities))
hass.http.register_view(CameraMjpegStream(hass, component.entities))
yield from component.async_setup(config)
return True
class Camera(Entity):
"""The base class for camera entities."""
def __init__(self):
"""Initialize a camera."""
self.is_streaming = False
@property
def access_token(self):
"""Access token for this camera."""
return str(id(self))
@property
def should_poll(self):
"""No need to poll cameras."""
return False
@property
def entity_picture(self):
"""Return a link to the camera feed as entity picture."""
return ENTITY_IMAGE_URL.format(self.entity_id, self.access_token)
@property
def is_recording(self):
"""Return true if the device is recording."""
return False
@property
def brand(self):
"""Camera brand."""
return None
@property
def model(self):
"""Camera model."""
return None
def camera_image(self):
"""Return bytes of camera image."""
raise NotImplementedError()
@asyncio.coroutine
def async_camera_image(self):
"""Return bytes of camera image.
This method must be run in the event loop.
"""
image = yield from self.hass.loop.run_in_executor(
None, self.camera_image)
return image
@asyncio.coroutine
def handle_async_mjpeg_stream(self, request):
"""Generate an HTTP MJPEG stream from camera images.
This method must be run in the event loop.
"""
response = web.StreamResponse()
response.content_type = ('multipart/x-mixed-replace; '
'boundary=--jpegboundary')
yield from response.prepare(request)
def write(img_bytes):
"""Write image to stream."""
response.write(bytes(
'--jpegboundary\r\n'
'Content-Type: image/jpeg\r\n'
'Content-Length: {}\r\n\r\n'.format(
len(img_bytes)), 'utf-8') + img_bytes + b'\r\n')
last_image = None
try:
while True:
img_bytes = yield from self.async_camera_image()
if not img_bytes:
break
if img_bytes is not None and img_bytes != last_image:
write(img_bytes)
# Chrome seems to always ignore first picture,
# print it twice.
if last_image is None:
write(img_bytes)
last_image = img_bytes
yield from response.drain()
yield from asyncio.sleep(.5)
finally:
yield from response.write_eof()
@property
def state(self):
"""Camera state."""
if self.is_recording:
return STATE_RECORDING
elif self.is_streaming:
return STATE_STREAMING
else:
return STATE_IDLE
@property
def state_attributes(self):
"""Camera state attributes."""
attr = {
'access_token': self.access_token,
}
if self.model:
attr['model_name'] = self.model
if self.brand:
attr['brand'] = self.brand
return attr
class CameraView(HomeAssistantView):
"""Base CameraView."""
requires_auth = False
def __init__(self, hass, entities):
"""Initialize a basic camera view."""
super().__init__(hass)
self.entities = entities
@asyncio.coroutine
def get(self, request, entity_id):
"""Start a get request."""
camera = self.entities.get(entity_id)
if camera is None:
return web.Response(status=404)
authenticated = (request.authenticated or
request.GET.get('token') == camera.access_token)
if not authenticated:
return web.Response( | status=401)
response = yield from self.handle(request, camera)
return response
@asyncio.coroutine
def handle(self, request, camera):
"""Hanlde the camera request."""
raise NotImplementedError()
class CameraImageView(CameraView):
"""Camera view to serve an image."""
url = "/api/camera_proxy/{entity_id}"
name = "api:camera:image"
@asyncio.coroutine
def handle(self, request, camera):
"""Serve camera image."""
| image = yield from camera.async_camera_image()
if image is None:
return web.Response(status=500)
return web.Response(body=image)
class CameraMjpegStream(CameraView):
"""Camera View to serve an MJPEG stream."""
url = "/api/camera_proxy_stream/{entity_id}"
name = "api:camera:stream"
@asyncio.coroutine
def handle(self, request, camera):
"""Serve camera image."""
yield from camera.handle_async_mjpeg_stream(request)
|
import pytest
from testutils import get_co, get_bytecode
from equip import BytecodeObject, BlockVisitor
from equip.bytecode import MethodDeclaration, TypeDeclaration, ModuleDeclaration
from equip.bytecode.utils import show_bytecode
import equip.utils.log as logutils
from equip.utils.log import logger
logutils.enableLogger(to_file='./equip.log')
from equip.analysis import ControlFlow, BasicBlock
SIMPLE_PROGRAM = """
import random
import sys
a = lambda x, y: x + (y if foo == 'bar' else x)
def some_value(i):
if (i % 2) == 0:
print "even",
elif foobar:
print "whatever"
else:
print "odd",
for n in range(2, 10):
for x in range(2, n):
if n % x == 0:
print n, 'equals', x, '*', n/x
break
print "foobar"
else:
# loop fell through without finding a factor
print n, 'is a prime number'
print "number: %d" % i
return i - 1
def ask_ok(prompt, retries=4, complaint='Yes or no, please!'):
while True:
ok = raw_input(prompt)
if ok in ('y', 'ye', 'yes'):
return True
if ok in ('n', 'no', 'nop', 'nope'):
return False
print False
retries = retries - 1
if retries < 0:
raise IOError('refusenik user')
print "Never reached"
print complaint
if foobar:
print "whatever"
def with_stmt(something):
with open('output.txt', 'w') as f:
f.write('Hi there!')
def exception_tests():
try:
fd = open('something')
except SomeException, ex:
print "SomeException"
except Exception, ex:
print "Last Exception"
finally:
print "Finally"
def while_loop(data, start):
while start < len(data):
print start
start += 1
if start > 10:
return -1
def main():
for i in range(1, random.randint()):
print some_value(i)
print "Call stats:"
items = sys.callstats().items()
items = [(value, key) for key, value in items]
items.sort()
items.reverse()
for va | lue,key in items:
print "%30s: %30s"%(key, value)
def return_Stmts(i):
if i == 1:
return 1
elif i == 2:
return 2
print "This is | something else"
if __name__ == '__main__':
main()
"""
def test_block_visitor():
co_simple = get_co(SIMPLE_PROGRAM)
assert co_simple is not None
bytecode_object = BytecodeObject('<string>')
bytecode_object.parse_code(co_simple)
class BlockPrinterVisitor(BlockVisitor):
def __init__(self):
BlockVisitor.__init__(self)
def new_control_flow(self):
logger.debug("Received new CFG: %s", self.control_flow)
def visit(self, block):
logger.debug("Visiting block: %s", block)
logger.debug('\n' + show_bytecode(block.bytecode))
visitor = BlockPrinterVisitor()
bytecode_object.accept(visitor)
|
# -*- coding: utf-8 -*-
# Copyright (C) 2010 Wil Mahan <wmahan+fatics@gmail.com>
#
# This file is part of FatICS.
#
# FatICS is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# FatICS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with FatICS. If not, see <http://www.gnu.org/licenses/>.
#
import re
import offer
import game
from command_parser import BadCommandError
from command import ics_command, Command
from game_constants import *
from db import db
class GameMixin(object):
def _get_played_game(self, conn):
g = conn.user.session.game
if not g or g.gtype != game.PLAYED:
g = None
conn.write(_("You are not playing a game.\n"))
return g
def _game_param(self, param, conn):
""" Find a game from a command argument, currently being
played, examined, or observed, prioritized in that order. """
| if param is not None:
g = game.from_name_or_number(param, conn)
else:
if conn.user.session.game:
g = conn.user.session.game
elif conn.user.session.observed:
g = conn.user.session.observed.primary()
else:
| conn.write(_("You are not playing, examining, or observing a game.\n"))
g = None
return g
@ics_command('abort', 'n')
class Abort(Command, GameMixin):
def run(self, args, conn):
g = self._get_played_game(conn)
if not g:
return
'''if len(conn.user.session.games) > 1:
conn.write(_('Please use "simabort" for simuls.\n'))
return'''
g = conn.user.session.game
if g.variant.pos.ply < 2:
g.result('Game aborted on move 1 by %s' % conn.user.name, '*')
else:
offer.Abort(g, conn.user)
@ics_command('adjourn', '')
class Adjourn(Command, GameMixin):
def run(self, args, conn):
g = self._get_played_game(conn)
if not g:
return
g = conn.user.session.game
#if g.variant.pos.ply < 5:
offer.Adjourn(g, conn.user)
@ics_command('draw', 'o')
class Draw(Command, GameMixin):
def run(self, args, conn):
if args[0] is None:
g = self._get_played_game(conn)
if not g:
return
offer.Draw(g, conn.user)
else:
conn.write('TODO: DRAW PARAM\n')
@ics_command('resign', 'o')
class Resign(Command, GameMixin):
def run(self, args, conn):
if args[0] is not None:
conn.write('TODO: RESIGN PLAYER\n')
return
g = self._get_played_game(conn)
if g:
g.resign(conn.user)
@ics_command('eco', 'oo')
class Eco(Command, GameMixin):
eco_pat = re.compile(r'[a-z][0-9][0-9][a-z]?')
nic_pat = re.compile(r'[a-z][a-z]\.[0-9][0-9]')
def run(self, args, conn):
g = None
if args[1] is not None:
assert(args[0] is not None)
rows = []
if args[0] == 'e':
if not self.eco_pat.match(args[1]):
conn.write(_("You haven't specified a valid ECO code.\n"))
else:
rows = db.look_up_eco(args[1])
elif args[0] == 'n':
if not self.nic_pat.match(args[1]):
conn.write(_("You haven't specified a valid NIC code.\n"))
else:
rows = db.look_up_nic(args[1])
else:
raise BadCommandError()
for row in rows:
if row['eco'] is None:
row['eco'] = 'A00'
if row['nic'] is None:
row['nic'] = '-----'
if row['long_'] is None:
row['long_'] = 'Unknown / not matched'
assert(row['fen'] is not None)
conn.write('\n')
conn.write(' ECO: %s\n' % row['eco'])
conn.write(' NIC: %s\n' % row['nic'])
conn.write(' LONG: %s\n' % row['long_'])
conn.write(' FEN: %s\n' % row['fen'])
else:
g = self._game_param(args[0], conn)
if g:
(ply, eco, long) = g.get_eco()
(nicply, nic) = g.get_nic()
conn.write(_('Eco for game %d (%s vs. %s):\n') % (g.number, g.white_name, g.black_name))
conn.write(_(' ECO[%3d]: %s\n') % (ply, eco))
conn.write(_(' NIC[%3d]: %s\n') % (nicply, nic))
conn.write(_('LONG[%3d]: %s\n') % (ply, long))
@ics_command('moves', 'n')
class Moves(Command, GameMixin):
def run(self, args, conn):
g = self._game_param(args[0], conn)
if g:
g.write_moves(conn)
@ics_command('moretime', 'd')
class Moretime(Command, GameMixin):
def run(self, args, conn):
g = self._get_played_game(conn)
if g:
secs = args[0]
if secs < 1 or secs > 36000:
conn.write(_('Invalid number of seconds.\n'))
else:
g.moretime(secs, conn.user)
@ics_command('flag', '')
class Flag(Command):
def run(self, args, conn):
if not conn.user.session.game:
conn.write(_("You are not playing a game.\n"))
return
g = conn.user.session.game
if not g.clock.check_flag(g, opp(g.get_user_side(conn.user))):
conn.write(_('Your opponent is not out of time.\n'))
@ics_command('refresh', 'n')
class Refresh(Command, GameMixin):
def run(self, args, conn):
g = self._game_param(args[0], conn)
if g:
g.send_board(conn.user, isolated=True)
@ics_command('time', 'n')
class Time(Command, GameMixin):
def run(self, args, conn):
g = self._game_param(args[0], conn)
if g:
(white_clock, black_clock) = g.clock.as_str()
g.send_info_str(conn.user)
conn.write(_('White Clock : %s\n') % white_clock)
conn.write(_('Black Clock : %s\n') % black_clock)
@ics_command('ginfo', 'n')
class Ginfo(Command, GameMixin):
def run(self, args, conn):
g = self._game_param(args[0], conn)
if g:
g.ginfo(conn)
# vim: expandtab tabstop=4 softtabstop=4 shiftwidth=4 smarttab autoindent
|
import glob, os, sys
def get_build_info(ipp_static=True, # static build requires static IPP libs
ipp_arch=None,
ipp_root=None,
):
"""get options to build Python extensions built with Intel IPP
ipp_static - True to build using static IPP library (requires IPP license)
ipp_arch - Architecture of IPP to use (None uses default, 'intel64' and 'ia32' are options)
ipp_root - Root location of the IPP installation (e.g. /opt/intel/compilers_and_libraries/linux/ipp)
"""
if ipp_arch is None:
if sys.platform == 'darwin':
ipp_arch = 'intel64'
elif sys.platform.startswith('linux'):
machine = os.uname()[4]
if machine == 'x86_64':
ipp_arch = 'intel64'
elif machine in ['i386','i686']:
ipp_arch = 'ia32'
else:
raise ValueError("unexpected linux architecture: %s"%machine)
elif sys.platform == 'win32':
ipp_arch = 'ia32'
else:
raise NotImplementedError("auto-architecture detection not implemented on this platform")
| vals = {}
if sys.platform.startswith('linux'):
libdirname = 'lib/%s_lin' % ipp_arch
else:
libdirname = 'lib'
incdirname = 'include' |
ipp_define_macros = []
ipp_extra_link_args = []
ipp_extra_objects = []
if sys.platform.startswith('win'):
ipp_define_macros = [('FASTIMAGE_IPP_ARCH','\\"%s\\"'%ipp_arch)]
else:
ipp_define_macros = [('FASTIMAGE_IPP_ARCH','"%s"'%ipp_arch)]
if ipp_static:
ipp_define_macros.append( ('FASTIMAGE_STATICIPP','1') )
ipp_include_dirs = [os.path.join(ipp_root,incdirname)]
# like LDFLAGS in sample Makefile.osx
if ipp_static:
ipp_library_dirs = []
ipp_libraries = []
libdir = os.path.join(ipp_root,libdirname)
ipp_extra_objects = [os.path.join(libdir,'lib'+lib+'.a') for lib in 'ippi','ipps','ippcv','ippcc','ippcore']
else:
ipp_library_dirs = [ os.path.join(ipp_root,libdirname) ]
ipp_libraries = ['ippi','ipps','ippcv','ippcc','ippcore']
ipp_extra_objects = []
vals['extra_link_args'] = ipp_extra_link_args
vals['ipp_library_dirs'] = ipp_library_dirs
vals['ipp_libraries'] = ipp_libraries
vals['ipp_define_macros'] = ipp_define_macros
vals['ipp_include_dirs'] = ipp_include_dirs
vals['ipp_extra_objects'] = ipp_extra_objects
return vals
|
""" unit test for Watchdo | g.py
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# imports
import os
from mock import MagicMock
# sut
from DIRAC.WorkloadManagementSystem.JobWrapper.Watchdog import Watchdog
mock_exeThread = MagicMock()
mock_spObject = MagicMock()
def test_calibrate():
pid = os.getpid()
wd = Watchdog(pid, mock_exeThread, mock_spObject, 5000)
res = wd.calibrat | e()
assert res['OK'] is True
def test__performChecks():
pid = os.getpid()
wd = Watchdog(pid, mock_exeThread, mock_spObject, 5000)
res = wd.calibrate()
assert res['OK'] is True
res = wd._performChecks()
assert res['OK'] is True
def test__performChecksFull():
pid = os.getpid()
wd = Watchdog(pid, mock_exeThread, mock_spObject, 5000)
wd.testCPULimit = 1
wd.testMemoryLimit = 1
res = wd.calibrate()
assert res['OK'] is True
res = wd._performChecks()
assert res['OK'] is True
|
7 2005/04/30 07:20:27 snakeru Exp $
"""Simplexml module provides xmpppy library with all needed tools to handle XML nodes and XML streams.
I'm personally using it in many other separate projects. It is designed to be as standalone as possible."""
import xml.parsers.expat
def XMLescape(txt):
"""Returns provided string with symbols & < > " replaced by their respective XML entities."""
# replace also FORM FEED and ESC, because they are not valid XML chars
return txt.replace("&", "&").replace("<", "<").replace(">", ">").replace('"', """).replace(u'\x0C', "").replace(u'\x1B', "")
ENCODING='utf-8'
def ustr(what):
"""Converts object "what" to unicode string using it's own __str__ method if accessible or unicode method otherwise."""
if isinstance(what, unicode): return what
try: r=what.__str__()
except AttributeError: r=str(what)
if not isinstance(r, unicode): return unicode(r,ENCODING)
return r
class Node(object):
""" Node class describes syntax of separate XML Node. It have a constructor that permits node creation
from set of "namespace name", attributes and payload of text strings and other nodes.
It does not natively support building node from text string and uses NodeBuilder class for that purpose.
After creation node can be mangled in many ways so it can be completely changed.
Also node can be serialised into string in one of two modes: default (where the textual representation
of node describes it exactly) and "fancy" - with whitespace added to make indentation and thus make
result m | ore readable by human.
Node class have attribute FORCE_NODE_RECREATION that is defaults to False thus enabling fast node
replication from the some other node. The drawback of the fast way is that new node shares some
info with the "original" node that is changing the one node may influence the | other. Though it is
rarely needed (in xmpppy it is never needed at all since I'm usually never using original node after
replication (and using replication only to move upwards on the classes tree).
"""
FORCE_NODE_RECREATION=0
def __init__(self, tag=None, attrs={}, payload=[], parent=None, nsp=None, node_built=False, node=None):
""" Takes "tag" argument as the name of node (prepended by namespace, if needed and separated from it
by a space), attrs dictionary as the set of arguments, payload list as the set of textual strings
and child nodes that this node carries within itself and "parent" argument that is another node
that this one will be the child of. Also the __init__ can be provided with "node" argument that is
either a text string containing exactly one node or another Node instance to begin with. If both
"node" and other arguments is provided then the node initially created as replica of "node"
provided and then modified to be compliant with other arguments."""
if node:
if self.FORCE_NODE_RECREATION and isinstance(node, Node):
node=str(node)
if not isinstance(node, Node):
node=NodeBuilder(node,self)
node_built = True
else:
self.name,self.namespace,self.attrs,self.data,self.kids,self.parent,self.nsd = node.name,node.namespace,{},[],[],node.parent,{}
for key in node.attrs.keys(): self.attrs[key]=node.attrs[key]
for data in node.data: self.data.append(data)
for kid in node.kids: self.kids.append(kid)
for k,v in node.nsd.items(): self.nsd[k] = v
else: self.name,self.namespace,self.attrs,self.data,self.kids,self.parent,self.nsd = 'tag','',{},[],[],None,{}
if parent:
self.parent = parent
self.nsp_cache = {}
if nsp:
for k,v in nsp.items(): self.nsp_cache[k] = v
for attr,val in attrs.items():
if attr == 'xmlns':
self.nsd[u''] = val
elif attr.startswith('xmlns:'):
self.nsd[attr[6:]] = val
self.attrs[attr]=attrs[attr]
if tag:
if node_built:
pfx,self.name = (['']+tag.split(':'))[-2:]
self.namespace = self.lookup_nsp(pfx)
else:
if ' ' in tag:
self.namespace,self.name = tag.split()
else:
self.name = tag
if isinstance(payload, basestring): payload=[payload]
for i in payload:
if isinstance(i, Node): self.addChild(node=i)
else: self.data.append(ustr(i))
def lookup_nsp(self,pfx=''):
ns = self.nsd.get(pfx,None)
if ns is None:
ns = self.nsp_cache.get(pfx,None)
if ns is None:
if self.parent:
ns = self.parent.lookup_nsp(pfx)
self.nsp_cache[pfx] = ns
else:
return 'http://www.gajim.org/xmlns/undeclared'
return ns
def __str__(self,fancy=0):
""" Method used to dump node into textual representation.
if "fancy" argument is set to True produces indented output for readability."""
s = (fancy-1) * 2 * ' ' + "<" + self.name
if self.namespace:
if not self.parent or self.parent.namespace!=self.namespace:
if 'xmlns' not in self.attrs:
s = s + ' xmlns="%s"'%self.namespace
for key in self.attrs.keys():
val = ustr(self.attrs[key])
s = s + ' %s="%s"' % ( key, XMLescape(val) )
s = s + ">"
cnt = 0
if self.kids:
if fancy: s = s + "\n"
for a in self.kids:
if not fancy and (len(self.data)-1)>=cnt: s=s+XMLescape(self.data[cnt])
elif (len(self.data)-1)>=cnt: s=s+XMLescape(self.data[cnt].strip())
s = s + a.__str__(fancy and fancy+1)
cnt=cnt+1
if not fancy and (len(self.data)-1) >= cnt: s = s + XMLescape(self.data[cnt])
elif (len(self.data)-1) >= cnt: s = s + XMLescape(self.data[cnt].strip())
if not self.kids and s.endswith('>'):
s=s[:-1]+' />'
if fancy: s = s + "\n"
else:
if fancy and not self.data: s = s + (fancy-1) * 2 * ' '
s = s + "</" + self.name + ">"
if fancy: s = s + "\n"
return s
def addChild(self, name=None, attrs={}, payload=[], namespace=None, node=None):
""" If "node" argument is provided, adds it as child node. Else creates new node from
the other arguments' values and adds it as well."""
if node:
newnode=node
node.parent = self
else: newnode=Node(tag=name, parent=self, attrs=attrs, payload=payload)
if namespace:
newnode.setNamespace(namespace)
self.kids.append(newnode)
return newnode
def addData(self, data):
""" Adds some CDATA to node. """
self.data.append(ustr(data))
def clearData(self):
""" Removes all CDATA from the node. """
self.data=[]
def delAttr(self, key):
""" Deletes an attribute "key" """
del self.attrs[key]
def delChild(self, node, attrs={}):
""" Deletes the "node" from the node's childs list, if "node" is an instance.
Else deletes the first node that have specified name and (optionally) attributes. """
if not isinstance(node, Node): node=self.getTag(node,attrs)
self.kids.remove(node)
return node
def getAttrs(self):
""" Returns all node's attributes as dictionary. """
return self.attrs
def getAttr(self, key):
""" Returns value of specified attribute. """
try: return self.attrs[key]
except: return None
def getChildren(self):
""" Returns all node's child nodes as list. """
return self.kids
def getData(self):
""" Returns all node CDATA as string (concatenated). """
return ''.join(self.data)
def getName(self):
""" Returns the name of node """
return self.name
def getNamespace(self):
""" Returns the namespace of node """
return self.namespace
def getParent(self):
""" Returns the parent of node (if present). """
return self.parent
def getPayload(self):
""" Return the payload of node i.e. list of child nodes and CDATA entries.
F.e. for "<node>text1<nodea/><nodeb/> text2</node>" will be returned list:
['text1', <nodea instance>, <nodeb instance>, ' text2']. """
ret=[]
for i in range(len(self.kids)+len(self.data)+1):
try:
if self.data[i]: ret.append(self.data[i])
except IndexError: pass
try: ret.append(self.kids[i])
except IndexError: pass
return ret
def getTag(self, name, attrs={}, namespace=None):
""" Filters all child nodes using specified arguments as filter.
Returns the first found or None if not found. """
return self.getTags(name, attrs, namespace, one=1)
def getTagAttr(self,tag,attr):
""" Returns attribute value of the child with specified name (or None if no such attribute)."""
try: return self.getTag(tag).attrs[attr]
excep |
from django.urls import path
from tickets import views
urlpatterns = [
path('', views.last_event, name='last_event'),
path('event/<str:ev>/', views.event, name='event'),
path('event/<str:ev>/<str:space>/<str:session>/register/', views.register, name='register'),
pa | th('ticket/<str:order>/payment/', views.payment, name='payment'),
path('ticket/<str:order>/thanks/', views.thanks, name='thanks'),
path('ticket/confirm/', views.confirm, name='confirm'),
path('ticket/confirm/paypal/', views.confirm_paypal, name='confirm_paypal'),
path('ticket/<str:order>/confirm/stripe/', views.confirm_stripe, name='confirm_stripe'),
path('ticket/template/<int:id>/preview/', views.template_preview, name='template_previe | w'),
path('ticket/email-confirm/<int:id>/preview/', views.email_confirm_preview, name='email_confirm_preview'),
path('<str:ev>/', views.multipurchase, name='multipurchase'),
path('seats/<int:session>/<int:layout>/', views.ajax_layout, name='ajax_layout'),
path('seats/view/<int:map>/', views.seats, name='seats'),
path('seats/auto/', views.autoseats, name='autoseats'),
path('seats/bystr/', views.seats_by_str, name='seats_by_str'),
]
|
)),
pnet.PoolingLayer(shape=(4, 4), strides=(4, 4)),
]
elif 0:
layers += [
pnet.PartsLayer(1000, (5, 5), settings=dict(outer_frame=1,
em_seed=training_seed,
threshold=2,
samples_per_image=40,
max_samples=200000,
train_limit=100000,
min_prob=0.0005,
)),
pnet.PoolingLayer(shape=(4, 4), strides=(4, 4)),
]
elif 0:
layers += [
pnet.BinaryTreePartsLayer(maxdepth, (5, 5), settings=dict(outer_frame=1,
em_seed=training_seed,
threshold=2,
samples_per_image=40,
max_samples=200000,
train_limit=10000,
min_prob=0.005,
#keypoint_suppress_radius=1,
min_samples_per_part=50,
split_criterion='IG',
split_entropy=0.2,
min_information_gain=0.01,
)),
pnet.PoolingLayer(shape=(4, 4), strides=(4, 4)),
]
elif 0:
layers += [
pnet.BinaryTreePartsLayer(maxdepth, (5, 5), settings=dict(outer_frame=1,
em_seed=training_seed,
threshold=2,
samples_per_image=40,
max_samples=200000,
train_limit=10000,
min_prob=0.0005,
#keypoint_suppress_radius=1,
min_samples_per_part=50,
split_criterion=split_criterion,
split_entropy=split_entropy,
min_information_gain=split_entropy,
)),
pnet.PoolingLayer(shape=(4, 4), strides=(4, 4)),
]
[
pnet.BinaryTreePartsLayer(10, (1, 1), settings=dict(outer_frame=0,
em_seed=training_seed+1,
threshold=1,
samples_per_image=200,
max_samples=1000000,
train_limit=10000,
#min_information_gain=0.05,
split_entropy=0.05,
min_prob=0.0005
)),
pnet.PoolingLayer(shape=(1, 1), strides=(1, 1)),
]
layers += [
pnet.MixtureClassificationLayer(n_components=1, min_prob=1e-5),
#pnet.SVMClassificationLayer(C=None),
]
net = pnet.PartsNet(layers)
TRAIN_SAMPLES = 10000
#TRAIN_SAMPLES = 1200
print(training_seed)
digits = range(10)
#ims = ag.io.load_mnist('training', selection=slice(0 + 3000 * training_seed, TRAIN_SAMPLES + 3000 * training_seed), return_labels=False)
ims = mnist_data['training_image'][0 + 1000 * training_seed : TRAIN_SAMPLES + 1000 * training_seed]
ims_label = mnist_data['training_label'][0 + 1000 * training_seed : TRAIN_SAMPLES + 1000 * training_seed]
validation_ims = mnist_data['training_image'][10000:12000]
validation_label = mnist_data['training_label'][10000:12000]
#print(net.sizes(X[[0]]))
print(ims.shape)
start0 = time.time()
net.train(ims)
end0 = time.time()
N = 1000
sup_ims = []
sup_labels = []
# Load supervised training data
for d in digits:
if N is None:
ims0 = ims[ims_label == d]
else:
#ims0 = ag.io.load_mnist('training', [d], selection=slice(N*training_seed, N*(1+training_seed)), return_labels=False)
ims0 = ims[ims_label == d]
sup_ims.append(ims0)
sup_labels.append(d * np.ones(len(ims0), dtype=np.int64))
sup_ims = np.concatenate(sup_ims, axis=0)
sup_labels = np.concatenate(sup_labels, axis=0)
#print('labels', np.bincount(sup_labels, minlength=10))
start1 = time.time()
net.train(sup_ims, sup_labels)
end1 = time.time()
#print("Now testing...")
### Test ######################################################################
corrects = 0
total = 0
test_ims, test_labels = mnist_data['test_image'], mnist_data['test_label']
test_ims = validation_ims
test_labels = validation_label
# TEMP
if 0:
test_ims = test_ims[:1000]
test_labels = test_labels[:1000]
#with gv.Timer("Split to batches"):
ims_batches = np.array_split(test_ims, 200)
labels_batch | es = np.array_split(test_labels, 200)
def format_error_rate(pr):
return "{:.2f}%".format(100*(1-pr))
#import gv
#with g | v.Timer('Testing'):
start2 = time.time()
args = (tup+(net,) for tup in itr.izip(ims_batches, labels_batches))
for i, res in enumerate(pnet.parallel.starmap(test, args)):
corrects += res.sum()
total += res.size
pr = corrects / total
end2 = time.time()
error_rate = 1.0 - pr
num_parts = 0#net.layers[1].num_parts
error_rates.append(error_rate)
print(training_seed, 'error rate', error_rate * 100, 'num parts', num_parts)#, 'num parts 2', net.layers[3].num_parts)
unsup_training_times.append(end0 - start0)
sup_training_times.append(end1 - start1)
testing_times.append(end2 - start2)
#print('times', end0-start0, end1-start1, end2-start2)
all_num_parts.append(num_parts)
#vz.section('MNIST')
#gv.img.save_image(vz.generate_filename(), test_ims[0])
#gv.img.save_image(vz.generate_filename(), test_ims[1])
#gv.img.save_image(vz.generate_filename(), test_ims[2])
# Vz
#net.infoplot(vz)
#vz.flush()
net.save('tmp{}.npy'.format(training_seed))
print(r"{ppl} & {depth} & {num_parts} & {unsup_time:.1f} & {test_time:.1f} & ${rate:.2f} \pm {std:.2f}$ \\".format(
ppl=2,
depth=maxdepth,
num_parts=r'${:.0f} \pm {:.0f}$'.format(np.mean(all_num_parts), np.std(all_num_parts)),
unsup_time=np.median(unsup_training_times) / 60,
#sup_time=np.median(sup_training_times),
test_time=np.median(testing_times) / 60,
rate=100*np.mean(error_rates),
std=100*np.std(error_rates)))
print(r"{ppl} {depth} {num_parts} {unsup_time} {test_time} {rate} {std}".format(
ppl=2,
|
# Copyright 2015 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import eventlet
import mock
from nova import exception
from hyperv.nova import constants
from hyperv.nova import eventhandler
from hyperv.nova import utilsfactory
from hyperv.tests.unit import test_base
class EventHandlerTestCase(test_base.HyperVBaseTestCase):
_FAKE_POLLING_INTERVAL = 3
_FAKE_EVENT_CHECK_TIMEFRAME = 15
@mock.patch.object(utilsfactory, 'get_vmutils')
def setUp(self, mock_get_vmutils):
super(EventHandlerTestCase, self).setUp()
self._state_change_callback = mock.Mock()
self._running_state_callback = mock.Mock()
self.flags(
power_state_check_timeframe=self._FAKE_EVENT_CHECK_TIMEFRAME,
group='hyperv')
self.flags(
power_state_event_polling_interval=self._FAKE_POLLING_INTERVAL,
group='hyperv')
self._event_handler = eventhandler.InstanceEventHandler(
self._state_change_callback)
self._event_handler._serial_console_ops = mock.Mock()
@mock.patch.object(eventhandler, 'wmi', create=True)
@mock.patch.object(eventhandler.InstanceEventHandler, '_dispatch_event')
@mock.patch.object(eventlet, 'sleep')
def _test_poll_events(self, mock_sleep, mock_dispatch,
mock_wmi, event_found=True):
fake_listener = mock.Mock()
mock_wmi.x_wmi_timed_out = Exception
fake_listener.side_effect = (mock.sentinel.event if event_found
else mock_wmi.x_wmi_timed_out,
KeyboardInterrupt)
self._event_handler._listener = fake_listener
# This is supposed to run as a daemon, so we'll just cause an exception
# in order to be able to test the method.
self.assertRaises(KeyboardInterrupt,
self._event_handler._poll_events)
if event_found:
mock_dispatch.assert_called_once_with(mock.sentinel.event)
else:
mock_sleep.assert_called_once_with(self._FAKE_POLLING_INTERVAL)
def test_poll_having_events(self):
# Test case in which events were found in the checked interval
self._test_poll_events()
def test_poll_no_event_found(self):
self._test_poll_events(event_found=False)
@mock.patch.object(eventhandler.InstanceEventHandler,
'_get_instance_uuid')
@mock.patch.object(eventhandler.InstanceEventHandler, '_emit_event')
def _test_dispatch_event(self, mock_emit_event, mock_get_uuid,
missing_uuid=False):
mock_get_uuid.return_value = (
mock.sentinel.instance_uuid if not missing_uuid else None)
self._event_handler._vmutils.get_vm_power_state.return_value = (
mock.sentinel.power_state)
event = mock.Mock()
event.ElementName = mock.sentinel.instance_name
event.EnabledState = mock.sentinel.enabled_state
self._event_handler._dispatch_event(event)
if not missing_uuid:
mock_emit_event.assert_called_once_with(
mock.sentinel.instance_name,
mock.sentinel.instance_uuid,
mock.sentinel.power_state)
else:
self.assertFalse(mock_emit_event.called)
def test_dispatch_event_new_final_state(self):
self._test_dispatch_event()
def test_dispatch_event_missing_uuid(self):
self._test_dispatch_event(missing_uuid=True)
@mock.patch.object(eventhandler.InstanceEventHandler, '_get_virt_event')
@mock.patch.object(eventlet, 'spawn_n')
def test_emit_event(self, mock_spawn, mock_get_event):
self._event_handler._emit_event(mock.sentinel.instance_name,
mock.sentinel.instance_uuid,
mock.sentinel.instance_state)
virt_event = mock_get_event.return_value
mock_spawn.assert_has_calls(
[mock.call(self._state_change_callback, virt_event),
mock.call(self._event_handler._handle_serial_console_workers,
mock.sentinel.instance_name,
mock.sentinel.instance_state)])
def test_handle_serial_console_instance_running(self):
self._event_handler._handle_serial_console_workers(
mock.sentinel.instance_name,
constants.HYPERV_VM_STATE_ENABLED)
serialops = self._event_handler._serial_console_ops
serialops.start_console_handler.assert_called_once_with(
mock.sentinel.instance_name)
def test_handle_serial_console_instance_stopped(self):
self._event_handler._handle_serial_console_workers(
mock.sentinel.instance_name,
constants.HYPERV | _VM_STATE_DISABLED)
serialops = self._event_handler._serial_console_ops
serialops.stop_console_handler.assert_called_once_with(
mock.sentinel.insta | nce_name)
def _test_get_instance_uuid(self, instance_found=True,
missing_uuid=False):
if instance_found:
side_effect = (mock.sentinel.instance_uuid
if not missing_uuid else None, )
else:
side_effect = exception.NotFound
mock_get_uuid = self._event_handler._vmutils.get_instance_uuid
mock_get_uuid.side_effect = side_effect
instance_uuid = self._event_handler._get_instance_uuid(
mock.sentinel.instance_name)
expected_uuid = (mock.sentinel.instance_uuid
if instance_found and not missing_uuid else None)
self.assertEqual(expected_uuid, instance_uuid)
def test_get_nova_created_instance_uuid(self):
self._test_get_instance_uuid()
def test_get_deleted_instance_uuid(self):
self._test_get_instance_uuid(instance_found=False)
def test_get_instance_uuid_missing_notes(self):
self._test_get_instance_uuid(missing_uuid=True)
@mock.patch('nova.virt.event.LifecycleEvent')
def test_get_virt_event(self, mock_lifecycle_event):
instance_state = constants.HYPERV_VM_STATE_ENABLED
expected_transition = self._event_handler._TRANSITION_MAP[
instance_state]
virt_event = self._event_handler._get_virt_event(
mock.sentinel.instance_uuid, instance_state)
self.assertEqual(mock_lifecycle_event.return_value,
virt_event)
mock_lifecycle_event.assert_called_once_with(
uuid=mock.sentinel.instance_uuid,
transition=expected_transition)
|
#!/usr/bin/python
"""
Copyright 2010 Paul Willworth <ioscode@gmail.com>
This file is part of Galaxy Harvester.
Galaxy Harvester is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Galaxy Harvester is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with Galaxy Harvester. If not, see <http://www.gnu.org/licenses/>.
"""
import os
import cgi
import Cookie
import hashlib
import MySQLdb
import dbSession
import dbShared
# Get current url
try:
url = os.environ['SCRIPT_NAME']
except KeyError:
url = ''
form = cgi.FieldStorage()
# Get Cookies
errorstr = ''
cookies = Cookie.SimpleCookie()
try:
cookies.load(os.environ['HTTP_COOKIE'])
except KeyError:
errorstr = 'no cookies\n'
if errorstr == '':
try:
currentUser = cookies['userID'].value
except KeyError:
currentUser = ''
try:
loginResult = cookies['loginAttempt'].value
except KeyError:
loginResult = 'success'
try:
sid = cookies[ | 'gh_sid'].value
except KeyError:
sid = form.getfirst('gh_sid', '')
else:
currentUser = ''
loginResult = 'success'
sid = form.getfirst('gh_sid', | '')
userpass = form.getfirst("userpass")
# escape input to prevent sql injection
sid = dbShared.dbInsertSafe(sid)
userpass = dbShared.dbInsertSafe(userpass)
# Get a session
logged_state = 0
linkappend = ''
sess = dbSession.getSession(sid, 2592000)
if (sess != ''):
logged_state = 1
currentUser = sess
linkappend = 'gh_sid=' + sid
# Check for errors
errstr='';
if (len(userpass) < 6):
errstr = errstr + "Your password must be at least 6 characters. \r\n"
if (logged_state == 0):
errstr = errstr + "You must be logged in to update your password. \r\n"
if (errstr != ''):
result = "Your Password could not be updated because of the following errors:\r\n" + errstr
else:
crypt_pass = hashlib.sha1(currentUser + userpass).hexdigest()
conn = dbShared.ghConn()
cursor = conn.cursor()
cursor.execute("UPDATE tUsers SET userPassword='" + crypt_pass + "' WHERE userID='" + currentUser + "';")
cursor.close()
conn.close()
result = "Password Updated"
print "Content-Type: text/html\n"
print result
|
import numpy as np
import scipy.io as sio
import pylab as pl
import itertools as it
import io
def ret_tru(inval):
return true
def get_next_ind(in_file, sep_ln="&", predicate = ret_tru):
lvals = []
for line in in_file:
line = line.strip()
if line == sep_ln:
break
if predicate(line):
lvals.append(line)
return lvals
def split_list(inl, predicate):
curl = []
tl = []
for v in inl:
if(predicate(v)):
if len(tl) > 0:
curl.append(tl)
tl = []
else:
tl.append(v)
return curl
dat_file = open("data_out.out")
#do processing for each value in the output
ltest = get_next_ind(dat_file, "&", lambda x:(x.startswith("Func") or x.startswith("Ablation") or x.startswith("Time") or x.startswith("&&")))
while(len(ltest) != 0):
ltest = split_list(ltest, lambda x:x == "&&")
if(len(ltest) == 0):
break
fncstack = []
timelist = ""
abl_val=[]
#create numpy arrays
for simval in ltest:
if len(simval)==1:
abl_val = np.fromstring(''.join(it.dropwhile(lambda x : not x.isdigit(), simval[0])), sep=' ')
continue
timestr = simval[0]
fncstr = simval[1]
timestr = it.dropwhile(lambda x: not x.isdigit(), timestr)
timestr = ''.join(timestr)
timelist = timelist + " " + timestr
fncstr = it.dropwhile(lambda x: not x.isdigit(), fncstr)
fncstr = ''.join(fncstr)
fncstack.append(np.fromstring(fncstr, sep=' '))
pri | nt len(fncstack)
neur_mat = np.vstack(fncstack).transpose();
time_vec = np.fromstring(timelist, sep=' ')
#write to .mat file
#create file name
fbase = "abl_study_"
for abl in abl_val:
fbase = fbase + str(int(abl)) + "_"
fbase = fbase[:-1]
prin | t fbase
sio.savemat(fbase, {'abl_vals':np.array(abl_val), 'neur_series':neur_mat, 'time_vec':time_vec})
ltest = get_next_ind(dat_file, "&", lambda x:(x.startswith("Ablation") or x.startswith("Func") or x.startswith("Time") or x.startswith("&&")))
|
# This Source Code Form is subject to the terms of the Mozilla Public
# | License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (c) 2015 Etherios, Inc. All rights reserved.
# Etherio | s, Inc. is a Division of Digi International.
|
from bears.c_languages.CPPLintBear import CPPLintBear
from tests.LocalBearTestHelper impo | rt verify_local_bear
test_file = """
int main() {
return 0;
}
"""
CPPLintBearTest = verify_local_bear(CPPLintBear,
valid_files=(),
invalid_files=(test_file,),
tempfile_kwargs={'suffix': '.cpp'})
CPPLintBearIgnoreConfigTest = verify_local_bear(
CPPLintBear,
valid_files=(test_file,),
invalid_files=(),
settings={'cpplint_ignore': 'legal'},
temp | file_kwargs={'suffix': '.cpp'})
CPPLintBearLineLengthConfigTest = verify_local_bear(
CPPLintBear,
valid_files=(),
invalid_files=(test_file,),
settings={'cpplint_ignore': 'legal',
'max_line_length': '13'},
tempfile_kwargs={'suffix': '.cpp'})
|
onError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
virtual_network_peering_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified virtual network peering.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param virtual_network_peering_name: The name of the virtual network peering.
:type virtual_network_peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
virtual_network_peering_name=virtual_network_peering_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
virtual_network_peering_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualNetworkPeering"
"""Gets the specified virtual network peering.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param virtual_network_peering_name: The name of the virtual network peering.
:type virtual_network_peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualNetworkPeering, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_09_01.models.VirtualNetworkPeering
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeering"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
accept = "application/json, text/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serial | ize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(u | rl, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.htt |
# -*- coding: utf-8 -*-
#
# ELLIPTIc documentation build configuration file, created by
# sphinx-quickstart on Sat Mar 25 15:56:12 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('../../'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.napoleon',
'sphinx.ext.viewcode',
'sphinx.ext.mathjax',
'sphinx_autodoc_typehints']
napoleon_include_init_with_doc = False
napoleon_include_special_with_doc = True
autodoc_mock_imports = []
nitpicky = True
intersphinx_mapping = {
'python': ('https://docs.python.org/3', None),
'anytree': ('http://anytree.readthedocs.io/en/latest/', None)
}
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'elliptic'
copyright = u'2018, Universidade Federal de Pernambuco'
author = u'Guilherme Caminha'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'1.0.1'
# The full version, including alpha/beta/rc tags.
release = u'1.0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'en'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_b | asename = 'ellipticdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
| # (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'elliptic.tex', u'ELLIPTIc Documentation',
u'Guilherme Caminha', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'elliptic', u'ELLIPTIc Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'elliptic', u'ELLIPTIc Documentation',
author, 'elliptic', 'The Extensible LIbrary for Physical simulaTIons.',
'Miscellaneous'),
]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 30 09:53:39 2018
@author: mayank
"""
from forms import SignupForm
from flask import Flask, request, render_template
from flask_login import LoginManager, login_user, login_required, logout_user
app = Flask(__name__)
app.secret_key = 'gMALVWEuxBSxQ44bomDOsWniejrPbhDV'
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///db/database.sqlite'
login_manager = LoginManager()
login_manager.init_app(app)
@app.route('/')
def index():
return "Welcome to Home Page"
@app.route('/signup', methods=['GET', 'POST'])
def signup():
form = SignupForm()
if request.method == 'GET':
return | render_template | ('signup.html', form=form)
elif request.method == 'POST':
if form.validate_on_submit():
if User.query.filter_by(email=form.email.data).first():
return "!!! Email address already exists !!!"
newuser = User(form.email.data, form.password.data)
db.session.add(newuser)
db.session.flush()
db.session.commit()
login_user(newuser)
return "User created!!!"
else:
return "Form didn't validate"
@login_manager.user_loader
def load_user(email):
return User.query.filter_by(email=email).first()
@app.route('/login', methods=['GET', 'POST'])
def login():
form = SignupForm()
if request.method == 'GET':
return render_template('login.html', form=form)
elif request.method == 'POST' and form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user and user.password == form.password.data:
login_user(user)
return "User logged in"
return "<h1>Wrong username or password</h1>"
return "form not validated or invalid request"
@app.route("/logout")
@login_required
def logout():
logout_user()
return "Logged out"
@app.route('/protected')
@login_required
def protected():
return "protected area"
def init_db():
db.init_app(app)
db.app = app
db.create_all()
if __name__ == '__main__':
from models import db, User
init_db()
app.run(port=5000, host='localhost')
|
#!/usr/bin/env python
# Copyright (C) 2017 DearBytes B.V. - All Rights Reserved
import os
from datetime import datetime
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy import ForeignKey
from sqlalchemy import Integer
from sqlalchemy import String
from sqlalchemy import create_engine
from sqlalchemy.orm import relationship
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
DATABASE_PATH = os.path.join(os.getcwd(), 'integrity.db')
engine = create_engine('sqlite:///' + DATABASE_PATH)
Base = declarative_base()
Session = sessionmaker(bind=engine)
session = Session()
class Model(object):
def to_anonymous_object(self):
"""
Convert the current model properties to an anonymous object
This is to prevent the data from not being able to be accesses outside of the main thread it was created in
:return: Object containing all keys and values that the current model does
:rtype: object
"""
return type('', (object,), self.to_dict())()
def to_dict(self):
"""
Convert the current model properties to a dict
:return: Dict containing all keys and values that the current model does
:rtype: dict
"""
return dict(((key, getattr(self, key)) for key in self.__mapper__.columns.keys()))
def values(self):
"""
Get all values in the current row as a list
:return: List containing all values that the current model does
:rtype: list
"""
return list(((getattr(self, key)) for key in self.keys()))
@classmethod
def keys(cls):
"""
Get all keys in the current row as a list
:return: List containing all keys that the current model does
:rtype: list
"""
return cls.__mapper__.columns.keys()
def delete(self):
"""
Delete the current row
:return:
"""
session.delete(self)
def __iter__(self):
values = vars(self)
for attr in self.__mapper__.columns.keys():
if attr in values:
yield [attr, values[attr]]
@classmethod
def as_list(cls):
"""
Get all results as a list
:return: List
"""
return list(cls)
@classmethod
def query(cls):
"""
Get a new reference to query
:return:
"""
return session.query(cls)
class Server(Model, Base):
__tablename__ = "servers"
id = Column(Integer, primary_key=True)
name = Column(String, nullable=False, unique=True)
@classmethod
def get(cls, name):
"""
Get the first instance of a server by name
:param name: Name of the server
:type name: str
:return: Server if found, else None
:rtype: models.Server
"""
return session.query(cls).filter(cls.name == name).one_or_none()
@classmethod
def exists(cls, name):
"""
Check if the server exists in the database
:param name: Name of the server
:return: Returns true if the server exists
:rtype: bool
"""
return cls.get(name=name) is not None
@classmethod
def create(cls, name):
"""
Create a new
:param name:
:return: Instance of the server
:rtype: models.Server
"""
server = cls(name=name)
session.add(server)
return server
def get_related_checksum(self, path, checksum):
"""
Get a related checksum by certain criteria
:param checksum: Checksum of the file
:param path: Path to the file
:type checksum: str
:type path: str
:return: Returns a checksum if one is found, otherwise None
"""
for row in self.checksums:
if row.path == path and row.checksum == checksum:
return row
class Checksum(Model, Base):
__tablename__ = "checksums"
id = Column(Integer, primary_key=True)
path = Column(String, nullable=False)
checksum = Column(String(128), nullable=False)
server = relationship(Server, backref="checksums")
server_id = Column(Integer, ForeignKey("servers.id"), index=True, nullable=False)
@classmethod
def create(cls, path, checksum, server):
"""
Create a new record and return it
:param path: Path to the file
:param checksum: File checksum
:param server: Related server ID
:type path: str
:type checksum: str
:type server: models.Server
:return: Returns the record that was just added
"""
record = cls(path=path, checksum=checksum, server=server)
session.add(record)
return record
class Event(Model, Base):
FILE_ADDED = 1
FILE_REMOVED = 2
FILE_MODIFIED = 3
__tablename__ = "events"
id = Column(Integer, primary_key=True)
event = Column(Integer, nullable=False)
description = Column(String, nullable=False)
timestamp = Column(DateTime, nullable=False)
checksum = relationship(Checksum)
checksum_id = Column(Integer, ForeignKey("checksums.id"), index=True, nullable=False)
@classmethod
def create(cls, event, description, checksum):
"""
Create a new event and store it in the database
:param event: What type of event was it (constant)
:param description: Description of the event
:param checksum: What checksum was it related to
:type event: int
:type description: str
:type checksum: models.Checksum
:return: Returns the instance of the event
"""
record = c | ls(event=event, description=description, checksum=checksum, timestamp=datetime.now())
session.add(record)
return record
def create_database():
""""
Create a new database or overwrit | e the existing one
:return: None
"""
Base.metadata.create_all(engine)
def database_exists():
"""
Check if the database exists
:return: True if the database exists
:rtype: bool
"""
return os.path.exists(DATABASE_PATH)
|
= 'cloudwatch_' + region
default_section_name = 'cloudwatch'
self.enabled = self.config.getboolean(
self.section_name, 'enabled', False, default_section_name)
self.workers = int(self.config.get(
self.section_name, 'workers', 1, default_section_name))
self.has_suffix_for_single_stat = self.config.getboolean(
self.section_name, 'single_stat_has_suffix', True,
default_section_name)
self.default_delay_minutes = int(self.config.get(
self.section_name, 'first_run_start_minutes', 5,
default_section_name))
self.namespace = self.config.get(
self.section_name, 'namespace', 'aws', default_section_name)
self.ec2_tag_keys = self.config.getlist(
self.section_name, 'ec2_tag_keys', [], default_section_name)
self.metric_config_path = self.config.get(
self.section_name, 'metric_config_path', DEFAULT_METRIC_CONFIG_FILE,
default_section_name)
self.start_time = self.config.getdate(
self.section_name, 'start_time', None, default_section_name)
self.end_time = self.config.getdate(
self.section_name, 'end_time', None, default_section_name)
self.last_run_time = self.config.getdate(
self.section_name, 'last_run_time', None, default_section_name)
self.update_start_end_times()
self.namespaces = set()
| self.metrics_config = None
def update_start_end_times(self):
"""
Updates sta | rt/end times after last_run_time set
"""
utcnow = (datetime.datetime.utcnow()
.replace(microsecond=0, tzinfo=dateutil.tz.tzutc()))
delta = datetime.timedelta(minutes=self.default_delay_minutes)
if self.last_run_time:
if not self.start_time or self.last_run_time > self.start_time:
self.start_time = self.last_run_time - delta
self.end_time = utcnow
elif not self.start_time:
self.start_time = utcnow - delta
self.end_time = utcnow
def set_last_run_time(self, run_time):
"""
Sets the last run time to the run_time argument.
Arguments:
run_time - the time when this script last executed successfully (end)
"""
if utils.CANCEL_WORKERS_EVENT.is_set():
return
utcnow = (datetime.datetime.utcnow()
.replace(microsecond=0, tzinfo=dateutil.tz.tzutc()))
if not run_time:
run_time = utcnow
self.config.set(
self.section_name, 'last_run_time', run_time.isoformat())
self.config.save()
self.last_run_time = run_time
def validate(self):
"""
Validates configuration
"""
if not self.metric_config_path:
raise ValueError('options.metric_config_path is required')
if not os.path.exists(self.metric_config_path):
raise ValueError('ERROR: Configuration file (%s) does not exist' %
(self.metric_config_path))
def load_metric_config(self):
"""
Loads the metric configuration from the configuration file.
"""
if self.metrics_config:
return
with open(self.metric_config_path, 'r') as conffd:
config = json.load(conffd)
if 'metrics' not in config:
raise ValueError('ERROR: Configuration file (%s) is not valid' %
(self.metric_config_path))
self.metrics_config = config['metrics']
for _, config in self.metrics_config.iteritems():
if 'namespace' in config and config['namespace']:
self.namespaces.add(config['namespace'])
#pylint: disable=unsupported-membership-test
#pylint: disable=unsubscriptable-object
def get_metric_config(self, namespace, metric_name):
"""
Given a namespace and metric, get the configuration.
Arguments:
namespace - the namespace
metric_name - the metric's name
Returns:
the configuration for this namespace and metric
"""
self.load_metric_config()
current_match = None
metric = namespace.replace('/', '.').lower() + '.' + metric_name.lower()
for name, config in self.metrics_config.iteritems():
if re.match(name, metric, re.IGNORECASE):
if current_match is None or \
('priority' in current_match and \
current_match['priority'] < config['priority']):
current_match = config
return current_match
#pylint: disable=too-many-instance-attributes
class AwsMetricsConfiguration(AwsBaseMetricsConfiguration):
"""
Configuration file for this command
"""
def __init__(self, config_file_path):
super(AwsMetricsConfiguration, self).__init__(
config_file_path=config_file_path)
self.cloudwatch = {}
for region in self.regions:
self.cloudwatch[region] = AwsCloudwatchConfiguration(self, region)
def get_region_config(self, region):
"""
Gets the configuration for cloudwatch for the given region
Arguments:
region - the name of the region
"""
if region in self.cloudwatch:
return self.cloudwatch[region]
else:
return None
def validate(self):
"""
Checks that all required configuration items are set
Throws:
ValueError when a configuration item is missing a value
"""
if (not self.aws_access_key_id or
not self.aws_secret_access_key or
not self.regions):
raise ValueError('AWS access key ID, secret access key, '
'and regions are required')
for _, cloudwatch in self.cloudwatch.iteritems():
cloudwatch.validate()
class AwsCloudwatchMetricsCommand(AwsBaseMetricsCommand):
"""
Wavefront command for retrieving metrics from AWS cloudwatch.
"""
def __init__(self, **kwargs):
super(AwsCloudwatchMetricsCommand, self).__init__(**kwargs)
self.metrics_config = None
def _parse_args(self, arg):
"""
Parses the arguments passed into this command.
Arguments:
arg - the argparse parser object returned from parser.parse_args()
"""
self.config = AwsMetricsConfiguration(arg.config_file_path)
self.config.validate()
try:
logging.config.fileConfig(arg.config_file_path)
except ConfigParser.NoSectionError:
pass
#pylint: disable=no-self-use
def get_help_text(self):
"""
Returns help text for --help of this wavefront command
"""
return "Pull metrics from AWS CloudWatch and push them into Wavefront"
def _execute(self):
"""
Execute this command
"""
super(AwsCloudwatchMetricsCommand, self)._execute()
self._process_cloudwatch()
#pylint: disable=too-many-locals
#pylint: disable=too-many-branches
#pylint: disable=too-many-statements
def _process_list_metrics_response(self, metrics, sub_account, region):
"""
This function is called by _process_cloudwatch_region() after calling
list_metrics() API.
Loops over all metrics and call GetMetricStatistics() on each that are
included by the configuration.
Arguments:
metrics - the array of metrics returned from ListMetrics() ('Metrics')
sub_account - the AwsSubAccount object representing the top level
"""
cloudwatch_config = self.config.get_region_config(region)
start = cloudwatch_config.start_time
end = cloudwatch_config.end_time
session = sub_account.get_session(region, False)
cloudwatch = session.client('cloudwatch')
account_id = sub_account.get_account_id()
for metric in metrics:
if utils.CANCEL_WORKERS_EVENT.is_set():
break
top = |
"""SCons.Tool.SCCS.py
Tool-specific initialization for SCCS.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Tool/SCCS.py 5023 2010/06/14 22:05:46 scons"
import SCons.Action
import SCons.Builder
import SCons.Util
def generate(env):
"""Add a Builder factory function | and construction variables for
SCCS to an Environment."""
def SCCSFactory(env=env):
""" """
import SCons.Warnings as W
W.warn(W.DeprecatedSourceCodeWarning, """The SCCS() factory is deprecated and there is no replacement.""")
act = SCons.Action.Acti | on('$SCCSCOM', '$SCCSCOMSTR')
return SCons.Builder.Builder(action = act, env = env)
#setattr(env, 'SCCS', SCCSFactory)
env.SCCS = SCCSFactory
env['SCCS'] = 'sccs'
env['SCCSFLAGS'] = SCons.Util.CLVar('')
env['SCCSGETFLAGS'] = SCons.Util.CLVar('')
env['SCCSCOM'] = '$SCCS $SCCSFLAGS get $SCCSGETFLAGS $TARGET'
def exists(env):
return env.Detect('sccs')
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'agile-analytics'
copyright = u'2016, Chris Heisel'
author = u'Chris Heisel'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u'0.1'
# The full version, including alpha/beta/rc tags.
release = u'0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = u'agile-analytics v0.1'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string i | s equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# | Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'agile-analyticsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'agile-analytics.tex', u'agile-analytics Documentation',
u'Chris Heisel', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
#
# latex_appendices = []
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'agile-analytics', u'agile-analytics Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target na |
__author__ = "Laura Martinez Sanchez"
__license__ = "GPL"
__version__ = "1.0"
__email__ = "lmartisa@gmail.com"
from osgeo import gdal, gdalnumeric, ogr, osr
import numpy as np
from PIL import Image, ImageDraw
from collections import defaultdict
import pickle
import time
from texture_common import *
#Uses a gdal geomatrix (gdal.GetGeoTransform()) to calculate the pixel location of a geospatial coordinate
def world2Pixel(geoMatrix, x, y):
ulX = geoMatrix[0]
ulY = geoMatrix[3]
xDist = geoMatrix[1]
yDist = geoMatrix[5]
rtnX = geoMatrix[2]
rtnY = geoMatrix[4]
pixel = int((x - ulX) / xDist)
line = int((y - ulY) / yDist)
return (pixel, line)
#Converts a Python Imaging Library array to a gdalnumeric image.
def imageToArray(i):
'''
Converts a Python Imaging Library (PIL) array to a gdalnumeric image.
'''
a = gdalnumeric.fromstring(i.tobytes(), 'b')
a.shape = i.im.size[1], i.im.size[0]
return a
def ReadClipArray(lrY, ulY, lrX, ulX, img):
clip = np.empty((img.RasterCount, lrY - ulY, lrX - ulX))
#Read only the pixels needed for do the clip
for band in range(img.RasterCount):
band += 1
imgaux = img.GetRasterBand(band).ReadAsA | rray(ulX, ulY, lrX - ulX, lrY - ulY)
clip[band - 1] = imgaux
return clip
#Does the clip of the shape
def ObtainPixelsfromShape(field, rasterPath, shapePath, INX, *args):
# field='zona'
# open dataset, also load as a gdal image to get | geotransform
# INX can be false. If True, uses additional layers.
print "Starting clip...."
start = time.time()
if args:
texture_train_Path = args[0]
print texture_train_Path
img, textArrayShp = createTextureArray(texture_train_Path, rasterPath)
else:
#print"Indexes = False"
img = gdal.Open(rasterPath)
geoTrans = img.GetGeoTransform()
geoTransaux = img.GetGeoTransform()
proj = img.GetProjection()
#open shapefile
driver = ogr.GetDriverByName("ESRI Shapefile")
dataSource = driver.Open(shapePath, 0)
layer = dataSource.GetLayer()
clipdic = defaultdict(list)
count = 0
#Convert the layer extent to image pixel coordinates, we read only de pixels needed
for feature in layer:
minX, maxX, minY, maxY = feature.GetGeometryRef().GetEnvelope()
geoTrans = img.GetGeoTransform()
ulX, ulY = world2Pixel(geoTrans, minX, maxY)
lrX, lrY = world2Pixel(geoTrans, maxX, minY)
#print ulX,lrX,ulY,lrY
# Calculate the pixel size of the new image
pxWidth = int(lrX - ulX)
pxHeight = int(lrY - ulY)
clip = ReadClipArray(lrY, ulY, lrX, ulX, img)
#EDIT: create pixel offset to pass to new image Projection info
xoffset = ulX
yoffset = ulY
#print "Xoffset, Yoffset = ( %d, %d )" % ( xoffset, yoffset )
# Create a new geomatrix for the image
geoTrans = list(geoTrans)
geoTrans[0] = minX
geoTrans[3] = maxY
# Map points to pixels for drawing the boundary on a blank 8-bit, black and white, mask image.
points = []
pixels = []
geom = feature.GetGeometryRef()
pts = geom.GetGeometryRef(0)
[points.append((pts.GetX(p), pts.GetY(p))) for p in range(pts.GetPointCount())]
[pixels.append(world2Pixel(geoTrans, p[0], p[1])) for p in points]
rasterPoly = Image.new("L", (pxWidth, pxHeight), 1)
rasterize = ImageDraw.Draw(rasterPoly)
rasterize.polygon(pixels, 0)
mask = imageToArray(rasterPoly)
#SHow the clips of the features
# plt.imshow(mask)
# plt.show()
# Clip the image using the mask into a dict
temp = gdalnumeric.choose(mask, (clip, np.nan))
# #SHow the clips of the image
# plt.imshow(temp[4])
# plt.show()
temp = np.concatenate(temp.T)
temp = temp[~np.isnan(temp[:, 0])] #NaN
#print temp.shape
clipdic[str(feature.GetField(field))].append(temp)
count += temp.shape[0]
end = time.time()
print "Time clipshape:"
print (end - start)
print "count", count
return clipdic, count
##########################################################################
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2007, 2010 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with translate; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""This module represents Persian language.
For more information, see U{http://en.wikipedia.org/wiki/Persian_language}
"""
from translate.lang import common
import re
def guillemets(text):
def convertquotation(match):
prefix = match.group(1)
# Let's see that we didn't perhaps match an XML tag property like
# <a href="something">
if prefix == u"=":
return match.group(0)
return u"%s«%s»" % (prefix, match.group(2))
# Check that there is an even number of double quotes, otherwise it is
# probably not safe to convert them.
if text.count(u'"') % 2 == 0:
text = re.sub('(.|^)"([^"]+)"', convertquotation, text)
singlecount = text.count(u"'")
if singlecount:
if singlecount == text.count(u'`'):
text = re.sub("(.|^)`([^']+)'", convertquotation, text)
elif singlecount % 2 == 0:
text = re.sub("(.|^)'([^']+)'", convertquotation, text)
text = re.sub(u'(.|^)“([^”]+)”', convertquotation, text)
return text
class fa(common.Common):
"""This class represents Persian."""
listseperator = u"، "
puncdict = {
u",": u"،",
u";": u"؛",
u"?": u"؟",
#This causes problems with variables, so commented out f | or now:
#u"%": u"٪",
}
ignoretests = ["startcaps", "simplecaps"]
#TODO: check per | sian numerics
#TODO: zwj and zwnj?
def punctranslate(cls, text):
"""Implement "French" quotation marks."""
text = super(cls, cls).punctranslate(text)
return guillemets(text)
punctranslate = classmethod(punctranslate)
|
'
"defaults to 'trybot' directory at top level of your "
'repo-managed checkout.'))
parser.add_remote_option('--chrome_rev', default=None, type='string',
action='callback', dest='chrome_rev',
callback=_CheckChromeRevOption,
help=('Revision of Chrome to use, of type [%s]'
% '|'.join(constants.VALID_CHROME_REVISIONS)))
parser.add_remote_option('--profile', default=None, type='string',
action='store', dest='profile',
help='Name of profile to sub-specify board variant.')
#
# Patch selection options.
#
group = CustomGroup(
parser,
'Patch Options')
group.add_remote_option('-g', '--gerrit-patches', action='extend',
default=[], type='string',
metavar="'Id1 *int_Id2...IdN'",
help=('Space-separated list of short-form Gerrit '
"Change-Id's or change numbers to patch. "
"Please prepend '*' to internal Change-Id's"))
group.add_remote_option('-G', '--rietveld-patches', action='extend',
default=[], type='string',
metavar="'id1[:subdir1]...idN[:subdirN]'",
help=('Space-separated list of short-form Rietveld '
'issue numbers to patch. If no subdir is '
'specified, the src directory is used.'))
group.add_option('-p', '--local-patches', action='extend', default=[],
metavar="'<project1>[:<branch1>]...<projectN>[:<branchN>]'",
help=('Space-separated list of project branches with '
'patches to apply. Projects are specified by name. '
'If no branch is specified the current branch of the '
'project will be used.'))
parser.add_option_group(group)
#
# Remote trybot options.
#
group = CustomGroup(
parser,
'Remote Trybot Options (--remote)')
group.add_remote_option('--hwtest', dest='hwtest', action='store_true',
default=False,
help='Run the HWTest stage (tests on real hardware)')
group.add_option('--remote-description', default=None,
help=('Attach an optional description to a --remote run '
'to make it easier to identify the results when it '
'finishes'))
group.add_option('--slaves', action='extend', default=[],
help=('Specify specific remote tryslaves to run on (e.g. '
'build149-m2); if the bot is busy, it will be queued'))
group.add_remote_option('--channel', dest='channels', action='extend',
default=[],
help=('Specify a channel for a payloads trybot. Can '
'be specified multiple times. No valid for '
'non-payloads configs.'))
group.add_option('--test-tryjob', action='store_true',
default=False,
help=('Submit a tryjob to the test repository. Will not '
'show up on the production trybot waterfall.'))
parser.add_option_group(group)
#
# Branch creation options.
#
group = CustomGroup(
parser,
'Branch Creation Options (used with branch-util)')
group.add_remote_option('--branch-name',
help='The branch to create or delete.')
group.add_remote_option('--delete-branch', default=False, action='store_true',
help='Delete the branch specified in --branch-name.')
group.add_remote_option('--rename-to', type='string',
help='Rename a branch to the specified name.')
group.add_remote_option('--force-create', default=False, action='store_true',
help='Overwrites an existing branch.')
parser.add_option_group(group)
#
# Advanced options.
#
group = CustomGroup(
parser,
'Advanced Options',
'Caution: use these options at your own risk.')
group.add_remote_option('--bootstrap-args', action='append', default=[],
help=('Args passed directly to the bootstrap re-exec '
'to skip verification by the bootstrap code'))
group.add_remote_option('--buildbot', dest='buildbot', action='store_true',
default=False, help='This is running on a buildbot')
group.add_remote_option('--buildnumber', help='build number', type='int',
default=0)
group.add_option('--chrome_root', default=None, type='path',
action='callback', callback=_CheckChromeRootOption,
dest='chrome_root', help='Local checkout of Chrome to use.')
group.add_remote_option('--chrome_version', default=None, type='string',
action='callback', dest='chrome_version',
callback=_CheckChromeVersionOption,
help=('Used with SPEC logic to force a particular '
'SVN revision of chrome rather than the '
'latest.'))
group.add_remote_option('--clobber', action='store_true', dest='clobber',
default=False,
help='Clears an old checkout before syncing')
group.add_remote_option('--latest-toolchain', action='store_true',
default=False,
help='Use the latest toolchain.')
parser.add_option('--log_dir', dest='log_dir', type='path',
help=('Directory where logs are stored.'))
group.add_remote_option('--maxarchives', dest='max_archive_builds',
default=3, type='int',
| help='Change the local sav | ed build count limit.')
parser.add_remote_option('--manifest-repo-url',
help=('Overrides the default manifest repo url.'))
group.add_remote_option('--compilecheck', action='store_true', default=False,
help='Only verify compilation and unit tests.')
group.add_remote_option('--noarchive', action='store_false', dest='archive',
default=True, help="Don't run archive stage.")
group.add_remote_option('--nobootstrap', action='store_false',
dest='bootstrap', default=True,
help=("Don't checkout and run from a standalone "
'chromite repo.'))
group.add_remote_option('--nobuild', action='store_false', dest='build',
default=True,
help="Don't actually build (for cbuildbot dev)")
group.add_remote_option('--noclean', action='store_false', dest='clean',
default=True, help="Don't clean the buildroot")
group.add_remote_option('--nocgroups', action='store_false', dest='cgroups',
default=True,
help='Disable cbuildbots usage of cgroups.')
group.add_remote_option('--nochromesdk', action='store_false',
dest='chrome_sdk', default=True,
help=("Don't run the ChromeSDK stage which builds "
'Chrome outside of the chroot.'))
group.add_remote_option('--noprebuilts', action='store_false',
dest='prebuilts', default=True,
help="Don't upload prebuilts.")
group.add_remote_option('--nopatch', action='store_false',
dest='postsync_patch', default=True,
help=("Don't run PatchChanges stage. This does not "
'disable patching in of chromite patches '
'during BootstrapStage.'))
group.add_remote_option('--nopaygen |
from xcrawler.compatibility.string_converter.compatible_string_c | onverter import Compatib | leStringConverter
class StringConverterPython3(CompatibleStringConverter):
"""A Python 3 compatible class for converting a string to a specified type.
"""
def convert_to_string(self, string):
string = self.try_convert_to_unicode_string(string)
return string
def list_convert_to_string(self, list_strings):
return [self.try_convert_to_unicode_string(s) for s in list_strings]
|
u | pperLimit = 1000
oddCounter = 3
oddList = []
n = 0
while upperLimit >= oddCounter:
oddList.append(oddCounter)
oddCounter += 2
while oddList(n) < (upperLimit - 1):
if o | ddList(n) %
print(oddList) |
# coding=utf-8
import unittest
"""295. Find Median from Data Stream
https://leetcode.com/problems/find-median-from-data-stream/description/
Median is the middle value in an ordered integer list. If the size of the list
is even, there is no middle value. So the median is the mean of the two middle
value.
For example,
`[2,3,4]`, the median is `3`
`[2,3]`, the median is `(2 + 3) / 2 = 2.5`
Design a data structure that supports the following two operations:
* void addNum(int num) - Add a integer number from the data stream to the data structure.
* double findMedian() - Return the median of all elements so far.
**Example:**
addNum(1)
addNum(2)
findMedian() -> 1.5
addNum(3)
findMedian() -> 2
Similar Questions:
Sliding Window Median (sliding-window-median)
"""
class MedianFinder(object):
def __init__(self):
"""
initialize your data structure | here.
"""
def addNum(self, num):
"""
:type num: int
| :rtype: void
"""
def findMedian(self):
"""
:rtype: float
"""
# Your MedianFinder object will be instantiated and called as such:
# obj = MedianFinder()
# obj.addNum(num)
# param_2 = obj.findMedian()
def test(self):
pass
if __name__ == "__main__":
unittest.main()
|
"""A very simple logger that tries to be concurrency-safe."""
import os, sys
import time
import traceback
import subprocess
import select
LOG_FILE = '/var/log/nodemanager.func'
# basically define 3 levels
LOG_NONE=0
LOG_NODE=1
LOG_VERBOSE=2
# default is to log a reasonable amount of stuff for when running on operational nodes
LOG_LEVEL=1
def set_level(level):
global LOG_LEVEL
assert level in [LOG_NONE,LOG_NODE,LOG_VERBOSE]
LOG_LEVEL=level
def verbose(msg):
log('(v) '+msg,LOG_VERBOSE)
def log(msg,level=LOG_NODE):
"""Write <msg> to the log file if level >= current log level (default LOG_NODE)."""
if (level > LOG_LEVEL):
return
try:
fd = os.open(LOG_FILE, os.O_WRONLY | os.O_CREAT | os.O_APPEND, 0600)
if not msg.endswith('\n'): msg += '\n'
os.write(fd, '%s: %s' % (time.asctime(time.gmtime()), msg))
os.close(fd)
except OSError:
sys.stderr.write(msg)
sys.stderr.flush()
def log_exc(msg="",name=None):
"""Log the traceback resulting from an exception."""
if name:
log("%s: EXCEPTION caught <%s> \n %s" %(name, msg, traceback.format_exc()))
else:
log("EXCEPTION caught <%s> \n %s" %(msg, traceback.format_exc()))
#################### child processes
# avoid waiting until the process returns;
# that makes debugging of hanging children hard
class Buffer:
def __init__ (self,message='log_call: '):
self.buffer=''
self.message=message
def add (self,c):
self.buffer += c
if c=='\n': self.flush()
def flush (self):
if self.buffer:
log (self.message + self.buffer)
self.buffer=''
# time out in seconds - avoid hanging subprocesses - default is 5 minutes
default_timeout_minutes=5
# returns a bool that is True when everything goes fine and the retcod is 0
def log_call(command,timeout=default_timeout_minutes*60,poll=1):
message=" ".join(command)
log("log_call: running command %s" % message)
verbose("log_call: timeout=%r s" % timeout)
verbose("log_call: poll=%r s" % poll)
trigger=time.time()+timeout
result = False
try:
child = subprocess.Popen(command, bufsize=1,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True)
buffer = Buffer()
while True:
# see if anything can be read within the poll interval
(r,w,x)=select.select([child.stdout],[],[],poll)
if r: buffer.add(child.stdout.read(1))
# is process over ?
returncode=child.poll()
# yes
if returncode != None:
buffer.flush()
# child is done and return 0
if returncode == 0:
log("log_call:end command (%s) completed" % message)
| result=True
break
# child has failed
else:
log("log_call:end command (%s) returned with code %d" %(message,returncode))
break
# no : still within timeout ?
if time.time() >= trigger:
buffer.flush()
child.terminate()
log("log_call:end terminating command (%s) - exceeded timeout %d s"%(mess | age,timeout))
break
except: log_exc("failed to run command %s" % message)
return result
|
import numpy as np
from displays.letters import ALPHABET
class Writer:
"""Produce scrolling text for the LED display, f | rame by frame"""
def __init__(self):
self.font = ALPHABET
self.spacer = np.zeros([8, | 1], dtype=int)
self.phrase = None
def make_phrase(self, phrase):
"""Convert a string into a long numpy array with spacing"""
# phrase.lower() called because ALPHABET currently doesn't have capitals
converted = [np.hstack([self.font[letter], self.spacer])
for letter in phrase.lower()]
self.phrase = np.hstack(converted)
def generate_frames(self):
"""Produce single 8*8 frames scrolling across phrase"""
height, width = np.shape(self.phrase)
for frame in range(width - 8):
yield self.phrase[:, frame:frame + 8]
def write(self, phrase):
"""Easily get frames for a phrase"""
self.make_phrase(phrase)
for frame in self.generate_frames():
yield frame
|
"""Handle nice names"""
import base64
from | pysyte.oss import platforms
def nice(data):
return base64.b64encode(bytes(data, 'utf-8'))
def name(data):
return base64.b64decode(data).decode('utf-8')
def chmod(data, *_):
platforms.put_ | clipboard_data(name(data))
return ''
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for contrib.seq2seq.python.seq2seq.beam_search_ops."""
# pylint: disable=unused-import,g-bad-import-order
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: enable=unused-import
import itertools
import numpy as np
from tensorflow.contrib.seq2seq.python.ops import beam_search_ops
from tensorflow.python.framework import ops
from tensorflow.python.platform import test
def _transpose_batch_time(x):
return np.transpose(x, [1, 0, 2]).astype(np.int32)
class GatherTreeTest(test.TestCase):
def testGatherTreeOne(self):
# (max_time = 4, batch_size = 1, beams = 3)
end_token = 10
step_ids = _transpose_batch_time(
[[[1, 2, 3], [4, 5, 6], [7, 8, 9], [-1, -1, -1]]])
parent_ids = _transpose_batch_time(
[[[0, 0, 0], [0, 1, 1], [2, 1, 2], [-1, -1, -1]]])
max_sequence_lengths = [3]
expected_result = _transpose_batch_time([[[2, 2, 2], [6, 5, 6], [7, 8, 9],
[10, 10, 10]]])
beams = beam_search_ops.gather_tree(
step_ids=step_ids,
parent_ids=parent_ids,
max_sequence_lengths=max_sequence_lengths,
end_token=end_token)
with self.session(use_gpu=True):
self.assertAllEqual(expected_result, beams.eval())
def testBadParentValuesOnCPU(self):
# (batch_size = 1, max_time = 4, beams = 3)
# bad parent in beam 1 time 1
end_token = 10
step_ids = _transpose_batch_time(
[[[1, 2, 3], [4, 5, 6], [7, 8, 9], [-1, -1, -1]]])
parent_ids = _transpose_batch_time(
[[[0, 0, 0], [0, -1, 1], [2, 1, 2], [-1, -1, -1]]])
max_sequence_lengths = [3]
with ops.device("/cpu:0"):
beams = beam_search_ops.gather_tree(
step_ids=step_ids,
parent_ids=parent_ids,
max_sequence_lengths=max_sequence_lengths,
end_token=end_token)
with self.cached_session():
with self.assertRaisesOpError(
r"parent id -1 at \(batch, time, beam\) == \(0, 0, 1\)"):
_ = beams.eval()
def testBadParentValuesOnGPU(self):
# Only want to run this test on CUDA devices, as gather_tree is not
# registered for SYCL devices.
if not test.is_gpu_available(cuda_only=True):
return
# (max_time = 4, batch_size = 1, beams = 3)
# bad parent in beam 1 time 1; appears as a negative index at time 0
end_token = 10
step_ids = _transpose_batch_time(
[[[1, 2, 3], [4, 5, 6], [7, 8, 9], [-1, -1, -1]]])
parent_ids = _transpose_batch_time(
[[[0, 0, 0], [0, -1, 1], [2, 1, 2], [-1, -1, -1]]])
max_sequence_lengths = [3]
expected_result = _transpose_batch_time([[[2, -1, 2], [6, 5, 6], [7, 8, 9],
[10, 10, 10]]])
with ops.device("/device:GPU:0"):
beams = beam_search_ops.gather_tree(
step_ids=step_ids,
parent_ids=parent_ids,
max_sequence_lengths=max_sequence_lengths,
end_token=end_token)
with self.session(use_gpu=True):
self.assertAllEqual(expected_result, bea | ms.eval())
def testGatherTreeBatch(self):
batch_size = 10
beam_width = 15
max_time = 8
max_sequence_lengths = [0 | , 1, 2, 4, 7, 8, 9, 10, 11, 0]
end_token = 5
with self.session(use_gpu=True):
step_ids = np.random.randint(
0, high=end_token + 1, size=(max_time, batch_size, beam_width))
parent_ids = np.random.randint(
0, high=beam_width - 1, size=(max_time, batch_size, beam_width))
beams = beam_search_ops.gather_tree(
step_ids=step_ids.astype(np.int32),
parent_ids=parent_ids.astype(np.int32),
max_sequence_lengths=max_sequence_lengths,
end_token=end_token)
self.assertEqual((max_time, batch_size, beam_width), beams.shape)
beams_value = beams.eval()
for b in range(batch_size):
# Past max_sequence_lengths[b], we emit all end tokens.
b_value = beams_value[max_sequence_lengths[b]:, b, :]
self.assertAllClose(b_value, end_token * np.ones_like(b_value))
for batch, beam in itertools.product(
range(batch_size), range(beam_width)):
v = np.squeeze(beams_value[:, batch, beam])
if end_token in v:
found_bad = np.where(v == -1)[0]
self.assertEqual(0, len(found_bad))
found = np.where(v == end_token)[0]
found = found[0] # First occurrence of end_token.
# If an end_token is found, everything before it should be a
# valid id and everything after it should be -1.
if found > 0:
self.assertAllEqual(
v[:found - 1] >= 0, np.ones_like(v[:found - 1], dtype=bool))
self.assertAllClose(v[found + 1:],
end_token * np.ones_like(v[found + 1:]))
if __name__ == "__main__":
test.main()
|
from distutils.core import setup
import | py2e | xe
setup(console=['DTR2Sync.py'])
|
import unittest
from music_app.post import Post
class TestPost(unittest.TestCase):
"""Class to test the Post Class"""
def t | est_object_creation(self):
p = Post('Promises', 'Dreamers', 'rock', '2014', 8, 'http://example.com', 146666666.66, 'https://www.youtube.com')
self.assertEqual(p.title, 'Promises')
se | lf.assertEqual(p.artist, 'Dreamers')
self.assertEqual(p.genre, 'rock')
self.assertEqual(p.year, '2014')
self.assertEqual(p.score, 8)
self.assertEqual(p.thumbnail, 'http://example.com')
self.assertEqual(p.timestamp, 146666666.66)
self.assertEqual(p.url, 'https://www.youtube.com')
|
from __ | future__ import unicode_literals
try:
from django.urls import re_path
except ImportError:
from django.conf.urls import url as re_path
from django.shortcuts import render
from django.views.decorators.http import require_GET
from paypal.standard.pdt.views import process_pdt
@require_GET
def pdt(request, template="pdt/pdt.html", context=None):
"""Standar | d implementation of a view that processes PDT and then renders a template
For more advanced uses, create your own view and call process_pdt.
"""
pdt_obj, failed = process_pdt(request)
context = context or {}
context.update({"failed": failed, "pdt_obj": pdt_obj})
return render(request, template, context)
urlpatterns = [
re_path(r'^pdt/$', pdt),
]
|
import ast
from python_minifier.rename.binding import NameBinding
from python_minifier.rename.name_generator import name_filter
from python_minifier.rename.util import is_namespace
def all_bindings(node):
"""
All bindings in a module
:param node: The module to get bindings in
:type node: :class:`ast.AST`
:rtype: Iterable[ast.AST, Binding]
"""
if is_namespace(node):
for binding in node.bindings:
yield node, binding
for child in ast.iter_child_nodes(node):
for namespace, binding in all_bindings(child):
yield namespace, binding
def sorted_bindings(module):
"""
All bindings in a modules sorted by descending number of references
:param module: The module to get bindings in
:type module: :class:`ast.AST`
:rtype: Iterable[ast.AST, Binding]
"""
def comp(tup):
namespace, binding = tup
return len(binding.references)
return sorted(all_bindings(module), key=comp, reverse=True)
def reservation_scope(namespace, binding):
"""
Get the namespaces that are in the bindings reservation scope
Returns the namespace nodes the binding name must be resolvable in
:param namespace: The local namespace of a binding
:type namespace: :class:`ast.AST`
:param binding: The binding to get the reservation scope for
:type binding: Binding
:rtype: set[ast.AST]
"""
namespaces = set([namespace])
for node in binding.references:
while node is not namespace:
namespaces.add(node.namespace)
node = node.namespace
return namespaces
def add_assigned(node):
"""
Add the assigned_names attribute to namespace nodes in a module
:param node: The module to add the assigned_names attribute to
:type node: :class:`ast.Module`
"""
if is_namespace(node):
node.assigned_names = set()
for child in ast.iter_child_nodes(node):
add_assigned(child)
def reserve_name(name, reservation_scope):
"""
Reserve a name | in a reservation scope
:param str name: The name to reserve
:param reservation_scope:
:type reservation_scope: Iterable[:class:`ast.AST`]
"""
for namespace in reservation_scope:
namespace.assigned_names.add(name)
| class UniqueNameAssigner(object):
"""
Assign new names to renamed bindings
Assigns a unique name to every binding
"""
def __init__(self):
self.name_generator = name_filter()
self.names = []
def available_name(self):
return next(self.name_generator)
def __call__(self, module):
assert isinstance(module, ast.Module)
for namespace, binding in sorted_bindings(module):
if binding.allow_rename:
binding.new_name = self.available_name()
return module
class NameAssigner(object):
"""
Assign new names to renamed bindings
This assigner creates a name 'reservation scope' containing each namespace a binding is referenced in, including
transitive namespaces. Bindings are then assigned the first available name that has no references in their
reservation scope. This means names will be reused in sibling namespaces, and shadowed where possible in child
namespaces.
Bindings are assigned names in order of most references, with names assigned shortest first.
"""
def __init__(self, name_generator=None):
self.name_generator = name_generator if name_generator is not None else name_filter()
self.names = []
def iter_names(self):
for name in self.names:
yield name
while True:
name = next(self.name_generator)
self.names.append(name)
yield name
def available_name(self, reservation_scope, prefix=''):
"""
Search for the first name that is not in reservation scope
"""
for name in self.iter_names():
if self.is_available(prefix + name, reservation_scope):
return prefix + name
def is_available(self, name, reservation_scope):
"""
Is a name unreserved in a reservation scope
:param str name: the name to check availability of
:param reservation_scope: The scope to check
:type reservation_scope: Iterable[:class:`ast.AST`]
:rtype: bool
"""
for namespace in reservation_scope:
if name in namespace.assigned_names:
return False
return True
def __call__(self, module, prefix_globals, reserved_globals=None):
assert isinstance(module, ast.Module)
add_assigned(module)
for namespace, binding in all_bindings(module):
if binding.reserved is not None:
scope = reservation_scope(namespace, binding)
reserve_name(binding.reserved, scope)
if reserved_globals is not None:
for name in reserved_globals:
module.assigned_names.add(name)
for namespace, binding in sorted_bindings(module):
scope = reservation_scope(namespace, binding)
if binding.allow_rename:
if isinstance(namespace, ast.Module) and prefix_globals:
name = self.available_name(scope, prefix='_')
else:
name = self.available_name(scope)
def should_rename():
if binding.should_rename(name):
return True
# It's no longer efficient to do this rename
if isinstance(binding, NameBinding):
# Check that the original name is still available
if binding.reserved == binding.name:
# We already reserved it (this is probably an arg)
return False
if not self.is_available(binding.name, scope):
# The original name has already been assigned to another binding,
# so we need to rename this anyway.
return True
return False
if should_rename():
binding.rename(name)
else:
# Any existing name will become reserved
binding.disallow_rename()
if binding.name is not None:
reserve_name(binding.name, scope)
return module
def rename(module, prefix_globals=False, preserved_globals=None):
NameAssigner()(module, prefix_globals, preserved_globals)
|
labels for each output.
`loss_` : float
The current loss computed with the loss function.
`label_binarizer_` : LabelBinarizer
A LabelBinarizer object trained on the training set.
`coefs_` : list, length n_layers - 1
The ith element in the list represents the weight matrix corresponding
to layer i.
`intercepts_` : list, length n_layers - 1
The ith element in the list represents the bias vector corresponding to
layer i + 1.
n_iter_ : int,
The number of iterations the algorithm has ran.
n_layers_ : int
Number of layers.
`n_outputs_` : int
Number of outputs.
`out_activation_` : string
Name of the output activation function.
Notes
-----
MLPClassifier trains iteratively since at each time step
the partial derivatives of the loss function with respect to the model
parameters are computed to update the parameters.
It can also have a regularization term added to the loss function
that shrinks model parameters to prevent overfitting.
This implementation works with data represented as dense and sparse numpy
arrays of floating point values.
References
----------
Hinton, Geoffrey E.
"Connectionist learning procedures." Artificial intelligence 40.1
(1989): 185-234.
Glorot, Xavier, and Yoshua Bengio. "Understanding the difficulty of
training deep feedforward neural networks." International Conference
on Artificial Intelligence and Statistics. 2010.
He, Kaiming, et al. "Delving deep into rectifiers: Surpassing human-level
performance on imagenet classification." arXiv preprint
arXiv:1502.01852 (2015).
Kingma, Diederik, and Jimmy Ba. "Adam: A method for stochastic
optimization." arXiv preprint arXiv:1412.6980 (2014).
"""
def __init__(self, hidden_layer_sizes=(100,), activation="relu",
algorithm='adam', alpha=0.0001,
batch_size=200, learning_rate="constant",
learning_rate_init=0.001, power_t=0.5, max_iter=200,
shuffle=True, random_state=None, tol=1e-4,
verbose=False, warm_start=False, momentum=0.9,
nesterovs_momentum=True, early_stopping=False,
validation_fraction=0.1, beta_1=0.9, beta_2=0.999,
epsilon=1e-8):
sup = super(MLPClassifier, self)
sup.__init__(hidden_layer_sizes=hidden_layer_sizes,
activation=activation, algorithm=algorithm, alpha=alpha,
batch_size=batch_size, learning_rate=learning_rate,
learning_rate_init=learning_rate_init, power_t=power_t,
max_iter=max_iter, loss='log_loss', shuffle=shuffle,
random_state=random_state, tol=tol, verbose=verbose,
warm_start=warm_start, momentum=momentum,
nesterovs_momentum=nesterovs_momentum,
early_stopping=early_stopping,
validation_fraction=validation_fraction,
beta_1=beta_1, beta_2=beta_2, epsilon=epsilon)
self.label_binarizer_ = LabelBinarizer()
def _validate_input(self, X, y, incremental):
X, y = check_X_y(X, y, accept_sparse=['csr', 'csc', 'coo'],
multi_output=True)
if y.ndim == 2 and y.shape[1] == 1:
y = column_or_1d(y, warn=True)
self.label_binarizer_.fit(y)
if not hasattr(self, 'classes_') or not incremental:
self.classes_ = self.label_binarizer_.classes_
else:
classes = self.label_binarizer_.classes_
if not np.all(np.in1d(classes, self.classes_)):
raise ValueError("`y` has classes not in `self.classes_`."
" `self.classes_` has %s. 'y' has %s." %
(self.classes_, classes))
y = self.label_binarizer_.transform(y)
return X, y
def decision_function(self, X):
"""Decision function of the mlp model
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The input data.
Returns
-------
y : array-like, shape (n_samples,) or (n_samples, n_classes)
The values of decision function for each class in the model.
"""
check_is_fitted(self, "coefs_")
y_scores = self._decision_scores(X)
if self.n_outputs_ == 1:
return y_scores.ravel()
else:
return y_scores
def predict(self, X):
"""Predict using the multi-layer perceptron classifier
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The input data.
Returns
-------
y : array-like, shape (n_samples,) or (n_samples, n_classes)
The predicted classes.
"""
check_is_fitted(self, "coefs_")
y_scores = self.decision_function(X)
y_scores = ACTIVATIONS[self.out_activation_](y_scores)
return self.label_binarizer_.inverse_transform(y_scores)
@property
def partial_fit(self):
"""Fit the model to data matrix X and target y.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The input data.
y : array-like, shape (n_samples,)
The target values.
classes : array, shape (n_classes)
Classes across all calls to partial_fit.
Can be obtained via `np.unique(y_all)`, where y_all is the
target vector of the entire dataset.
This argument is required for the first call to partial_fit
and can be omitted in the subsequent calls.
Note that y doesn't need to contain all labels in `classes`.
Returns
-------
self : returns a trained MLP model.
"""
if self.algorithm not in _STOCHASTIC_ALGOS:
raise AttributeError("partial_fit is only available for stochastic"
"optimization algorithms. %s is not"
" stochastic" % self.algorithm)
return self._partial_fit
def _partial_fit(self, X, y, classes=None):
_check_partial_fit_first_call(self, classes)
super(MLPClassifier, self)._partial_fit(X, y)
return self
def predict_log_proba(self, X):
"""Return the log of probability estimates.
Parameters
----------
X : array-like, shape (n_samples, n_features)
The input data.
Returns
-------
log_y_prob : array-like, shape (n_samples, n_classes)
The predicted log-probability of the sample for each class
in the model, where classes are ordered as they are in
`self.classes_`. Equivalent to log(predict_proba(X))
"""
y_prob = self.predict_proba(X)
return np.log(y_prob, out=y_prob)
def predict_proba(self, X):
"""Probability estimates.
Parameters
----------
X : {array-like, sparse ma | trix}, shape (n_samples, n_features)
| The input data.
Returns
-------
y_prob : array-like, shape (n_samples, n_classes)
The predicted probability of the sample for each class in the
model, where classes are ordered as they are in `self.classes_`.
"""
y_scores = self.decision_function(X)
if y_scores.ndim == 1:
y_scores = logistic(y_scores)
return np.vstack([1 - y_scores, y_scores]).T
else:
return softmax(y_scores)
class MLPRegressor(BaseMultilayerPerceptron, RegressorMixin):
"""Multi-layer Perceptron regressor.
This algorithm optimizes the squared-loss using l-bfgs or gradient descent.
Parameters
----------
hidden_layer_sizes : tuple, length = n_layers - 2, default (100,)
The ith element represents the number of neurons |
# -*-coding:Utf-8 -*
# Copyright (c) 2010 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant le contexte éditeur EdtStats"""
from primaires.interpreteur.editeur import Editeur
from primaires.format.fonctions import contient
class EdtStats(Editeur):
"""Classe définissant le contexte éditeur 'stats'.
Ce contexte permet d'éditer les stats d'une race.
"""
def __init__(self, pere, objet=None, attribut=None):
"""Constructeur de l'éditeur"""
Editeur.__init__(self, pere, objet, attribut)
def accueil(self):
"""Message d'accueil"""
msg = \
"Entrez le |ent|nom|ff| de la stat, un signe |ent|/|ff| " \
"et la valeur pour modifier une stat.\nExemple : |cmd|force / " \
"45|ff|\n\nEntrez |ent|/|ff| pour revenir à la fenêtre parente\n\n"
stats = self.objet
msg += "+-" + "-" * 20 + "-+-" + "-" * 6 + "-+\n"
msg += "| " + "Nom".ljust(20) + " | " + "Valeur".ljust(6) + " |\n"
msg += "| " + " ".ljust(20) + " | " + " ".ljust(6) + " |"
for stat in stats:
if not stat.max:
msg += "\n| |ent|" + stat.nom.ljust(20) + "|ff| | "
msg += str(stat.defaut).rjust(6) + " |"
return msg
def interpreter(self, msg):
"""Interprétation du message""" |
try:
nom_stat, valeur = msg.split(" / ")
except ValueError:
self.pere << "|err|Syntaxe invalide.|ff|"
else:
# On cherche la stat
stat = None
for t_stat in self.objet:
if not t_stat.max and contient(t_stat.nom, nom_stat):
| stat = t_stat
break
if not stat:
self.pere << "|err|Cette stat est introuvable.|ff|"
else:
# Convertion
try:
valeur = int(valeur)
assert valeur > 0
assert valeur >= stat.marge_min
assert valeur <= stat.marge_max
except (ValueError, AssertionError):
self.pere << "|err|Valeur invalide.|ff|"
else:
stat.defaut = valeur
stat.courante = valeur
self.actualiser()
|
# -*- coding: utf-8 -*-
"""
Created on Wed Feb 17 00:00:05 2016
@author: rstreet
"""
import logging
from os import path, remove
from sys import exit
from astropy.time import Time
from datetime import datetime
import glob
def start_day_log( config, log_name, version=None ):
"""Function to initialize a new log file. The naming convention for the
file is [log_name]_[UTC_date].log. A new file is au | tomatically created
if none for the current UTC day already exist, otherwise output is appended
to an existing file.
| This function also configures the log file to provide timestamps for
all entries.
Parameters:
config dictionary Script configuration including parameters
log_directory Directory path
log_root_name Name of the log file
log_name string Name applied to the logger Object
(not to be confused with the log_root_name)
console Boolean Switch to capture logging data from the
stdout. Normally set to False.
Returns:
log open logger object
"""
log_file = get_log_path( config, config['log_root_name'] )
# To capture the logging stream from the whole script, create
# a log instance together with a console handler.
# Set formatting as appropriate.
log = logging.getLogger( log_name )
if len(log.handlers) == 0:
log.setLevel( logging.INFO )
file_handler = logging.FileHandler( log_file )
file_handler.setLevel( logging.INFO )
formatter = logging.Formatter( fmt='%(asctime)s %(message)s', \
datefmt='%Y-%m-%dT%H:%M:%S' )
file_handler.setFormatter( formatter )
log.addHandler( file_handler )
log.info( '\n------------------------------------------------------\n')
if version != None:
log.info('Software version: '+version+'\n')
return log
def get_log_path( config, log_root_name ):
"""Function to determine the path and name of the log file, giving it
a date-stamp in UTC.
Parameters:
config dictionary Script configuration including parameters
log_directory Directory path
log_root_name Name of the log file
Returns:
log_file string Path/log_name string
"""
ts = Time.now()
ts = ts.iso.split()[0]
log_file = path.join( config['log_dir'], \
log_root_name + '_' + ts + '.log' )
return log_file
def end_day_log( log ):
"""Function to cleanly shutdown logging functions with last timestamped
entry.
Parameters:
log logger Object
Returns:
None
"""
log.info( 'Processing complete\n' )
logging.shutdown()
|
from thrift import Thrift
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol
from thrift.server import TServer
import threading
from wpwithin.WPWithinCallback import Client
from wpwithin.WPWithinCallback import Processor
class CallbackHandler:
def __init__(self):
self.log = {}
def beginServiceDelivery(self, serviceId, serviceDeliveryToken, unitsToSupply):
try:
print "event from core - onBeginServiceDelivery()"
print "ServiceID: {0}\n".format(serviceId)
print "UnitsToSupply: {0}\n".format(unitsToSupply)
print "SDT.Key: {0}\n".format(servi | ceDeliveryToken.key)
print "SDT.Expiry: {0}\n".format(serviceDeliveryToken.expiry)
print "SDT.Issued: {0}\n".format(serviceDeliveryToken.issued)
print "S | DT.Signature: {0}\n".format(serviceDeliveryToken.signature)
print "SDT.RefundOnExpiry: {0}\n".format(serviceDeliveryToken.refundOnExpiry)
except Exception as e:
print "doBeginServiceDelivery failed: " + str(e)
def endServiceDelivery(self, serviceId, serviceDeliveryToken, unitsReceived):
try:
print "event from core - onEndServiceDelivery()"
print "ServiceID: {0}\n".format(serviceId)
print "UnitsReceived: {0}\n".format(unitsReceived)
print "SDT.Key: {0}\n".format(serviceDeliveryToken.key)
print "SDT.Expiry: {0}\n".format(serviceDeliveryToken.expiry)
print "SDT.Issued: {0}\n".format(serviceDeliveryToken.issued)
print "SDT.Signature: {0}\n".format(serviceDeliveryToken.signature)
print "SDT.RefundOnExpiry: {0}\n".format(serviceDeliveryToken.refundOnExpiry)
except Exception as e:
print "doEndServiceDelivery failed: " + str(e)
class EventServer:
server = None
def startServer(self, server):
print "##### STARTING WRAPPER SERVER to receive callbacks #####"
print "##### SERVER: " + str(server)
server.serve()
def stop():
if server != None:
server.setShouldStop(True)
def __init__(self, listenerHandler, hostname, port):
try:
if(listenerHandler == None):
print "Using build-in handler"
theListenerToUse = CallbackHandler()
else:
print "Using custom handler"
theListenerToUse = listenerHandler
processor = Processor(theListenerToUse)
transport = TSocket.TServerSocket(host=hostname, port=port)
tfactory = TTransport.TBufferedTransportFactory()
pfactory = TBinaryProtocol.TBinaryProtocolFactory()
#self.server = TServer.TThreadedServer(processor, transport, tfactory, pfactory)
self.server = TServer.TSimpleServer(processor, transport, tfactory, pfactory)
print "Serving the Wrapper listener, port: " + str(port)
thread = threading.Thread(target=self.startServer, args=([self.server]))
thread.daemon = True # Daemonize thread
thread.start() # Start the execution
print "##### SERVER: " + str(self.server)
print "##### SERVER: SHOULD HAVE STARTED"
print "Should have started Wrapper listener"
except Exception as e:
print "Event server setup failed: " + str(e)
|
# -*- coding: utf-8 -*-
# Copyright 2016-2017 Nate Bogdanowicz
import datetime
__distname__ = "Instrumental-lib"
__version__ = "0.6"
|
__author__ = "Nate Bogdanowicz"
__email__ = "natezb@gmail.com"
__url__ = 'https://github.com/mabuchilab/Instrumental'
__license__ = "GPLv3"
__copyright__ = "C | opyright 2013-{}, {}".format(datetime.date.today().year, __author__)
|
from __future__ import absolute_import
import hashlib
import jwt
from six.moves.urllib.parse import quote
from sentry.shared_integrations.exceptions import ApiError
def percent_encode(val):
# see https://en.wikipedia.org/wiki/Percent-encoding
return quote(val.encode("utf8", errors="replace")).replace("%7E", "~").replace("/", "%2F")
def get_query_hash(uri, method, query_params=None):
# see
# https://developer.atlassian.com/static/connect/docs/latest/concepts/understanding-jwt.html#qsh
uri = uri.rstrip("/")
method = method.upper()
if query_params is None:
query_params = {}
sorted_query = []
for k, v in sorted(query_params.items()):
# don't include jwt query param
if k != "jwt":
if isinstance(v, list):
param_val = [percent_encode(val) for val in v].join(",")
else:
param_val = percent_encode(v)
sorted_query.append("%s=%s" % (percent_encode(k), param_val | ))
query_string = "%s&%s&%s" % (method, uri, "&".join(sorted_query))
return hashlib.sha256(query_string.encode("utf8")).hexdigest()
def get_jira_auth_from_request(request):
# https://developer.atlassian.com/static/connect/docs/latest/concepts/authentication.html
# Extract the JWT token from the request's jw | t query
# parameter or the authorization header.
token = request.GET.get("jwt")
if token is None:
raise ApiError("No token parameter")
# Decode the JWT token, without verification. This gives
# you a header JSON object, a claims JSON object, and a signature.
decoded = jwt.decode(token, verify=False)
# Extract the issuer ('iss') claim from the decoded, unverified
# claims object. This is the clientKey for the tenant - an identifier
# for the Atlassian application making the call
issuer = decoded["iss"]
# Look up the sharedSecret for the clientKey, as stored
# by the add-on during the installation handshake
from sentry_plugins.jira_ac.models import JiraTenant
jira_auth = JiraTenant.objects.get(client_key=issuer)
# Verify the signature with the sharedSecret and
# the algorithm specified in the header's alg field.
decoded_verified = jwt.decode(token, jira_auth.secret)
# Verify the query has not been tampered by Creating a Query Hash
# and comparing it against the qsh claim on the verified token.
# TODO: probably shouldn't need to hardcode get... for post maybe
# the secret should just be a hidden field in the form ?
qsh = get_query_hash(request.path, "GET", request.GET)
# qsh = get_query_hash(request.path, request.method, request.GET)
if qsh != decoded_verified["qsh"]:
raise ApiError("Query hash mismatch")
return jira_auth
|
nt()
order.m_deltaNeutralOrderType = ("NONE" if (receivedInt == 0) else "MKT")
else:
# version 12 and up
order.m_deltaNeutralOrderType = self.readStr()
order.m_deltaNeutralAuxPrice = self.readDoubleMax()
if version | >= 27 and not Util.StringIsEmpty(order.m_deltaNeutralOrderType):
order.m_deltaNeutralConId = self.readInt()
order.m_deltaNeutralSettlingFirm = self.readStr()
order.m_deltaNeutralClearingAccount = self.readStr()
order.m_deltaNeutralClearingIntent = self.readStr()
| order.m_continuousUpdate = self.readInt()
if self.m_parent.serverVersion() == 26:
order.m_stockRangeLower = self.readDouble()
order.m_stockRangeUpper = self.readDouble()
order.m_referencePriceType = self.readInt()
if version >= 13:
order.m_trailStopPrice = self.readDoubleMax()
if version >= 30:
order.m_trailingPercent = self.readDoubleMax()
if version >= 14:
order.m_basisPoints = self.readDoubleMax()
order.m_basisPointsType = self.readIntMax()
contract.m_comboLegsDescrip = self.readStr()
if version >= 29:
comboLegsCount = self.readInt()
if comboLegsCount > 0:
contract.m_comboLegs = []
i = 0
while i < comboLegsCount:
conId = self.readInt()
ratio = self.readInt()
action = self.readStr()
exchange = self.readStr()
openClose = self.readInt()
shortSaleSlot = self.readInt()
designatedLocation = self.readStr()
exemptCode = self.readInt()
comboLeg = ComboLeg(conId, ratio, action, exchange, openClose, shortSaleSlot, designatedLocation, exemptCode)
contract.m_comboLegs.append(comboLeg)
i += 1
orderComboLegsCount = self.readInt()
if orderComboLegsCount > 0:
order.m_orderComboLegs = []
i = 0
while i < orderComboLegsCount:
price = self.readDoubleMax()
orderComboLeg = OrderComboLeg(price)
order.m_orderComboLegs.append(orderComboLeg)
i += 1
if version >= 26:
smartComboRoutingParamsCount = self.readInt()
if smartComboRoutingParamsCount > 0:
order.m_smartComboRoutingParams = []
i = 0
while i < smartComboRoutingParamsCount:
tagValue = TagValue()
tagValue.m_tag = self.readStr()
tagValue.m_value = self.readStr()
order.m_smartComboRoutingParams.append(tagValue)
i += 1
if version >= 15:
if version >= 20:
order.m_scaleInitLevelSize = self.readIntMax()
order.m_scaleSubsLevelSize = self.readIntMax()
else:
# int notSuppScaleNumComponents =
self.readIntMax()
order.m_scaleInitLevelSize = self.readIntMax()
order.m_scalePriceIncrement = self.readDoubleMax()
if version >= 28 and order.m_scalePriceIncrement > 0.0 and order.m_scalePriceIncrement != Double.MAX_VALUE:
order.m_scalePriceAdjustValue = self.readDoubleMax()
order.m_scalePriceAdjustInterval = self.readIntMax()
order.m_scaleProfitOffset = self.readDoubleMax()
order.m_scaleAutoReset = self.readBoolFromInt()
order.m_scaleInitPosition = self.readIntMax()
order.m_scaleInitFillQty = self.readIntMax()
order.m_scaleRandomPercent = self.readBoolFromInt()
if version >= 24:
order.m_hedgeType = self.readStr()
if not Util.StringIsEmpty(order.m_hedgeType):
order.m_hedgeParam = self.readStr()
if version >= 25:
order.m_optOutSmartRouting = self.readBoolFromInt()
if version >= 19:
order.m_clearingAccount = self.readStr()
order.m_clearingIntent = self.readStr()
if version >= 22:
order.m_notHeld = self.readBoolFromInt()
if version >= 20:
if self.readBoolFromInt():
underComp = UnderComp()
underComp.m_conId = self.readInt()
underComp.m_delta = self.readDouble()
underComp.m_price = self.readDouble()
contract.m_underComp = underComp
if version >= 21:
order.m_algoStrategy = self.readStr()
if not Util.StringIsEmpty(order.m_algoStrategy):
algoParamsCount = self.readInt()
if algoParamsCount > 0:
order.m_algoParams = []
i = 0
while i < algoParamsCount:
tagValue = TagValue()
tagValue.m_tag = self.readStr()
tagValue.m_value = self.readStr()
order.m_algoParams.append(tagValue)
i += 1
orderState = OrderState()
if version >= 16:
order.m_whatIf = self.readBoolFromInt()
orderState.m_status = self.readStr()
orderState.m_initMargin = self.readStr()
orderState.m_maintMargin = self.readStr()
orderState.m_equityWithLoan = self.readStr()
orderState.m_commission = self.readDoubleMax()
orderState.m_minCommission = self.readDoubleMax()
orderState.m_maxCommission = self.readDoubleMax()
orderState.m_commissionCurrency = self.readStr()
orderState.m_warningText = self.readStr()
self.eWrapper().openOrder(order.m_orderId, contract, order, orderState)
elif msgId == self.NEXT_VALID_ID:
version = self.readInt()
orderId = self.readInt()
self.eWrapper().nextValidId(orderId)
elif msgId == self.SCANNER_DATA:
contract = ContractDetails()
version = self.readInt()
tickerId = self.readInt()
numberOfElements = self.readInt()
ctr = 0
while ctr < numberOfElements:
rank = self.readInt()
if version >= 3:
contract.m_summary.m_conId = self.readInt()
contract.m_summary.m_symbol = self.readStr()
contract.m_summary.m_secType = self.readStr()
contract.m_summary.m_expiry = self.readStr()
contract.m_summary.m_strike = self.readDouble()
contract.m_summary.m_right = self.readStr()
contract.m_summary.m_exchange = self.readStr()
contract.m_summary.m_currency = self.readStr()
contract.m_summary.m_localSymbol = self.readStr()
contract.m_marketName = self.readStr()
contract.m_tradingClass = self.readStr()
distance = self.readStr()
benchmark = self.readStr()
projection = self.readStr()
legsStr = None
if version >= 2:
legsStr = self.readStr()
self.eWrapper().scannerData(tickerId, rank, contract, distance, benchmark, projection, legsStr)
ctr += 1
self.eWrapper().scannerDataEnd(tickerId)
elif msgId == self.CONT |
#!/usr/bin/env python
from | setuptools import setup
setup(name='features',
version='0.1',
description='A collection of feature extraction/selection algorithms',
author='Charanpal Dhanjal',
author_email='charanpal@gmail.com',
url='https://github.com/charanpald/features',
install_requires=['numpy>=1.5.0', 'scipy>=0.7.1'],
platforms=["OS Independent"],
packages=['features', 'kernel', "features.test", | "kernel.test"],
)
|
#The MIT License (MIT)
#Copyright (c) 2015-2016 mh4x0f P0cL4bs Team
#Permission is hereby granted, free of charge, to any person obtaining a copy of
#this software and associated documentation files (the "Software"), to deal in
#the Software without restriction, including without limitation the rights to
#use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
#the Software, and to permit persons to whom the Software is furnished to do so,
#subject to the following conditions:
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
#FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
#COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
#IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
#CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from PyQt4.QtGui import *
from re import search
from os import system,geteuid,getuid,popen
from Core.Settings import frm_Settings
from Modules.utils import Refactor,set_monitor_mode
from subprocess import Popen,PIPE
from scapy.all import *
class frm_Probe(QMainWindow):
def __init__(self, parent=None):
super(frm_Probe, self).__init__(parent)
self.form_widget = frm_PMonitor(self)
self.setCentralWidget(self.form_widget)
self.setWindowIcon(QIcon('rsc/icon.ico'))
class frm_PMonitor(QWidget):
def __init__(self, parent=None):
super(frm_PMonitor, self).__init__(parent)
self.Main = QVBoxLayout()
self.setWindowTitle("Probe Request wifi Monitor")
self.setWindowIcon(QIcon('rsc/icon.ico'))
self.config = frm_Settings()
self.interface = str(self.config.xmlSettings("interface", "monitor_mode", None, False))
self.loadtheme(self.config.XmlThemeSelected())
self.setupGUI()
def loadtheme(self,theme):
sshFile=("Core/%s.qss"%(theme))
with open(sshFile,"r") as fh:
self.setStyleSheet(fh.read())
def setupGUI(self):
self.form0 = QFormLayout()
self.list_probe = QListWidget()
self.list_probe.setFixedHeight(300)
self.btn_scan = QPushButton("Scan")
self.btn_scan.clicked.connect(self.Pro_request)
self.btn_scan.setIcon(QIcon("rsc/network.png"))
self.get_placa = QComboBox(self)
n = Refactor.get_interfaces()['all']
for i,j in enumerate(n):
if search("wlan", j):
self.get_placa.addItem(n[i])
self.time_scan = QComboBox(self)
self.time_scan.addItems(["10s","20s","30s"])
self.form0.addRow("Network Adapter: ", self.get_placa)
self.form0.addRow(self.list_probe)
self.form0.addRow("Time Scan: ", self.time_scan)
self.form1 = QFormLayout()
self.form1.addRow(self.btn_scan)
self.Main.addLayout(self.form0)
self.Main.addLayout(self.form1)
self.setLayout(self.Main)
def Pro_request(self):
self.time_control = None
if self.time_scan.currentText() == "10s":self.time_control = 300
elif self.time_scan.currentText() == "20s":self.time_control = 400
elif self.time_scan.currentText() == "30s":self.time_control = 600
if self.get_placa.currentText() == "":
QMessageBox.information(self, "Network Adapter", 'Network Adapter Not found try again.')
return
out = popen('iwconfig').readlines()
for i in out:
if search('Mode:Monitor', i):
self.interface = i.split()[0]
sniff(iface=self.interface,prn=self.sniff_probe, count=self.time_control)
return
set_monitor_mode(self.get_placa.currentText()).setEnable()
sniff(iface=self.interface,prn=self.sniff_probe, count=self.time_control | )
def sniff_probe(self,p):
if (p.haslayer(Dot11ProbeReq)):
mac_address=(p.addr2)
ssid=p[Dot11Elt].info
ssid=ssid.decode('utf-8','ignore')
if ssid == "":ssid="null"
self.list_probe.addItem("[:] Probe Request from | %s for SSID '%s'" %(mac_address,ssid)) |
t self.replacement_db_product.end_of_sale_date or \
self.replacement_db_product.current_lifecycle_states == [Product.NO_EOL_ANNOUNCEMENT_STR]:
# product is not EoL and therefore valid
return True
else:
return False
else:
return True
return False
def get_valid_replacement_product(self):
"""get a valid replacement product for this migration source"""
if self.is_valid_replacement():
return self.replacement_db_product
return None
def clean(self):
# check required to get a proper validation working within the Django admin (unique together with the required
# FK values)
if self.product_id is not None and self.migration_source_id is not None:
# validate that this combination does not already exist (issue when using django admin)
qs = ProductMigrationOption.objects.exclude(pk=self.pk).filter(
product__id=self.product_id,
migration_source__id=self.migration_source_id
)
if qs.count() > 0:
# one of the fields must be changed, therefore view the error on both attributes
msg = "Product Migration Option with this Product ID and Migration Source already exists"
raise ValidationError({"product_id": msg, "migration_source": msg})
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
self.full_clean()
super().save(force_insert, force_update, using, update_fields)
def __str__(self):
return "replacement option for %s" % self.product.product_id
class Meta:
# only a single migration option is allowed per migration source
unique_together = ["product", "migration_source"]
ordering = ["-migration_source__preference"] # first element is from the source that is most preferred
verbose_name = "Product Migration Option"
verbose_name_plural = "Product Migration Options"
class ProductList(models.Model):
name = models.CharField(
max_length=2048,
unique=True,
help_text="unique name for the product list",
verbose_name="Product List Name"
)
vendor = models.ForeignKey(
Vendor,
help_text="vendor for the product list (only products from a single vendor can be used within a product list)",
verbose_name="Vendor",
on_delete=models.CASCADE,
# auto-discovery based on the list entries is implemented as part of the save function
# required only for data migration
null=True,
blank=False
)
string_product_list = models.TextField(
help_text="Product IDs separated by word wrap or semicolon",
verbose_name="Unstructured Product IDs separated by line break"
)
description = models.TextField(
max_length=4096,
blank=True,
null=False,
verbose_name="Description",
help_text="short description what's part of this Product List (markdown and/or HTML)"
)
version_note = models.TextField(
max_length=16384,
blank=True,
null=False,
verbose_name="Version note",
help_text="some version information for the product list (markdown and/or HTML)"
)
update_date = models.DateField(
auto_now=True
)
update_user = models.ForeignKey(
User,
related_name='product_lists',
on_delete=models.CASCADE
)
hash = models.CharField(
max_length=64,
null=False,
blank=True,
default=""
)
def get_string_product_list_as_list(self):
result = []
for line in self.string_product_list.splitlines():
result += line.split(";")
return sorted([e.strip() for e in result])
def get_product_list_objects(self):
q_filter = Q()
for product_id in self.get_string_product_list_as_list():
q_filter.add(Q(product_id=product_id, vendor_id=self.vendor_id), Q.OR)
return Product.objects.filter(q_filter).prefetch_related("vendor", "product_group")
def full_clean(self, exclude=None, validate_unique=True):
# validate product list string together with selected vendor
result = super().full_clean(exclude, validate_unique)
# validation between fields
self.__discover_vendor_based_on_products()
if self.vendor is not None:
validate_product_list_string(self.string_product_list, self.vendor.id)
else:
raise ValidationError("vendor not set")
return result
def save(self, **kwargs):
self.__discover_vendor_based_on_products()
| self.full_clean()
# normalize value in database, remove duplicates and sort the list
values = self.get_string_product_list_as_list()
self.string_product_list = "\n".join(sorted(list(set(values))))
# calculate hash value for Product check linking on the queries
s = "%s:%s:%s" % (self.name, self.string_product_list, self.vendor_id)
self.hash = hashlib.sha256(s.encode()).hexdigest()
super(ProductList, self).save(**kwargs)
def __discover_vendor_ | based_on_products(self):
# discovery vendor based on the products (if not set, used primary for data migration)
if self.vendor is None:
product_id_list = self.get_string_product_list_as_list()
if len(product_id_list) > 0:
self.vendor = Product.objects.filter(product_id=product_id_list[-1]).first().vendor
def __str__(self):
return self.name
class Meta:
verbose_name = "Product List"
verbose_name_plural = "Product Lists"
ordering = ('name',)
class UserProfileManager(models.Manager):
def get_by_natural_key(self, username):
return self.get(user=User.objects.get(username=username))
class UserProfile(models.Model):
objects = UserProfileManager()
user = models.OneToOneField(
User,
related_name='profile',
on_delete=models.CASCADE,
unique=True
)
preferred_vendor = models.ForeignKey(
Vendor,
blank=False,
null=False,
default=1,
verbose_name="preferred vendor",
help_text="vendor that is selected by default in all vendor specific views",
on_delete=models.SET_DEFAULT
)
regex_search = models.BooleanField(
default=False,
verbose_name="use regular expressions in search fields",
help_text="Use regular expression in any search field (fallback to simple search if no valid "
"regular expression is used)"
)
choose_migration_source = models.BooleanField(
default=False,
verbose_name="choose Migration Source in Product Check",
help_text="specify the Migration Source for a Product Check (don't use the preferred migration path)"
)
def natural_key(self):
return self.user.username
def __str__(self):
return "User Profile for %s" % self.user.username
class ProductCheckInputChunks(models.Model):
"""chunks for the input product IDs field in the product check"""
sequence = models.PositiveIntegerField()
input_product_ids_chunk = models.CharField(
max_length=65536,
null=False,
blank=True
)
product_check = models.ForeignKey(
"ProductCheck",
on_delete=models.CASCADE
)
class Meta:
ordering = ['sequence']
class ProductCheck(models.Model):
name = models.CharField(
verbose_name="Name",
help_text="Name to identify the Product Check",
max_length=256
)
migration_source = models.ForeignKey(
ProductMigrationSource,
verbose_name="migration source",
help_text="migration source to identify the replacement options, if not selected the preferred migration path "
"is used",
null=True,
blank=True,
on_delete=models.CASCADE
)
|
# ----------------------------------------------------------------------------
# Copyright (c) 2016-2021, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
from qiime2.plugin import SemanticType
from ..plugin_setup import plugin
from . import AlphaDiversityDirectoryFormat
SampleData = SemanticType('SampleData', field_names='type')
AlphaDiversity = SemanticType('AlphaDiversity',
variant_of=SampleData.field['type | '])
plugin.register_semantic_types(SampleData, AlphaDiversity)
plugin.register_semantic_type_to_format(
SampleData[AlphaDiversity],
artifact_format=AlphaD | iversityDirectoryFormat
)
|
le:
self.client.force_login(self.admin_user)
response = self.client.post(reverse('oppia:upload'),
{'course_file': course_file})
self.assertEqual(200, response.status_code)
course_log = CoursePublishingLog.objects.latest('log_date')
self.assertEqual("no_module_xml", course_log.action)
@pytest.mark.xfail(reason="works on local but not on github workflows")
def test_corrupt_course(self):
with open(self.corrupt_course_zip, 'rb') as course_file:
self.client.force_login(self.admin_user)
response = self.client.post(reverse('oppia:upload'),
{'course_file': course_file})
self.assertEqual(200, response.status_code)
self.assertRaises(BadZipfile)
course_log = CoursePublishingLog.objects.latest('log_date')
self.assertEqual("invalid_zip", course_log.action)
@pytest.mark.xfail(reason="works on local but not on github workflows")
def test_no_sub_dir(self):
with open(self.course_no_sub_dir, 'rb') as course_file:
self.client.force_login(self.admin_user)
response = self.client.post(reverse('oppia:upload'),
{'course_file': course_file})
self.assertEqual(200, response.status_code)
course_log = CoursePublishingLog.objects.latest('log_date')
self.assertEqual("invalid_zip", course_log.action)
@pytest.mark.xfail(reason="works on local but not on github workflows")
def test_newer_version_exists(self):
with open(self.course_old_version, 'rb') as course_file:
self.client.force_login(self.admin_user)
response = self.client.post(reverse('oppia:upload'),
{'course_file': course_file})
self.assertEqual(200, response.status_code)
course_log = CoursePublishingLog.objects.latest('log_date')
self.assertEqual("newer_version_exists", course_log.action)
@pytest.mark.xfail(reason="works on local but not on github workflows")
def test_course_no_activities(self):
with open(self.course_no_activities, 'rb') as course_file:
self.client.force_login(self.admin_user)
response = self.client.post(reverse('oppia:upload'),
{'course_file': course_file})
self.assertEqual(200, response.status_code)
course_log = CoursePublishingLog.objects.latest('log_date')
self.assertEqual("no_activities", course_log.action)
@pytest.mark.xfail(reason="works on local but not on github workflows")
def test_course_with_custom_points(self):
course_game_events_start = CourseGamificationEvent. \
objects.all().count()
media_game_events_start = MediaGamificationEvent. \
objects.all().count()
activity_game_events_start = ActivityGamificationEvent. \
objects.all().count()
with open(self.course_with_custom_points, 'rb') as course_file:
self.client.force_login(self.admin_user)
response = self.client.post(reverse('oppia:upload'),
{'course_file': course_file})
course = Course.objects.latest('created_date')
self.assertRedirects(response,
reverse('oppia:upload_step2',
args=[course.id]),
302,
200)
course_game_events_end = CourseGamificationEvent.objects.all().count()
self.assertEqual(course_game_events_start+10, course_game_events_end)
media_game_events_end = MediaGamifica | tionEvent.objects.all().count()
self.assertEqual(media_game_events_start+4, media_game_events_end)
activity_game_events_end = ActivityGamificationEvent. \
objects.all().count()
self.assertEqual(activity_game_events_start+1,
activity_game_e | vents_end)
@pytest.mark.xfail(reason="works on local but not on github workflows")
def test_course_with_custom_points_updated(self):
with open(self.course_with_custom_points, 'rb') as course_file:
self.client.force_login(self.admin_user)
response = self.client.post(reverse('oppia:upload'),
{'course_file': course_file})
course = Course.objects.latest('created_date')
self.assertRedirects(response,
reverse('oppia:upload_step2',
args=[course.id]),
302,
200)
course_game_events_start = CourseGamificationEvent. \
objects.all().count()
media_game_events_start = MediaGamificationEvent. \
objects.all().count()
activity_game_events_start = ActivityGamificationEvent. \
objects.all().count()
# reset course version no to avoid issue with newer version being
# reported in the test
update_course = Course.objects.get(shortname='ref-1')
update_course.version = 0
update_course.save()
with open(self.course_with_custom_points_updated, 'rb') as course_file:
self.client.force_login(self.admin_user)
response = self.client.post(reverse('oppia:upload'),
{'course_file': course_file})
course = Course.objects.latest('created_date')
self.assertRedirects(response,
reverse('oppia:upload_step2',
args=[course.id]),
302,
200)
course_game_events_end = CourseGamificationEvent.objects.all().count()
self.assertEqual(course_game_events_start, course_game_events_end)
media_game_events_end = MediaGamificationEvent.objects.all().count()
self.assertEqual(media_game_events_start, media_game_events_end)
activity_game_events_end = ActivityGamificationEvent. \
objects.all().count()
self.assertEqual(activity_game_events_start, activity_game_events_end)
@pytest.mark.xfail(reason="works on local but not on github workflows")
def test_update_quizprops(self):
self.client.force_login(self.admin_user)
with open(self.course_with_quizprops, 'rb') as course_file:
response = self.client.post(reverse('oppia:upload'), {'course_file': course_file})
course = Course.objects.get(shortname='quizprops_course')
self.assertRedirects(response,
reverse('oppia:upload_step2',
args=[course.id]),
302,
200)
current_quizzes = Activity.objects.filter(section__course=course, type=Activity.QUIZ).values_list('digest', flat=True)
quizzes = Quiz.objects.filter(quizprops__name='digest', quizprops__value__in=current_quizzes)
quiz_questions = Question.objects.filter(quizquestion__quiz__in=quizzes)
quiz_props = QuizProps.objects.filter(quiz__in=quizzes)
question_props = QuestionProps.objects.filter(question__in=quiz_questions)
self.assertEqual(1, quizzes.count())
self.assertEqual(2, quiz_questions.count())
self.assertEqual(8, quiz_props.count())
self.assertEqual(4, question_props.count())
self.assertEqual(QuizProps.objects.filter(name='moodle_quiz_id', quiz=quizzes.first()).first().value, '43504')
# Lower the version so that we can upload a new one regardless of the current date
course.version = 100
course.save()
with open(self.course_with_updated_quizprops, 'rb') as course_file:
response = self.client.post(reverse(' |
ret_value = self._test_is_share_eligible(fake_capacity_info,
fake_volume_size)
self.assertEqual(ret_value, True)
def test_share_volume_above_oversub_ratio(self):
fake_capacity_info = (4, 4, 7)
fake_volume_size = 2
ret_value = self._test_is_share_eligible(fake_capacity_info,
fake_volume_size)
self.assertEqual(ret_value, False)
def test_share_reserved_above_oversub_ratio(self):
fake_capacity_info = (4, 4, 10)
fake_vo | lume_size = 1
ret_value = self._test_is_share_eligible(fake_capacity_info,
fake_volume_size)
self.assertEqual(ret_value, False)
def test_parse_options(self):
(opt_list,
opt_dict) = self._smbfs_driver.parse_options(
self._F | AKE_SHARE_OPTS)
expected_ret = ([], self._FAKE_OPTIONS_DICT)
self.assertEqual(expected_ret, (opt_list, opt_dict))
def test_parse_credentials(self):
fake_smb_options = r'-o user=MyDomain\Administrator,noperm'
expected_flags = '-o username=Administrator,noperm'
flags = self._smbfs_driver.parse_credentials(fake_smb_options)
self.assertEqual(expected_flags, flags)
def test_get_volume_path(self):
self._smbfs_driver.get_volume_format = mock.Mock(
return_value='vhd')
self._smbfs_driver._local_volume_dir = mock.Mock(
return_value=self._FAKE_MNT_POINT)
expected = self._FAKE_VOLUME_PATH + '.vhd'
ret_val = self._smbfs_driver.local_path(self._FAKE_VOLUME)
self.assertEqual(expected, ret_val)
def test_initialize_connection(self):
self._smbfs_driver.get_active_image_from_info = mock.Mock(
return_value=self._FAKE_VOLUME_NAME)
self._smbfs_driver._get_mount_point_base = mock.Mock(
return_value=self._FAKE_MNT_BASE)
self._smbfs_driver.shares = {self._FAKE_SHARE: self._FAKE_SHARE_OPTS}
self._smbfs_driver._qemu_img_info = mock.Mock(
return_value=mock.Mock(file_format='raw'))
fake_data = {'export': self._FAKE_SHARE,
'format': 'raw',
'name': self._FAKE_VOLUME_NAME,
'options': self._FAKE_SHARE_OPTS}
expected = {
'driver_volume_type': 'smbfs',
'data': fake_data,
'mount_point_base': self._FAKE_MNT_BASE}
ret_val = self._smbfs_driver.initialize_connection(
self._FAKE_VOLUME, None)
self.assertEqual(expected, ret_val)
def _test_extend_volume(self, extend_failed=False, image_format='raw'):
drv = self._smbfs_driver
drv.local_path = mock.Mock(
return_value=self._FAKE_VOLUME_PATH)
drv._check_extend_volume_support = mock.Mock(
return_value=True)
drv._is_file_size_equal = mock.Mock(
return_value=not extend_failed)
drv._qemu_img_info = mock.Mock(
return_value=mock.Mock(file_format=image_format))
with contextlib.nested(
mock.patch.object(image_utils, 'resize_image'),
mock.patch.object(image_utils, 'convert_image')) as (
fake_resize, fake_convert):
if extend_failed:
self.assertRaises(exception.ExtendVolumeError,
drv._extend_volume,
self._FAKE_VOLUME, mock.sentinel.new_size)
else:
drv._extend_volume(
self._FAKE_VOLUME,
mock.sentinel.new_size)
if image_format in (drv._DISK_FORMAT_VHDX,
drv._DISK_FORMAT_VHD_LEGACY):
fake_tmp_path = self._FAKE_VOLUME_PATH + '.tmp'
fake_convert.assert_any_call(self._FAKE_VOLUME_PATH,
fake_tmp_path, 'raw')
fake_resize.assert_called_once_with(
fake_tmp_path, mock.sentinel.new_size)
fake_convert.assert_any_call(fake_tmp_path,
self._FAKE_VOLUME_PATH,
image_format)
else:
fake_resize.assert_called_once_with(
self._FAKE_VOLUME_PATH, mock.sentinel.new_size)
def test_extend_volume(self):
self._test_extend_volume()
def test_extend_volume_failed(self):
self._test_extend_volume(extend_failed=True)
def test_extend_vhd_volume(self):
self._test_extend_volume(image_format='vpc')
def _test_check_extend_support(self, has_snapshots=False,
is_eligible=True):
self._smbfs_driver.local_path = mock.Mock(
return_value=self._FAKE_VOLUME_PATH)
if has_snapshots:
active_file_path = self._FAKE_SNAPSHOT_PATH
else:
active_file_path = self._FAKE_VOLUME_PATH
self._smbfs_driver.get_active_image_from_info = mock.Mock(
return_value=active_file_path)
self._smbfs_driver._is_share_eligible = mock.Mock(
return_value=is_eligible)
if has_snapshots:
self.assertRaises(exception.InvalidVolume,
self._smbfs_driver._check_extend_volume_support,
self._FAKE_VOLUME, 2)
elif not is_eligible:
self.assertRaises(exception.ExtendVolumeError,
self._smbfs_driver._check_extend_volume_support,
self._FAKE_VOLUME, 2)
else:
self._smbfs_driver._check_extend_volume_support(
self._FAKE_VOLUME, 2)
self._smbfs_driver._is_share_eligible.assert_called_once_with(
self._FAKE_SHARE, 1)
def test_check_extend_support(self):
self._test_check_extend_support()
def test_check_extend_volume_with_snapshots(self):
self._test_check_extend_support(has_snapshots=True)
def test_check_extend_volume_uneligible_share(self):
self._test_check_extend_support(is_eligible=False)
def test_create_volume_from_in_use_snapshot(self):
fake_snapshot = {'status': 'in-use'}
self.assertRaises(
exception.InvalidSnapshot,
self._smbfs_driver.create_volume_from_snapshot,
self._FAKE_VOLUME, fake_snapshot)
def test_copy_volume_from_snapshot(self):
drv = self._smbfs_driver
fake_volume_info = {self._FAKE_SNAPSHOT_ID: 'fake_snapshot_file_name'}
fake_img_info = mock.MagicMock()
fake_img_info.backing_file = self._FAKE_VOLUME_NAME
drv.get_volume_format = mock.Mock(
return_value='raw')
drv._local_path_volume_info = mock.Mock(
return_value=self._FAKE_VOLUME_PATH + '.info')
drv._local_volume_dir = mock.Mock(
return_value=self._FAKE_MNT_POINT)
drv._read_info_file = mock.Mock(
return_value=fake_volume_info)
drv._qemu_img_info = mock.Mock(
return_value=fake_img_info)
drv.local_path = mock.Mock(
return_value=self._FAKE_VOLUME_PATH[:-1])
drv._extend_volume = mock.Mock()
drv._set_rw_permissions_for_all = mock.Mock()
with mock.patch.object(image_utils, 'convert_image') as (
fake_convert_image):
drv._copy_volume_from_snapshot(
self._FAKE_SNAPSHOT, self._FAKE_VOLUME,
self._FAKE_VOLUME['size'])
drv._extend_volume.assert_called_once_with(
self._FAKE_VOLUME, self._FAKE_VOLUME['size'])
fake_convert_image.assert_called_once_with(
self._FAKE_VOLUME_PATH, self._FAKE_VOLUME_PATH[:-1], 'raw')
def test_ensure_mounted(self):
self._smbfs_driver.shares = {self._FAKE_SHARE: self._FAKE_SHARE_OPTS}
self._smbfs_driver._ensure_share_mounted(self._FAKE_SHARE)
self._smbf |
# -*- coding: utf-8 -*-
import pandas as pd
import sys
from builtins import str as text
from utils import find_zipcode, str2date
header | _mapping = {
'origin': 'ORIGIN',
'company_name': 'LABO',
'lastname_firstname': 'BENEF_PS_QUALITE_NOM_PRENOM',
'address': 'BENEF_PS_ADR',
'job': 'BENEF_PS_QUALIFICATION',
'rpps': 'BENEF_PS_RPPS',
'value': 'DECL_AVANT_MONTANT',
'date': 'DECL_AVANT_DATE',
'kind': 'DECL_AV | ANT_NATURE',
'BENEF_PS_CODEPOSTAL': 'BENEF_PS_CODEPOSTAL'
}
input_filename = sys.argv[1]
output_filename = sys.argv[2]
df = pd.read_csv(input_filename, encoding='utf-8')
df['lastname_firstname'] = df['name'] + ' ' + df['firstname']
df['origin'] = 'Pharmacien'
df['date'] = df['date'].apply(str2date)
df['BENEF_PS_CODEPOSTAL'] = df['address'].apply(find_zipcode)
for origin, target in header_mapping.items():
df[target] = df[origin]
df[target] = df[target].apply(text).apply(lambda s: s.replace(',', '- ').replace('"', ''))
df[list(header_mapping.values())].to_csv(output_filename, index=False, encoding='utf-8')
|
omico',
'cbElencoPrezzi',
'cbComputoMetrico',
'cbCostiManodopera',
'cbQuadroEconomico',
)
def loadExportSettings(oDoc):
cfg = LeenoConfig.Config()
data = DocUtils.loadDataBlock(oDoc, 'ImpostazioniExport')
if data is None or len(data) == 0:
data = cfg.readBlock('ImpostazioniExport', True)
return data
def storeExportSettings(oDoc, es):
cfg = LeenoConfig.Config()
DocUtils.storeDataBlock(oDoc, 'ImpostazioniExport', es)
cfg.writeBlock('ImpostazioniExport', es, True)
def prepareCover(oDoc, nDoc, docSubst):
'''
prepare cover page, if there's one
copy to nDoc document and fill it's data
return true if we've got a cover, false otherwise
docSubst is a dictionary with additional variable replacements
mostly used for [PAGINE], [OGGETTO] and [NUMERO_DOCUMENTO]
which are document dependent data
'''
# load print settings and look for cover
data, covers = LeenoSettings.loadPrintSettings(oDoc)
fileCopertine = data.get('fileCopertine')
copertina = data.get('copertina')
if fileCopertine is None or copertina is None:
return False
if fileCopertine == '' or copertina == '':
return False
cDoc = DocUtils.loadDocument(fileCopertine)
if cDoc is None:
return False
if not copertina in cDoc.Sheets:
cDoc.close(False)
del cDoc
return False
# we need to copy page style too
sheet = cDoc.Sheets[copertina]
pageStyle = sheet.PageStyle
if pageStyle is not None and pageStyle != '':
print("PAGE HAS STYLE")
pageStyles = cDoc.StyleFamilies.getByName('PageStyles')
style = pageStyles.getByName(pageStyle)
SheetUtils.copyPageStyle(nDoc, style)
# cover is OK, copy to new document
pos = nDoc.Sheets.Count
nDoc.Sheets.importSheet(cDoc, copertina, pos)
# if page has a print area, copy it too...
nDoc.Sheets[pos].PageStyle = sheet.PageStyle
if len(sheet.PrintAreas) > 0:
print("PAGE HAS PRINT AREA")
nDoc.Sheets[pos].PrintAreas = sheet.PrintAreas
# replaces all placeholders with settings ones
settings = LeenoSettings.loadPageReplacements(oDoc)
for key, val in docSubst.items():
settings[key] = val
SheetUtils.replaceText(nDoc.Sheets[pos], settings)
# close cover document and return
cDoc.close(False)
del cDoc
return True
def prepareHeaderFooter(oDoc, docSubst):
res = {}
# load print settings, we need header and footer data
printSettings, dummy = LeenoSettings.loadPrintSettings(oDoc)
# load replacement templates
replDict = LeenoSettings.loadPageReplacements(oDoc)
for key, val in docSubst.items():
replDict[key] = val
# replace placeholders
for psKey in ('intSx', 'intCenter', 'intDx', 'ppSx', 'ppCenter', 'ppDx'):
if psKey in printSettings:
psVal = printSettings[psKey]
for replKey, replVal in replDict.items():
# pagination needs some extra steps
if replKey in ('[PAGINA]', '[PAGINE]'):
continue
while replKey in psVal:
psVal = psVal.replace(replKey, replVal)
res[psKey] = psVal
return res
def PdfDialog():
# dimensione verticale dei checkbox == dimensione bottoni
#dummy, hItems = Dialogs.getButtonSize('', Icon="Icons-24x24/settings.png")
nWidth, hItems = Dialogs.getEditBox('aa')
# dimensione dell'icona col PDF
imgW = Dialogs.getBigIconSize()[0] * 2
return Dialogs.Dialog(Title='Esportazione documenti PDF', Horz=False, CanClose=True, Items=[
Dialogs.HSizer(Items=[
Dialogs.VSizer(Items=[
Dialogs.Spacer(),
Dialogs.ImageControl(Image='Icons-Big/pdf.png', MinWidth=imgW),
Dialogs.Spacer(),
]),
Dialogs.VSizer(Items=[
Dialogs.FixedText(Text='Tavola'),
Dialogs.Spacer(),
Dialogs.Edit(Id='npElencoPrezzi', Align=1, FixedHeight=hItems, FixedWidth=nWidth),
Dialogs.Spacer(),
Dialogs.Edit(Id='npComputoMetrico', Align=1, FixedHeight=hItems, FixedWidth=nWidth),
Dialogs.Spacer(),
Dialogs.Edit(Id='npCostiManodopera', Align=1, FixedHeight=hItems, FixedWidth=nWidth),
Dialogs.Spacer(),
Dialogs.Edit(Id='npQuadroEconomico', Align=1, FixedHeight=hItems, FixedWidth=nWidth),
]),
Dialogs.Spacer(),
Dialogs.VSizer(Items=[
Dialogs.FixedText(Text='Oggetto'),
Dialogs.Spacer(),
Dialogs.CheckBox(Id="cbElencoPrezzi", Label="Elenco prezzi", FixedHeight=hItems),
Dialogs.Spacer(),
Dialogs.CheckBox(Id="cbComputoMetrico", Label="Computo metrico", FixedHeight=hItems),
Dialogs.Spacer(),
Dialogs.CheckBox(Id="cbCostiManodopera", Label="Costi manodopera", FixedHeight=hItems),
Dialogs.Spacer(),
Dialogs.CheckBox(Id="cbQuadroEconomico", Label="Quadro economico", FixedHeight=hItems),
]),
Dialogs.Spacer(),
]),
Dialogs.Spacer(),
Dialogs.Spacer(),
Dialogs.FixedText(Text='Cartella di destinazione:'),
Dialogs.Spacer(),
Dialogs.PathControl(Id="pathEdit"),
Dialogs.Spacer(),
Dialogs.HSizer(Items=[
Dialogs.Spacer(),
Dialogs.Button(Label='Ok', MinWidth=Dialogs.MINBTNWIDTH, Icon='Icons-24x24/ok.png', RetVal=1),
Dialogs.Spacer(),
Dialogs.Button(Label='Annulla', MinWidth=Dialogs.MINBTNWIDTH, Icon='Icons-24x24/cancel.png', RetVal=-1),
Dialogs.Spacer()
])
])
def PdfElencoPrezzi(destFolder, nTavola):
oDoc = LeenoUtils.getDocument() |
ep = oDoc.Sheets.getByName('Elenco Prezzi')
# lancia l'export
nDoc = str(nTavola)
baseName = ''
if nDoc != '' and nDoc is not None:
baseName = nDoc + '-'
destPath = os.path.join(destFolder, baseName + 'ElencoPrezzi.pdf')
print(f"Export to '{destPath}' file")
selection = [ep, ]
docSubst = {
'[OGGETTO]':'Elenco Prezzi',
'[NUMERO_DOCUMENTO]': str(nTavola),
| }
headerFooter = prepareHeaderFooter(oDoc, docSubst)
SheetUtils.pdfExport(oDoc, selection, destPath, headerFooter, lambda oDoc, nDoc: prepareCover(oDoc, nDoc, docSubst))
# purtropp non c'è alcun modo di determinare in anticipo il numero di pagine, indi
# dobbiamo creare il PDF, usare una funzione per contarle, e ricrearlo di nuovo
# meraviglie di LibreOffice...
nPages = LeenoUtils.countPdfPages(destPath)
docSubst['[PAGINE]'] = nPages
SheetUtils.pdfExport(oDoc, selection, destPath, headerFooter, lambda oDoc, nDoc: prepareCover(oDoc, nDoc, docSubst))
def PdfComputoMetrico(destFolder, nTavola):
oDoc = LeenoUtils.getDocument()
ep = oDoc.Sheets.getByName('COMPUTO')
# lancia l'export
nDoc = str(nTavola)
baseName = ''
if nDoc != '' and nDoc is not None:
baseName = nDoc + '-'
destPath = os.path.join(destFolder, baseName + 'ComputoMetrico.pdf')
print(f"Export to '{destPath}' file")
selection = [ep, ]
docSubst = {
'[OGGETTO]':'Computo Metrico',
'[NUMERO_DOCUMENTO]': str(nTavola),
}
headerFooter = prepareHeaderFooter(oDoc, docSubst)
SheetUtils.pdfExport(oDoc, selection, destPath, headerFooter, lambda oDoc, nDoc: prepareCover(oDoc, nDoc, docSubst))
# purtropp non c'è alcun modo di determinare in anticipo il numero di pagine, indi
# dobbiamo creare il PDF, usare una funzione per contarle, e ricrearlo di nuovo
# meraviglie di LibreOffice...
nPages = LeenoUtils.countPdfPages(destPath)
docSubst['[PAGINE]'] = nPages
SheetUtils.pdfExport(oDoc, selection, destPath, headerFooter, lambda oDoc, nDoc: prepareCover(oDoc, nDoc, docSubst))
def MENU_Pdf():
oDoc = LeenoUtils.getDocument()
es = loadExportSettings(oDoc)
dlg = PdfDialog()
dlg.setData(es)
# se premuto "annulla" non fa nulla
if dlg.r |
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2014-2016 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
"""
Module exports
:class:`ZhaoEtAl2006AscSWISS05`,
:class:`ZhaoEtAl2006AscSWISS03`,
:class:`ZhaoEtAl2006AscSWISS08`.
"""
from __future__ import division
import numpy as np
from openquake.hazardlib.gsim.base import CoeffsTable
from openquake.hazardlib import const
from openquake.hazardlib.imt import PGA, SA
from openquake.hazardlib.gsim.zhao_2006 import ZhaoEtAl2006Asc
from openquake.hazardlib.gsim.zhao_2006_swiss_coeffs import (
COEFFS_FS_ROCK_SWISS05,
COEFFS_FS_ROCK_SWISS03,
COEFFS_FS_ROCK_SWISS08
)
from openquake.hazardlib.gsim.utils_swiss_gmpe import _apply_adjustments
class ZhaoEtAl2006AscSWISS05(ZhaoEtAl2006Asc):
"""
This class extends :class:ZhaoEtAl2006Asc,
adjusted to be used for the Swiss Hazard Model [2014].
This GMPE is valid for a fixed value of vs30=700m/s
#. kappa value
K-adjustments corresponding to model 01 - as prepared by Ben Edwards
K-value for PGA were not provided but infered from SA[0.01s]
the model applies to a fixed value of vs30=700m/s to match the
reference vs30=1100m/s
#. small-magnitude correction
#. single station sigma - inter-event magnitude/distance adjustment
Disclaimer: these equations are modified to be used for the
Swiss Seismic Hazard Model [2014].
The hazard modeller is solely responsible for the use of this GMPE
in a different tectonic context.
Model implemented by laurentiu.danciu@gmail.com
"""
# Supported standard deviation type is only total, but reported as a
# combination of mean and magnitude/distance single station sigma
DEFINED_FOR_STANDARD_DEVIATION_TYPES = set([const.StdDev.TOTAL])
DEFINED_FOR_INTENSITY_MEASURE_TYPES = set([
PGA,
SA
])
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
sites.vs30 = 700 * np.ones(len(sites.vs30))
mean, stddevs = super(ZhaoEtAl2006AscSWISS05, self).\
get_mean_and_stddevs(sites, rup, dists, imt, stddev_types)
tau_ss = 'tauC'
log_phi_ss = 1.00
C = ZhaoEtAl2006AscSWISS05.COEFFS_ASC
mean, stddevs = _apply_adjustments(
C, self.COEFFS_FS_ROCK[imt], tau_ss,
mean, stddevs, sites, rup, dists.rrup, imt, stddev_types,
log_phi_ss)
return mean, stddevs
COEFFS_FS_ROCK = COEFFS_FS_ROCK_SWISS05
#: Original Coefficient table
COEFFS_ASC = CoeffsTable(sa_damping=5, table="""\
IMT a b c d e FR CH C1 C2 C3 C4 sigma QC WC tauC
pga 1.101 -0.00564 0.0055 1.080 0.01412 0.251 0.293 1.111 1.344 1.355 1.420 0.604 0.0 0.0 0.303
0.05 1.076 -0.00671 0.0075 1.060 0.01463 0.251 0.939 1.684 1.793 1.747 1.814 0.640 0.0 0.0 0.326
0.10 1.118 -0.00787 0.0090 1.083 0.01423 0.240 1.499 2.061 2.135 2.031 2.082 0.694 0.0 0.0 0.342
0.15 1.134 -0.00722 0.0100 1.053 0.01509 0.251 1.462 1.916 2.168 2.052 2.113 0.702 0.0 0.0 0.331
0.20 1.147 -0.00659 0.0120 1.014 0.01462 0.260 1.280 1.669 2.085 2.001 2.030 0.692 0.0 0.0 0.312
0.25 1.149 -0.00590 0.0140 0.966 0.01459 0.269 1.121 1.468 1.942 1.941 1.937 0.682 0.0 0.0 0.298
0.30 1.163 -0.00520 0.0150 0.934 0.01458 0.259 0.852 1.172 1.683 1.808 1.770 0.670 0.0 0.0 0.300
0.40 1.200 -0.00422 0.0100 0.959 0.01257 0.248 0.365 0.655 1.127 1.482 1.397 0.659 0.0 | 0.0 0.346
0.50 1.250 -0.00338 0.0060 1.008 0.01114 0.247 -0.207 0.071 0.515 0.934 0.955 0.653 -0.0126 0.0116 0.338
0.60 1.293 -0.00282 0.0030 1.088 0.01019 0.233 -0.705 -0.429 -0.003 | 0.394 0.559 0.653 -0.0329 0.0202 0.349
0.70 1.336 -0.00258 0.0025 1.084 0.00979 0.220 -1.144 -0.866 -0.449 -0.111 0.188 0.652 -0.0501 0.0274 0.351
0.80 1.386 -0.00242 0.0022 1.088 0.00944 0.232 -1.609 -1.325 -0.928 -0.620 -0.246 0.647 -0.0650 0.0336 0.356
0.90 1.433 -0.00232 0.0020 1.109 0.00972 0.220 -2.023 -1.732 -1.349 -1.066 -0.643 0.653 -0.0781 0.0391 0.348
1.00 1.479 -0.00220 0.0020 1.115 0.01005 0.211 -2.451 -2.152 -1.776 -1.523 -1.084 0.657 -0.0899 0.0440 0.338
1.25 1.551 -0.00207 0.0020 1.083 0.01003 0.251 -3.243 -2.923 -2.542 -2.327 -1.936 0.660 -0.1148 0.0545 0.313
1.50 1.621 -0.00224 0.0020 1.091 0.00928 0.248 -3.888 -3.548 -3.169 -2.979 -2.661 0.664 -0.1351 0.0630 0.306
2.00 1.694 -0.00201 0.0025 1.055 0.00833 0.263 -4.783 -4.410 -4.039 -3.871 -3.640 0.669 -0.1672 0.0764 0.283
2.50 1.748 -0.00187 0.0028 1.052 0.00776 0.262 -5.444 -5.049 -4.698 -4.496 -4.341 0.671 -0.1921 0.0869 0.287
3.00 1.759 -0.00147 0.0032 1.025 0.00644 0.307 -5.839 -5.431 -5.089 -4.893 -4.758 0.667 -0.2124 0.0954 0.278
4.00 1.826 -0.00195 0.0040 1.044 0.00590 0.353 -6.598 -6.181 -5.882 -5.698 -5.588 0.647 -0.2445 0.1088 0.273
5.00 1.825 -0.00237 0.0050 1.065 0.00510 0.248 -6.752 -6.347 -6.051 -5.873 -5.798 0.643 -0.2694 0.1193 0.275
""")
class ZhaoEtAl2006AscSWISS03(ZhaoEtAl2006AscSWISS05):
"""
This class extends :class:ZhaoEtAl2006Asc,following same strategy
as for :class:ZhaoEtAl2006AscSWISS05
"""
COEFFS_FS_ROCK = COEFFS_FS_ROCK_SWISS03
class ZhaoEtAl2006AscSWISS08(ZhaoEtAl2006AscSWISS05):
"""
This class extends :class:ZhaoEtAl2006Asc,following same strategy
as for :class:ZhaoEtAl2006AscSWISS05 to be used for the
Swiss Hazard Model [2014].
"""
COEFFS_FS_ROCK = COEFFS_FS_ROCK_SWISS08
|
from asposewords import Settings
from com.aspose.words import Document
from com.aspose.words import BreakType
from com.aspose.words import DocumentBuilder
from com.aspose.words import StyleIdentifier
class UpdateFields:
def __init__(self):
dataDir = Settings.dataDir + 'quickstart/'
# Demonstrates how to insert fields and update them using Aspose.Words.
# First create a blank document.
doc = Document()
# Use the document builder to insert some content and fields.
builder = DocumentBuilder(doc)
# Insert a table of contents at the beginning of the document.
builder.insertTableOfContents("\\o \"1-3\" \\h \\z \\u")
builder.writeln()
# Insert some other fields.
builder.write("Page: ")
builder.insertField("PAGE")
builder.write(" of ")
builder.insertField("NUMPAGES")
builder.writeln()
builder.write("Date: ")
builder.insertField("DATE")
# Start the actual document content on | the second page.
builder.insertBreak(BreakTyp | e.SECTION_BREAK_NEW_PAGE)
# Build a document with complex structure by applying different heading styles thus creating TOC entries.
builder.getParagraphFormat().setStyleIdentifier(StyleIdentifier.HEADING_1)
builder.writeln("Heading 1")
builder.getParagraphFormat().setStyleIdentifier(StyleIdentifier.HEADING_2)
builder.writeln("Heading 1.1")
builder.writeln("Heading 1.2")
builder.getParagraphFormat().setStyleIdentifier(StyleIdentifier.HEADING_1)
builder.writeln("Heading 2")
builder.writeln("Heading 3")
# Move to the next page.
builder.insertBreak(BreakType.PAGE_BREAK)
builder.getParagraphFormat().setStyleIdentifier(StyleIdentifier.HEADING_2)
builder.writeln("Heading 3.1")
builder.getParagraphFormat().setStyleIdentifier(StyleIdentifier.HEADING_3)
builder.writeln("Heading 3.1.1")
builder.writeln("Heading 3.1.2")
builder.writeln("Heading 3.1.3")
builder.getParagraphFormat().setStyleIdentifier(StyleIdentifier.HEADING_2)
builder.writeln("Heading 3.2")
builder.writeln("Heading 3.3")
print "Updating all fields in the document."
# Call the method below to update the TOC.
doc.updateFields()
doc.save(dataDir + "Document Field Update Out.docx")
print "Fields updated in the document successfully."
if __name__ == '__main__':
UpdateFields() |
# -*- coding: utf-8 -*-
import unittest
from getkey.platforms import PlatformTest
def readchar_fn_factory(stream):
v = [x for x in stream]
def inner(blocking=False):
return v.pop(0)
return inner
class TestGetkey(unittest.TestCase):
def test_basic_character(self):
getkey = PlatformTest('a').getkey
result = getkey()
self.assertEqual('a', result)
def test_string_instead_of_char(self):
char = 'a'
getkey = PlatformTest(char + 'bcde').getkey
result = getkey()
self.assertEqual(char, result)
def test_special_combo_character(self):
char = '\x1b\x01'
getkey = PlatformTest(char + 'foo').getkey
| result = getkey()
self.assertEqual(char, result)
def test_special_key(self):
char = '\x1b\x5b\x41'
getkey = PlatformTest(char + 'foo').getkey
result = getkey()
self.assertEqual(char, result)
def test_special_key_combo(self):
char = '\x1b\x5b\x33\x5e'
getkey = PlatformTest(char + 'foo').getkey
result = getkey()
self.assertEqual(char, result)
def test_unicode_character(self):
te | xt = u'Ángel'
getkey = PlatformTest(text).getkey
result = getkey()
self.assertEqual(u'Á', result)
|
from django.db import models
class Empresa(models.Model):
nombre | = models.CharField(max_length=100)
ciudad = models.CharField(max_length=50)
sector = models.CharField(max_length=200)
def __str__(self):
return | self.nombre
class Calificacion(models.Model):
alumno = models.CharField(max_length=100)
calificacion = models.IntegerField(default=0)
empresa = models.ForeignKey(Empresa)
def __str__(self):
return self.alumno
|
from seleniumhelpers import SeleniumTestCase |
from seleniumhelpers import get_default_timeout
from seleniumhelpers import get_se | tting_with_envfallback
|
# Copyright 2016 Conchylicultor. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""
"""
import tensorflow as tf
import deepmusic.tfutils as tfutils
import deepmusic.songstruct as music
# TODO: Some class from the encoder and decoder are really similar. Could they be merged ?
class DecoderNetwork:
""" Predict a keyboard configuration at step t
This is just an abstract class
Warning: To encapsulate the weights in the right tf scope, they should be defined
within the build function
"""
def __init__(self, args):
"""
Args:
args: parameters of the model
"""
self.args = args
def build(self):
""" Initialize the weights of the model
"""
pass
def init_state(self):
""" Return the initial cell state
"""
return None
def get_cell(self, prev_keyboard, prev_state_enco):
""" Predict the next keyboard state
Args:
prev_keyboard (?): the previous keyboard configuration
prev_state_enco (?): the encoder output state
Return:
Tuple: A tuple containing the predicted keyboard configuration and last decoder state
"""
raise NotImplementedError('Abstract class')
class Rnn(DecoderNetwork):
""" Predict a keyboard configuration at step t
Use a RNN to predict the next configuration
"""
@staticmethod
def get_module_id():
return 'rnn'
def __init__(self, args):
"""
Args:
args: parameters of the model
"""
super().__init__(args)
self.rnn_cell = None
self.project_key = None # Fct which project the decoder output into a single key space
def build(self):
""" Initialize the weights of the model
"""
self.rnn_cell = tfutils.get_rnn_cell(self.args, "deco_cell")
self.project_key = tfutils.single_layer_perceptron([self.args.hidden_size, 1],
'project_key')
def init_state(self):
""" Return the initial cell state
"""
return self.rnn_cell.zero_state(batch_size=self.args.batch_size, dtype=tf.float32)
def get_cell(self, prev_keyboard, prev_state_enco):
""" a RNN decoder
See parent class for arguments details
"""
axis = 1 # The first dimension is the batch, we split the keys
assert prev_keyboard.get_shape()[axis].value == music.NB_NOTES
inputs = tf.split(axis, music.NB_NOTES, prev_keyboard)
outputs, final_state = tf.nn.seq2seq.rnn_decoder(
decoder_inputs=inputs,
initial_state=prev_state_enco,
cell=self.rnn_cell
# TODO: Which loop function (should use prediction) ? : Should take the previous generated input/ground truth (as the global model loop_fct). Need to add a new bool placeholder
)
# Is it better to do the projection before or after the packing ?
next_keys = []
for output in outputs:
next_keys.append(self.project_key(output))
next_keyboard = tf.concat(axis, next_keys)
return next_keyboard, final_state
class Perceptron(DecoderNetwork):
""" Single layer perceptron. Just a proof of concept for the architecture
"""
@staticmethod
def get_module_id():
return 'perceptron'
def __init__(self, args):
"""
Args:
args: parameters of the model
"""
super().__init__(args)
self.project_hidden = None # Fct which decode the previous state
self.project_keyboard = None # Fct which project the decoder output into the keyboard space
def build(self):
""" Initialize the weights of the model
"""
# For projecting on the keyboard space
self.project_hidden = tfutils.single_layer_perceptron([music.NB_NOTES, self.args.hidden_size],
'project_hidden')
# For projecting on the keyboard space
self.project_keyboard = tfutils.single_layer_perceptron([self.args.hidden_size, music.NB_NOTES],
'project_keyboard') # Should we do the activation sigmoid here ?
def get_cell(self, prev_keyboard, prev_state_enco):
""" Simple 1 hidden layer perceptron
See parent class for arguments details
"""
# Don't change the state
next_state_deco = prev_state_enco # Return the last state (Useful ?)
# Compute the next output
hidden_state = self.project_hidden(prev_keyboard)
next_keyboard = self.project_keyboard(hidden_state) # Should we do the activation sigmoid here ? Maybe not because the loss function does it
return next_keyboard, next_state_deco
class Lstm(DecoderNetwork):
""" Multi-layer Lstm. Just a wrapper around the official tf
"""
@staticmethod
def get_module_id():
return 'lstm'
def __init__(self, args, *module_args):
"""
Args:
args: parameters of the model
"""
super().__init__(args)
self.args = args
self.rnn_cell = None
self.project_keyboard = None # Fct which project the decoder output into the ouput space
def build(self):
""" Initialize the weights of the model
"""
# TODO: Control over the the Cell using module arguments instead of global arguments (hidden_size and num_layer) !!
# RNN network
rnn_cell = tf.nn.rnn_cell.BasicLSTMCell(self.args.hidden_size, state_is_tuple=True) # Or GRUCell, LSTMCell(args.hidden_size)
if not self.args.test: # TODO: Should use a placeholder instead
rnn_cell = tf.nn.rnn_cell.DropoutWrapper(rnn_cell, input_keep_prob=1.0, output_keep_prob=0.9) # TODO: Custom values
rnn_cell = tf.nn.rnn_cell.MultiRNNCell([rnn_cell] * self.args.num_layers, state_is_tuple=True)
self.rnn_cell = rnn_cell
# For projecting on the keyboard space
self.project_output = tfutils.single_layer_perceptron([self.args.hidden_size, 12 + 1], # TODO: HACK: Input/output space hardcoded !!!
'project_output') # Should we do the activation sigmoid here ?
def init_state(self):
""" Return the initial cell state
| """
return self.rnn_cell.zero_state(batch_size=self.args.batch_size, dtype=tf.float32)
def get_cell(self, prev_input, prev_states):
"""
"""
next_output, next_state = self.rnn_cell(prev_input, prev_states[1])
next_output = self.proje | ct_output(next_output)
# No activation function here: SoftMax is computed by the loss function
return next_output, next_state
|
#!/usr/bin/env python3
# Copyright (c) 2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test various fingerprinting protections.
If an stale block more than a month old or its header are requested by a peer | ,
the node should pretend that it does not have it to avoid fingerprinting.
"""
import threading
import time
from test_framework.blocktools import (create_block, create_coinbase)
from test_framework.mininode import (
CInv,
NodeConnCB,
msg_headers,
msg_block,
msg_getdata,
msg_getheaders,
| network_thread_start,
wait_until,
)
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
p2p_port)
class P2PFingerprintTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
# Build a chain of blocks on top of given one
def build_chain(self, nblocks, prev_hash, prev_height, prev_median_time):
blocks = []
for _ in range(nblocks):
coinbase = create_coinbase(prev_height + 1)
block_time = prev_median_time + 1
block = create_block(int(prev_hash, 16), coinbase, block_time)
block.solve()
blocks.append(block)
prev_hash = block.hash
prev_height += 1
prev_median_time = block_time
return blocks
# Send a getdata request for a given block hash
def send_block_request(self, block_hash, node):
msg = msg_getdata()
msg.inv.append(CInv(2, block_hash)) # 2 == "Block"
node.send_message(msg)
# Send a getheaders request for a given single block hash
def send_header_request(self, block_hash, node):
msg = msg_getheaders()
msg.hashstop = block_hash
node.send_message(msg)
# Check whether last block received from node has a given hash
def last_block_equals(self, expected_hash, node):
block_msg = node.last_message.get("block")
return block_msg and block_msg.block.rehash() == expected_hash
# Check whether last block header received from node has a given hash
def last_header_equals(self, expected_hash, node):
headers_msg = node.last_message.get("headers")
return (headers_msg and
headers_msg.headers and
headers_msg.headers[0].rehash() == expected_hash)
# Checks that stale blocks timestamped more than a month ago are not served
# by the node while recent stale blocks and old active chain blocks are.
# This does not currently test that stale blocks timestamped within the
# last month but that have over a month's worth of work are also withheld.
def run_test(self):
node0 = self.nodes[0].add_p2p_connection(NodeConnCB())
network_thread_start()
node0.wait_for_verack()
# Set node time to 60 days ago
self.nodes[0].setmocktime(int(time.time()) - 60 * 24 * 60 * 60)
# Generating a chain of 10 blocks
block_hashes = self.nodes[0].generate(nblocks=10)
# Create longer chain starting 2 blocks before current tip
height = len(block_hashes) - 2
block_hash = block_hashes[height - 1]
block_time = self.nodes[0].getblockheader(block_hash)["mediantime"] + 1
new_blocks = self.build_chain(5, block_hash, height, block_time)
# Force reorg to a longer chain
node0.send_message(msg_headers(new_blocks))
node0.wait_for_getdata()
for block in new_blocks:
node0.send_and_ping(msg_block(block))
# Check that reorg succeeded
assert_equal(self.nodes[0].getblockcount(), 13)
stale_hash = int(block_hashes[-1], 16)
# Check that getdata request for stale block succeeds
self.send_block_request(stale_hash, node0)
test_function = lambda: self.last_block_equals(stale_hash, node0)
wait_until(test_function, timeout=3)
# Check that getheader request for stale block header succeeds
self.send_header_request(stale_hash, node0)
test_function = lambda: self.last_header_equals(stale_hash, node0)
wait_until(test_function, timeout=3)
# Longest chain is extended so stale is much older than chain tip
self.nodes[0].setmocktime(0)
tip = self.nodes[0].generate(nblocks=1)[0]
assert_equal(self.nodes[0].getblockcount(), 14)
# Send getdata & getheaders to refresh last received getheader message
block_hash = int(tip, 16)
self.send_block_request(block_hash, node0)
self.send_header_request(block_hash, node0)
node0.sync_with_ping()
# Request for very old stale block should now fail
self.send_block_request(stale_hash, node0)
time.sleep(3)
assert not self.last_block_equals(stale_hash, node0)
# Request for very old stale block header should now fail
self.send_header_request(stale_hash, node0)
time.sleep(3)
assert not self.last_header_equals(stale_hash, node0)
# Verify we can fetch very old blocks and headers on the active chain
block_hash = int(block_hashes[2], 16)
self.send_block_request(block_hash, node0)
self.send_header_request(block_hash, node0)
node0.sync_with_ping()
self.send_block_request(block_hash, node0)
test_function = lambda: self.last_block_equals(block_hash, node0)
wait_until(test_function, timeout=3)
self.send_header_request(block_hash, node0)
test_function = lambda: self.last_header_equals(block_hash, node0)
wait_until(test_function, timeout=3)
if __name__ == '__main__':
P2PFingerprintTest().main()
|
pady=2, sticky=W)
#
cal.transient(siWin)
cal.grab_set()
siWin.wait_window(cal)
print( '\nmonoCal(): done!')
#
## Power Up - operations to sequence initialization of hardware/software
#
def PowerUp():
'''Load "settings" and calibrate SPEX.'''
global serOutReady
#
readSettings() # load the Default settings for the spectrometer
#
# establish serial connection to RetroSPEX controller")
# or, set to 'offline' mode to look at files, etc.")
#
portScan() # search for serialPort to spectrometer
#
if portName != 'OFFLINE':
#TODO flash LED repeatedly to indicate readyness
print("TODO flash LED repeatedly to indicate readyness")
#
#TODO log "run time" (bulb life? - i.e. need start time)
#
#TODO if connected: Initialize RetroSPEX controller settings
#TODO i.e. HV levels (0 volts initially), 'G'ain setting, etc.
print("TODO: if connected: Initialize RetroSPEX controller settings")
RetroSPEXinit()
#
print("TODO: if connected: Monochrometers by 10nm (anti-backlash)")
readPositions()
#TODO Move Monochrometers by -10nm/+10nm (anti-backlash)
#
# perform monochrometer calibration (verification)
monoCal()
#TODO ( => always move POS dir (or sept NEG val+10 and then POS 10)
#TODO ( => real time display values initialize)
#
checkDayFile()
#
return
#
## Power Down - operations to sequence shutdown of hardware/software
#
def PowerDown():
#
if portName != 'OFFLINE':
#
#TODO stop scan if one is in process
print("TODO: scan if one is in process")
#
#TODO if connected: Initialize RetroSPEX controller settings
#TODO i.e. HV levels (0 volts initially), 'G'ain setting, etc.
print("TODO: if connected: Initialize RetroSPEX controller settings")
RetroSPEXinit()
#
#TODO log "run time" (bulb life? - i.e. need start time)
#
#TODO log data such as monochrometer position on shutdown
print("TODO: log data such as monochrometer position on shutdown")
#
return
#====================================
## Scan Control Frame
#
#-------
controlsFrame = Frame(siWin, bg = TANBG, borderwidth=0)
controlsFrame.grid(row=0,column=0, sticky=N)
#
#-------
scfScanControlFrame = LabelFrame(controlsFrame,text='Control',
bg = TANBG, borderwidth=4)
scfScanControlFrame.grid(row=0,column=0, sticky=N)
## Scan; START/STOP - Spectrometer scan control
#
scanStopIcon = PhotoImage(file='icons/icon_scanSTOP.gif')
scanStartIcon = PhotoImage(file='icons/icon_scanSTART.gif')
runOn = False # default == OFF
#
def toggleScan():
'''Scan Start/Stop - Spectrometer scan control'''
global runOn
if runOn: # then STOP the scan !!
if jjltest:
print('STOPPING NOT IMPLEMENTED YET ;-)')
runOn = False
runScfB00['image'] = scanStartIcon
else: # START up a scan
# perform sanity checks before starting scan
sane = scanSanityCheck( warn = True )
if jjltest:
print('STARTING A SCAN NOT IMPLEMENTED YET ;-)')
sane = False
if sane:
runOn = True
runScfB00['image'] = scanStopIcon
return
#
#-------
runScfB00 = Button(scfScanControlFrame,image=scanStartIcon
,borderwidth = 0,activebackground=ACTIVB
,bg = TANBG, command = toggleScan )
runScfB00.grid(column=0,row=0, padx=2)
## HV - On/Off - High Voltage (red: safety concern)
#
hvOffIcon = PhotoImage(file='icons/icon_hvOff.gif')
hvOnIcon = PhotoImage(file='icons/icon_hvOn.gif')
hvOn = False # default == OFF
#
def toggleHV():
'''HV - On/Off - High Voltage (red: safety concern)'''
global hvOn
hvOn = toggleBtnVar(hvOn, hvScfB01, hvOffIcon, hvOnIcon)
if hvOn:
cmdHVonEM() # turn HV on
cmdHVonREF()
else:
cmdHVoffEM() # turn HV off
cmdHVoffREF()
return
#
#-------
hvScfB01 = Button(scfScanControlFrame, image = hvOffIcon
,activebackground=ACTIVB
,borderwidth = 0, bg = TANBG, command = toggleHV)
hvScfB01.grid(column=0,row=1)
#====================================
## Ref. Data Frame -- Load previous Scan Data for Reference or Settings recall
#
#-------
filesFrame = LabelFrame(controlsFrame,text='Ref. Data',
bg = TANBG, borderwidth=4)
filesFrame.grid(row=1,column=0, padx=2, sticky=NW)
#
# LOAD experimental settings from disk
dataLoadIcon = PhotoImage(file='icons/icon_dataLOAD.gif')
#
#-------
fileFileDataLoad = Button(filesFrame, image=dataLoadIcon
, bg = TANBG, activebackground=ACTIVB
,command = dataFileLOAD
,borderwidth = 0, font=monoFont14 )
fileFileDataLoad.grid(row=0, column=0, sticky=NW)
#
#
dataMgetIcon = PhotoImage(file='icons/icon_dataMGET.gif')
#
#-------
fileSettingsGet = Button(filesFrame, image=dataMgetIcon, bg = TANBG
,command = dataFileMGET,activebackground=ACTIVB
,borderwidth = 0, font=monoFont14 )
fileSettingsGet.grid(row=1, column=0,sticky=NW)
#====================================
## Macro Files Frame
#
#-------
macroFrame = LabelFrame(controlsFrame,text='Macro Files',
bg = TANBG, borderwidth=4)
macroFrame.grid(row=2,column=0, padx=2, sticky=NW)
#
# LOAD scan settings from disk
macroLoadIcon = PhotoImage(file='icons/icon_macroLOAD.gif')
#
#-------
macroFileLoad = Button(macroFrame, image=macroLoadIcon, bg = TANBG
,borderwidth = 0
| ,activebackground=ACTIVB, font=monoFont14 )
macroFileLoad.grid(row=0, column=0,sticky=NW)
#
#
macroEditIcon = PhotoImage(file='icons/icon_macroEDIT.gif')
#
#-------
macroFileEdit = Button(macroFrame, image=macroEditIcon, bg = TANBG
, borderwidth = 0
,activebackground=ACTIVB, font=monoFont14 )
macroFileEdit.grid(row=1, column=0,sticky=NW)
|
#====================================
## Settings Frame
#
#-------
settingsFrame = LabelFrame(controlsFrame,text='Settings',
bg = TANBG, borderwidth=4)
settingsFrame.grid(row=12,column=0, sticky=S)
#
#
settingsIcon = PhotoImage(file='icons/icon_settings.gif')
#
#-------
settingsBtn = Button(settingsFrame, image=settingsIcon, bg = TANBG
,borderwidth = 0, command = editSettings
,activebackground=ACTIVB, font=monoFont14 )
settingsBtn.grid()
#====================================
## Quit Frame
#
def quitCommand():
#
# Shutdown equipment
#
PowerDown()
#
siWin.destroy()
#-------
quitFrame = LabelFrame(controlsFrame,text='Quit',
bg = TANBG, borderwidth=4)
quitFrame.grid(row=13,column=0, sticky=S)
#
#
quitIcon = PhotoImage(file='icons/icon_quit.gif')
#
#-------
quitBtn = Button(quitFrame, image=quitIcon, bg = TANBG, borderwidth = 0
,command = quitCommand
,activebackground=ACTIVB, font=monoFont14 )
quitBtn.grid()
#====================================
## Experiment Frame -- Window to right of Control frame
#
#-------
efFrame = Frame(siWin, bg = TANBG, borderwidth=0)
efFrame.grid(row=0,column=1,sticky=NW)
#====================================
## Experiment Settings Frame
#
#-------
esfFrame = Frame(efFrame, bg = TANBG, borderwidth=0)
esfFrame.grid(row=0,column=0,sticky=NW)
#====================================
## Spectrometer / Specimen Box Frame
#
#-------
ssbFrame = Frame(esfFrame, bg = TANBG, borderwidth=0)
ssbFrame.grid(row=0,column=0,sticky=EW)
#====================================
## Spectrometer Settings Frame
#
#-------
ssfFrame = LabelFrame(ssbFrame,text='Spectrometer Settings',
bg = TANBG, borderwidth=4)
ssfFrame.grid(row=0,column=0,sticky=NW)
#====================================
## Spectrometer EX Frame - EXcitation
#
# EX scan
#
#-------
sEXfFrame = Frame(ssfFrame, bg = TANBG)
sEXfFrame.grid(row=0,column=0 |
# -*- encoding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import datetime
import errno
import json
import os
import shutil
import tempfile
import uuid
import numpy
import six
from gnocchi import incoming
from gnocchi import utils
class FileStorage(incoming.IncomingDriver):
def __init__(self, conf):
super(FileStorage, self).__init__(conf)
self.basepath = conf.file_basepath
self.basepath_tmp = os.path.join(self.basepath, 'tmp')
def __str__(self):
return "%s: %s" % (self.__class__.__name__, str(self.basepath))
def upgrade(self, num_sacks):
super(FileStorage, self).upgrade(num_sacks)
utils.ensure_paths([self.basepath_tmp])
def get_storage_sacks(self):
try:
with open(os.path.join(self.basepath_tmp, self.CFG_PREFIX),
'r') as f:
return json.load(f)[self.CFG_SACKS]
except IOError as e:
if e.errno == errno.ENOENT:
return
raise
def set_storage_settings(self, num_sacks):
data = {self.CFG_SACKS: num_sacks}
with open(os.path.join(self.basepath_tmp, self.CFG_PREFIX), 'w') as f:
json.dump(data, f)
utils.ensure_paths([self._sack_path(i)
for i in six.moves.range(self.NUM_SACKS)])
def remove_sack_group(self, num_sacks):
prefix = self.get_sack_prefix(num_sacks)
for i in six.moves.xrange(num_sacks):
shutil.rmtree(os.path.join(self.basepath, prefix % i))
def _sack_path(self, sack):
return os.path.join(self.basepath, self.get_sack_name(sack))
def _measure_path(self, sack, metric_id):
return os.path.join(self._sack_path(sack), six.text_type(metric_id))
def _build_measure_path(self, metric_id, random_id=None):
sack = self.sack_for_metric(metric_id)
path = self._measure_path(sack, metric_id)
if random_id:
if random_id is True:
now = datetime.datetime.utcnow().strftime("_%Y%m%d_%H:%M:%S")
random_id = six.text_type(uuid.uuid4()) + now
return os.path.join(path, random_id)
return path
def _store_new_measures(self, metric, data):
tmpfile = tempfile.NamedTemporaryFile(
prefix='gnocchi', dir=self.basepath_tmp,
delete=False)
tmpfile.write(data)
tmpfile.close()
path = self._build_measure_path(metric.id, True)
while True:
try:
os.rename(tmpfile.name, path)
break
except OSError as e:
if e.errno != errno.ENOENT:
raise
try:
os.mkdir(self._build_measure_path(metric.id))
except OSError as e:
# NOTE(jd) It's possible that another process created the
# path just before us! In this case, good for us, let's do
# nothing then! (see bug #1475684)
if e.errno != errno.EEXIST:
raise
def _build_report(self, details):
report_vars = {'metrics': 0, 'measures': 0, 'metric_details': {}}
if details:
def build_metric_report(metric, sack):
report_vars['metric_details'][metric] = len(
self._list_measures_container_for_metric_id_str(sack,
metric))
else:
def build_metric_report(metric, sack):
report_vars['metrics'] += 1
report_vars['measures'] += len(
self._list_measures_container_for_metric_id_str(sack,
metric))
| for i in six.moves.range(self.NUM_SACKS):
for metric in self.list_metric_with_measures_to_process(i):
build_metric_report(metric, i)
return (report_vars['metrics'] or
len(report_vars['metric_details'].keys()),
report_vars['measures'] or
sum(report_vars['metric_details'].values()),
report_vars['metric_details'] if details else None)
def list_metric_with_measures_to_process(self, sack):
| return set(self._list_target(self._sack_path(sack)))
def _list_measures_container_for_metric_id_str(self, sack, metric_id):
return self._list_target(self._measure_path(sack, metric_id))
def _list_measures_container_for_metric_id(self, metric_id):
return self._list_target(self._build_measure_path(metric_id))
@staticmethod
def _list_target(target):
try:
return os.listdir(target)
except OSError as e:
# Some other process treated this one, then do nothing
if e.errno == errno.ENOENT:
return []
raise
def _delete_measures_files_for_metric_id(self, metric_id, files):
for f in files:
try:
os.unlink(self._build_measure_path(metric_id, f))
except OSError as e:
# Another process deleted it in the meantime, no prob'
if e.errno != errno.ENOENT:
raise
try:
os.rmdir(self._build_measure_path(metric_id))
except OSError as e:
# ENOENT: ok, it has been removed at almost the same time
# by another process
# ENOTEMPTY: ok, someone pushed measure in the meantime,
# we'll delete the measures and directory later
# EEXIST: some systems use this instead of ENOTEMPTY
if e.errno not in (errno.ENOENT, errno.ENOTEMPTY, errno.EEXIST):
raise
def delete_unprocessed_measures_for_metric_id(self, metric_id):
files = self._list_measures_container_for_metric_id(metric_id)
self._delete_measures_files_for_metric_id(metric_id, files)
def has_unprocessed(self, metric):
return os.path.isdir(self._build_measure_path(metric.id))
@contextlib.contextmanager
def process_measure_for_metric(self, metric):
files = self._list_measures_container_for_metric_id(metric.id)
measures = self._make_measures_array()
for f in files:
abspath = self._build_measure_path(metric.id, f)
with open(abspath, "rb") as e:
measures = numpy.append(
measures, self._unserialize_measures(f, e.read()))
yield measures
self._delete_measures_files_for_metric_id(metric.id, files)
|
"""This demo program solves the incompressible Navier-Stokes equations
on an L-shaped domain using Chorin's splitting method."""
# Copyright (C) 2010-2011 Anders Logg
#
# This file is part of DOLFIN.
#
# DOLFIN is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DOLFIN is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with DOLFIN. If not, see <http://www.gnu.org/licenses/>.
#
# Modified by Mikael Mortensen 2011
#
# First added: 2010-08-30
# Last changed: 2011-06-30
#
# SC14 Paraview's Catalyst tutorial
#
# Step 1 : initialization
#
# [SC14-Catalyst] we need a python environment that enables import of both Dolfin and ParaView
execfile("simulation-env.py")
# [SC14-Catalyst] import paraview, vtk and paraview's simple API
import sys
import paraview
import paraview.vtk as vtk
import paraview.simple as pvsimple
# [SC14-Catalyst] check for command line arguments
if len(sys.argv) != 3:
print "command is 'python",sys.argv[0],"<script name> <number of time steps>'"
sys.exit(1)
# [SC14-Catalyst] initialize and read input parameters
paraview.options.batch = True
paraview.options.symmetric = True
# [SC14-Catalyst] import user co-processing script
import vtkPVCatalystPython
import os
scriptpath, scriptname = os.path.split(sys.argv[1])
sys.path.append(scriptpath)
if scriptname.endswith(".py"):
print 'script name is ', scriptname
scriptname = scriptname[0:len(scriptname)-3]
try:
cpscript = __import__(scriptname)
except:
print sys.exc_info()
print 'Cannot find ', scriptname, ' -- no coprocessing will be performed.'
sys.exit(1)
# Begin demo
from dolfin import *
# Print log messages only from the root process in parallel
parameters["std_out_all_processes"] = False;
# Load mesh from file
mesh = Mesh(DOLFIN_EXAMPLE_DATA_DIR+"/lshape.xml.gz")
# Define function spaces (P2-P1)
V = VectorFunctionSpace(mesh, "Lagrange", 2)
Q = FunctionSpace(mesh, "Lagrange", 1)
# Define trial and test functions
u = TrialFunction(V)
p = TrialFunction(Q)
v = TestFunction(V)
q = TestFunction(Q)
# Set parameter values
dt = 0.01
T = 3
nu = 0.01
# Define time-dependent pressure boundary condition
p_in = Expression("sin(3.0*t)", t=0.0)
# Define boundary conditions
noslip = DirichletBC(V, (0, 0),
"on_boundary && \
(x[0] < DOLFIN_EPS | x[1] < DOLFIN_EPS | \
(x[0] > 0.5 - DOLFIN_EPS && x[1] > 0.5 - DOLFIN_EPS))")
inflow = DirichletBC(Q, p_in, "x[1] > 1.0 | - DOLFIN_EPS")
outflow = DirichletBC(Q, 0, "x[0] > 1.0 - DOLFIN_EPS")
bcu = [ | noslip]
bcp = [inflow, outflow]
# Create functions
u0 = Function(V)
u1 = Function(V)
p1 = Function(Q)
# Define coefficients
k = Constant(dt)
f = Constant((0, 0))
# Tentative velocity step
F1 = (1/k)*inner(u - u0, v)*dx + inner(grad(u0)*u0, v)*dx + \
nu*inner(grad(u), grad(v))*dx - inner(f, v)*dx
a1 = lhs(F1)
L1 = rhs(F1)
# Pressure update
a2 = inner(grad(p), grad(q))*dx
L2 = -(1/k)*div(u1)*q*dx
# Velocity update
a3 = inner(u, v)*dx
L3 = inner(u1, v)*dx - k*inner(grad(p1), v)*dx
# Assemble matrices
A1 = assemble(a1)
A2 = assemble(a2)
A3 = assemble(a3)
# Use amg preconditioner if available
prec = "amg" if has_krylov_solver_preconditioner("amg") else "default"
# Create files for storing solution
ufile = File("results/velocity.pvd")
pfile = File("results/pressure.pvd")
# Time-stepping
maxtimestep = int(sys.argv[2])
tstep = 0
t = dt
while tstep < maxtimestep:
# Update pressure boundary condition
p_in.t = t
# Compute tentative velocity step
begin("Computing tentative velocity")
b1 = assemble(L1)
[bc.apply(A1, b1) for bc in bcu]
solve(A1, u1.vector(), b1, "gmres", "default")
end()
# Pressure correction
begin("Computing pressure correction")
b2 = assemble(L2)
[bc.apply(A2, b2) for bc in bcp]
solve(A2, p1.vector(), b2, "gmres", prec)
end()
# Velocity correction
begin("Computing velocity correction")
b3 = assemble(L3)
[bc.apply(A3, b3) for bc in bcu]
solve(A3, u1.vector(), b3, "gmres", "default")
end()
# Plot solution [SC14-Catalyst] Not anymore
# plot(p1, title="Pressure", rescale=True)
# plot(u1, title="Velocity", rescale=True)
# Save to file [SC14-Catalyst] Not anymore
# ufile << u1
# pfile << p1
# Move to next time step
u0.assign(u1)
t += dt
tstep += 1
print "t =", t, "step =",tstep
# Hold plot [SC14-Catalyst] Not anymore
# interactive()
|
from __future__ import | absolute_import
from .base import *
from .local import *
CACHE_BA | CKEND = 'redis_cache.cache://127.0.0.1:6379/?timeout=15'
DEBUG = False
|
"""
Learning python3
"""
def document_it(func):
'''
decractor for func, only print doc of func.
'''
def new_function(*args, **kwargs):
'''
internal function for wrappering of func and print out function parameter and result.
'''
print('Running functions:', func.__name__)
print('Positional arguments:', args)
print('Keyword arguments:', kwargs)
result = func(*args, **kwa | rgs)
print('Result:', result)
return result
return new_function
@document_it
def add_ints0(add_a, add_b):
'''
add with decrator of @document_it.
'''
return add_a + add_b
def square_it(func):
'''
decractor for func, return square of func returned | value.
'''
def new_function(*args, **kwargs):
'''
internal function for wrappering of func and return square of func as result.
'''
result = func(*args, **kwargs)
return result * result
return new_function
@document_it
@square_it
def add_ints1(add_a, add_b):
'''
add with decrator of @square_it @document_it in order.
'''
return add_a + add_b
@square_it
@document_it
def add_ints2(add_a, add_b):
'''
add with decrator of @document_it @square_it in order.
'''
return add_a + add_b
|
.bsz = NervanaObject.be.batch_size = batch_size
check_gru(seq_len, input_size, hidden_size, batch_size,
Gaussian())
def test_ref_compare_rand_init_state(backend_default, refgruargs):
seq_len, input_size, hidden_size, batch_size = refgruargs
NervanaObject.be.bsz = NervanaObject.be.batch_size = batch_size
check_gru(seq_len, input_size, hidden_size, batch_size,
Gaussian(), add_init_state=True)
# compare neon GRU to reference GRU implementation
def check_gru(seq_len, input_size, hidden_size,
batch_size, init_func, inp_moms=[0.0, 1.0], add_init_state=False):
# init_func is the initializer for the model params
# inp_moms is the [ mean, std dev] of the random input
input_shape = (input_size, seq_len * batch_size)
output_shape = (hidden_size, seq_len * batch_size)
slice_shape = (hidden_size, batch_size)
NervanaObject.be.bsz = NervanaObject.be.batch_size = batch_size
# neon GRU
gru = GRU(hidden_size,
init_func,
activation=Tanh(),
gate_activation=Logistic())
# generate random input tensor
inp = np.random.rand(*input_shape) * inp_moms[1] + inp_moms[0]
inp_dev = gru.be.array(inp)
# generate random deltas tensor
deltas = np.random.ra | ndn(*output_shape)
# run neon fprop
gru.configure((input_size, seq_len))
gru.prev_layer = True
gru.allocate()
test_buffer = DeltasTree()
gru.allocate_deltas(test_buffer)
test_buffer.allocate_buffers()
gru.set_deltas(test_buffer)
if add_init_state:
init_state = np.random.rand(*slice_shape)*inp_mo | ms[1] + inp_moms[0]
init_state_dev = gru.be.array(init_state)
gru.fprop(inp_dev, init_state=init_state_dev)
else:
gru.fprop(inp_dev)
# reference numpy GRU
gru_ref = RefGRU(input_size, hidden_size)
WGRU = gru_ref.weights
# make ref weights and biases the same with neon model
r_range = list(range(hidden_size))
z_range = list(range(hidden_size, hidden_size * 2))
c_range = list(range(hidden_size * 2, hidden_size * 3))
WGRU[gru_ref.weights_ind_br][:] = gru.b.get()[r_range]
WGRU[gru_ref.weights_ind_bz][:] = gru.b.get()[z_range]
WGRU[gru_ref.weights_ind_bc][:] = gru.b.get()[c_range]
WGRU[gru_ref.weights_ind_Wxr][:] = gru.W_input.get()[r_range]
WGRU[gru_ref.weights_ind_Wxz][:] = gru.W_input.get()[z_range]
WGRU[gru_ref.weights_ind_Wxc][:] = gru.W_input.get()[c_range]
WGRU[gru_ref.weights_ind_Rhr][:] = gru.W_recur.get()[r_range]
WGRU[gru_ref.weights_ind_Rhz][:] = gru.W_recur.get()[z_range]
WGRU[gru_ref.weights_ind_Rhc][:] = gru.W_recur.get()[c_range]
# transpose input X and do fprop
# the reference code expects these shapes:
# input_shape: (seq_len, input_size, batch_size)
# output_shape: (seq_len, hidden_size, batch_size)
inp_ref = inp.copy().T.reshape(
seq_len, batch_size, input_size).swapaxes(1, 2)
deltas_ref = deltas.copy().T.reshape(
seq_len, batch_size, hidden_size).swapaxes(1, 2)
if add_init_state:
init_state_ref = init_state.copy()
(dWGRU_ref, h_ref_list, dh_ref_list,
dr_ref_list, dz_ref_list, dc_ref_list) = gru_ref.lossFun(inp_ref,
deltas_ref,
init_state_ref)
else:
(dWGRU_ref, h_ref_list, dh_ref_list,
dr_ref_list, dz_ref_list, dc_ref_list) = gru_ref.lossFun(inp_ref,
deltas_ref)
neon_logger.display('====Verifying hidden states====')
assert allclose_with_out(gru.outputs.get(),
h_ref_list,
rtol=0.0,
atol=1.0e-5)
neon_logger.display('fprop is verified')
# now test the bprop
neon_logger.display('Making sure neon GRU matches numpy GRU in bprop')
gru.bprop(gru.be.array(deltas))
# grab the delta W from gradient buffer
dWinput_neon = gru.dW_input.get()
dWrecur_neon = gru.dW_recur.get()
db_neon = gru.db.get()
dWxr_neon = dWinput_neon[r_range]
dWxz_neon = dWinput_neon[z_range]
dWxc_neon = dWinput_neon[c_range]
dWrr_neon = dWrecur_neon[r_range]
dWrz_neon = dWrecur_neon[z_range]
dWrc_neon = dWrecur_neon[c_range]
dbr_neon = db_neon[r_range]
dbz_neon = db_neon[z_range]
dbc_neon = db_neon[c_range]
drzc_neon = gru.rzhcan_delta_buffer.get()
dr_neon = drzc_neon[r_range]
dz_neon = drzc_neon[z_range]
dc_neon = drzc_neon[c_range]
dWxr_ref = dWGRU_ref[gru_ref.dW_ind_Wxr]
dWxz_ref = dWGRU_ref[gru_ref.dW_ind_Wxz]
dWxc_ref = dWGRU_ref[gru_ref.dW_ind_Wxc]
dWrr_ref = dWGRU_ref[gru_ref.dW_ind_Rhr]
dWrz_ref = dWGRU_ref[gru_ref.dW_ind_Rhz]
dWrc_ref = dWGRU_ref[gru_ref.dW_ind_Rhc]
dbr_ref = dWGRU_ref[gru_ref.dW_ind_br]
dbz_ref = dWGRU_ref[gru_ref.dW_ind_bz]
dbc_ref = dWGRU_ref[gru_ref.dW_ind_bc]
# neon_logger.display '====Verifying hidden deltas ===='
neon_logger.display('====Verifying r deltas ====')
assert allclose_with_out(dr_neon,
dr_ref_list,
rtol=0.0,
atol=1.0e-5)
neon_logger.display('====Verifying z deltas ====')
assert allclose_with_out(dz_neon,
dz_ref_list,
rtol=0.0,
atol=1.0e-5)
neon_logger.display('====Verifying hcan deltas ====')
assert allclose_with_out(dc_neon,
dc_ref_list,
rtol=0.0,
atol=1.0e-5)
neon_logger.display('====Verifying update on W_input====')
neon_logger.display('dWxr')
assert allclose_with_out(dWxr_neon,
dWxr_ref,
rtol=0.0,
atol=1.0e-5)
neon_logger.display('dWxz')
assert allclose_with_out(dWxz_neon,
dWxz_ref,
rtol=0.0,
atol=1.0e-5)
neon_logger.display('dWxc')
assert allclose_with_out(dWxc_neon,
dWxc_ref,
rtol=0.0,
atol=1.0e-5)
neon_logger.display('====Verifying update on W_recur====')
neon_logger.display('dWrr')
assert allclose_with_out(dWrr_neon,
dWrr_ref,
rtol=0.0,
atol=1.0e-5)
neon_logger.display('dWrz')
assert allclose_with_out(dWrz_neon,
dWrz_ref,
rtol=0.0,
atol=1.0e-5)
neon_logger.display('dWrc')
assert allclose_with_out(dWrc_neon,
dWrc_ref,
rtol=0.0,
atol=1.0e-5)
neon_logger.display('====Verifying update on bias====')
neon_logger.display('dbr')
assert allclose_with_out(dbr_neon,
dbr_ref,
rtol=0.0,
atol=1.0e-5)
neon_logger.display('dbz')
assert allclose_with_out(dbz_neon,
dbz_ref,
rtol=0.0,
atol=1.0e-5)
neon_logger.display('dbc')
assert allclose_with_out(dbc_neon,
dbc_ref,
rtol=0.0,
atol=1.0e-5)
neon_logger.display('bprop is verified')
return
def reset_gru(gru):
# in order to run fprop multiple times
# for the gradient check tests the
# gru internal variables need to be
# cleared
gru.x = None
gru.xs = None # just in case
gru.outputs = None
return
def test_gradient_neon_gru(backend_default, gradgruargs):
seq_len, input_size, hidden_size, batch_size = gradgruargs
NervanaObject.be.bsz = NervanaObject.be.batch_size |
a = "¢пр"
print(a[0], a[0: | 1])
print(a[1], a[1:2])
print(a[2], a[2:3])
try:
print(a[3])
except IndexError:
print("IndexError")
print(a[3:4])
print(a[-1])
p | rint(a[-2], a[-2:-1])
print(a[-3], a[-3:-2])
try:
print(a[-4])
except IndexError:
print("IndexError")
print(a[-4:-3])
print(a[0:2])
print(a[1:3])
print(a[2:4])
|
# -*- coding: utf8 -*-
#***************************************************************************
#* *
#* Copyright (c) 2011 *
#* Yorik van Havre <yorik@uncreated.net> *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with this program; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#***************************************************************************
import FreeCAD,Draft,ArchCommands,ArchFloor
if FreeCAD.GuiUp:
import FreeCADGui
from PySide import QtCore, QtGui
from DraftTools import translate
else:
def translate(ctxt,txt):
return txt
__title__="FreeCAD Site"
__author__ = "Yorik van Havre"
__url__ = "http://www.freecadweb.org"
def makeSite(objectslist=None,baseobj=None,name="Site"):
'''makeBuilding(objectslist): creates a site including the
objects from the given list.'''
obj = FreeCAD.ActiveDocument.addObject("App::DocumentObjectGroupPython",name)
obj.Label = translate("Arch",name)
_Site(obj)
if FreeCAD.GuiUp:
_ViewProviderSite(obj.ViewObject)
if objectslist:
obj.Group = objectslist
if baseobj:
obj.Terrain = baseobj
return obj
class _CommandSite:
"the Arch Site command definition"
def GetResources(self):
return {'Pixmap' : 'Arch_Site',
'MenuText': QtCore.QT_TRANSLATE_NOOP("Arch_Site","Site"),
'Accel': "S, I",
'ToolTip': QtCore.QT_TRANSLATE_NOOP("Arch_Site","Creates a site object including selected objects.")}
def IsActive(self):
return not FreeCAD.ActiveDocument is None
def Activated(self):
sel = FreeCADGui.Selection.getSelection()
p = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Arch")
link = p.GetBool("FreeLinking",False)
siteobj = []
warning = False
for obj in sel :
if Draft.getType(obj) == "Building":
siteobj.append(obj)
else :
if link == True :
siteobj.append(obj)
else:
warning = True
if warning :
message = translate( "Arch" , "Please select only Building objects or nothing!\n\
Site are not allowed to accept other object than Building.\n\
Other objects will be removed from the selection.\n\
You can change that in the preferences." )
ArchCommands.printMessage( message )
if sel and len(siteobj) == 0:
message = translate( "Arch" , "There is no valid object in the selection.\n\
Site creation aborted." )
ArchCommands.printMessage( message )
| else :
ss = "[ "
for o in siteobj:
ss += "FreeCAD.ActiveDocumen | t." + o.Name + ", "
ss += "]"
FreeCAD.ActiveDocument.openTransaction(translate("Arch","Create Site"))
FreeCADGui.addModule("Arch")
FreeCADGui.doCommand("Arch.makeSite("+ss+")")
FreeCAD.ActiveDocument.commitTransaction()
FreeCAD.ActiveDocument.recompute()
class _Site(ArchFloor._Floor):
"The Site object"
def __init__(self,obj):
ArchFloor._Floor.__init__(self,obj)
obj.addProperty("App::PropertyLink","Terrain","Arch","The terrain of this site")
obj.addProperty("App::PropertyString","Address","Arch","The street and housenumber of this site")
obj.addProperty("App::PropertyString","PostalCode","Arch","The postal or zip code of this site")
obj.addProperty("App::PropertyString","City","Arch","The city of this site")
obj.addProperty("App::PropertyString","Country","Arch","The country of this site")
obj.addProperty("App::PropertyFloat","Latitude","Arch","The latitude of this site")
obj.addProperty("App::PropertyFloat","Longitude","Arch","The latitude of this site")
obj.addProperty("App::PropertyString","Url","Arch","An url that shows this site in a mapping website")
self.Type = "Site"
obj.setEditorMode('Height',2)
class _ViewProviderSite(ArchFloor._ViewProviderFloor):
"A View Provider for the Site object"
def __init__(self,vobj):
ArchFloor._ViewProviderFloor.__init__(self,vobj)
def getIcon(self):
import Arch_rc
return ":/icons/Arch_Site_Tree.svg"
def claimChildren(self):
return self.Object.Group+[self.Object.Terrain]
if FreeCAD.GuiUp:
FreeCADGui.addCommand('Arch_Site',_CommandSite())
|
# This file is part of jacqq.py
# Copyright (C) 2015 Saman Jirjies - sjirjies(at)asu(dot)edu.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import csv
import argparse
# This script generates a null data set where all outputs are 0 when passed through Jacquez's Q.
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Generate a lattice of pentagon case-control points",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('x_size', type=int, help="Number of clusters to form in the x direction.")
parser.add_argument('y_size', type=int, help="Number of clusters to form in the y direction.")
parser.add_argument('histories_data', help="Location to write individuals' residential history.")
parser.add_argument('details_data', help="Location to write individuals' status data set.")
parser.add_argument('focus_data', help="Location to write focus data set")
args = parser.parse_args()
lattice_size_y = args.x_size
lattice_size_x = args.y_size
case_locations = []
for xi in range(0, lattice_size_x):
for yi in range(0, lattice_size_y):
case_locations.append((2+(10*xi), 2+(10*yi)))
focus_locations = []
for xi in range(0, lattice_size_x - 1):
for yi in range(0, lattice_size_y - 1):
focus_locations.append((7+(10*xi), 7+(10*yi)))
# Generate details data
csv_file = open(args.details_data, 'w')
try:
writer = csv.writer(csv_file)
writer.writerow(('ID', 'is_case'))
for case_index, case_point in enumerate(case_locations):
writer.writerow(('case_'+str(case_index+1), 1))
for control_name in ('A', 'B', 'C', 'D', 'E'):
writer.writerow(('control_'+str(case_index+1)+control_name, 0))
finally:
csv_file.close()
# Generate time series data
csv_file = open(args.histories_data, 'w')
try:
writer = csv.writer(csv_file)
writer.writerow(('ID', 'start_date', 'end_date', 'x', 'y'))
start_date = '20150101'
end_date = '20150102'
for id_index, case_point in enumerate(case_locations):
writer.writerow(('case_'+str(id_index+1), sta | rt_date, end_date, case_point[0], case_point[1]))
writer.writerow(('control_'+str(id_index+1)+'A', start_date, end_date, case_point[0], case_point[1]-2))
writer.writerow(('control_'+str(id_index+1)+'B', start_date, end | _date, case_point[0]+2, case_point[1]))
writer.writerow(('control_'+str(id_index+1)+'C', start_date, end_date, case_point[0]+1, case_point[1]+1))
writer.writerow(('control_'+str(id_index+1)+'D', start_date, end_date, case_point[0]-1, case_point[1]+1))
writer.writerow(('control_'+str(id_index+1)+'E', start_date, end_date, case_point[0]-2, case_point[1]))
finally:
csv_file.close()
print("Finished generating null dataset")
# Generate focus data
csv_file = open(args.focus_data, 'w')
try:
writer = csv.writer(csv_file)
writer.writerow(('ID', 'start_date', 'end_date', 'x', 'y'))
start_date = '20150101'
end_date = '20150102'
for index, location in enumerate(focus_locations):
writer.writerow(('focus_' + str(index+1), start_date, end_date, location[0], location[1]))
finally:
csv_file.close() |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Tests for the core module.
"""
import itertools
from contextlib import nullcontext
from astropy.modeling.models import Gaussian1D, Gaussian2D
from astropy.utils.exceptions import AstropyUserWarning
import numpy as np
from numpy.testing import assert_allclose
import pytest
from ..gaussian import centroid_1dg, centroid_2dg, _gaussian1d_moments
from ...utils._optional_deps import HAS_SCIPY # noqa
XCEN = 25.7
YCEN = 26.2
XSTDS = [3.2, 4.0]
YSTDS = [5.7, 4.1]
THETAS = np.array([30., 45.]) * np.pi / 180.
DATA = np.zeros((3, 3))
DATA[0:2, 1] = 1.
DATA[1, 0:2] = 1.
DATA[1, 1] = 2.
# NOTE: the fitting routines in astropy use scipy.optimize
@pytest.mark.skipif('not HAS_SCIPY')
@pytest.mark.parametrize(('x_std', 'y_std', 'theta'),
list(itertools.product(XSTDS, YSTDS, THETAS)))
def test_centroids(x_std, y_std, theta):
model = Gaussian2D(2.4, XCEN, YCEN, x_stddev=x_std, y_stddev=y_std,
theta=theta)
y, x = np.mgrid[0:50, 0:47]
data = model(x, y)
xc, yc = centroid_1dg(data)
assert_allclose((xc, yc), (XCEN, YCEN), rtol=0, atol=1.e-3)
xc, yc = centroid_2dg(data)
assert_allclose((xc, yc), (XCEN, YCEN), rtol=0, atol=1.e-3)
# test with errors
error = np.sqrt(data)
xc, yc = centroid_1dg(data, error=error)
assert_allclose((xc, yc), (XCEN, YCEN), rtol=0, atol=1.e-3)
xc, yc = centroid_2dg(data, error=error)
assert_allclose((xc, yc), (XCEN, YCEN), rtol=0, atol=1.e-3)
# test with mask
mask = np.zeros(data.shape, dtype=bool)
data[10, 10] = 1.e5
mask[10, 10] = True
xc, yc = centroid_1dg(data, mask=mask)
assert_allclose((xc, yc), (XCEN, YCEN), rtol=0, atol=1.e-3)
xc, yc = centroid_2dg(data, mask=mask)
assert_allclose((xc, yc), (XCEN, YCEN), rtol=0, atol=1.e-3)
@pytest.mark.skipif('not HAS_SCIPY')
@pytest.mark.parametrize('use_mask', [True, False])
def test_centroids_nan_withmask(use_mask):
xc_ref = 24.7
yc_ref = 25.2
model = Gaussian2D(2.4, xc_ref, yc_ref, x_stddev=5.0, y_stddev=5.0)
y, x = np.mgrid[0:50, 0:50]
data = model(x, y)
data[20, :] = np.nan
if use_mask:
mask = np.zeros(data.shape, dtype=bool)
mask[20, :] = True
nwarn = 0
ctx = nullcontext()
els | e:
mask = None
nwarn = 1
ctx = pytest.warns(AstropyUserWarning,
match='Input data contains | non-finite values')
with ctx as warnlist:
xc, yc = centroid_1dg(data, mask=mask)
assert_allclose([xc, yc], [xc_ref, yc_ref], rtol=0, atol=1.e-3)
if nwarn == 1:
assert len(warnlist) == nwarn
with ctx as warnlist:
xc, yc = centroid_2dg(data, mask=mask)
assert_allclose([xc, yc], [xc_ref, yc_ref], rtol=0, atol=1.e-3)
if nwarn == 1:
assert len(warnlist) == nwarn
@pytest.mark.skipif('not HAS_SCIPY')
def test_invalid_mask_shape():
data = np.zeros((4, 4))
mask = np.zeros((2, 2), dtype=bool)
with pytest.raises(ValueError):
centroid_1dg(data, mask=mask)
with pytest.raises(ValueError):
centroid_2dg(data, mask=mask)
with pytest.raises(ValueError):
_gaussian1d_moments(data, mask=mask)
@pytest.mark.skipif('not HAS_SCIPY')
def test_invalid_error_shape():
error = np.zeros((2, 2), dtype=bool)
with pytest.raises(ValueError):
centroid_1dg(np.zeros((4, 4)), error=error)
with pytest.raises(ValueError):
centroid_2dg(np.zeros((4, 4)), error=error)
@pytest.mark.skipif('not HAS_SCIPY')
def test_centroid_2dg_dof():
data = np.ones((2, 2))
with pytest.raises(ValueError):
centroid_2dg(data)
def test_gaussian1d_moments():
x = np.arange(100)
desired = (75, 50, 5)
g = Gaussian1D(*desired)
data = g(x)
result = _gaussian1d_moments(data)
assert_allclose(result, desired, rtol=0, atol=1.e-6)
data[0] = 1.e5
mask = np.zeros(data.shape).astype(bool)
mask[0] = True
result = _gaussian1d_moments(data, mask=mask)
assert_allclose(result, desired, rtol=0, atol=1.e-6)
data[0] = np.nan
mask = np.zeros(data.shape).astype(bool)
mask[0] = True
with pytest.warns(AstropyUserWarning) as warnlist:
result = _gaussian1d_moments(data, mask=mask)
assert_allclose(result, desired, rtol=0, atol=1.e-6)
assert len(warnlist) == 1
|
r = list(bch.torque_fft[-1]['order'])
if order and max(order) < 5:
order += [15]
tq += [0]
torque_fft(order, tq)
plt.subplot(rows, cols, row+2)
force('Force Fx',
bch.torque[-1]['angle'], bch.torque[-1]['force_x'])
plt.subplot(rows, cols, row+3)
force('Force Fy',
bch.torque[-1]['angle'], bch.torque[-1]['force_y'])
row += 3
elif bch.linearForce:
title, keys = __get_linearForce_title_keys(bch.linearForce[-1])
force(title[0], bch.linearForce[-1]['displ'],
bch.linearForce[-1][keys[0]], 'Displt. / mm')
plt.subplot(rows, cols, row+1)
force_fft(bc | h.linearForce_fft[-2]['order'],
bch.linearForce_fft[-2]['force'])
plt.subplot(rows, cols, row+2)
force(title[1], bch.linearForce[-1]['displ'],
bch.linearForce[-1][keys[1]], 'Displt. / mm')
plt.subplot(rows, cols, row+3)
force_fft(bch.linearForce_fft[-1]['order'],
bch.linearFor | ce_fft[-1]['force'])
row += 3
plt.subplot(rows, cols, row+1)
flux = [bch.flux[k][-1] for k in bch.flux]
pos = [f['displ'] for f in flux]
winding_flux(pos,
[f['flux_k'] for f in flux])
plt.subplot(rows, cols, row+2)
winding_current(pos,
[f['current_k'] for f in flux])
plt.subplot(rows, cols, row+3)
voltage('Internal Voltage',
bch.flux['1'][-1]['displ'],
bch.flux['1'][-1]['voltage_dpsi'])
plt.subplot(rows, cols, row+4)
try:
voltage_fft('Internal Voltage Harmonics',
bch.flux_fft['1'][-1]['order'],
bch.flux_fft['1'][-1]['voltage'])
except:
pass
if len(bch.flux['1']) > 1:
plt.subplot(rows, cols, row+5)
voltage('No Load Voltage',
bch.flux['1'][0]['displ'],
bch.flux['1'][0]['voltage_dpsi'])
plt.subplot(rows, cols, row+6)
try:
voltage_fft('No Load Voltage Harmonics',
bch.flux_fft['1'][0]['order'],
bch.flux_fft['1'][0]['voltage'])
except:
pass
fig.tight_layout(h_pad=3.5)
if title:
fig.subplots_adjust(top=0.92)
def multcal(bch, title=''):
"""creates a plot of a MULT CAL simulation"""
cols = 2
rows = 4
htitle = 1.5 if title else 0
fig, ax = plt.subplots(nrows=rows, ncols=cols,
figsize=(10, 3*rows + htitle))
if title:
fig.suptitle(title, fontsize=16)
row = 1
plt.subplot(rows, cols, row)
if bch.torque:
torque(bch.torque[-1]['angle'], bch.torque[-1]['torque'])
plt.subplot(rows, cols, row+1)
tq = list(bch.torque_fft[-1]['torque'])
order = list(bch.torque_fft[-1]['order'])
if order and max(order) < 5:
order += [15]
tq += [0]
torque_fft(order, tq)
plt.subplot(rows, cols, row+2)
force('Force Fx',
bch.torque[-1]['angle'], bch.torque[-1]['force_x'])
plt.subplot(rows, cols, row+3)
force('Force Fy',
bch.torque[-1]['angle'], bch.torque[-1]['force_y'])
row += 3
elif bch.linearForce:
title, keys = __get_linearForce_title_keys(bch.linearForce[-1])
force(title[0], bch.linearForce[-1]['displ'],
bch.linearForce[-1][keys[0]], 'Displt. / mm')
plt.subplot(rows, cols, row+1)
force_fft(bch.linearForce_fft[-2]['order'],
bch.linearForce_fft[-2]['force'])
plt.subplot(rows, cols, row+2)
force(title[1], bch.linearForce[-1]['displ'],
bch.linearForce[-1][keys[1]], 'Displt. / mm')
plt.subplot(rows, cols, row+3)
force_fft(bch.linearForce_fft[-1]['order'],
bch.linearForce_fft[-1]['force'])
row += 3
plt.subplot(rows, cols, row+1)
flux = [bch.flux[k][-1] for k in bch.flux]
pos = [f['displ'] for f in flux]
winding_flux(pos,
[f['flux_k'] for f in flux])
plt.subplot(rows, cols, row+2)
winding_current(pos,
[f['current_k'] for f in flux])
plt.subplot(rows, cols, row+3)
voltage('Internal Voltage',
bch.flux['1'][-1]['displ'],
bch.flux['1'][-1]['voltage_dpsi'])
plt.subplot(rows, cols, row+4)
try:
voltage_fft('Internal Voltage Harmonics',
bch.flux_fft['1'][-1]['order'],
bch.flux_fft['1'][-1]['voltage'])
except:
pass
if len(bch.flux['1']) > 1:
plt.subplot(rows, cols, row+5)
voltage('No Load Voltage',
bch.flux['1'][0]['displ'],
bch.flux['1'][0]['voltage_dpsi'])
plt.subplot(rows, cols, row+6)
try:
voltage_fft('No Load Voltage Harmonics',
bch.flux_fft['1'][0]['order'],
bch.flux_fft['1'][0]['voltage'])
except:
pass
fig.tight_layout(h_pad=3.5)
if title:
fig.subplots_adjust(top=0.92)
def fasttorque(bch, title=''):
"""creates a plot of a Fast Torque simulation"""
cols = 2
rows = 4
if len(bch.flux['1']) > 1:
rows += 1
htitle = 1.5 if title else 0
fig, ax = plt.subplots(nrows=rows, ncols=cols,
figsize=(10, 3*rows + htitle))
if title:
fig.suptitle(title, fontsize=16)
row = 1
plt.subplot(rows, cols, row)
if bch.torque:
torque(bch.torque[-1]['angle'], bch.torque[-1]['torque'])
plt.subplot(rows, cols, row+1)
torque_fft(bch.torque_fft[-1]['order'], bch.torque_fft[-1]['torque'])
plt.subplot(rows, cols, row+2)
force('Force Fx',
bch.torque[-1]['angle'], bch.torque[-1]['force_x'])
plt.subplot(rows, cols, row+3)
force('Force Fy',
bch.torque[-1]['angle'], bch.torque[-1]['force_y'])
row += 3
elif bch.linearForce:
title, keys = __get_linearForce_title_keys(bch.linearForce[-1])
force(title[0], bch.linearForce[-1]['displ'],
bch.linearForce[-1][keys[0]], 'Displt. / mm')
plt.subplot(rows, cols, row+1)
force_fft(bch.linearForce_fft[-2]['order'],
bch.linearForce_fft[-2]['force'])
plt.subplot(rows, cols, row+2)
force(title[1], bch.linearForce[-1]['displ'],
bch.linearForce[-1][keys[1]], 'Displt. / mm')
plt.subplot(rows, cols, row+3)
force_fft(bch.linearForce_fft[-1]['order'],
bch.linearForce_fft[-1]['force'])
row += 3
plt.subplot(rows, cols, row+1)
flux = [bch.flux[k][-1] for k in bch.flux]
pos = [f['displ'] for f in flux]
winding_flux(pos, [f['flux_k'] for f in flux])
plt.subplot(rows, cols, row+2)
winding_current(pos, [f['current_k'] for f in flux])
plt.subplot(rows, cols, row+3)
voltage('Internal Voltage',
bch.flux['1'][-1]['displ'],
bch.flux['1'][-1]['voltage_dpsi'])
plt.subplot(rows, cols, row+4)
try:
voltage_fft('Internal Voltage Harmonics',
bch.flux_fft['1'][-1]['order'],
bch.flux_fft['1'][-1]['voltage'])
except:
pass
if len(bch.flux['1']) > 1:
plt.subplot(rows, cols, row+5)
voltage('No Load Voltage',
bch.flux['1'][0]['displ'],
bch.flux['1'][0]['voltage_dpsi'])
plt.subplot(rows, cols, row+6)
try:
voltage_fft('No Load Voltage Harmonics',
bch.flux_fft['1'][0]['order'],
bch.flux_fft['1'][0]['voltage'])
except:
pass
fig.tight_layout(h_pad=3.5)
if title:
fig.subplots_adjust(top=0.92)
def cogging(bch, title=''):
"""creates a cogging plot"""
cols = 2
rows = 3
htitle = 1.5 if title else 0
fig, ax = plt.subplots(nrows=rows, ncols=cols,
figsize=(10, 3*rows + htitle))
if title:
fig.suptitle(title, fontsize=16)
ro |
"""
gof.py
gof stands for Graph Optimization Framework
The gof submodule of theano implements a framework
for manipulating programs described as graphs. The
gof module defines basic theano graph concepts:
-Apply nodes, which represent the application
of an Op to Variables. Together these make up a
graph.
-The Type, needed for Variables to make sense
-The FunctionGraph, which defines how a subgraph
should be interpreted to implement a function
-The Thunk, a callable object that becames part
of the executable emitted by theano
-Linkers/VMs, the objects that call Thunks in
sequence in order to execute a theano program
Conceptually, gof is intended to be sufficiently abstract
that it could be used to implement a language other than
theano. ie, theano is a domain-specific language for
numerical computation, created by implementing
tensor Variables and Ops that perform mathematical functions.
A different kind o | f domain-specific language could be
made by using gof with different Variables and Ops.
In practice, gof and the rest of theano are somewhat more
tightly intertwined.
Currently, gof also contains much of the C compilation
functionality. Ideally this should be refactored into
a different submodule.
For more details and discussion, see the theano-dev
e-mail thread "What is gof?"
"""
from theano.gof.cc impor | t \
CLinker, OpWiseCLinker, DualLinker, HideC
# Also adds config vars
from theano.gof.compiledir import \
local_bitwidth, python_int_bitwidth
from theano.gof.fg import \
CachedConstantError, InconsistencyError, MissingInputError, FunctionGraph
from theano.gof.destroyhandler import \
DestroyHandler
from theano.gof.graph import \
Apply, Variable, Constant, view_roots
from theano.gof.link import \
Container, Linker, LocalLinker, PerformLinker, WrapLinker, WrapLinkerMany
from theano.gof.op import \
Op, OpenMPOp, PureOp, COp, ops_with_inner_function
from theano.gof.opt import (
Optimizer,
optimizer, inplace_optimizer,
SeqOptimizer,
MergeOptimizer,
LocalOptimizer, local_optimizer, LocalOptGroup,
OpSub, OpRemove, PatternSub,
NavigatorOptimizer, TopoOptimizer, EquilibriumOptimizer,
OpKeyOptimizer)
from theano.gof.optdb import \
DB, Query, \
EquilibriumDB, SequenceDB, ProxyDB
from theano.gof.toolbox import \
Feature, \
Bookkeeper, History, Validator, ReplaceValidate, NodeFinder,\
PrintListener, ReplacementDidntRemovedError, NoOutputFromInplace
from theano.gof.type import \
Type, Generic, generic
from theano.gof.utils import \
hashtype, object2, MethodNotDefined
import theano
if theano.config.cmodule.preload_cache:
cc.get_module_cache()
|
#!/usr/bin/env python3
# Copyright (c) 2016 The nealcoin C | ore developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""S | pecialized SipHash-2-4 implementations.
This implements SipHash-2-4 for 256-bit integers.
"""
def rotl64(n, b):
return n >> (64 - b) | (n & ((1 << (64 - b)) - 1)) << b
def siphash_round(v0, v1, v2, v3):
v0 = (v0 + v1) & ((1 << 64) - 1)
v1 = rotl64(v1, 13)
v1 ^= v0
v0 = rotl64(v0, 32)
v2 = (v2 + v3) & ((1 << 64) - 1)
v3 = rotl64(v3, 16)
v3 ^= v2
v0 = (v0 + v3) & ((1 << 64) - 1)
v3 = rotl64(v3, 21)
v3 ^= v0
v2 = (v2 + v1) & ((1 << 64) - 1)
v1 = rotl64(v1, 17)
v1 ^= v2
v2 = rotl64(v2, 32)
return (v0, v1, v2, v3)
def siphash256(k0, k1, h):
n0 = h & ((1 << 64) - 1)
n1 = (h >> 64) & ((1 << 64) - 1)
n2 = (h >> 128) & ((1 << 64) - 1)
n3 = (h >> 192) & ((1 << 64) - 1)
v0 = 0x736f6d6570736575 ^ k0
v1 = 0x646f72616e646f6d ^ k1
v2 = 0x6c7967656e657261 ^ k0
v3 = 0x7465646279746573 ^ k1 ^ n0
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n0
v3 ^= n1
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n1
v3 ^= n2
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n2
v3 ^= n3
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= n3
v3 ^= 0x2000000000000000
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0 ^= 0x2000000000000000
v2 ^= 0xFF
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
v0, v1, v2, v3 = siphash_round(v0, v1, v2, v3)
return v0 ^ v1 ^ v2 ^ v3
|
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Test resources processing, i.e. <if> and <include> tag handling."""
import unittest
from processor import FileCache, Processor, LineNumber
class ProcessorTest(unittest.TestCase):
"""Test <include> tag processing logic."""
def __init__(self, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
self.maxDiff = None
def setUp(self):
FileCache._cache["/debug.js"] = """
// Copyright 2002 Older Chromium Author dudes.
function debug(msg) { if (window.DEBUG) alert(msg); }
""".strip()
FileCache._cache["/global.js"] = """
// Copyright 2014 Old Chromium Author dudes.
<include src="/debug.js">
var global = 'type checking!';
""".strip()
FileCache._cache["/checked.js"] = """
// Copyright 2028 Future Chromium Author dudes.
/**
* | @fileoverview Coolest app ever.
* @author Douglas Crockford (douglas@crockford.com)
*/
<include src="/global.js">
debug(global);
// Here continues checked.js, a swell file.
""".strip()
FileCache._cache["/double-debug.js"] = """
<include src="/debug.js">
<include src="/debug.js">
""".strip()
| self._processor = Processor("/checked.js")
def testInline(self):
self.assertMultiLineEqual("""
// Copyright 2028 Future Chromium Author dudes.
/**
* @fileoverview Coolest app ever.
* @author Douglas Crockford (douglas@crockford.com)
*/
// Copyright 2014 Old Chromium Author dudes.
// Copyright 2002 Older Chromium Author dudes.
function debug(msg) { if (window.DEBUG) alert(msg); }
var global = 'type checking!';
debug(global);
// Here continues checked.js, a swell file.
""".strip(), self._processor.contents)
def assertLineNumber(self, abs_line, expected_line):
actual_line = self._processor.get_file_from_line(abs_line)
self.assertEqual(expected_line.file, actual_line.file)
self.assertEqual(expected_line.line_number, actual_line.line_number)
def testGetFileFromLine(self):
"""Verify that inlined files retain their original line info."""
self.assertLineNumber(1, LineNumber("/checked.js", 1))
self.assertLineNumber(5, LineNumber("/checked.js", 5))
self.assertLineNumber(6, LineNumber("/global.js", 1))
self.assertLineNumber(7, LineNumber("/debug.js", 1))
self.assertLineNumber(8, LineNumber("/debug.js", 2))
self.assertLineNumber(9, LineNumber("/global.js", 3))
self.assertLineNumber(10, LineNumber("/checked.js", 7))
self.assertLineNumber(11, LineNumber("/checked.js", 8))
def testIncludedFiles(self):
"""Verify that files are tracked correctly as they're inlined."""
self.assertEquals(set(["/global.js", "/debug.js"]),
self._processor.included_files)
def testDoubleIncludedSkipped(self):
"""Verify that doubly included files are skipped."""
processor = Processor("/double-debug.js")
self.assertEquals(set(["/debug.js"]), processor.included_files)
self.assertEquals(FileCache.read("/debug.js") + "\n", processor.contents)
class IfStrippingTest(unittest.TestCase):
"""Test that the contents of XML <if> blocks are stripped."""
def __init__(self, *args, **kwargs):
unittest.TestCase.__init__(self, *args, **kwargs)
self.maxDiff = None
def setUp(self):
FileCache._cache["/century.js"] = """
function getCurrentCentury() {
<if expr="netscape_os">
alert("Oh wow!");
return "XX";
</if>
return "XXI";
}
""".strip()
self.processor_ = Processor("/century.js")
def testIfStripping(self):
self.assertMultiLineEqual("""
function getCurrentCentury() {
alert("Oh wow!");
return "XX";
return "XXI";
}
""".strip(), self.processor_.contents)
if __name__ == '__main__':
unittest.main()
|
self._jstore.save(self._json)
def get_user(self):
"""
Returns the current user
:return: user
"""
return self._user
def get_watch_later_id(self):
"""
Returns the current users watch later playlist id
:return: the current users watch later playlist id
"""
self._json = self._jstore.get_data()
current_playlist_id = self._json['access_manager']['users'].get(self._user, {}).get('watch_later', ' WL')
settings_playlist_id = self._settings.get_string('youtube.folder.watch_later.playlist', '').strip()
if settings_playlist_id and (current_playlist_id != settings_playlist_id):
self._json['access_manager']['users'][self._user]['watch_later'] = settings_playlist_id
self._jstore.save(self._json)
self._settings.set_string('youtube.folder.watch_later.playlist', '')
return self._json['access_manager']['users'].get(self._user, {}).get('watch_later', ' WL')
def set_watch_later_id(self, playlist_id):
"""
Sets the current users watch later playlist id
:param playlist_id: string, watch later playlist id
:return:
"""
self._json = self._jstore.get_data()
self._json['access_manager']['users'][self._user]['watch_later'] = playlist_id
self._settings.set_string('youtube.folder.watch_later.playlist', '')
self._jstore.save(self._json)
def get_watch_history_id(self):
"""
Returns the current users watch history playlist id
:return: the current users watch history playlist id
"""
self._json = self._jstore.get_data()
current_playlist_id = self._json['access_manager']['users'].get(self._user, {}).get('watch_history', 'HL')
settings_playlist_id = self._settings.get_string('youtube.folder.history.playlist', '').strip()
if settings_playlist_id and (current_playlist_id != settings_playlist_id):
self._json['access_manager']['users'][self._user]['watch_history'] = settings_playlist_id
self._jstore.save(self._json)
self._settings.set_string('youtube.folder.history.playlist', '')
return self._json['access_manager']['users'].get(self._user, {}).get('watch_history', 'HL')
def set_watch_history_id(self, playlist_id):
"""
Sets the current users watch history playlist id
:param playlist_id: string, watch history playlist id
:return:
"""
self._json = self._jstore.get_data()
self._json['access_manager']['users'][self._user]['watch_history'] = playlist_id
self._settings.set_string('youtube.folder.history.playlist', '')
self._jstore.save(self._json)
def set_last_origin(self, origin):
"""
Updates the origin
:param user: string, origin
:param switch_to: boolean, change last origin
:return:
"""
self._last_origin = origin
self._json = self._jstore.get_data()
self._json['access_manager']['last_origin'] = origin
self._jstore.save(self._json)
def get_last_origin(self):
"""
Returns the last origin
:return:
"""
return self._last_origin
def get_access_token(self):
"""
Returns the access token for some API
:return: access_token
"""
self._json = self._jstore.get_data()
return self._json['access_manager']['users'].get(self._user, {}).get('access_token', '')
def get_refresh_token(self):
"""
Returns the refresh token
:return: refresh token
"""
self._json = self._jstore.get_data()
return self._json['access_manager']['users'].get(self._user, {}).get('refresh_token', '')
def has_refresh_token(self):
return self.get_refresh_token() != ''
def is_access_token_expired(self):
"""
Returns True if the access_token is expired otherwise False.
If no expiration date was provided and an access_token exists
this method will always return True
:return:
"""
self._json = self._jstore.get_data()
access_token = self._json['access_manager']['users'].get(self._user, {}).get('access_token', '')
expires = int(self._json['access_manager']['users'].get(self._user, {}).get('token_expires', -1))
# with no access_token it must be expired
if not access_token:
return True
# in this case no expiration date was set
if expires == -1:
return False
now = int(time.time())
return expires <= now
def update_access_token(self, access_token, unix_timestamp=None, refresh_token=None):
"""
Updates the old access token with the new one.
:param access_token:
:param unix_timestamp:
:param refresh_token:
:return:
"""
self._json = self._jstore.get_data()
self._json['access_manager']['users'][self._user]['access_token'] = access_token
if unix_timestamp is not None:
self._json['access_manager']['users'][self._user]['token_expires'] = int(unix_timestamp)
if refresh_token is not None:
self._json['access_manager']['users'][self._user]['refresh_token'] = refresh_token
self._jstore.save(self._json)
def get_new_developer(self, addon_id):
"""
:param addon_id: string, addon id
:return: a new developer dict
"""
return {'access_token': '', 'refresh_token': '', 'token_expires': -1, 'last_key_hash': ''}
def get_developers(self):
"""
Returns developers
:return: dict, developers
"""
return self._json['access_manager'].get('developers', {})
def set_developers(self, developers):
"""
Updates the users
:param developers: dict, developers
:return:
"""
self._json = self._jstore.get_data()
self._json['access_manager']['developers'] = developers
self._jstore.save(self._json)
def get_dev_access_token(self, addon_id):
"""
Retu | rns the access token for some API
:para | m addon_id: addon id
:return: access_token
"""
self._json = self._jstore.get_data()
return self._json['access_manager']['developers'].get(addon_id, {}).get('access_token', '')
def get_dev_refresh_token(self, addon_id):
"""
Returns the refresh token
:return: refresh token
"""
self._json = self._jstore.get_data()
return self._json['access_manager']['developers'].get(addon_id, {}).get('refresh_token', '')
def developer_has_refresh_token(self, addon_id):
return self.get_dev_refresh_token(addon_id) != ''
def is_dev_access_token_expired(self, addon_id):
"""
Returns True if the access_token is expired otherwise False.
If no expiration date was provided and an access_token exists
this method will always return True
:return:
"""
self._json = self._jstore.get_data()
access_token = self._json['access_manager']['developers'].get(addon_id, {}).get('access_token', '')
expires = int(self._json['access_manager']['developers'].get(addon_id, {}).get('token_expires', -1))
# with no access_token it must be expired
if not access_token:
return True
# in this case no expiration date was set
if expires == -1:
return False
now = int(time.time())
return expires <= now
def update_dev_access_token(self, addon_id, access_token, unix_timestamp=None, refresh_token=None):
"""
Updates the old access token with the new one.
:param addon_id:
:param access_token:
:param unix_timestamp:
:param refresh_token:
:return:
"""
self._json = self._jstore.get_data()
self._json['access_manager']['developers'][addon_id]['access_token'] = access_token
if unix_timestamp is not None:
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.