prefix stringlengths 0 918k | middle stringlengths 0 812k | suffix stringlengths 0 962k |
|---|---|---|
# This script calculates how many error reports are in each subdirectory
# and how many error reports are in total.
# Edit in_dir and out_file parameters as you need.
import os
in_dir = "D:/Projects/CrashRpt/valid_reports"
out_file = "stats.txt"
f = open(out_file, "w")
def get_txt_file_count(dirname):
count = 0
for root, dirs, files in os. | walk(dirname, True):
for file in files:
if file[-4:] != ".txt":
continue;
count += 1
break;
return count
multimap = dict()
for root, dirs, files in os.walk(in_dir):
for dir in dirs:
dir_name = os.path.join(root, dir)
report_count_in_dir = get_txt_file_count(dir_name)
if report_count_in_dir in multimap.keys() | :
multimap[report_count_in_dir].append(dir)
else:
multimap[report_count_in_dir] = [dir]
ordered_list = list(multimap.keys())
ordered_list.sort()
ordered_list.reverse()
total_count = 0
total_groups = 0
for count in ordered_list:
total_groups += len(multimap[count]);
total_count += count * len(multimap[count])
f.write("Total %d reports (100%%) in %d directories\n"%(total_count, total_groups))
n = 1
for key in ordered_list:
for dir in multimap[key]:
percent = key/total_count*100
f.write("%d. %d reports (%0.1f%%) in '%s'\n"%(n, key, percent, dir))
n = n+1
f.close() |
#!/usr/bin/env python2
# References:
# https://fedoraproject.org/wiki/Koji/ServerHowTo
# https://github.com/sbadakhc/kojak/blob/master/scripts/install/install
import util.cfg as cfg
import util.pkg as pkg
import util.cred as cred
from util.log import log
#
# Setup
#
log.info("General update")
pkg.clean()
pkg.update()
log.info("Install EPEL")
pkg.install("https://dl.fedoraproject.org/pub/epel/6/x86_64/epel-release-6-8.noarch.rpm")
#
# Kojid (Koji Builder)
#
log.info("Install Koji Builder")
pkg.install("koji-builder")
koji_url = dict()
koji_url["web"] = "http://koji/koji"
koji_url["top"] = "http://koji/kojifiles"
koji_url["hub"] = "http://koji/kojihub"
log.info | ("Configure Koji Builder")
with cfg.mod_ini("/etc/kojid/kojid.conf") as i:
i.kojid.sleeptime = 2
i.kojid.maxjobs = 20
i.kojid.server = koji_url["hub"]
i.kojid.topurl = koji_url["top"]
# i.kojid.cert is set at runtime
i.kojid.ca = cred.ca_crt
i.kojid.serverca = cred.ca_crt
i.kojid.smtphost = "koji"
i.kojid.from_addr = "Koji Build System <buildsys@koj | ibuilder>"
#
# Koji CLI
#
log.info("Configure Koji CLI")
with cfg.mod_ini("/etc/koji.conf") as i:
i.koji.server = koji_url["hub"]
i.koji.weburl = koji_url["web"]
i.koji.topurl = koji_url["top"]
i.koji.topdir = "/mnt/koji"
i.koji.cert = cred.user["kojiadmin"].pem
i.koji.ca = cred.ca_crt
i.koji.serverca = cred.ca_crt
pkg.clean()
|
from .simpl | e import SIMPLE_ADJACENCIES
from .combo import ComboAdjacency, LearnedComboAdjacency
def construct_adjacency(matrix, **kwargs):
if isinstance(matrix, (list,)):
if kwargs.get('learned_tradeoff', False):
return LearnedComboAdjacency(adj_list=matrix, **kwargs)
return ComboAdjacency(adj | _list=matrix, **kwargs)
return SIMPLE_ADJACENCIES[matrix](**kwargs)
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class NameAvailability(Model):
"""Represents a reso | urce name availability.
:param message: Error Message.
:type message: str
:param name_available: Indicates whether the resource name is available.
:type name_available: bool
:param reason: Reason for name bei | ng unavailable.
:type reason: str
"""
_attribute_map = {
'message': {'key': 'message', 'type': 'str'},
'name_available': {'key': 'nameAvailable', 'type': 'bool'},
'reason': {'key': 'reason', 'type': 'str'},
}
def __init__(self, message=None, name_available=None, reason=None):
self.message = message
self.name_available = name_available
self.reason = reason
|
# Name: Examples for using conditioning number finders for curves and surfaces
# Description: Contains some ex | amples with descriptions of how to use the functions
# Created: 2016-08-18
# Author: Janis Lazovskis
# Navigate to the conditioning directory
# Run Python 2
# Example (curve)
execfile('curves.py')
x = variety()
x0,x1,x2 = sp.var('x0,x1,x2')
x.varlist = [x0,x1,x2]
x.func = x0*x0 + x1*x2 - x1*x0
x.points = [[1,1,0], [2,1,-2]]
cnumcurve(x)
# Non-example (curve)
# Use the above, but instead, put:
x.points = [[1,1,0], [2,1,-2], [0,0,0]]
# Then cnumcurve will return an empt | y list saying the last point isn't in P^2
cnumcurve(x)
# Non-example (curve)
# Use the above, but instead, put:
x.points = [[1,1,0], [2,1,-2], [1,1,1]]
# Then cnumcurve will return an empty list saying the last point isn't on the curve
cnumcurve(x)
# Example surface
execfile('surfaces.py')
x = variety()
x0,x1,x2,x3 = sp.var('x0,x1,x2,x3')
x.varlist = [x0,x1,x2,x3]
x.func = x0*x1 - x2*x3
x.points = [[1,1,1,1], [0,1,1,0], [0,1,0,1], [2,1,1,2]]
cnumsurface(x)
# Non-example (surface)
execfile('surfaces.py')
x = variety()
x0,x1,x2,x3 = sp.var('x0,x1,x2,x3')
x.varlist = [x0,x1,x2,x3]
x.func = x0*x0*x1 - x2*x3*x3 + x0*x1*x2 +x2*x2*x2
x.points = [[0,1,1,1], [1,0,1,1], [1,0,2,2], [1,1,-1,1]]
# This will raise an error because the curve is not smooth
cnumsurface(x)
|
)
# Call
ntools.assert_raises(SCIONPathPolicyViolated, inst.check_filters, pcb)
def test_property_ranges(self):
inst, pcb = self._setup(reasons="reasons")
ntools.assert_raises(SCIONPathPolicyViolated, inst.check_filters, pcb)
class TestPathPolicyCheckPropertyRanges(object):
"""
Unit tests for lib.path_store.PathPolicy._check_property_ranges
"""
def _setup(self, max_bw=20):
inst = PathPolicy()
inst.property_ranges = {
'PeerLinks': [0, 1], 'HopsLength': [0, 1], 'DelayTime': [0, 1],
'GuaranteedBandwidth': [0, max_bw],
'AvailableBandwidth': [0, max_bw], 'TotalBandwidth': [0, max_bw]
}
pcb = create_mock(["get_n_peer_links", "get_n_hops", "get_timestamp"])
return inst, pcb
@patch("lib.path_store.SCIONTime.get_time", new_callable=create_mock)
def test_success(self, get_time):
inst, pcb = self._setup()
pcb.get_n_peer_links.return_value = 0.5
pcb.get_n_hops.return_value = 0.5
pcb.get_timestamp.return_value = 0.5
# Call
ntools.eq_(inst._check_property_ranges(pcb), [])
@patch("lib.path_store.SCIONTime.get_time", new_callable=create_mock)
def test_failure(self, get_time):
inst, pcb = self._setup(max_bw=9)
pcb.get_n_peer_links.return_value = 2
pcb.get_n_hops.return_value = -1
pcb.get_timestamp.return_value = -0.1
# Call
ntools.eq_(len(inst._check_property_ranges(pcb)), 6)
@patch("lib.path_store.SCIONTime.get_time", new_callable=create_mock)
def test_no_checks(self, get_time):
inst, pcb = self._setup(max_bw=9)
for key in inst.property_ranges:
inst.property_ranges[key] = []
pcb.get_n_peer_links.return_value = 2
pcb.get_n_hops.return_value = -1
pcb.get_timestamp.return_value = -0.1
# Call
ntools.eq_(inst._check_property_ranges(pcb), [])
class TestPathPolicyParseDict(object):
"""
Unit tests for lib.path_store.PathPolicy.parse_dict
"""
def test_basic(self):
dict_ = {}
dict_['BestSetSize'] = "best_set_size"
dict_['CandidatesSetSize'] = "candidates_set_size"
dict_['HistoryLimit'] = "history_limit"
dict_['UpdateAfterNumber'] = "update_after_number"
dict_['UpdateAfterTime'] = "update_after_time"
dict_['UnwantedASes'] = "1-11,2-12"
dict_['PropertyRanges'] = {'key1': "1-11", 'key2': "2-12"}
dict_['PropertyWeights'] = "property_weights"
pth_pol2 = PathPolicy()
pth_pol2.parse_dict(dict_)
ntools.eq_(pth_pol2.best_set_size, "best_set_size")
ntools.eq_(pth_pol2.candidates_set_size, "candidates_set_size")
ntools.eq_(pth_pol2.history_limit, "history_limit")
ntools.eq_(pth_pol2.update_after_number, "update_after_number")
ntools.eq_(pth_pol2.update_after_time, "update_after_time")
ntools.eq_(pth_pol2.property_ranges, {'key1': (1, 11), 'key2': (2, 12)})
ntools.eq_(pth_pol2.property_weights, "property_weights")
class TestPathStoreRecordInit(object):
"""
Unit tests for lib.path_store.PathStoreRecord.__init__
"""
@patch("lib.path_store.PathStoreRecord.update", autospec=True)
@patch("lib.path_store.SCIONTime.get_time", new_callable=create_mock)
def test(self, get_time, update):
pcb = create_mock(['get_hops_hash', 'get_n_hops', 'get_n_peer_links'],
class_=PathSegment)
get_time.return_value = PathStoreRecord.DEFAULT_OFFSET + 1
# Call
inst = PathStoreRecord(pcb)
# Tests
ntools.eq_(inst.id, pcb.get_hops_hash.return_value)
ntools.eq_(inst.peer_links, pcb.get_n_peer_links.return_value)
ntools.eq_(inst.hops_length, pcb.get_n_hops.return_value)
ntools.eq_(inst.fidelity, 0)
ntools.eq_(inst.disjointness, 0)
ntools.eq_(inst.last_sent_time, 1)
ntools.eq_(inst.guaranteed_bandwidth, 0)
ntools.eq_(inst.available_bandwidth, 0)
ntools.eq_(inst.total_bandwidth, 0)
update.assert_called_once_with(inst, pcb)
class TestPathStoreRecordUpdate(object):
"""
Unit tests for lib.path_store.PathStoreRecord.update
"""
@patch("lib.path_store.SCIONTime.get_time", new_callable=create_mock)
@patch("lib.path_store.PathStoreRecord.__init__", autospec=True,
return_value=None)
def test(self, init, get_time):
inst = PathStoreRecord(" | pcb")
get_time.return_value = 100
pcb = create_mock(["copy", "get_hops_hash", "get_timestamp",
"get_expiration_time"])
inst.id = pcb.get_hops_hash.return_value
pcb.get_timestamp.return_value = 95
# Call
inst.update(pcb)
# Tests
pcb.copy.asse | rt_called_once_with()
ntools.eq_(inst.delay_time, 5)
ntools.eq_(inst.last_seen_time, 100)
ntools.eq_(inst.expiration_time, pcb.get_expiration_time.return_value)
class TestPathStoreRecordUpdateFidelity(object):
"""
Unit tests for lib.path_store.PathStoreRecord.update_fidelity
"""
@patch("lib.path_store.SCIONTime.get_time", new_callable=create_mock)
@patch("lib.path_store.PathStoreRecord.__init__", autospec=True,
return_value=None)
def test_basic(self, init, time_):
path_policy = PathPolicy()
path_policy.property_weights['PeerLinks'] = 10
path_policy.property_weights['HopsLength'] = 1
path_policy.property_weights['Disjointness'] = 2
path_policy.property_weights['LastSentTime'] = 3
path_policy.property_weights['LastSeenTime'] = 4
path_policy.property_weights['DelayTime'] = 5
path_policy.property_weights['ExpirationTime'] = 6
path_policy.property_weights['GuaranteedBandwidth'] = 7
path_policy.property_weights['AvailableBandwidth'] = 8
path_policy.property_weights['TotalBandwidth'] = 9
pth_str_rec = PathStoreRecord("pcb")
pth_str_rec.peer_links = 10 ** 5
pth_str_rec.hops_length = (1 / (10 ** 4))
pth_str_rec.disjointness = 10 ** 3
pth_str_rec.last_sent_time = -99
pth_str_rec.last_seen_time = 10
pth_str_rec.delay_time = 1
pth_str_rec.expiration_time = 10 / 9
pth_str_rec.guaranteed_bandwidth = 10 ** -2
pth_str_rec.available_bandwidth = 10 ** -3
pth_str_rec.total_bandwidth = 10 ** -4
time_.return_value = 1
pth_str_rec.update_fidelity(path_policy)
ntools.assert_almost_equal(pth_str_rec.fidelity, 1012345.6789)
class TestPathStoreAddSegment(object):
"""
Unit tests for lib.path_store.PathStore.add_segment
"""
def _setup(self, filter_=True):
inst = PathStore("path_policy")
inst.path_policy = create_mock(["check_filters"])
if not filter_:
inst.path_policy.check_filters.side_effect = SCIONPathPolicyViolated()
pcb = create_mock(["get_hops_hash", "get_timestamp"],
class_=PathSegment)
return inst, pcb
@patch("lib.path_store.PathStore.__init__", autospec=True,
return_value=None)
def test_filters(self, psi):
"""
Try to add a path that does not meet the filter requirements.
"""
inst, pcb = self._setup(filter_=False)
# Call
inst.add_segment(pcb)
# Tests
inst.path_policy.check_filters.assert_called_once_with(pcb)
@patch("lib.path_store.PathStore.__init__", autospec=True,
return_value=None)
def test_already_in_store(self, init):
"""
Try to add a path that is already in the path store.
"""
inst, pcb = self._setup()
candidate = create_mock(['id', 'update'])
candidate.id = pcb.get_hops_hash.return_value
inst.candidates = [candidate]
# Call
inst.add_segment(pcb)
# Tests
candidate.update.assert_called_once_with(pcb)
@patch("lib.path_store.PathStoreRecord", autospec=True)
@patch("lib.path_store.PathStore.__init__", autospec=True,
re |
# -*- coding: utf-8 -*-
MDL_BYTE = 1
MDL_UNSIGNED_ | BYTE = 2
MDL_SHORT = 3
MDL_UNSIGNED_SHORT = 4
MDL_HALF = 5
MDL_INT = 6
MDL_UNSIGNED_INT = 7
MDL_FLOAT = 8
MDL | _DOUBLE = 9
MDL_FILE = 1
MDL_NODE = 2
MDL_MESH = 3
MDL_BONE = 4
MDL_VERTEXARRAY = 5
MDL_INDICEARRAY = 6
MDL_PROPERTIES = 7
MDL_ANIMATIONKEYS = 8
MDL_AABB = 9
MDL_SURFACE = 10
MDL_NEWTONCOLLISIONTREE = 11
MDL_POSITION = 1
MDL_NORMAL = 2
MDL_TEXTURE_COORD = 3
MDL_COLOR = 4
MDL_TANGENT = 5
MDL_BINORMAL = 6
MDL_BONEINDICE = 7
MDL_BONEWEIGHT = 8
MDL_POINTS = 1
MDL_LINE_STRIP = 2
MDL_LINE_LOOP = 3
MDL_LINES = 4
MDL_TRIANGLE_STRIP = 5
MDL_TRIANGLE_FAN = 6
MDL_TRIANGLES = 7
MDL_QUAD_STRIP = 8
MDL_QUADS = 9
MDL_POLYGON = 10
MDL_VERSION = 2 |
import sys, os
class FastaTarget:
def __init__(self):
self.tid = ''
self.len = ''
self.offset = -1
self.blen = -1
self.llen = -1
class FastaIndex:
def __init__(self, fname=''):
self.f = open(fname, 'r')
self.ft = {}
for line in self.f.readlines():
dat = line.split()
if len(dat)==5:
tid = dat[0]
self.ft[tid] = FastaTarget()
self.ft[tid].tid = tid
self.ft[tid].len = int(dat[1])
self.ft[tid].offset = int(dat[2])
self.ft[tid].blen = int(dat[3])
self.ft[tid].llen = int(dat[4])
# print 'Fasta: ', tid, int(dat[1]), int(dat[2]), int(dat[3]), int(dat[4])
self.f.close()
class Fasta:
def __init__(self, fname = '/nfs/users/nfs_c/caa/s103/ref/human_b36_male.fa'):
self.fa = open(fname,'r')
self.fai = FastaIndex(fname+'.fai')
def get(self, tid, pos1based, len):
pos = pos1based - 1
try:
idx = self.fai.ft[tid]
except KeyError:
print 'KeyError: ', tid
raise NameError('KeyError')
fpos = idx.offset+ ( int(pos)/idx.blen)*idx.llen + (int(pos)%idx.blen)
self.fa.seek(fpos,0)
numread=0
seq = []
while numread<len:
char = self.fa.read(1)
if char!='\n':
seq.append(char)
numread +=1
| return seq
def getChromosomes(faFile = ''):
faiFile = "%s.fai" % (faFile)
if not os.path.exists(faiFile):
raise NameError("Cannot find fai file for %s" % faFile)
faidx = FastaIndex(faiFile)
fachr = faidx.ft.keys() |
chromosomes = []
autosomal = ["%d" % c for c in range(1,23)]
autosomal.extend(['X','Y'])
for chrom in autosomal:
if chrom in fachr:
chromosomes.append(chrom)
chromosomes.extend( list (set(fachr) - set(autosomal)))
return chromosomes
|
# encoding: utf-8
# Copyright (c) 2001-2014, Canal TP and/or its affiliates. All rights reserved.
#
# This file is part of Navitia,
# the software to build cool stuff with public transport.
#
# Hope you'll enjoy and contribute to this project,
# powered by Canal TP (www.canaltp.fr).
# Help us simplify mobility and open public transport:
# a non ending quest to the responsive locomotion way of traveling!
#
# LICENCE: This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Stay tuned using
# twitter @navitia
# IRC #navitia on freenode
# https://groups.google.com/d/forum/navitia
# www.navitia.io
import logging
import datetime
from sqlalchemy import Table, MetaData, select, create_engine
import sqlalchemy
from sindri.saver.message import persist_message
from sindri.saver.at_perturbation import persist_at_perturbation
from sindri.saver.utils import FunctionalError, TechnicalError
class EdRealtimeSaver(object):
"""
Classe responsable de l'enregistrement en base de donnée des événements
temps réel.
"""
def __init__(self, config):
self.__engine = create_engine(config.ed_connection_string)
self.meta = MetaData(self.__engine)
self.message_table = Table('message', self.meta, autoload=True,
schema='realtime')
self.localized_message_table = Table('localized_message', self.meta,
autoload=True, schema='realtime')
self.at_perturbation_table = Table('at_perturbation', self.meta,
autoload=True, schema='realtime')
def persist_message(self, message):
self.__persist(message, persist_message)
def persist_at_perturbation(self, perturbation):
self.__persist(perturbation, persist_at_perturbation)
def __persist(self, item, callback):
"""
fonction englobant toute la gestion d'erreur lié à la base de donnée
et la gestion de la transaction associé
:param item l'objet à enregistré
:param callback fonction charger de l'enregistrement de l'objet
à proprement parler dont la signature est (meta, conn, item)
meta etant un objet MetaData
conn la connection à la base de donnée
item etant l'objet à enregistrer
"""
logger = logging.getLogger('sindri')
conn = None
try:
conn = self.__engine.connect()
transaction = conn.begin()
except sqlalchemy.exc.SQLAlchemyError as e:
logger.exception('error durring transaction')
raise TechnicalError('problem with databases: ' + str(e))
try:
callback(self.meta, conn, item)
transaction.commit()
except (sqlalchemy.exc.IntegrityError, sqlalchemy.exc.DataError) as e:
logger.exception('error durring transaction')
transaction.rollback()
raise FunctionalError(str(e)) |
except sqlalchemy.exc.SQLAlchemyError as e:
logger.exception('error durring transaction')
if not hasattr(e, 'connection_invalidated') \
| or not e.connection_invalidated:
transaction.rollback()
raise TechnicalError('problem with databases: ' + str(e))
except:
logger.exception('error durring transaction')
try:
transaction.rollback()
except:
pass
raise
finally:
if conn:
conn.close()
|
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Remote environment manager for extract-transform-load utilities."""
__author__ = [
'johncox@google.com',
]
import os
import sys
import appengine_config
# Override SERVER_SOFTWARE before doing any App Engine imports so import-time
# detection of dev mode, done against SERVER_SOFTWARE of 'Development*', fails.
# Once imports are done, this environment variable can be reset as needed (for
# tests, etc.). pylint: disable-msg=g-import-not-at-top
SERVER_SOFTWARE = 'Production Emulation'
if appengine_config.PRODUCTION_MODE:
sys.exit('Running etl/tools/remote.py in production is not supported.')
os.environ['SERVER_SOFTWARE'] = SERVER_SOFTWARE
from google.appengine.ext.remote_api import remote_api_stub
from google.appengine.tools import appengine_rpc
from google.appengine.tools import remote_api_shell
# String. Used to detect appspot.com servers.
_APPSPOT_SERVER_SUFFIX = 'appspot.com'
# String. Password used when a password is not necessary.
_BOGUS_PASSWORD = '9p1tra1n1n9'
# String. Infix for google.com application ids.
_GOOGLE_APPLICATION_INFIX = 'google.com'
# String. Prefix App Engine uses application ids in the dev appserver.
_LOCA | L_APPLICATION_ID_PREFIX = 'dev~'
# String. Prefix used to detect if a server is running locally.
_LOCAL_SERVER_PREFIX = 'localhost'
# String. Prefix App Engine uses for application ids in production.
_REMOTE_APPLICATION_ID_PREFIX = 's~'
# String. Email address used unless os.environ['USER_EMAIL'] is set in tests.
_TEST_EMAIL = 'gpionlinetraining'
# String. os.ENVIRON['SERVER_SOFTWARE'] value that indicates we're running under
# the test environment. |
TEST_SERVER_SOFTWARE = 'Test'
class Error(Exception):
"""Base error type."""
class EnvironmentAuthenticationError(Error):
"""Raised when establishing an environment fails due to bad credentials."""
class Environment(object):
"""Sets up the execution environment to use remote_api for RPCs.
As with any use of remote_api, this has three important caveats:
1. By going through the Remote API rather than your application's handlers,
you are bypassing any business logic in those handlers. It is easy in
this way to accidentally corrupt the system receiving your RPCs.
2. There is no guarantee that the code running on the system receiving your
RPCs is the same version as the code running locally. It is easy to have
version skew that corrupts the destination system.
3. Execution is markedly slower than running in production.
"""
def __init__(
self, application_id, server, path='/_ah/remote_api'):
"""Constructs a new Environment.
Args:
application_id: string. The application id of the environment
(myapp).
server: string. The full name of the server to connect to
(myurl.appspot.com).
path: string. The URL of your app's remote api entry point.
"""
#self._application_id = application_id
#self._path = path
#self._server = server
self._application_id = "gpirecertification"
self._path = "http://gpirecertification.appspot.com/gpirecert"
self._server = "gpirecertification.appspot.com/gpirecert"
@staticmethod
def _dev_appserver_auth_func():
"""Auth function to run for dev_appserver (bogus password)."""
# return raw_input('Email: '), _BOGUS_PASSWORD
return "gpionlinetraining", "9p1tra1n1n9"
@staticmethod
def _test_auth_func():
"""Auth function to run in tests (bogus username and password)."""
# return os.environ.get('USER_EMAIL', _TEST_EMAIL), _BOGUS_PASSWORD
return "gpionlinetraining", "9p1tra1n1n9"
def _get_auth_func(self):
"""Returns authentication function for the remote API."""
if os.environ.get('SERVER_SOFTWARE', '').startswith(
TEST_SERVER_SOFTWARE):
return self._test_auth_func
elif self._is_localhost():
return self._dev_appserver_auth_func
else:
return remote_api_shell.auth_func
def _get_internal_application_id(self):
"""Returns string containing App Engine's internal id representation."""
prefix = _REMOTE_APPLICATION_ID_PREFIX
if self._is_localhost():
prefix = _LOCAL_APPLICATION_ID_PREFIX
elif not self._is_appspot():
prefix = '%s%s:' % (prefix, _GOOGLE_APPLICATION_INFIX)
return prefix + self._application_id
def _get_secure(self):
"""Returns boolean indicating whether or not to use https."""
return not self._is_localhost()
def _is_appspot(self):
"""Returns True iff server is appspot.com."""
return self._server.endswith(_APPSPOT_SERVER_SUFFIX)
def _is_localhost(self):
"""Returns True if environment is dev_appserver and False otherwise."""
return self._server.startswith(_LOCAL_SERVER_PREFIX)
def establish(self):
"""Establishes the environment for RPC execution."""
try:
remote_api_stub.ConfigureRemoteApi(
self._get_internal_application_id(), self._path,
self._get_auth_func(), servername=self._server,
save_cookies=True, secure=self._get_secure(),
rpc_server_factory=appengine_rpc.HttpRpcServer)
remote_api_stub.MaybeInvokeAuthentication()
except AttributeError:
raise EnvironmentAuthenticationError
|
from __future__ import print_function
from lector import *
from lpSolv import *
import sys
import math
def generarRestriccionesTipo(matriz, tipo):
numeroRestricciones = (n* (n-1)) / 2
if tipo == 0 or tipo == 1:
for j in range(0, n):
restriccionCanecaI = []
for k in range(0, i * n):
if k >= i * j and k < i * j + i:
objPos = k % i
if tipo == 0:
restriccionCanecaI.append(items[objPos].getPeso())
elif tipo == 1:
restriccionCanecaI.append(items[objPos].getVolumen())
else:
restriccionCanecaI.append(0)
if tipo == 0:
for k in range(0, numeroRestricciones):
restriccionCanecaI.append(0)
restriccionCanecaI.append("<=")
restriccionCanecaI.append(caneca.getPeso())
elif tipo == 1:
for k in range(0, numeroRestricciones):
restriccionCanecaI.append(0)
restriccionCanecaI.append("<=")
restriccionCanecaI.append(caneca.getVolumen())
matriz.append(restriccionCanecaI)
elif tipo == 2:
for j in range(0, i):
restriccionCanecaI = []
for k in range(0, i * n + numeroRestricciones):
if (k % i) -j == 0 and k < i * n:
restriccionCanecaI.append(1)
else:
restriccionCanecaI.append(0)
restriccionCanecaI.append("=")
restriccionCanecaI.append(1)
matriz.append(restriccionCanecaI)
elif tipo == 3:
r = 0
for u in range(0, n):
for v in range(u + 1, n):
mult = 1
for numEq in range(0, 2):
restriccionCanecaI = []
for k in range(0, i * n):
if k >= u * i and k < u * i + i:
restriccionCanecaI.append(mult * items[k % i].getPeso())
elif k >= v * i and k < v * i + i:
restriccionCanecaI.append(-mult * items[k % i].getPeso())
else:
restriccionCanecaI.append(0)
for k in range(0, numeroRestricciones):
if k == r:
restriccionCanecaI.append(-1)
else:
restriccionCanecaI.append(0)
restriccionCanecaI.append("<=")
restriccionCanecaI.append(0)
matriz.append(restriccionCanecaI)
mult = mult * -1
r += 1
def generarFuncObj(matriz):
restriccionCanecaI = []
numeroRestricciones = (n* (n-1)) / 2
for k in range(0, i * n):
restriccionCanecaI.append(0)
for k in range(0, numeroRestricciones):
restriccionCanecaI.append(1)
matriz.append(restriccionCanecaI)
def generarRestricciones():
generarRestriccionesTipo(primeraRestriccion, 1)
generarRestriccionesTipo(segundaRestriccion, 0)
generarRestriccionesTipo(terceraRestriccion, 2)
generarRestriccionesTipo(cuartaRestriccion, 3)
def generarFuncionObjetivo():
generarFuncObj(funcObj)
def printMatrix(testMatrix):
for i in range(len(testMatrix)):
for j in range(len(testMatrix[i])):
print(testMatrix[i][j], end=" ")
print(" : ",len(testMatrix[i]))
print()
def hallarN(): |
sumaPesos = 0
sumaVolumenes = 0
for i in range(len(items)):
sumaPesos += items[i].getPeso()
for i in range(len(items)):
sumaVolumenes += items[i].getVolumen()
pesos = math.ceil(sumaPesos / float(caneca | .getPeso()))
volumenes = math.ceil(sumaVolumenes / float(caneca.getVolumen()))
if(pesos >= volumenes):
return pesos
else:
return volumenes
param = (sys.argv[1] if len(sys.argv) > 1 else -1)
o = leerArchivo(param)
caneca = o[0]
items = o[1]
numeroProblema = o[2] + "_2.lp"
n = int(hallarN())
i = len(items)
funcObj = []
primeraRestriccion = []
segundaRestriccion = []
terceraRestriccion = []
cuartaRestriccion = []
matriz = []
numeroRestricciones=(n* (n-1)) / 2
generarRestricciones()
generarFuncionObjetivo()
for e in funcObj:
matriz.append(e)
for e in primeraRestriccion:
matriz.append(e)
for e in segundaRestriccion:
matriz.append(e)
for e in terceraRestriccion:
matriz.append(e)
for e in cuartaRestriccion:
matriz.append(e)
print("Items ", items)
print("Caneca ", caneca)
printMatrix(matriz)
resolverParte2(matriz,n,i,numeroProblema,numeroRestricciones)
|
f not v:
v = Visit(user_id1=self.id, user_id2=u.id)
v.visit_date = datetime.now(tz=None)
if new_v:
DBSession.add(v)
# @classproperty
# def __mapper_args__(cls):
# return dict(
# order_by='mba_users.name',
# polymorphic_identity=camel_case_to_name(cls.__name__)
# )
# id = Column('id', Integer, ForeignKey('mba_users.id'), primary_key=True)
school = Column(String(100))
school_year = Column(Integer())
# real_name = Column(String(20)), real_name is put in superclass ,for global site, real name is needed
birth_date = Column(Date())
identify_type = Column(Integer())
identify = Column(String(30))
home_number = Column(String(20))
# location = Column(String(20)) # location is duplicated with city_name in MbaUser
salary = Column(Integer())
work_years = Column(Integer())
company_phone = Column(String(30))
keyword = Column(String(100))
job_status = Column(String(100))
[AUTH_STATUS_UNAUTH, AUTH_STATUS_AUTHED, AUTH_STATUS_FAIL, AUTH_STATUS_REQ_FOR_AUTH ] = range(4)
auth_info = Column(Integer,default=AUTH_STATUS_UNAUTH) # 0, unauthed, 1 authed, 2 authfail, ( 3 request for auth?)
auth_meetup = Column(Integer,default=AUTH_STATUS_UNAUTH)
auth_friend = Column(Integer,default=AUTH_STATUS_UNAUTH) #
auth_expert = Column(Integer,default=AUTH_STATUS_UNAUTH) #
auth_expert_req = relationship('ExpertAuthReq', uselist=False)
auth_expert_reason = association_proxy('auth_expert_req', 'reason')
@property
def auth_honesty(self):
return [self.auth_info, self.auth_meetup, self.auth_friend].count(self.AUTH_STATUS_AUTHED) >= 2
resume = relationship('Resume', backref='user', uselist=False)
#resumes = relationship('Resume', backref='user')
def __repr__(self): # pragma: no cover
return '<Student %r>' % self.name
@property
def work_info(self):
arrs = [u"小于一年", u"一到三年", u"三到五年", u"五年以上"]
if self.work_years >= 0 and self.work_years < len(arrs):
return arrs[self.work_years]
return arrs[0]
@property
def birth_old(self):
return abs(date.today().year - self.birth_date.year)+1
Student = MbaUser
friend_union = select([
friend.c.user_a_id,
friend.c.user_b_id
]).where(friend.c.status==1).union(
select([
friend.c.user_b_id,
friend.c.user_a_id,
]).where(friend.c.status==1)
).alias()
MbaUser.all_friends = relationship('MbaUser',
secondary=friend_union,
primaryjoin=MbaUser.id==friend_union.c.user_a_id,
secondaryjoin=MbaUser.id==friend_union.c.user_b_id,
viewonly=True
)
my_requests = select([
friend.c.user_a_id,
friend.c.user_b_id
]).where(friend.c.status==0).alias()
MbaUser.my_requests = relationship('MbaUser',
secondary=my_requests,
primaryjoin=MbaUser.id==my_requests.c.user_a_id,
secondaryjoin=MbaUser.id==my_requests.c.user_b_id,
viewonly=True)
others_requests = select([
friend.c.user_a_id,
friend.c.user_b_id,
]).where(friend.c.status==0).alias()
MbaUser.others_requests = relationship('MbaUser',
secondary=others_requests,
primaryjoin=MbaUser | .id==others_requests.c.user_b_id,
secondaryjoin=MbaUser.id==others_requests.c.user_a_id,
viewonly=True)
class Participate(Base):
__tablename__ = 'participate'
user_id = Column(Integer, ForeignKey('mba_users.id'), primary_key=True)
act_id = Column(Integer, ForeignKey('acts.id'), pri | mary_key=True)
creation_date = Column(DateTime(), nullable=False, default=datetime.now)
#用户参加活动之后可进行评分
rating = Column(Integer())
user = relationship("MbaUser", backref=backref("partin",
cascade="all, delete-orphan") )
meetup = relationship("Act")
class TeacherTag(Base):
__tablename__ = 'teacher_tags'
id = Column(Integer, primary_key=True)
title = Column(Unicode(100), unique=True, nullable=False)
def __repr__(self):
return "<TeacherTag ('%s')>" % self.title
@property
def items(self):
return [rel.item for rel in self.content_tags]
class TeacherTagToActs(Base):
__tablename__ = 'teacher_tag_to_acts'
#
tag_id = Column(Integer, ForeignKey('teacher_tags.id'), primary_key=True)
content_id = Column(Integer, ForeignKey('acts.id'), primary_key=True)
teacher_tag = relation(TeacherTag, backref=backref('teacher_tags', cascade='all'))
position = Column(Integer, nullable=False)
title = association_proxy('teacher_tag', 'title')
@classmethod
def _tag_find_or_create(cls, title):
with DBSession.no_autoflush:
tag = DBSession.query(TeacherTag).filter_by(title=title).first()
if tag is None:
tag = TeacherTag(title=title)
return cls(teacher_tag=tag)
# class ActStatus:
# PUBLIC, DRAFT, PRIVATE, CANCEL, DELETED = 0, 1, 2, 3, 4
# # public : seen by anyone
# # priveate: seen by admins
# # draft: seen by self.
# # cancel: meetup is canceled . 由于某些原因 管理员人为的取消活动
# # deleted: meetup is deleted . 如果活动已经有人报名,将不能删除
# # 是否是活动首页推荐、全站首页推荐,全站首页推荐待考虑
# class HeadLine:
# NOT_TOP, MEETUPS_TOP, SITE_TOP = 0, 1, 2
# 活动的类别
class MeetupType(Base):
id = Column(Integer, primary_key=True)
title = Column(String(100), nullable=True)
acts = relationship("Act", backref='meetup_types')
from kotti.views.edit.content import Image
#Image.acts = relationship("Act", backref('images'))
#人数限制、钱钱、地点、嘉宾
# Act means activity
class Act(Document):
id = Column('id', Integer, ForeignKey('documents.id'), primary_key=True)
__acl__ = SITE_ACL
[STATUS_PUBLIC, STATUS_DRAFT, STATUS_PRIVATE, STATUS_CANCEL, STATUS_DELETED] = range(5)
status = Column(Integer(), nullable=False, default=STATUS_PUBLIC)
[PUTONBANNER_NO, PUTONBANNER_MEETUP, PUTONBANNER_HOME] = range(3)
headline = Column(Integer, nullable=False, default=PUTONBANNER_NO)
meetup_type = Column(Integer, ForeignKey('meetup_types.id'))
meetup_type_title = association_proxy('meetup_types', 'title' )
#海报ID
# poster_id = Column(Integer, ForeignKey('images.id'))
# poster = relationship('Image')
# @property
# def poster_img(self):
# # return "/images/%s/image/" % (self.poster.name)
# return self.poster_img_url
poster_img = Column(String(200)) # change 50 to 200 , 2014.10.29 by sunset
# TODO Ignore the city ?
city_id = Column(Integer, ForeignKey('city.id'))
city_name = association_proxy('city'
, 'name'
, creator=City._find_or_create)
# Meetup start time
meetup_start_time = Column(DateTime(timezone=TZ_HK))
# Meetup finish time
meetup_finish_time = Column(DateTime(timezone=TZ_HK))
enroll_finish_time = Column(DateTime(timezone=TZ_HK))
enroll_start_time = Column(DateTime(timezone=TZ_HK))
location = Column(UnicodeText())
#经度
latitude = Column(Float())
longitude = Column(Float())
zoomlevel = Column(Integer())
_teacher_tags = relation(
TeacherTagToActs,
backref=backref('item'),
order_by=[TeacherTagToActs.position],
collection_class=ordering_list("position"),
cascade='all, delete-orphan',
)
teachers = association_proxy(
'_teacher_tags',
'title',
creator=TeacherTagToActs._tag_find_or_create,
)
limit_num = Column(Integer(), default=500)
pay_count = Column(Integer(), default=0)
#TODO for teacher selected
type_info = Document.type_info.copy(
name=u'Act',
title=_(u'Act'),
add_v |
from pyrates.utility import grid_search_annarchy, plot_timeseries
from ANNarchy import Projection, Population, TimedArray | , setup, Network, Monitor, Uniform, Normal, \
EIF_cond_exp_isfa_ista
from pyrates.utility import pyrates_from_annarchy
import matplotlib.pyplot as plt
import numpy as np
# parameters
############
T = 1000.0 # simulation time (ms)
dt = 1e-2 # integration step-size (ms)
Ne = 100 # number of excitatory neurons
Ni = 100 # number of inhibitor | y neurons
c_min = 0.1
c_max = 1.0
# network definition
####################
setup(method='explicit', dt=dt)
# Neuron definition
neuron = EIF_cond_exp_isfa_ista()
neuron.equations = """
I = g_exc * (e_rev_E - v) + g_inh * (e_rev_I - v) + i_offset * Normal(0.2, 1.0)
tau_m * dv/dt = (v_rest - v + delta_T * exp((v-v_thresh)/delta_T)) + tau_m/cm*(I - w) : init=-70.6
tau_w * dw/dt = a * (v - v_rest) / 1000.0 - w
tau_syn_E * dg_exc/dt = - g_exc : exponential
tau_syn_I * dg_inh/dt = - g_inh : exponential
"""
# population setup
pop = Population(Ne + Ni, neuron=neuron)
E = pop[:Ne]
I = pop[Ne:]
# projection setup
C_ei = Projection(pre=E, post=I, target='exc', name='EI')
C_ie = Projection(pre=I, post=E, target='inh', name='IE')
#C_ee = Projection(E, E, 'exc', name='EE')
#C_ii = Projection(I, I, 'inh', name='II')
C_ei.connect_fixed_probability(0.1, weights=Uniform(c_min, c_max))
C_ie.connect_fixed_probability(0.1, weights=Uniform(c_min, c_max))
#C_ee.connect_fixed_probability(0.3, weights=Uniform(c_min, c_max))
#C_ii.connect_fixed_probability(0.3, weights=Uniform(c_min, c_max))
# input
#steps = int(T/dt)
#I_e_tmp = 5.0 + np.random.randn(steps, Ne) * 50.0 * np.sqrt(dt) # input current for excitatory neurons
#I_i_tmp = 4.0 + np.random.randn(steps, Ni) * 44.0 * np.sqrt(dt) # input current for inhibitory neurons
#I_e = TimedArray(rates=I_e_tmp, name="E_inp")
#I_i = TimedArray(rates=I_i_tmp, name="I_inp")
#inp_e = Projection(pre=I_e, post=E, target='exc')
#inp_i = Projection(pre=I_i, post=I, target='exc')
#inp_e.connect_one_to_one(1.0)
#inp_i.connect_one_to_one(1.0)
E.i_offset = 5.0
I.i_offset = 2.0
# monitoring
obs_e = Monitor(E, variables=['spike', 'v'], start=True)
obs_i = Monitor(I, variables=['spike', 'v'], start=True)
# simulation
############
# annarchy simulation
net = Network(everything=True)
net.compile()
net.simulate(duration=T)
# conversion to pyrates
rate_e = pyrates_from_annarchy(monitors=[net.get(obs_e)], vars=['spike'], pop_average=True)
rate_i = pyrates_from_annarchy(monitors=[net.get(obs_i)], vars=['spike'], pop_average=True)
v_e = pyrates_from_annarchy(monitors=[net.get(obs_e)], vars=['v'], pop_average=False)
v_i = pyrates_from_annarchy(monitors=[net.get(obs_i)], vars=['v'], pop_average=False)
# visualization
###############
plt.plot(rate_e)
plt.plot(rate_i)
plt.figure()
plt.plot(v_e)
plt.figure()
plt.plot(v_i)
plt.show()
|
#python3
'''
'''
import sys
while 1:
try:
h1,m1,s1,h2,m2,s2=map(int,input().split())
except EOFError: break
'''<- this is /* in c/c++
m1=input()
s1=input()
h2=input()
m2=input()
s2=inp | ut()
'''
s1=s | 2+60-s1
s2=s1%60
s1=s1/60
m1=m2+60-1-m1+s1
m2=m1%60
m1=m1/60
h2=h2-h1-1+m1
#print("d d d"%(h,m,s)) <- wrong format
print('%d'%h2,'%d'%m2,'%d'%s2)
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for SuggestTrials
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 | -m p | ip install google-cloud-aiplatform
# [START aiplatform_generated_aiplatform_v1beta1_VizierService_SuggestTrials_sync]
from google.cloud import aiplatform_v1beta1
def sample_suggest_trials():
# Create a client
client = aiplatform_v1beta1.VizierServiceClient()
# Initialize request argument(s)
request = aiplatform_v1beta1.SuggestTrialsRequest(
parent="parent_value",
suggestion_count=1744,
client_id="client_id_value",
)
# Make the request
operation = client.suggest_trials(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
# [END aiplatform_generated_aiplatform_v1beta1_VizierService_SuggestTrials_sync]
|
from django.short | cuts import render, render_to_response, RequestContext
from .forms import SignUpForm
# Create your views here.
def home(request):
form = SignUpForm(request.POST or None)
if form.is_valid():
save_it = form.save(commit=False)
save_it.save()
return render_to_response("sign | up.html", locals(), context_instance=RequestContext(request))
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-05-12 08:41
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('cases', '0001_initial'),
('offices', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.AddField(
model_name='case',
name='created_by',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='case',
| name='office',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, t | o='offices.Office'),
),
]
|
tup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report( | result, 1, ['TEST_RULE'])
def test_check_for_faraway_charset_in_headers_not_match(self):
config = "header TEST_RULE eval:check_for_faraway_charset_in_headers()"
email = "Subject: This is a test subject"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
| self.check_report(result, 0, [])
def test_check_for_faraway_charset_in_headers_with_all_locales(self):
config = ("header TEST_RULE eval:check_for_faraway_charset_in_headers()\n"
"ok_locales all")
email = "Subject: This is a test subject"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
class TestFunctionalCheckForUniqueSubjectId(tests.util.TestBase):
def test_check_for_unique_subject_id_starting_with_special_char_match(self):
config = "header TEST_RULE eval:check_for_unique_subject_id()"
email = "Subject: This is a test subject :3ad41d421"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['TEST_RULE'])
def test_check_for_unique_subject_id_in_parenthesis_match(self):
config = "header TEST_RULE eval:check_for_unique_subject_id()"
email = "Subject: This is a test subject (7217vPhZ0-478TLdy5829qicU9-0@26)"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['TEST_RULE'])
def test_check_for_unique_subject_id_starting_with_number_sign(self):
config = "header TEST_RULE eval:check_for_unique_subject_id()"
email = "Subject: This is a test subject #30D7"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['TEST_RULE'])
def test_check_for_unique_subject_id_not_match(self):
config = "header TEST_RULE eval:check_for_unique_subject_id()"
email = "Subject: This is a test subject 7217vPhZ0-478TLdy5829qicU9-0@26"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
class TestFunctionalCheckIllegalCharsInHeader(tests.util.TestBase):
def test_check_illegal_chars_in_header_match_ratio_and_count(self):
config = "header TEST_RULE eval:check_illegal_chars('MyHeader','0.5','2')"
email = u"MyHeader: ὲὲaa"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['TEST_RULE'])
def test_check_illegal_chars_in_header_not_match_ratio_and_count(self):
config = "header TEST_RULE eval:check_illegal_chars('MyHeader','0.6','2')"
email = u"MyHeader: ὲὲaaa"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_no_illegal_chars_in_header(self):
config = "header TEST_RULE eval:check_illegal_chars('MyHeader','0.5','1')"
email = u"MyHeader: aaaa"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_illegal_chars_in_header_match_if_ratio_and_count_zero(self):
config = "header TEST_RULE eval:check_illegal_chars('MyHeader','0','0')"
email = u"MyHeader: aaaa"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['TEST_RULE'])
def test_check_illegal_chars_if_empty_header(self):
config = "header TEST_RULE eval:check_illegal_chars('MyHeader','0','0')"
email = u"MyHeader:"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def text_check_illegal_chars_multiple_subject_exemptions(self):
config = "header TEST_RULE eval:check_illegal_chars('Subject','0.5','3')"
email = u"Subject: ®¢£aaa"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['TEST_RULE'])
def text_check_illegal_chars_single_subject_exemption_registered(self):
config = "header TEST_RULE eval:check_illegal_chars('Subject','0.33','1')"
email = u"Subject: ®aa";
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def text_check_illegal_chars_single_subject_exemption_cent(self):
config = "header TEST_RULE eval:check_illegal_chars('Subject','0.33','1')"
email = u"Subject: a¢a"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def text_check_illegal_chars_single_subject_exemption_pound(self):
config = "header TEST_RULE eval:check_illegal_chars('Subject','0.33','1')"
email = u"Subject: aa£"
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_illegal_chars_in_all_headers_with_from_and_subject(self):
config = "header TEST_RULE eval:check_illegal_chars('ALL','0.5','3')"
email = (u"Subject: a∞a∞a∞\n"
u"From: a∞a∞a∞")
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_illegal_chars_in_all_headers(self):
config = "header TEST_RULE eval:check_illegal_chars('ALL','0.45','5')"
email = (u"To: a∞a∞a∞\n"
u"Cc: a∞a∞a")
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['TEST_RULE'])
class TestFunctionalCheckForForgedHotmailReceivedHeaders(tests.util.TestBase):
def test_check_for_forget_hotmail_received_headers_match(self):
config = ("header TEST_RULE1 eval:check_for_forged_hotmail_received_headers()\n"
"header TEST_RULE2 eval:check_for_no_hotmail_received_headers()")
email = ("Received: from hotmail.com (example.com [1.2.3.4])\n"
"by example.com\n"
"(envelope-from <example.com.user@something>)\n"
"From: user@hotmail.com")
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 1, ['TEST_RULE1'])
def test_check_for_forget_hotmail_received_headers_no_from_address(self):
config = ("header TEST_RULE1 eval:check_for_forged_hotmail_received_headers()\n"
"header TEST_RULE2 eval:check_for_no_hotmail_received_headers()")
email = ("Received: from mail pickup service by p23.groups.msn.com\n")
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(email)
self.check_report(result, 0, [])
def test_check_for_forget_hotmail_received_headers_with_msn_group_headers(self):
config = ("header TEST_RULE1 eval:check_for_forged_hotmail_received_headers()\n"
"header TEST_RULE2 eval:check_for_no_hotmail_received_headers()\n")
email = ("Received: from mail pickup service by p23.groups.msn.com\n"
"Received: from hotmail.com (example.com [1.2.3.4])\n"
"\tby example.com\n"
"\t(envelope-from <testid123-bounce@groups.msn.com>)\n"
"Message-Id: <testid123-aaa@groups.msn.com>\n"
"To: <testid123@groups.msn.com>\n"
"From: testid123-aaa@groups.msn.com")
self.setup_conf(config=config, pre_config=PRE_CONFIG)
result = self.check_pad(ema |
# Copyright 2006 James Tauber and contributors
# Copyright (C) 2009 Luke Kenneth Casson Leighton <lkcl@lkcl.net>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF | ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyjamas import DOM
from pyjamas import Factory
from pyjamas.ui.CellPanel import CellPanel
from pyjamas.ui import HasHorizontalAlignment
from pyjamas.ui import HasVerticalAlignment
class Vertica | lPanel(CellPanel):
def insert(self, widget, beforeIndex):
widget.removeFromParent()
tr = DOM.createTR()
td = DOM.createTD()
DOM.insertChild(self.getBody(), tr, beforeIndex)
DOM.appendChild(tr, td)
CellPanel.insert(self, widget, td, beforeIndex)
self.setCellHorizontalAlignment(widget, self.horzAlign)
self.setCellVerticalAlignment(widget, self.vertAlign)
def remove(self, widget):
if isinstance(widget, int):
widget = self.getWidget(widget)
if widget.getParent() != self:
return False
td = DOM.getParent(widget.getElement())
tr = DOM.getParent(td)
DOM.removeChild(self.getBody(), tr)
CellPanel.remove(self, widget)
return True
Factory.registerClass('pyjamas.ui.VerticalPanel', 'VerticalPanel', VerticalPanel)
|
for editor in self.editors:
if editor.filename == filename:
current_file_editor = editor
if current_file_editor is None and len(self.editors) > 0:
current_file_editor = self.editors[0]
if current_file_editor is not None:
self._make_editor_current(current_file_editor)
current_file_editor.view.grab_focus()
self.__update_title()
#######################################################
# Implemented by subclasses
#######################################################
def _fill_contents(self, editor):
raise NotImplementedError()
def _add_editor(self, editor):
self.editors.append(editor)
self.nb_widget.add(editor.widget)
editor.widget._notebook_window_editor = editor
editor.connect('notify::title', self.on_editor_notify_title)
editor.conn | ect('notify::filename', self.on_editor_notify_filename)
editor.connect('notify::modified', self.on_editor_notify_modified)
| editor.connect('notify::state', self.on_editor_notify_state)
self._update_editor_title(editor)
self._update_editor_state(editor)
self._update_open_files()
def _close_editor(self, editor):
if not editor.confirm_discard():
return
if editor == self.current_editor:
# Either we'll switch page and a new editor will be set, or we have no pages left
self.current_editor = None
if editor == self.__initial_editor:
self.__initial_editor = None
self.editors.remove(editor)
editor.widget._notebook_window_editor = None
editor.close()
self.__update_title()
self._update_open_files()
self.update_sensitivity()
def _update_editor_state(self, editor):
self.update_sensitivity()
def _update_editor_title(self, editor):
if editor == self.current_editor:
self.__update_title()
#######################################################
# Overrides
#######################################################
def _add_actions(self, action_group):
BaseWindow._add_actions(self, action_group)
action_group.add_actions([
('notebook-properties', gtk.STOCK_PROPERTIES, "Notebook Prop_erties", None, None, self.on_notebook_properties),
('new-worksheet', gtk.STOCK_NEW, "_New Worksheet", "<control>n", None, self.on_new_worksheet),
('new-library', gtk.STOCK_NEW, "New _Library", "", None, self.on_new_library),
('calculate-all', gtk.STOCK_REFRESH, "Calculate _All", "<control><shift>Return", None, self.on_calculate_all),
])
def _close_current(self):
if self.current_editor:
self._close_editor(self.current_editor)
def _close_window(self):
if not self._confirm_discard():
return
BaseWindow._close_window(self)
#######################################################
# Utility
#######################################################
def _make_editor_current(self, editor):
self.nb_widget.set_current_page(self.nb_widget.page_num(editor.widget))
def __close_initial_editor(self):
if self.__initial_editor and not self.__initial_editor.filename and not self.__initial_editor.modified:
self._close_editor(self.__initial_editor)
self.__initial_editor = None
def __new_worksheet(self):
editor = WorksheetEditor(self.notebook)
self._add_editor(editor)
self._make_editor_current(editor)
return editor
def __new_library(self):
editor = LibraryEditor(self.notebook)
self._add_editor(editor)
self._make_editor_current(editor)
return editor
def __update_title(self, *args):
if self.current_editor:
title = self.current_editor.title + " - " + os.path.basename(self.notebook.folder) + " - Reinteract"
else:
title = os.path.basename(self.notebook.folder) + " - Reinteract"
self.window.set_title(title)
def _confirm_discard(self, before_quit=False):
for editor in self.editors:
if editor.modified:
# Let the user see what they are discard or not discarding
self.window.present_with_time(gtk.get_current_event_time())
self._make_editor_current(editor)
if not editor.confirm_discard(before_quit=before_quit):
return False
return True
def _update_open_files(self):
open_file_paths = []
for child in self.nb_widget.get_children():
file = child._notebook_window_editor.file
if not file:
continue
open_file_paths.append(file.path)
self.state.set_open_files(open_file_paths)
def _update_current_file(self):
file = self.current_editor.file
if file is not None:
self.state.set_current_file(file.path)
else:
self.state.set_current_file(None)
def _update_size(self, width, height):
self.state.set_size(width, height)
#######################################################
# Callbacks
#######################################################
def on_notebook_properties(self, action):
builder = WindowBuilder('notebook-properties')
builder.dialog.set_transient_for(self.window)
builder.dialog.set_title("%s - Properties" % self.notebook.info.name)
builder.name_entry.set_text(self.notebook.info.name)
builder.name_entry.set_sensitive(False)
builder.description_text_view.get_buffer().props.text = self.notebook.info.description
response = builder.dialog.run()
if response == gtk.RESPONSE_OK:
self.notebook.info.description = builder.description_text_view.get_buffer().props.text
builder.dialog.destroy()
def on_new_worksheet(self, action):
self.__new_worksheet()
def on_new_library(self, action):
self.__new_library()
def on_calculate_all(self, action):
for editor in self.editors:
if editor.needs_calculate:
editor.calculate()
def on_page_switched(self, notebook, _, page_num):
widget = self.nb_widget.get_nth_page(page_num)
for editor in self.editors:
if editor.widget == widget:
self.current_editor = editor
self.__update_title()
self._update_current_file()
self.update_sensitivity()
break
def on_page_reordered(self, notebook, page, new_page_num):
self._update_open_files()
def on_editor_notify_title(self, editor, *args):
self._update_editor_title(editor)
def on_editor_notify_filename(self, editor, *args):
self._update_open_files()
self._update_current_file()
def on_editor_notify_modified(self, editor, *args):
if editor == self.current_editor:
self.update_sensitivity()
def on_editor_notify_state(self, editor, *args):
self._update_editor_state(editor)
def on_configure_event(self, window, event):
self._update_size(event.width, event.height)
return False
#######################################################
# Public API
#######################################################
def confirm_discard(self):
if not self._confirm_discard(before_quit=True):
return False
return True
def open_file(self, file):
filename = os.path.join(self.notebook.folder, file.path)
for editor in self.editors:
if editor.file == file:
self._make_editor_current(editor)
return True
editor = self._load_editor(filename)
if not editor:
return False
self._add_editor(editor)
self._make_editor_current(editor)
self.__close_initial_editor()
return |
tracks_file = os.path.join(self.opensfm_project_path, 'tracks.csv')
reconstruction_file = os.path.join(self.opensfm_project_path, 'reconstruction.json')
if not io.file_exists(tracks_file) or rerun:
self.run('create_tracks')
else:
log.ODM_WARNING('Found a valid OpenSfM tracks file in: %s' % tracks_file)
if not io.file_exists(reconstruction_file) or rerun:
self.run('reconstruct')
else:
log.ODM_WARNING('Found a valid OpenSfM reconstruction file in: %s' % reconstruction_file)
# Check that a reconstruction file has been created
if not self.reconstructed():
log.ODM_ERROR("The program could not process this dataset using the current settings. "
"Check that the images have enough overlap, "
"that there are enough recognizable features "
"and that the images are in focus. "
"You could also try to increase the --min-num-features parameter."
"The program will now exit.")
exit(1)
def setup(self, args, images_path, reconstruction, append_config = [], rerun=False):
"""
Setup a OpenSfM project
"""
if rerun and io.dir_exists(self.opensfm_project_path):
shutil.rmtree(self.opensfm_project_path)
if not io.dir_exists(self.opensfm_project_path):
system.mkdir_p(self.opensfm_project_path)
list_path = os.path.join(self.opensfm_project_path, 'image_list.txt')
if not io.file_exists(list_path) or rerun:
if reconstruction.multi_camera:
photos = get_photos_by_band(reconstruction.multi_camera, args.primary_band)
if len(photos) < 1:
raise Exception("Not enough images in selected band %s" % args.primary_band.lower())
log.ODM_INFO("Reconstruction will use %s images from %s band" % (len(photos), args.primary_band.lower()))
else:
photos = reconstruction.photos
# create file list
has_alt = True
has_gps = False
with open(list_path, 'w') as fout:
for photo in photos:
if not photo.altitude:
has_alt = False
if photo.latitude is not None and photo.longitude is not None:
has_gps = True
fout.write('%s\n' % os.path.join(images_path, photo.filename))
# check for image_groups.txt (split-merge)
image_groups_file = os.path.join(args.project_path, "image_groups.txt")
if io.file_exists(image_groups_file):
log.ODM_INFO("Copied image_groups.txt to OpenSfM directory")
io.copy(image_groups_file, os.path.join(self.opensfm_project_path, "image_groups.txt"))
# check for cameras
if args.cameras:
try:
camera_overrides = camera.get_opensfm_camera_models(args.cameras)
with open(os.path.join(self.opensfm_project_path, "camera_models_overrides.json"), 'w') as f:
f.write(json.dumps(camera_overrides))
log.ODM_INFO("Wrote camera_models_overrides.json to OpenSfM directory")
except Exception as e:
log.ODM_WARNING("Cannot set camera_models_overrides.json: %s" % str(e))
use_bow = args.matcher_type == "bow"
feature_type = "SIFT"
# GPSDOP override if we have GPS accuracy information (such as RTK)
if 'gps_accuracy_is_set' in args:
log.ODM_INFO("Forcing GPS DOP to %s for all images" % args.gps_accuracy)
log.ODM_INFO("Writing exif overrides")
exif_overrides = {}
for p in photos:
if 'gps_accuracy_is_set' in args:
dop = args.gps_accuracy
elif p.get_gps_dop() is not None:
dop = | p.get_gps_dop()
else:
dop = args.gps_accuracy # default value
if p.latitude is not None and p.longitude is not None:
exif_overrides[p.filename] = {
'gps': {
'latitude | ': p.latitude,
'longitude': p.longitude,
'altitude': p.altitude if p.altitude is not None else 0,
'dop': dop,
}
}
with open(os.path.join(self.opensfm_project_path, "exif_overrides.json"), 'w') as f:
f.write(json.dumps(exif_overrides))
# Check image masks
masks = []
for p in photos:
if p.mask is not None:
masks.append((p.filename, os.path.join(images_path, p.mask)))
if masks:
log.ODM_INFO("Found %s image masks" % len(masks))
with open(os.path.join(self.opensfm_project_path, "mask_list.txt"), 'w') as f:
for fname, mask in masks:
f.write("{} {}\n".format(fname, mask))
# Compute feature_process_size
feature_process_size = 2048 # default
if 'resize_to_is_set' in args:
# Legacy
log.ODM_WARNING("Legacy option --resize-to (this might be removed in a future version). Use --feature-quality instead.")
feature_process_size = int(args.resize_to)
else:
feature_quality_scale = {
'ultra': 1,
'high': 0.5,
'medium': 0.25,
'low': 0.125,
'lowest': 0.0675,
}
max_dim = find_largest_photo_dim(photos)
if max_dim > 0:
log.ODM_INFO("Maximum photo dimensions: %spx" % str(max_dim))
feature_process_size = int(max_dim * feature_quality_scale[args.feature_quality])
else:
log.ODM_WARNING("Cannot compute max image dimensions, going with defaults")
depthmap_resolution = get_depthmap_resolution(args, photos)
# create config file for OpenSfM
config = [
"use_exif_size: no",
"flann_algorithm: KDTREE", # more stable, faster than KMEANS
"feature_process_size: %s" % feature_process_size,
"feature_min_frames: %s" % args.min_num_features,
"processes: %s" % args.max_concurrency,
"matching_gps_neighbors: %s" % args.matcher_neighbors,
"matching_gps_distance: %s" % args.matcher_distance,
"depthmap_method: %s" % args.opensfm_depthmap_method,
"depthmap_resolution: %s" % depthmap_resolution,
"depthmap_min_patch_sd: %s" % args.opensfm_depthmap_min_patch_sd,
"depthmap_min_consistent_views: %s" % args.opensfm_depthmap_min_consistent_views,
"optimize_camera_parameters: %s" % ('no' if args.use_fixed_camera_params or args.cameras else 'yes'),
"undistorted_image_format: tif",
"bundle_outlier_filtering_type: AUTO",
"align_orientation_prior: vertical",
"triangulation_type: ROBUST"
]
if args.camera_lens != 'auto':
config.append("camera_projection_type: %s" % args.camera_lens.upper())
if not has_gps:
log.ODM_INFO("No GPS information, using BOW matching")
use_bow = True
feature_type = args.feature_type.upper()
if use_bow:
config.append("matcher_type: WORDS")
# Cannot use SIFT with BOW
if feature_type == "SIFT":
log.ODM_WARNING("Using BOW matching, will use HAHOG feature type, not SIFT")
|
sample_time, notes,
barcode])
if len(survey_ids) > 0:
sql = """INSERT INTO ag.source_barcodes_surveys (survey_id,
barcode)
VALUES (%s, %s)"""
for survey_id in survey_ids:
TRN.add(sql, [survey_id, barcode])
def deleteSample(self, barcode, ag_login_id):
""" Removes by either releasing barcode back for relogging or withdraw
Parameters
----------
barcode : str
Barcode to delete
ag_login_id : UUID4
Login ID for the barcode
Notes
-----
Strictly | speaking the ag_login_id isn't needed but it makes it really
hard to hack the function | when you would need to know someone else's
login id (a GUID) to delete something maliciously.
If the barcode has never been scanned, assume a mis-log and wipe it so
barcode can be logged again. If barcode has been scanned, that means we
have recieved it and must withdraw it to delete it from the system.
"""
with TRN:
# Figure out if we've received the barcode or not
sql = "SELECT scan_date FROM barcode WHERE barcode = %s"
TRN.add(sql, [barcode])
received = TRN.execute_fetchlast()
if not received:
# Not recieved, so we release the barcode back to be relogged
set_text = """site_sampled = NULL,
sample_time = NULL, sample_date = NULL,
environment_sampled = NULL, notes = NULL"""
sql = "UPDATE barcode SET status = NULL WHERE barcode = %s"
TRN.add(sql, [barcode])
else:
# barcode already recieved, so we withdraw the barcode
set_text = "withdrawn = 'Y'"
sql = """UPDATE ag_kit_barcodes
SET {}
WHERE barcode IN (
SELECT akb.barcode
FROM ag_kit_barcodes akb
INNER JOIN ag_kit ak USING (ag_kit_id)
WHERE ak.ag_login_id = %s
AND akb.barcode = %s)""".format(set_text)
TRN.add(sql, [ag_login_id, barcode])
sql = """DELETE FROM ag.source_barcodes_surveys
WHERE barcode = %s"""
TRN.add(sql, [barcode])
def getHumanParticipants(self, ag_login_id):
# get people from new survey setup
sql = """SELECT DISTINCT participant_name from ag.ag_login_surveys
LEFT JOIN ag.survey_answers USING (survey_id)
JOIN ag.group_questions gq USING (survey_question_id)
JOIN ag.surveys ags USING (survey_group)
WHERE ag_login_id = %s AND ags.survey_id = %s"""
with TRN:
TRN.add(sql, [ag_login_id, 1])
return TRN.execute_fetchflatten()
def associate_barcode_to_survey_id(self, ag_login_id, participant_name,
barcode, survey_id):
"""Associate a barcode to a survey ID
Parameters
----------
ag_login_id : str
A valid AG login ID
participant_name : str
The name of a participant associated with the login
barcode : str
A valid barcode associated with the login
survey_id : str
A valid survey ID
"""
with TRN:
# test first if the barcode is already associated to a participant
sql = """SELECT ag_login_id, participant_name, barcode
FROM ag.ag_login_surveys
JOIN ag.source_barcodes_surveys USING(survey_id)
WHERE ag_login_id=%s
AND participant_name=%s
AND barcode=%s"""
TRN.add(sql, [ag_login_id, participant_name, barcode])
results = TRN.execute_fetchflatten()
if len(results) == 0:
# this implies the barcode was unassigned, and this is a new
# assignment.
# Let's verify the barcode is associated to the kit and login
sql = """SELECT 1
FROM ag.ag_login
JOIN ag.ag_kit USING (ag_login_id)
JOIN ag.ag_kit_barcodes USING (ag_kit_id)
WHERE ag_login_id=%s
AND barcode=%s"""
TRN.add(sql, [ag_login_id, barcode])
results = TRN.execute_fetchflatten()
if len(results) == 0:
# the barcode is not part of a kit with the login ID
raise ValueError("Unexpected barcode / kit relationship")
# the barcode should also not already be linked to a
# participant within the kit
sql = """SELECT 1
FROM ag.ag_login_surveys
JOIN ag.source_barcodes_surveys USING(survey_id)
WHERE ag_login_id=%s
AND barcode=%s"""
TRN.add(sql, [ag_login_id, barcode])
results = TRN.execute_fetchflatten()
if len(results) > 0:
# the barcode is already assigned to someone on the kit
raise ValueError("Barcode already assigned")
sql = """INSERT INTO ag_login_surveys
(ag_login_id, survey_id, participant_name)
VALUES (%s, %s, %s)"""
TRN.add(sql, [ag_login_id, survey_id, participant_name])
sql = """INSERT INTO ag.source_barcodes_surveys
(survey_id, barcode)
VALUES (%s, %s)"""
TRN.add(sql, [survey_id, barcode])
def updateVioscreenStatus(self, survey_id, status):
with TRN:
sql = """UPDATE ag_login_surveys
SET vioscreen_status = %s
WHERE survey_id = %s"""
TRN.add(sql, (status, survey_id))
def get_vioscreen_status(self, survey_id):
"""Retrieves the vioscreen status for a survey_id
Parameters
----------
survey_id : str
The survey to get status for
Returns
-------
int
Vioscreen status
Raises
------
ValueError
survey_id passed is not in the database
"""
with TRN:
sql = """SELECT vioscreen_status
FROM ag.ag_login_surveys
WHERE survey_id = %s"""
TRN.add(sql, [survey_id])
status = TRN.execute_fetchindex()
if not status:
raise ValueError("Survey ID %s not in database" % survey_id)
return status[0][0]
def getAnimalParticipants(self, ag_login_id):
sql = """SELECT DISTINCT participant_name from ag.ag_login_surveys
JOIN ag.survey_answers USING (survey_id)
JOIN ag.group_questions gq USING (survey_question_id)
JOIN ag.surveys ags USING (survey_group)
WHERE ag_login_id = %s AND ags.survey_id = %s"""
with TRN:
TRN.add(sql, [ag_login_id, 2])
return TRN.execute_fetchflatten()
def getParticipantSamples(self, ag_login_id, participant_name):
sql = """SELECT DISTINCT
ag_kit_barcodes.barcode,
ag_kit_barcodes.site_sampled,
ag_kit_barcodes.sample_date,
ag_kit_barcodes.sample_time,
ag_kit_barcodes.notes,
barcodes.barcode.status
FROM ag.ag_login_surveys
JOIN ag.source_barcodes_surveys USING (survey_id)
JOIN ag.ag_kit_barcodes USING (barcode)
JOIN barcodes.barcode USING (barcode)
WHERE ag_login_id |
m Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import platform
from subprocess import Popen, STDOUT
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.common import utils
import time
class FirefoxBinary(object):
NO_FOCUS_LIBRARY_NAME = "x_ignore_nofocus.so"
def __init__(self, firefox_path=None, log_file=None):
"""
Creates a new instance of Firefox binary.
:Args:
- firefox_path - Path to the Firefox executable. By default, it will be detected from the standard locations.
- log_file - A file object to redirect the firefox process output to. It can be sys.stdout.
Please note that with parallel run the output won't be synchronous.
By default, it will be redirected to /dev/null.
"""
self._start_cmd = firefox_path
# We used to default to subprocess.PIPE instead of /dev/null, but after
# a while the pipe would fill up and Firefox would freeze.
self._log_file = log_file or open(os.devnull, "wb")
self.command_line = None
if self._start_cmd is None:
self._start_cmd = self._get_firefox_start_cmd()
if not self._start_cmd.strip():
raise Exception("Failed to find firefox binary. You can set it by specifying the path to 'firefox_binary':\n\nfrom selenium.webdriver.firefox.firefox_binary import FirefoxBinary\n\n" +
"binary = FirefoxBinary('/path/to/binary')\ndriver = webdriver.Firefox(firefox_binary=binary)")
# Rather than modifying the environment of the calling Python process
# copy it and modify as needed.
self._firefox_env = os.environ.copy()
self._firefox_env["MOZ_CRASHREPORTER_DISABLE"] = "1"
self._firefox_env["MOZ_NO_REMOTE"] = "1"
self._firefox_env["NO_EM_RESTART"] = "1"
def add_command_line_options(self, *args):
self.command_line = args
def launch_browser(self, profile):
"""Launches the browser for the given profile name.
It is assumed the profile already exists.
"""
self.profile = profile
self._start_from_profile_path(self.profile.path)
self._wait_until_connectable()
def kill(self):
"""Kill the browser.
This is useful when the browser is stuck.
"""
if self.process:
self.process.kill()
self.process.wait()
def _start_from_profile_path(self, path):
self._firefox_env["XRE_PROFILE_PATH"] = path
if platform.system().lower() == 'linux':
self._modify_link_library_path()
command = [self._start_cmd, "-silent"]
if self.command_line is not None:
for cli in self.command_line:
command.append(cli)
Popen(command, stdout=self._log_file, stderr=STDOUT,
env=self._firefox_env).communicate()
command[1] = '-foreground'
self.process = Popen(
command, stdout=self._log_file, stderr=STDOUT,
env=self._firefox_env)
def _wait_until_connectable(self):
"""Blocks until the extension is connectable in the firefox."""
count = 0
while not utils.is_connectable(self.profile.port):
if self.process.poll() is not None:
# Browser has exited
raise WebDriverException("The browser appears to have exited "
"before we could connect. If you specified a log_file in "
"the FirefoxBinary constructor, check it for details.")
if count == 30:
self.kill()
raise WebDriverException("Can't load the profile. Profile "
"Dir: %s If you specified a log_file in the "
"FirefoxBinary constructor, check it for details.")
count += 1
time.sleep(1)
return True
def _find_exe_in_registry(self):
try:
from _winreg import OpenKey, QueryValue, HKEY_LOCAL_MACHINE, HKEY_CURRENT_USER
except ImportError:
from winreg import OpenKey, QueryValue, HKEY_LOCAL_MACHINE, HKEY_CURRENT_USER
import shlex
keys = (
r"SOFTWARE\Classes\FirefoxHTML\shell\open\command",
r"SOFTWARE\Classes\Applications\firefox.exe\shell\open\command"
)
command = ""
for path in keys:
try:
key = OpenKey(HKEY_LOCAL_MACHINE, path)
command = QueryValue(key, "")
break
except OSError:
try:
key = OpenKey(HKEY_CURRENT_USER, path)
command = QueryValue(key, "")
break
except OSError:
pass
else:
return ""
if not command:
return ""
return shlex.split(command)[0]
def _get_firefox_start_cmd(self):
"""Return the command to start firefox."""
start_cmd = ""
if platform.system() == "Darwin":
start_cmd = ("/Applications/Firefox.app/Contents/MacOS/firefox-bin")
elif platform.system() == "Windows":
start_cmd = (self._find_exe_in_registry() or
self._default_windows_location())
elif platform.system() == 'Java' and os._name == 'nt':
start_cmd = self._default_windows_location()
else:
for ffname in ["firefox", "iceweasel"]:
| start_cmd = self.which(ffname)
if start_cmd is not None:
break
else:
# couldn't find firefox on the system path
raise RuntimeError | ("Could not find firefox in your system PATH." +
" Please specify the firefox binary location or install firefox")
return start_cmd
def _default_windows_location(self):
program_files = [os.getenv("PROGRAMFILES", r"C:\Program Files"),
os.getenv("PROGRAMFILES(X86)", r"C:\Program Files (x86)")]
for path in program_files:
binary_path = os.path.join(path, r"Mozilla Firefox\firefox.exe")
if os.access(binary_path, os.X_OK):
return binary_path
return ""
def _modify_link_library_path(self):
existing_ld_lib_path = os.environ.get('LD_LIBRARY_PATH', '')
new_ld_lib_path = self._extract_and_check(
self.profile, self.NO_FOCUS_LIBRARY_NAME, "x86", "amd64")
new_ld_lib_path += existing_ld_lib_path
self._firefox_env["LD_LIBRARY_PATH"] = new_ld_lib_path
self._firefox_env['LD_PRELOAD'] = self.NO_FOCUS_LIBRARY_NAME
def _extract_and_check(self, profile, no_focus_so_name, x86, amd64):
paths = [x86, amd64]
built_path = ""
for path in paths:
library_path = os.path.join(profile.path, path)
os.makedirs(library_path)
import shutil
shutil.copy(os.path.join(os.path.dirname(__file__), path,
self.NO_FOCUS_LIBRARY_NAME),
library_path)
built_path += library_path + ":"
return built_path
def which(self, fname):
"""Returns the fully qualified path by searching Path of the given
name"""
for pe in os.environ['PATH'].split(os.pathsep):
checkname = os.path.join(pe, fname)
if os.access(ch |
import math
class Point(object):
X = 0
Y = 0
def __init__(self, x, y):
self.X = x
self.Y = y
def getX(self):
return self.X
def getY(self):
return self.Y
def __str__(self):
return "Point(%s,%s)" % (self.X, self.Y)
def __eq__(self, other):
return self.X == other.X and self.Y == other.Y
def move_left(self):
self.X = self.X - 1
return None
def move_up(self):
self.Y = self.Y - 1
return None
def move_right(self):
self.X = self.X + 1
return None
def move_bottom(self):
self.Y = self.Y + 1
return None
|
class Trail(object):
def __init__(self):
self.trail = []
def extend(self, point):
self.trail.append(point)
print "Added : " + str(point)
def getHousesCountVisitedOnce(self):
uniquePoints = []
for point1 in self.trail:
found = False
for point2 in uniquePoints:
if point1.X == point2.X and point1.Y == point2.Y :
found = True
| if found == False:
uniquePoints.append(point1)
return len(uniquePoints)
def main():
#
# Entry point
#
origin = Point(0,0) # Where Santa starts it's run.
trail = Trail() # The gifts delivery trail.
trail.extend(origin)
with open('data.txt') as f:
for c in f.read():
if c == "<":
origin = Point(origin.X, origin.Y)
origin.move_left()
trail.extend(origin)
elif c == "^":
origin = Point(origin.X, origin.Y)
origin.move_up()
trail.extend(origin)
elif c == ">":
origin = Point(origin.X, origin.Y)
origin.move_right()
trail.extend(origin)
elif c == "v":
origin = Point(origin.X, origin.Y)
origin.move_bottom()
trail.extend(origin)
print "Number of houses visited once : " + str(trail.getHousesCountVisitedOnce())
if __name__ == "__main__":
main()
|
"""
WSGI config for proxy project.
It exposes the WSGI call | able as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "proxy.settings")
application = get_wsgi_applic | ation()
|
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (c) 2016 Gramps Development Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
Proxy class for the Gramps databases. Caches lookups from handles.
"""
from ..utils.lru import LRU
class CacheProxyDb:
"""
A Proxy for a database with cached lookups on handles.
Does not invalid caches. Should be used only in read-only
places, and not where caches are altered.
"""
def __init__(self, database):
"""
CacheProxy will cache items based on their handle.
Assumes all handles (regardless of type) are unique.
Database is called self.db for consistency with other
proxies.
"""
self.db = database
self.clear_cache()
def __getattr__(self, attr):
"""
If an attribute isn't found here, use the self.db
version.
| """
return getattr(self.db, attr)
def clear_cache(self, handle=None):
"""
Clears all caches if handle is None, or
specific entry.
| """
if handle:
del self.cache_handle[handle]
else:
self.cache_handle = LRU(100000)
def get_person_from_handle(self, handle):
"""
Gets item from cache if it exists. Converts
handles to string, for uniformity.
"""
if isinstance(handle, bytes):
handle = str(handle, "utf-8")
if handle not in self.cache_handle:
self.cache_handle[handle] = self.db.get_person_from_handle(handle)
return self.cache_handle[handle]
def get_event_from_handle(self, handle):
"""
Gets item from cache if it exists. Converts
handles to string, for uniformity.
"""
if isinstance(handle, bytes):
handle = str(handle, "utf-8")
if handle not in self.cache_handle:
self.cache_handle[handle] = self.db.get_event_from_handle(handle)
return self.cache_handle[handle]
def get_family_from_handle(self, handle):
"""
Gets item from cache if it exists. Converts
handles to string, for uniformity.
"""
if isinstance(handle, bytes):
handle = str(handle, "utf-8")
if handle not in self.cache_handle:
self.cache_handle[handle] = self.db.get_family_from_handle(handle)
return self.cache_handle[handle]
def get_repository_from_handle(self, handle):
"""
Gets item from cache if it exists. Converts
handles to string, for uniformity.
"""
if isinstance(handle, bytes):
handle = str(handle, "utf-8")
if handle not in self.cache_handle:
self.cache_handle[handle] = self.db.get_repository_from_handle(handle)
return self.cache_handle[handle]
def get_place_from_handle(self, handle):
"""
Gets item from cache if it exists. Converts
handles to string, for uniformity.
"""
if isinstance(handle, bytes):
handle = str(handle, "utf-8")
if handle not in self.cache_handle:
self.cache_handle[handle] = self.db.get_place_from_handle(handle)
return self.cache_handle[handle]
def get_place_from_handle(self, handle):
"""
Gets item from cache if it exists. Converts
handles to string, for uniformity.
"""
if isinstance(handle, bytes):
handle = str(handle, "utf-8")
if handle not in self.cache_handle:
self.cache_handle[handle] = self.db.get_place_from_handle(handle)
return self.cache_handle[handle]
def get_citation_from_handle(self, handle):
"""
Gets item from cache if it exists. Converts
handles to string, for uniformity.
"""
if isinstance(handle, bytes):
handle = str(handle, "utf-8")
if handle not in self.cache_handle:
self.cache_handle[handle] = self.db.get_citation_from_handle(handle)
return self.cache_handle[handle]
def get_source_from_handle(self, handle):
"""
Gets item from cache if it exists. Converts
handles to string, for uniformity.
"""
if isinstance(handle, bytes):
handle = str(handle, "utf-8")
if handle not in self.cache_handle:
self.cache_handle[handle] = self.db.get_source_from_handle(handle)
return self.cache_handle[handle]
def get_note_from_handle(self, handle):
"""
Gets item from cache if it exists. Converts
handles to string, for uniformity.
"""
if isinstance(handle, bytes):
handle = str(handle, "utf-8")
if handle not in self.cache_handle:
self.cache_handle[handle] = self.db.get_note_from_handle(handle)
return self.cache_handle[handle]
def get_media_from_handle(self, handle):
"""
Gets item from cache if it exists. Converts
handles to string, for uniformity.
"""
if isinstance(handle, bytes):
handle = str(handle, "utf-8")
if handle not in self.cache_handle:
self.cache_handle[handle] = self.db.get_media_from_handle(handle)
return self.cache_handle[handle]
def get_tag_from_handle(self, handle):
"""
Gets item from cache if it exists. Converts
handles to string, for uniformity.
"""
if isinstance(handle, bytes):
handle = str(handle, "utf-8")
if handle not in self.cache_handle:
self.cache_handle[handle] = self.db.get_tag_from_handle(handle)
return self.cache_handle[handle]
|
'''
Created on Sep 10, 2014
@author: moloyc
'''
import os
import logging
from jinja2 import Environment, PackageLoader
from netaddr import IPNetwork
import util
from model import Pod
from dao import Dao
from writer import DhcpConfWriter
moduleName = 'ztp'
logging.basicConfig()
logger = logging.getLogger(moduleName)
logger.setLevel(logging.DEBUG)
ztpTemplateLocation = os.path.join('conf', 'ztp')
class ZtpServer():
def __init__(self, conf = {}, templateEnv = None):
if any(conf) == False:
self.conf = util.loadConfig()
logger.setLevel(logging.getLevelName(self.conf['logLevel'][moduleName]))
else:
self.conf = conf
self.dao = Dao(self.conf)
if templateEnv is None:
self.templateEnv = Environment(loader=PackageLoader('jnpr.openclos', ztpTemplateLocation))
def dcpServerReloadConfig(self):
#TODO: sudo service isc-dhcp-server force-reload
# Not needed as of now
pass
''' TODO: for 2.0, Not needed as of now
def createSingleDhcpConfFile(self):
pods = self.dao.getAll(Pod)
if len(pods) > 0:
confWriter = DhcpConfWriter(self.conf, pods[0], self.dao)
confWriter.writeSingle(self.generateSingleDhcpConf())
'''
def generateSingleDhcpConf(self):
if util.isPlatformUbuntu():
ztp = self.populateDhcpGlobalSettings()
dhcpTemplate = self.templateEnv.get_template('dhcp.conf.ubuntu')
return dhcpTemplate.render(ztp = self.populateDhcpDeviceSpecificSettingForAllPods(ztp))
def createPodSpecificDhcpConfFile(self, podName):
pod = self.dao.getUniqueObjectByName(Pod, podName)
confWriter = DhcpConfWriter(self.conf, pod, self.dao)
confWriter.write(self.generatePodSpecificDhcpConf(pod.name))
def generatePodSpecificDhcpConf(self, podName):
ztp = self.populateDhcpGlobalSettings()
conf = None
if util.isPlatformUbuntu():
dhcpTemplate = self.templateEnv.get_template('dhcp.conf.ubuntu')
ztp = self.populateDhcpDeviceSpecificSetting(podName, ztp)
conf = dhcpTemplate.render(ztp = zt | p)
elif util.isPlatformCentos():
dhcpTemplate = self.templateEnv.get_template('dhcp.conf.centos')
ztp = self.populateDhcpDeviceSpecificSetting(podName, ztp)
con | f = dhcpTemplate.render(ztp = ztp)
logger.debug('dhcpd.conf\n%s' % (conf))
return conf
def populateDhcpGlobalSettings(self):
ztp = {}
ztpGlobalSettings = util.loadClosDefinition()['ztp']
subnet = ztpGlobalSettings['dhcpSubnet']
dhcpBlock = IPNetwork(subnet)
ipList = list(dhcpBlock.iter_hosts())
ztp['network'] = str(dhcpBlock.network)
ztp['netmask'] = str(dhcpBlock.netmask)
ztp['defaultRoute'] = ztpGlobalSettings.get('dhcpOptionRoute')
if ztp['defaultRoute'] is None or ztp['defaultRoute'] == '':
ztp['defaultRoute'] = str(ipList[0])
ztp['rangeStart'] = ztpGlobalSettings.get('dhcpOptionRangeStart')
if ztp['rangeStart'] is None or ztp['rangeStart'] == '':
ztp['rangeStart'] = str(ipList[1])
ztp['rangeEnd'] = ztpGlobalSettings.get('dhcpOptionRangeEnd')
if ztp['rangeEnd'] is None or ztp['rangeEnd'] == '':
ztp['rangeEnd'] = str(ipList[-1])
ztp['broadcast'] = str(dhcpBlock.broadcast)
ztp['httpServerIp'] = self.conf['httpServer']['ipAddr']
ztp['imageUrl'] = ztpGlobalSettings.get('junosImage')
return ztp
def populateDhcpDeviceSpecificSettingForAllPods(self, ztp = {}):
pods = self.dao.getAll(Pod)
for pod in pods:
ztp = self.populateDhcpDeviceSpecificSetting(pod.name, ztp)
return ztp
def populateDhcpDeviceSpecificSetting(self, podName, ztp = {}):
if ztp.get('devices') is None:
ztp['devices'] = []
pod = self.dao.getUniqueObjectByName(Pod, podName)
for device in pod.devices:
if device.role == 'spine':
image = pod.spineJunosImage
elif device.role == 'leaf':
image = pod.leafJunosImage
else:
image = None
logger.error('Pod: %s, Device: %s with unknown role: %s' % (pod.name, device.name, device.role))
deviceMgmtIp = str(IPNetwork(device.managementIp).ip)
ztp['devices'].append({'name': device.name, 'mac': device.macAddress,
'configUrl': 'pods/' + pod.name + '/devices/' + device.name + '/config',
'imageUrl': image, 'mgmtIp': deviceMgmtIp})
return ztp
if __name__ == '__main__':
ztpServer = ZtpServer()
ztpServer.createPodSpecificDhcpConfFile('labLeafSpine')
ztpServer.createPodSpecificDhcpConfFile('anotherPod')
#ztpServer.createSingleDhcpConfFile()
|
#!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import importlib
import os
import sys
import traceback
from inspect import isclass
from typing import List
def import_all_provider_classes(source_path: str,
provider_ids: List[str] = None,
print_imports: bool = False) -> List[str]:
"""
Imports all classes in providers packages. This method loads and imports
all the classes found in providers, so that we can find all the subclasses
of operators/sensors etc.
:param provider_ids - provider ids that should be loaded.
:param print_imports - if imported class should also be printed in output
:param source_path: path to look for sources - might be None to look for all packages in all source paths
:return: list of all imported classes
"""
if provider_ids:
prefixed_provider_paths = [source_path + "/airflow/providers/" + provider_id.replace(".", "/")
for provider_id in provider_ids]
else:
prefixed_provider_paths = [source_path + "/airflow/providers/"]
imported_classes = []
tracebacks = []
for root, dirs, files in os.walk(source_path):
if all([not root.startswith(prefix_provider_path)
for prefix_provider_path in prefixed_provider_paths]) or root.endswith("__pycache__"):
# Skip loading module if it is not in the list of providers that we are looking for
continue
package_name = root[len(source_path) + 1:].replace("/", ".")
for file in files:
if file.endswith(".py"):
module_name = package_name + "." + file[:-3] if file != "__init__.py" else package_name
if print_imports:
print(f"Importing module: {module_name}")
# noinspection PyBroadException
try:
_module = importlib.import_module(module_name)
for attribute_name in dir(_module):
class_name = module_name + "." + attribute_name
attribute = getattr(_module, attribute_name)
if isclass(attribute):
if print_imports:
print(f"Imported {class_name}")
imported_classes.append(class_name)
except Exception:
exception_str = traceback.format_exc()
tracebacks. | append(exception_str)
if tracebacks:
print("""
ERROR: There were some import errors
""", | file=sys.stderr)
for trace in tracebacks:
print("----------------------------------------", file=sys.stderr)
print(trace, file=sys.stderr)
print("----------------------------------------", file=sys.stderr)
sys.exit(1)
else:
return imported_classes
if __name__ == '__main__':
install_source_path = None
for python_path_candidate in sys.path:
providers_path_candidate = os.path.join(python_path_candidate, "airflow", "providers")
if os.path.isdir(providers_path_candidate):
install_source_path = python_path_candidate
print()
print(f"Walking all paths in {install_source_path}")
print()
import_all_provider_classes(print_imports=True, source_path=install_source_path)
print()
print("SUCCESS: All backport packages are importable!")
print()
|
de': 'fallback_mode',
'fullPath': 'full_path',
'loadBalancingMode': 'load_balancing_mode',
'manualResume': 'manual_resume',
'maxAnswersReturned' | : 'max_answers_returned',
'qosHitRatio': 'qos_hit_ratio',
'qosHops': 'qos_hops',
'qosKilobytesSecond': 'qos_kilobytes_second',
'qosLcs': 'qos_lcs',
'qosPacketRate | ': 'qos_packet_rate',
'qosRtt': 'qos_rtt',
'qosTopology': 'qos_topology',
'qosVsCapacity': 'qos_vs_capacity',
'qosVsScore': 'qos_vs_score',
'verifyMemberAvailability': 'verify_member_availability',
'membersReference': 'members'
}
returnables = [
'alternate_mode', 'dynamic_ratio', 'enabled', 'disabled', 'fallback_mode',
'load_balancing_mode', 'manual_resume', 'max_answers_returned', 'members',
'name', 'partition', 'qos_hit_ratio', 'qos_hops', 'qos_kilobytes_second',
'qos_lcs', 'qos_packet_rate', 'qos_rtt', 'qos_topology', 'qos_vs_capacity',
'qos_vs_score', 'ttl', 'type', 'full_path', 'availability_state',
'enabled_state', 'availability_status'
]
@property
def max_answers_returned(self):
if self._values['max_answers_returned'] is None:
return None
return int(self._values['max_answers_returned'])
@property
def members(self):
result = []
if self._values['members'] is None or 'items' not in self._values['members']:
return result
for item in self._values['members']['items']:
self._remove_internal_keywords(item)
if 'disabled' in item:
if item['disabled'] in BOOLEANS_TRUE:
item['disabled'] = True
else:
item['disabled'] = False
if 'enabled' in item:
if item['enabled'] in BOOLEANS_TRUE:
item['enabled'] = True
else:
item['enabled'] = False
if 'fullPath' in item:
item['full_path'] = item.pop('fullPath')
if 'memberOrder' in item:
item['member_order'] = int(item.pop('memberOrder'))
# Cast some attributes to integer
for x in ['order', 'preference', 'ratio', 'service']:
if x in item:
item[x] = int(item[x])
result.append(item)
return result
@property
def qos_hit_ratio(self):
if self._values['qos_hit_ratio'] is None:
return None
return int(self._values['qos_hit_ratio'])
@property
def qos_hops(self):
if self._values['qos_hops'] is None:
return None
return int(self._values['qos_hops'])
@property
def qos_kilobytes_second(self):
if self._values['qos_kilobytes_second'] is None:
return None
return int(self._values['qos_kilobytes_second'])
@property
def qos_lcs(self):
if self._values['qos_lcs'] is None:
return None
return int(self._values['qos_lcs'])
@property
def qos_packet_rate(self):
if self._values['qos_packet_rate'] is None:
return None
return int(self._values['qos_packet_rate'])
@property
def qos_rtt(self):
if self._values['qos_rtt'] is None:
return None
return int(self._values['qos_rtt'])
@property
def qos_topology(self):
if self._values['qos_topology'] is None:
return None
return int(self._values['qos_topology'])
@property
def qos_vs_capacity(self):
if self._values['qos_vs_capacity'] is None:
return None
return int(self._values['qos_vs_capacity'])
@property
def qos_vs_score(self):
if self._values['qos_vs_score'] is None:
return None
return int(self._values['qos_vs_score'])
@property
def availability_state(self):
if self._values['stats'] is None:
return None
try:
result = self._values['stats']['status_availabilityState']
return result['description']
except AttributeError:
return None
@property
def enabled_state(self):
if self._values['stats'] is None:
return None
try:
result = self._values['stats']['status_enabledState']
return result['description']
except AttributeError:
return None
@property
def availability_status(self):
# This fact is a combination of the availability_state and enabled_state
#
# The purpose of the fact is to give a higher-level view of the availability
# of the pool, that can be used in playbooks. If you need further detail,
# consider using the following facts together.
#
# - availability_state
# - enabled_state
#
if self.enabled_state == 'enabled':
if self.availability_state == 'offline':
return 'red'
elif self.availability_state == 'available':
return 'green'
elif self.availability_state == 'unknown':
return 'blue'
else:
return 'none'
else:
# disabled
return 'black'
class WideIpParameters(BaseParameters):
api_map = {
'fullPath': 'full_path',
'failureRcode': 'failure_return_code',
'failureRcodeResponse': 'failure_return_code_response',
'failureRcodeTtl': 'failure_return_code_ttl',
'lastResortPool': 'last_resort_pool',
'minimalResponse': 'minimal_response',
'persistCidrIpv4': 'persist_cidr_ipv4',
'persistCidrIpv6': 'persist_cidr_ipv6',
'poolLbMode': 'pool_lb_mode',
'ttlPersistence': 'ttl_persistence'
}
returnables = [
'full_path', 'description', 'enabled', 'disabled', 'failure_return_code',
'failure_return_code_response', 'failure_return_code_ttl', 'last_resort_pool',
'minimal_response', 'persist_cidr_ipv4', 'persist_cidr_ipv6', 'pool_lb_mode',
'ttl_persistence', 'pools'
]
@property
def pools(self):
result = []
if self._values['pools'] is None:
return []
for pool in self._values['pools']:
del pool['nameReference']
for x in ['order', 'ratio']:
if x in pool:
pool[x] = int(pool[x])
result.append(pool)
return result
@property
def failure_return_code_ttl(self):
if self._values['failure_return_code_ttl'] is None:
return None
return int(self._values['failure_return_code_ttl'])
@property
def persist_cidr_ipv4(self):
if self._values['persist_cidr_ipv4'] is None:
return None
return int(self._values['persist_cidr_ipv4'])
@property
def persist_cidr_ipv6(self):
if self._values['persist_cidr_ipv6'] is None:
return None
return int(self._values['persist_cidr_ipv6'])
@property
def ttl_persistence(self):
if self._values['ttl_persistence'] is None:
return None
return int(self._values['ttl_persistence'])
class ServerParameters(BaseParameters):
api_map = {
'fullPath': 'full_path',
'exposeRouteDomains': 'expose_route_domains',
'iqAllowPath': 'iq_allow_path',
'iqAllowServiceCheck': 'iq_allow_service_check',
'iqAllowSnmp': 'iq_allow_snmp',
'limitCpuUsage': 'limit_cpu_usage',
'limitCpuUsageStatus': 'limit_cpu_usage_status',
'limitMaxBps': 'limit_max_bps',
'limitMaxBpsStatus': 'limit_max_bps_status',
'limitMaxConnections': 'limit_max_connections',
'limitMaxConnectionsStatus': 'limit_max_connections_status',
'limitMaxPps': 'limit_max_pps',
'limitMaxPpsStatus': 'limit_max_pps_status',
'limitMemAvail': 'limit_mem_available',
'limitMemAvailStatus': 'limit_mem_available_status',
'linkDiscovery': 'link_discovery',
'proberFallback': 'pr |
import tkinter as tk
import tkinter.ttk as ttk
def NewFlight():
from Controller import LoadNewFlightPage
LoadNewFlightPage(mainPage)
def LogBook(): |
from Controll | er import LoadLogBookPage
LoadLogBookPage(mainPage)
def CreateMainPage():
global mainPage
mainPage = tk.Tk()
mainPage.configure(background="midnight blue")
mainPage.title("RAF Flying Logbook v1")
"""mainPage.geometry('%dx%d+0+0' % (mainPage.winfo_screenwidth(),mainPage.winfo_screenheight()))
mainPageWidth = mainPage.winfo_screenwidth()
mainPageHeight = mainPage.winfo_screenheight()"""
FrameTitle = tk.Label(text = "RAF Flight Logbook v1", fg="steel blue", font=("Comic Sans MS", 10),width=50)
FrameTitle.grid(row=0,column=0,columnspan=4)
TopSpace = tk.Label(text = "", bg="midnight blue")
TopSpace.grid(row=1,columnspan=4,sticky='ew')
LogBook_btn = tk.Button(text = "Log Book", fg ="black",command=LogBook)
LogBook_btn.grid(row=2,column=1,columnspan=2,)
MidTopSpace = tk.Label(text = "", bg="midnight blue")
MidTopSpace.grid(row=3,columnspan=4,sticky='ew')
NewFlight_btn = tk.Button(text = "New Flight", fg ="black",command=NewFlight)
NewFlight_btn.grid(row=4,column=1,columnspan=2,)
MidSpace = tk.Label(text = "", bg="midnight blue")
MidSpace.grid(row=5,columnspan=4,sticky='ew')
QSummary_btn= tk.Button(text = "Quarterly Summary", fg ="black")
QSummary_btn.grid(row=6,column=1,columnspan=2,)
BotSpace = tk.Label(text = "", bg="midnight blue")
BotSpace.grid(row=7,columnspan=4,sticky='ew')
ASummary_btn= tk.Button(text = "Annual Summary", fg ="black")
ASummary_btn.grid(row=8,column=1,columnspan=2,)
TableSpace = tk.Label(text = "", bg="midnight blue")
TableSpace.grid(row=9,columnspan=4,sticky='ew')
Summary = ttk.Treeview(mainPage,height=4)
Summary["columns"]=("one")
Summary.heading("#0", text='Type', anchor='w')
Summary.column("one", width=40)
Summary.heading("one", text="Hours")
Summary.insert("", 0,text="Approaches", values=("3"))
Summary.insert("", 0,text="IF", values=("2"))
Summary.insert("", 0,text="Night", values=("1"))
Summary.insert("", 0,text="Day", values=("0"))
Summary.grid(row=10,column=0,columnspan=3,sticky='e')
Summary.columnconfigure(0,weight=0)
mainPage.mainloop() |
"""
Definition of model | s.
"""
from django.db import models
from django import forms;
# Create your models here.
class Artist(models.Model):
name = models.CharField(max_length=50);
year_formed = models.PositiveIntegerField();
class ArtistForm(forms.ModelForm):
class Meta:
model = Artist;
fields = ['name', 'year_formed'];
class ArtistForm(forms.ModelForm):
class Meta:
model = Artist;
fields = ['name', 'year_formed'];
class Album(models.Model):
name = mod | els.CharField(max_length=50);
artist = models.ForeignKey(Artist); |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('flood_mapper', '0003_data_migration_20141201_0218'),
| ]
operations = [
migrations.AlterField(
model_name='floodstatus',
name='notes',
field=models.TextField(null=True, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='vi | llage',
name='slug',
field=models.SlugField(unique=True, max_length=100),
preserve_default=True,
),
]
|
volume_name" field in the input example, '
'volume_path points to HDF5 volumes containing int64 '
'label data, and `dataset` is the name of the dataset '
'from which data will be read.')
flags.DEFINE_string('model_name', None,
'Name of the model to train. Format: '
'[<packages>.]<module_name>.<model_class>, if packages is '
'missing "ffn.training.models" is used as default.')
flags.DEFINE_string('model_args', None,
'JSON string with arguments to be passed to the model '
'constructor.')
# Training infra options.
flags.DEFINE_string('train_dir', '/tmp',
'Path where checkpoints and other data will be saved.')
flags.DEFINE_string('master', '', 'Network address of the master.')
flags.DEFINE_integer('batch_size', 4, 'Number of images in a batch.')
flags.DEFINE_integer('task', 0, 'Task id of the replica running the training.')
flags.DEFINE_integer('ps_tasks', 0, 'Number of tasks in the ps job.')
flags.DEFINE_integer('max_steps', 10000, 'Number of steps to train for.')
flags.DEFINE_integer('replica_step_delay', 300,
'Require the model to reach step number '
'<replica_step_delay> * '
'<replica_id> before starting training on a given '
'replica.')
flags.DEFINE_integer('summary_rate_secs', 120,
'How often to save summaries (in seconds).')
# FFN training options.
flags.DEFINE_float('seed_pad', 0.05,
'Value to use for the unknown area of the seed.')
flags.DEFINE_float('threshold', 0.9,
'Value to be reached or exceeded at the new center of the '
'field of view in order for the network to inspect it.')
flags.DEFINE_enum('fov_policy', 'fixed', ['fixed', 'max_pred_moves'],
'Policy to determine where to move the field of the '
'network. "fixed" tries predefined offsets specified by '
'"model.shifts". "max_pred_moves" moves to the voxel with '
'maximum mask activation within a plane perpendicular to '
'one of the 6 Cartesian directions, offset by +/- '
'model.deltas from the current FOV position.')
# TODO(mjanusz): Implement fov_moves > 1 for the 'fixed' policy.
flags.DEFINE_integer('fov_moves', 1,
'Number of FOV moves by "model.delta" voxels to execute '
'in every dimension. Currently only works with the '
'"max_pred_moves" policy.')
flags.DEFINE_boolean('shuffle_moves', True,
'Whether to randomize the order of the moves used by the '
'network with the "fixed" policy.')
flags.DEFINE_float('image_mean', None,
'Mean image intensity to use for input normalization.')
flags.DEFINE_float('image_stddev', None,
'Image intensity standard deviation to use for input '
'normalization.')
flags.DEFINE_list('image_offset_scale_map', None,
'Optional per-volume specification of mean and stddev. '
'Every entry in the list is a colon-separated tuple of: '
'volume_label, offset, scale.')
flags.DEFINE_list('permutable_axes', ['1', '2'],
'List of integers equal to a subset of [0, 1, 2] specifying '
'which of the [z, y, x] axes, respectively, may be permuted '
'in order to augment the training data.')
flags.DEFINE_list('reflectable_axes', ['0', '1', '2'],
'List of integers equal to a subset of [0, 1, 2] specifying '
'which of the [z, y, x] axes, respectively, may be reflected '
'in order to augment the training data.')
FLAGS = flags.FLAGS
class EvalTracker(object):
"""Tracks eval results over multiple training steps."""
def __init__(self, eval_shape):
self.eval_labels = tf.placeholder(
tf.float32, [1] + eval_shape + [1], name='eval_labels')
self.eval_preds = tf.placeholder(
tf.float32, [1] + eval_shape + [1], name='eval_preds')
self.eval_loss = tf.reduce_mean(
tf.nn.sigmoid_cross_entropy_with_logits(
logits=self.eval_preds, labels=self.eval_labels))
self.reset()
self.eval_threshold = logit(0.9)
self.sess = None
self._eval_shape = eval_shape
def reset(self):
"""Resets status of the tracker."""
self.loss = 0
self.num_patches = 0
self.tp = 0
self.tn = 0
self.fn = 0
self.fp = 0
self.total_voxels = 0
self.masked_voxels = 0
self.images_xy = deque(maxlen=16)
self.images_xz = deque(maxlen=16)
self.images_yz = deque(maxlen=16)
def slice_image(self, labels, predicted, weights, slice_axis):
"""Builds a tf.Summary showing a slice of an object mask.
The object mask slice is shown side by side with the corresponding
ground truth mask.
Args:
labels: ndarray of ground truth data, shape [1, z, y, x, 1]
predicted: ndarray of predicted data, shape [1, z, y, x, 1]
weights: ndarray of loss weights, shape [1, z, y, x, 1]
slice_axis: axis in the middle of which to place the cutting plane
for which the summary image will be generated, valid values are
2 ('x'), 1 ('y'), and 0 ('z').
Returns:
tf.Summary.Value object with the image.
"""
zyx = list(labels.shape[1:-1])
selector = [0, slice(None), slice(None), slice(None), 0]
selector[slice_axis + 1] = zyx[slice_axis] // 2
selector = tuple(selector)
del zyx[slice_axis]
h, w = zyx
buf = BytesIO()
labels = (labels[selector] * 255).astype(np.uint8)
predicted = (predicted[selector] * 255).astype(np.uint8)
weights = (weights[selector] * 255).astype(np.uint8)
im = PIL.Image.fromarray(np.concatenate([labels, predicted,
weights], axis=1), 'L')
im.save(buf, 'PNG')
axis_names = 'zyx'
axis_names = axis_names.replace(axis_names[slice_axis], '')
return tf.Summary.Value(
tag='final_%s' % axis_names[::-1],
image=tf.Summary.Image(
height=h, width=w * 3, colorspace=1, # greyscale
encoded_image_string=buf.getvalue()))
def add_patch(self, labels, predicted, weights,
coord=None, volname=None, patches=None):
"""Evaluates single-object segmentation quality."""
predicted = mask.crop_and_pad(predicted, (0, 0, 0), self._eval_shape)
weights = mask.crop_and_pad(weights, (0, 0, 0), self._eval_shape)
labels = mask.crop_and_pad(labels, (0, 0, 0), self._eval_shape)
loss, = self.sess.run([self.eval_loss], {self.eval_labels: labels,
self.eval_preds: predicted})
self.loss += loss
self.total_voxels += labels.size
self.masked_voxels += np.sum(weights == 0.0)
pred_mask = predicted >= self.eval_threshold
true_mask = labels > 0.5
pred_bg = np.logical_not(pred_mask)
true_bg = np.logical_not(true_mask)
self.tp += np.sum(pred_mask & true_mask)
self.fp += np.sum(pred_mask & true_bg)
self.fn += np.su | m(pred_bg & true_mask)
self.tn += np.sum(pred_bg & true_bg)
self.num_patches += 1
predicted = expit(predicted)
self.images_xy.append(self.slice_image(labels, predicted, weights, 0))
self.images_xz.append(self.slice_image(labels, predicted, weights, 1))
self.images_yz.append(self.slice_image(labels, predicted, weights, 2))
def get_summaries(self):
"""Gathers tensorflow summaries into single list | ."""
if not self.total_voxels:
return []
precision = self.tp / max(self.tp + self.fp, 1)
recall = self.tp / max(self.tp + self.fn, 1)
for images in self.images_xy, self.images_xz, self.images_yz:
for i, summary in enumerate(images):
summary.tag += '/%d' % i
summaries = (
list(self.images_xy) + list(self.images_xz) + list(self.images_yz) + [
tf.Summary.Value(tag='masked_voxe |
import numpy a | s np
def P_vector(model, nodal_load):
"""Return the load vector
"""
P = np.zeros(model.nt)
for n, p in noda | l_load.items():
if n not in model.CON:
raise Exception('Not a valid DOF for the applied load!')
for i, d in enumerate(model.DOF[n]):
P[d] = p[i]
return P
|
#!/usr/bin/env python
"""
Test code for blackjack game. Tests can be run with py.test or nosetests
"""
from __future__ import print_function
from unittest import TestCase
from card_games import blackjack
from card_games.blackjack import BlackJack
print(blackjack.__file__)
class TestRule(TestCase):
def setUp(self):
pass
def tearDown(self):
| pass
def test_init(self):
mygame = BlackJack()
self.assertEqual(len(mygame.player_hand), 2) # Initial hand for Player
self.assertEqual(len(mygame.dealer_hand), 2) # Initial hand for Dealer
def test_player_bust(self):
mygame = BlackJack()
for cnt in range(10): # Draw 10 cards - Sure to loose
mygame.draw_card_player()
self.assertEqual(len(mygame.player_hand), 12) # Twelve cards in Player's hand
self.assertEqual(mygam | e.game_result(), 'bust') # Definitely a bust
|
# coding=utf-8
# -------------------------------- | ------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsof | t (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class AddDataLakeStoreParameters(Model):
"""The parameters used to add a new Data Lake Store account.
:param suffix: The optional suffix for the Data Lake Store account.
:type suffix: str
"""
_attribute_map = {
'suffix': {'key': 'properties.suffix', 'type': 'str'},
}
def __init__(self, suffix=None):
super(AddDataLakeStoreParameters, self).__init__()
self.suffix = suffix
|
--------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2019, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from __future__ import print_function
import abc
import torch
import torch.nn as nn
import torch.nn.functional as F
from htmresearch.frameworks.pytorch.duty_cycle_metrics import (
maxEntropy, binaryEntropy
)
from htmresearch.frameworks.pytorch.functions import k_winners, k_winners2d
def getEntropy(m):
"""
Function used to get the current and max entropies of KWinners modules.
:param m: any module
:return: (currentEntropy, maxEntropy)
"""
if isinstance(m, KWinnersBase):
return m.entropy(), m.maxEntropy()
else:
return 0.0, 0.0
def getEntropies(m):
"""
Recursively get the current and max entropies from every child module
:param m: any module
:return: (currentEntropy, maxEntropy)
"""
entropy = 0.0
max_entropy = 0.0
for module in m.children():
e, m = getEntropies(module)
entropy += e
max_entropy += m
e, m = getEntropy(m)
entropy += e
max_entropy += m
return entropy, max_entropy
def updateBoostStrength(m):
"""
Function used to update KWinner modules boost strength after each epoch.
Call using :meth:`torch.nn.Module.apply` after each epoch if required
For example: ``m.apply(updateBoostStrength)``
:param m: KWinner module
"""
if isinstance(m, KWinnersBase):
if m.training:
m.boostStrength = m.boostStrength * m.boostStrengthFactor
class KWinnersBase(nn.Module):
"""
Base KWinners class
"""
__metaclass__ = abc.ABCMeta
def __init__(self, n, k, kInferenceFactor=1.0, boostStrength=1.0,
boostStrengthFactor=1.0, dutyCyclePeriod=1000):
"""
:param n:
Number of units
:type n: int
:param k:
The activity of the top k units will be allowed to remain, the rest are set
to zero
:type k: int
:param kInferenceFactor:
During inference (training=False) we increase k by this factor.
:type kInferenceFactor: float
:param boostStrength:
boost strength (0.0 implies no boosting).
:type boostStrength: float
:param boostStrengthFactor:
Boost strength factor to use [0..1]
:type boostStrengthFactor: float
:param dutyCyclePeriod:
The period used to calculate duty cycles
:type dutyCyclePeriod: int
"""
super(KWinnersBase, self).__init__()
assert (boostStrength >= 0.0)
self.n = n
self.k = k
self.kInferenceFactor = kInferenceFactor
self.learningIterations = 0
# Boosting related parameters
self.boostStrength = boostStrength
self.boostStrengthFactor = boostStrengthFactor
self.dutyCyclePeriod = dutyCyclePeriod
def getLearningIterations(self):
return self.learningIterations
@abc.abstractmethod
def updateDutyCycle(self, x):
"""
Updates our duty cycle estimates with the new value. Duty cycles are
updated according to the following formula:
.. math::
dutyCycle = \\frac{dutyCycle \\times \\left( period - batchSize \\right)
+ newValue}{period}
:param x:
Current activity of each unit
"""
raise NotImplementedError
def updateBoostStrength(self):
"""
Update boost strength using given strength factor during training
"""
if self.training:
self.boostStrength = self.boostStrength * self.boostStrengthFactor
def entropy(self):
"""
Returns the current total entropy of this layer
"""
if self.k < self.n:
_, entropy = binaryEntropy(self.dutyCycle)
return entropy
else:
return 0
def maxEntropy(self):
"""
Returns the maximum total entropy we can expect from this layer
"""
return maxEntropy(self.n, self.k)
class KWinners(KWinnersBase):
"""
Applies K-Winner function to the input tensor
See :class:`htmresearch.frameworks.pytorch.functions.k_winners`
"""
def __init__(self, n, k, kInferenceFactor=1.0, boostStrength=1.0,
boostStrengthFactor=1.0, dutyCycleP | eriod=1000):
"""
:param n:
Number of units
:type n: int
:param k:
The activity of the top k units will be allowed to remain, the rest are set
to zero
:type k: int
:param kInferenceFactor:
| During inference (training=False) we increase k by this factor.
:type kInferenceFactor: float
:param boostStrength:
boost strength (0.0 implies no boosting).
:type boostStrength: float
:param boostStrengthFactor:
Boost strength factor to use [0..1]
:type boostStrengthFactor: float
:param dutyCyclePeriod:
The period used to calculate duty cycles
:type dutyCyclePeriod: int
"""
super(KWinners, self).__init__(n=n, k=k,
kInferenceFactor=kInferenceFactor,
boostStrength=boostStrength,
boostStrengthFactor=boostStrengthFactor,
dutyCyclePeriod=dutyCyclePeriod)
self.register_buffer("dutyCycle", torch.zeros(self.n))
def forward(self, x):
# Apply k-winner algorithm if k < n, otherwise default to standard RELU
if self.k >= self.n:
return F.relu(x)
if self.training:
k = self.k
else:
k = min(int(round(self.k * self.kInferenceFactor)), self.n)
x = k_winners.apply(x, self.dutyCycle, k, self.boostStrength)
if self.training:
self.updateDutyCycle(x)
return x
def updateDutyCycle(self, x):
batchSize = x.shape[0]
self.learningIterations += batchSize
period = min(self.dutyCyclePeriod, self.learningIterations)
self.dutyCycle.mul_(period - batchSize)
self.dutyCycle.add_(x.gt(0).sum(dim=0, dtype=torch.float))
self.dutyCycle.div_(period)
class KWinners2d(KWinnersBase):
"""
Applies K-Winner function to the input tensor
See :class:`htmresearch.frameworks.pytorch.functions.k_winners2d`
"""
def __init__(self, n, k, channels, kInferenceFactor=1.0, boostStrength=1.0,
boostStrengthFactor=1.0, dutyCyclePeriod=1000):
"""
:param n:
Number of units. Usually the output of the max pool or whichever layer
preceding the KWinners2d layer.
:type n: int
:param k:
The activity of the top k units will be allowed to remain, the rest are set
to zero
:type k: int
:param channels:
Number of channels (filters) in the convolutional layer.
:type channels: int
:param kInferenceFactor:
During inference (training=False) we increase k by this factor.
:type kInferenceFactor: float
:param boostStrength:
boost strength (0.0 implies no boosting).
:type boostStrength: float
:param boostStrengthFactor:
Boost strength factor to use [0..1]
:type boostStrengthFactor: float
:param dutyCyclePeriod:
The period used to calculate duty cycles
:type dutyCyclePeriod: int
"""
super(KWinners2d, self).__init__(n=n, k=k,
kInferenceFactor=kInferenceFactor,
boostStrength=boostStrength,
boostStrengthFactor=boostStrengthFactor,
dutyCyclePeriod=dutyCyclePeriod)
self.channels = channels
self.regi |
# Portions Copyright (c) Facebook, Inc. and its affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2.
# highlight.py - highlight extension implementation file
#
# Copyright 2007-2009 Adam Hupp <adam@hupp.org> and others
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
#
# The original module was split in an interface and an implementation
# file to defer pygments loading and speedup extension setup.
from __future__ import absolute_import
from edenscm.mercurial import demandimport, encoding, util
demandimport.ignore.extend(["pkgutil", "pkg_resources", "__main__"])
with demandimport.deactivated():
import pygments
import pygments.formatters
import pygments.lexers
import pygments.util
highlight = pygments.highlight
ClassNotFound = pygments.util.ClassNotFound
guess_lexer = pygments.lexers.guess_lexer
guess_lexer_for_filename = pygments.lexers.guess_lexer_for_fil | ename
TextLexer = pygments.lexers.TextLexer
HtmlFormatter = pygments.formatters.HtmlFormatter
SYNTAX_CSS = '\n<link rel="stylesheet" href="{url}highlightcss" ' 'type="text/css" />'
def pygmentize(field, fctx, style, tmp | l, guessfilenameonly=False):
# append a <link ...> to the syntax highlighting css
old_header = tmpl.load("header")
if SYNTAX_CSS not in old_header:
new_header = old_header + SYNTAX_CSS
tmpl.cache["header"] = new_header
text = fctx.data()
if util.binary(text):
return
# str.splitlines() != unicode.splitlines() because "reasons"
for c in "\x0c\x1c\x1d\x1e":
if c in text:
text = text.replace(c, "")
# Pygments is best used with Unicode strings:
# <http://pygments.org/docs/unicode/>
text = text.decode(encoding.encoding, "replace")
# To get multi-line strings right, we can't format line-by-line
try:
lexer = guess_lexer_for_filename(fctx.path(), text[:1024], stripnl=False)
except (ClassNotFound, ValueError):
# guess_lexer will return a lexer if *any* lexer matches. There is
# no way to specify a minimum match score. This can give a high rate of
# false positives on files with an unknown filename pattern.
if guessfilenameonly:
return
try:
lexer = guess_lexer(text[:1024], stripnl=False)
except (ClassNotFound, ValueError):
# Don't highlight unknown files
return
# Don't highlight text files
if isinstance(lexer, TextLexer):
return
formatter = HtmlFormatter(nowrap=True, style=style)
colorized = highlight(text, lexer, formatter)
coloriter = (s.encode(encoding.encoding, "replace") for s in colorized.splitlines())
tmpl.filters["colorize"] = lambda x: next(coloriter)
oldl = tmpl.cache[field]
newl = oldl.replace("line|escape", "line|colorize")
tmpl.cache[field] = newl
|
intang_assets: NUMBER(20,4)
无形资产
r_and_d_costs: NUMBER(20,4)
开发支出
goodwill: NUMBER(20,4)
商誉
long_term_deferred_exp: NUMBER(20,4)
长期待摊费用
deferred_tax_assets: NUMBER(20,4)
递延所得税资产
loans_and_adv_granted: NUMBER(20,4)
发放贷款及垫款
oth_non_cur_assets: NUMBER(20,4)
其他非流动资产
tot_non_cur_assets: NUMBER(20,4)
非流动资产合计
cash_deposits_central_bank: NUMBER(20,4)
现金及存放中央银行款项
asset_dep_oth_banks_fin_inst: NUMBER(20,4)
存放同业和其它金融机构款项
precious_metals: NUMBER(20,4)
贵金属
derivative_fin_assets: NUMBER(20,4)
衍生金融资产
agency_bus_assets: NUMBER(20,4)
代理业务资产
subr_rec: NUMBER(20,4)
应收代位追偿款
rcv_ceded_unearned_prem_rsrv: NUMBER(20,4)
应收分保未到期责任准备金
rcv_ceded_claim_rsrv: NUMBER(20,4)
应收分保未决赔款准备金
rcv_ceded_life_insur_rsrv: NUMBER(20,4)
应收分保寿险责任准备金
rcv_ceded_lt_health_insur_rsrv: NUMBER(20,4)
应收分保长期健康险责任准备金
mrgn_paid: NUMBER(20,4)
存出保证金
insured_pledge_loan: NUMBER(20,4)
保户质押贷款
cap_mrgn_paid: NUMBER(20,4)
存出资本保证金
independent_acct_assets: NUMBER(20,4)
独立账户资产
clients_cap_deposit: NUMBER(20,4)
客户资金存款
clients_rsrv_settle: NUMBER(20,4)
客户备付金
incl_seat_fees_exchange: NUMBER(20,4)
其中:交易席位费
rcv_invest: NUMBER(20,4)
应收款项类投资
tot_assets: NUMBER(20,4)
资产总计
st_borrow: NUMBER(20,4)
短期借款
borrow_central_bank: NUMBER(20,4)
向中央银行借款
deposit_received_ib_deposits: NUMBER(20,4)
吸收存款及同业存放
loans_oth_banks: NUMBER(20,4)
拆入资金
tradable_fin_liab: NUMBER(20,4)
交易性金融负债
notes_payable: NUMBER(20,4 | )
应付票据
acct_payable: NUMBER(20,4)
应付账款
adv_from_cust: NUMBER(20,4)
预收款项
fund_sales_fin_assets_rp: NUMBER(20,4)
卖出回购金融资产款
handling_charges_comm_payable: NUMBER(20,4)
应付手续费及佣金
empl_ben_payable: NUMBER(20,4)
| 应付职工薪酬
taxes_surcharges_payable: NUMBER(20,4)
应交税费
int_payable: NUMBER(20,4)
应付利息
dvd_payable: NUMBER(20,4)
应付股利
oth_payable: NUMBER(20,4)
其他应付款
acc_exp: NUMBER(20,4)
预提费用
deferred_inc: NUMBER(20,4)
递延收益
st_bonds_payable: NUMBER(20,4)
应付短期债券
payable_to_reinsurer: NUMBER(20,4)
应付分保账款
rsrv_insur_cont: NUMBER(20,4)
保险合同准备金
acting_trading_sec: NUMBER(20,4)
代理买卖证券款
acting_uw_sec: NUMBER(20,4)
代理承销证券款
non_cur_liab_due_within_1y: NUMBER(20,4)
一年内到期的非流动负债
oth_cur_liab: NUMBER(20,4)
其他流动负债
tot_cur_liab: NUMBER(20,4)
流动负债合计
lt_borrow: NUMBER(20,4)
长期借款
bonds_payable: NUMBER(20,4)
应付债券
lt_payable: NUMBER(20,4)
长期应付款
specific_item_payable: NUMBER(20,4)
专项应付款
provisions: NUMBER(20,4)
预计负债
deferred_tax_liab: NUMBER(20,4)
递延所得税负债
deferred_inc_non_cur_liab: NUMBER(20,4)
递延收益-非流动负债
oth_non_cur_liab: NUMBER(20,4)
其他非流动负债
tot_non_cur_liab: NUMBER(20,4)
非流动负债合计
liab_dep_oth_banks_fin_inst: NUMBER(20,4)
同业和其它金融机构存放款项
derivative_fin_liab: NUMBER(20,4)
衍生金融负债
cust_bank_dep: NUMBER(20,4)
吸收存款
agency_bus_liab: NUMBER(20,4)
代理业务负债
oth_liab: NUMBER(20,4)
其他负债
prem_received_adv: NUMBER(20,4)
预收保费
deposit_received: NUMBER(20,4)
存入保证金
insured_deposit_invest: NUMBER(20,4)
保户储金及投资款
unearned_prem_rsrv: NUMBER(20,4)
未到期责任准备金
out_loss_rsrv: NUMBER(20,4)
未决赔款准备金
life_insur_rsrv: NUMBER(20,4)
寿险责任准备金
lt_health_insur_v: NUMBER(20,4)
长期健康险责任准备金
independent_acct_liab: NUMBER(20,4)
独立账户负债
incl_pledge_loan: NUMBER(20,4)
其中:质押借款
claims_payable: NUMBER(20,4)
应付赔付款
dvd_payable_insured: NUMBER(20,4)
应付保单红利
tot_liab: NUMBER(20,4)
负债合计
cap_stk: NUMBER(20,4)
股本
cap_rsrv: NUMBER(20,4)
资本公积金
special_rsrv: NUMBER(20,4)
专项储备
surplus_rsrv: NUMBER(20,4)
盈余公积金
undistributed_profit: NUMBER(20,4)
未分配利润
less_tsy_stk: NUMBER(20,4)
减:库存股
prov_nom_risks: NUMBER(20,4)
一般风险准备
cnvd_diff_foreign_curr_stat: NUMBER(20,4)
外币报表折算差额
unconfirmed_invest_loss: NUMBER(20,4)
未确认的投资损失
minority_int: NUMBER(20,4)
少数股东权益
tot_shrhldr_eqy_excl_min_int: NUMBER(20,4)
股东权益合计(不含少数股东权益)
tot_shrhldr_eqy_incl_min_int: NUMBER(20,4)
股东权益合计(含少数股东权益)
tot_liab_shrhldr_eqy: NUMBER(20,4)
负债及股东权益总计
comp_type_code: VARCHAR2(2)
公司类型代码 1非金融类2银行3保险4证券
actual_ann_dt: VARCHAR2(8)
实际公告日期
spe_cur_assets_diff: NUMBER(20,4)
流动资产差额(特殊报表科目)
tot_cur_assets_diff: NUMBER(20,4)
流动资产差额(合计平衡项目)
spe_non_cur_assets_diff: NUMBER(20,4)
非流动资产差额(特殊报表科目)
tot_non_cur_assets_diff: NUMBER(20,4)
非流动资产差额(合计平衡项目)
spe_bal_assets_diff: NUMBER(20,4)
资产差额(特殊报表科目)
tot_bal_assets_diff: NUMBER(20,4)
资产差额(合计平衡项目)
spe_cur_liab_diff: NUMBER(20,4)
流动负债差额(特殊报表科目)
tot_cur_liab_diff: NUMBER(20,4)
流动负债差额(合计平衡项目)
spe_non_cur_liab_diff: NUMBER(20,4)
非流动负债差额(特殊报表科目)
tot_non_cur_liab_diff: NUMBER(20,4)
非流动负债差额(合计平衡项目)
spe_bal_liab_diff: NUMBER(20,4)
负债差额(特殊报表科目)
tot_bal_liab_diff: NUMBER(20,4)
负债差额(合计平衡项目)
spe_bal_shrhldr_eqy_diff: NUMBER(20,4)
股东权益差额(特殊报表科目)
tot_bal_shrhldr_eqy_diff: NUMBER(20,4)
股东权益差额(合计平衡项目)
spe_bal_liab_eqy_diff: NUMBER(20,4)
负债及股东权益差额(特殊报表项目)
tot_bal_liab_eqy_diff: NUMBER(20,4)
负债及股东权益差额(合计平衡项目)
lt_payroll_payable: NUMBER(20,4)
长期应付职工薪酬
other_comp_income: NUMBER(20,4)
其他综合收益
other_equity_tools: NUMBER(20,4)
其他权益工具
other_equity_tools_p_shr: NUMBER(20,4)
其他权益工具:优先股
lending_funds: NUMBER(20,4)
融出资金
accounts_receivable: NUMBER(20,4)
应收款项
st_financing_payable: NUMBER(20,4)
应收短期融资款
payables: NUMBER(20,4)
应付款项
s_info_compcode: VARCHAR2(10)
公司ID
tot_shr: NUMBER(20,4)
期末总资本(单位:股)
hfs_assets: NUMBER(20,4)
持有待售的资产
hfs_sales: NUMBER(20,4)
持有待售的负债
opdate: DATETIME
opdate
opmode: VARCHAR(1)
opmode
"""
__tablename__ = "AShareBalanceSheet"
object_id = Column(VARCHAR2(100), primary_key=True)
s_info_windcode = Column(VARCHAR2(40))
wind_code = Column(VARCHAR2(40))
ann_dt = Column(VARCHAR2(8))
report_period = Column(VARCHAR2(8))
statement_type = Column(VARCHAR2(10))
crncy_code = Column(VARCHAR2(10))
monetary_cap = Column(NUMBER(20,4))
tradable_fin_assets = Column(NUMBER(20,4))
notes_rcv = Column(NUMBER(20,4))
acct_rcv = Column(NUMBER(20,4))
oth_rcv = Column(NUMBER(20,4))
prepay = Column(NUMBER(20,4))
dvd_rcv = Column(NUMBER(20,4))
int_rcv = Column(NUMBER(20,4))
inventories = Column(NUMBER(20,4))
consumptive_bio_assets = Column(NUMBER(20,4))
deferred_exp = Column(NUMBER(20,4))
non_cur_assets_due_within_1y = Column(NUMBER(20,4))
settle_rsrv = Column(NUMBER(20,4))
loans_to_oth_banks = Column(NUMBER(20,4))
prem_rcv = Column(NUMBER(20,4))
rcv_from_reinsurer = Column(NUMBER(20,4))
rcv_from_ceded_insur_cont_rsrv = Column(NUMBER(20,4))
red_monetary_cap_for_sale = Column(NUMBER(20,4))
oth_cur_assets = Column(NUMBER(20,4))
tot_cur_assets = Column(NUMBER(20,4))
fin_assets_avail_for_sale = Column(NUMBER(20,4))
held_to_mty_invest = Column(NUMBER(20,4))
long_term_eqy_invest = Column(NUMBER(20,4))
invest_real_estate = Column(NUMBER(20,4))
|
fro | m .tornadoconnection import TornadoLDAPConnectio | n
|
from docutils import nodes, utils
from docutils.parsers.rst.roles import set_classes
# I cant figure out how the hell to import this so I'm just gonna forget it for now
def apigen_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
"""Link to API Docs page.
Returns 2 part tuple containing list of nodes to insert into the
document and a list of system messages. Both are allowed to be
empty.
:param name: The role name used in the document.
:param rawtext: The entire markup snippet, with role.
:param text: The text marked with the role. |
:param lineno: The line numbe | r where rawtext appears in the input.
:param inliner: The inliner instance that called us.
:param options: Directive options for customization.
:param content: The directive content for customization.
"""
try:
class_name = text.replace('\\', '.')
if text[0:1] == '.':
class_name = class_name[1:]
if class_name == "":
raise ValueError
except ValueError:
msg = inliner.reporter.error(
'Class name must be a valid fully qualified class name; '
'"%s" is invalid.' % text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
app = inliner.document.settings.env.app
node = make_link_node(rawtext, app, 'class', class_name, options)
return [node], []
def make_link_node(rawtext, app, type, slug, options):
"""Create a link to an ApiGen API docs page.
:param rawtext: Text being replaced with link node.
:param app: Sphinx application context
:param type: Item type (class, namespace, etc.)
:param slug: ID of the thing to link to
:param options: Options dictionary passed to role func.
"""
#
try:
base = app.config.apigen_docs_uri
if not base:
raise AttributeError
except AttributeError, err:
raise ValueError('apigen_docs_uri configuration value is not set (%s)' % str(err))
# Build API docs link
slash = '/' if base[-1] != '/' else ''
ref = base + slash + type + '-' + slug + '.html'
set_classes(options)
node = nodes.reference(rawtext, type + ' ' + utils.unescape(slug), refuri=ref,
**options)
return node
def setup(app):
"""Install the plugin.
:param app: Sphinx application context.
"""
app.info('Initializing Api Class plugin')
app.add_role('apiclass', apigen_role)
# app.add_role('apins', apigen_namespace_role)
app.add_config_value('apigen_docs_uri', None, 'env')
return
|
"""
Copyright (c) 2016-2017, Kir Chou
https://github.com/note35/sinon/blob/master/LICENSE
A set of functions for handling known error
"""
def _ | _exception_helper(msg, exception=Exception): #pylint: disable=missing-docstring
raise exception(msg)
def mock_type_error(obj): #pylint: disable=missing-docstring
error_msg = "[{}] is an invalid module/class".format(str(obj))
return __exception_helper(error_msg)
def prop_type_error(prop): #pylint: disable=missing-docstring
error_msg = "[{}] is an invalid property, it should be a string".format(prop)
return __exception_helper(error_msg)
def prop_is_func_error | (obj, prop): #pylint: disable=missing-docstring
name = obj.__name__ if hasattr(obj, "__name__") else obj
error_msg = "[{}] is an invalid property, it should be a method in [{}]".format(prop, name)
return __exception_helper(error_msg)
def prop_in_obj_error(obj, prop): #pylint: disable=missing-docstring
error_msg = "[{}] is not exist in [{}]".format(prop, obj)
return __exception_helper(error_msg)
def lock_error(obj): #pylint: disable=missing-docstring
name = obj.__name__ if hasattr(obj, "__name__") else obj
error_msg = "[{}] have already been declared".format(name)
return __exception_helper(error_msg)
def called_with_empty_error(): #pylint: disable=missing-docstring
error_msg = "There is no argument"
return __exception_helper(error_msg)
def is_not_spy_error(obj): #pylint: disable=missing-docstring
error_msg = "[{}] is an invalid spy".format(str(obj))
return __exception_helper(error_msg)
def matcher_type_error(prop): #pylint: disable=missing-docstring
error_msg = "[{}] is an invalid property, it should be a type".format(prop)
return __exception_helper(error_msg, exception=TypeError)
def matcher_instance_error(prop): #pylint: disable=missing-docstring
error_msg = "[{}] is an invalid property, it should be an instance".format(prop)
return __exception_helper(error_msg, exception=TypeError)
def wrapper_object_not_found_error():
error_msg = 'Wrapper object cannot be found'
return __exception_helper(error_msg)
|
import sys, os
import math, numpy as np
import matplotlib.pyplot as pp
import roslib; roslib.load_manifest('sandbox_tapo_darpa_m3')
import hrl_lib.util as ut
import hrl_lib.matplotlib_util as mpu
import scipy.optimize as scp
# Params
area = 0.0
# For one push and pull:
def force_one_push_pull(d):
ft_l = d['ft'][30:]
adc_l = (d['adc_bias'] - np.array(d['adc'][30:])).tolist()
f_prev = 0.
temp_ft_l = []
temp_adc_l = []
increasing = True
count = 1
for i in range(len(ft_l)):
f = ft_l[i]
a = adc_l[i]
if f>f_prev:
if increasing:
temp_ft_l.append(f)
temp_adc_l.append(a)
else:
if len(temp_ft_l) > 50:
count +=1
if count == 2:
break
increasing = True
else:
if increasing:
if len(temp_ft_l) > 50:
f_prev = f
else:
temp_ft_l.append(f)
temp_adc_l.append(a)
increasing = False
f_prev = f
#print temp_ft_l
return np.array(temp_adc_l), np.array(temp_ft_l)
def force_vs_adc(nm, adc, ft, color):
pp.scatter(ad | c, ft, marker='o', color=color,
label=nm.split('/')[-1].split('.')[0], s=50)
pp.xlabel('ADC bias - ADC')
pp.ylabel('FT_z')
pp.legend()
pp.grid('on')
def residuals(p, y, x, area):
coeff_1, coeff_2, coeff_3, coeff_4 = p
err = y-(coeff_1*area*(x)**3 + coeff_2*area*(x)**2 + coeff_3*area*(x) + coeff_4*area)
return err
def fit_the_data(pkl_list, color):
# Train | ing on 4-cm data
d = ut.load_pickle(pkl_list[2])
x,y_meas = force_one_push_pull(d)
area = math.sqrt(d['contact_area'])
coeff_1 = 5.
coeff_2 = 5.
coeff_3 = 5.
coeff_4 = 5.
p0 = np.array([coeff_1, coeff_2, coeff_3, coeff_4])
p_lsq = scp.leastsq(residuals, p0, args=(y_meas, x, area))
print p_lsq[0]
y_fit = p_lsq[0][0]*area*(x)**3 + p_lsq[0][1]*area*(x)**2 + p_lsq[0][2]*area*(x)**1 + p_lsq[0][3]*area
pp.plot(x, y_fit, color=color, linewidth = 3.0)
# Training on 2-cm data with 4-cm result as initial condition
p0 = p_lsq[0]
d = ut.load_pickle(pkl_list[1])
x,y_meas = force_one_push_pull(d)
area = math.sqrt(d['contact_area'])
p_lsq = scp.leastsq(residuals, p0, args=(y_meas, x, area))
print p_lsq[0]
y_fit = p_lsq[0][0]*area*(x)**3 + p_lsq[0][1]*area*(x)**2 + p_lsq[0][2]*area*(x)**1 + p_lsq[0][3]*area
pp.plot(x, y_fit, color=color, linewidth = 3.0)
# Testing on 1-cm
d = ut.load_pickle(pkl_list[0])
x,y_meas = force_one_push_pull(d)
area = math.sqrt(d['contact_area'])
y_fit = p_lsq[0][0]*area*(x)**3 + p_lsq[0][1]*area*(x)**2 + p_lsq[0][2]*area*(x)**1 + p_lsq[0][3]*area
pp.plot(x, y_fit, color=color, linewidth = 3.0)
# Calculate RMS Error
rmse = math.sqrt(float(np.sum((np.array(y_meas) - np.array(y_fit))**2))/float(np.size(y_meas)))
print rmse
if __name__ == '__main__':
mpu.figure()
pkl_list = ['./1_cm.pkl', './2_cm.pkl', './4_cm.pkl']
color_list = ['r', 'g', 'b']
for pkl, c in zip(pkl_list, color_list):
d = ut.load_pickle(pkl)
adc, ft = force_one_push_pull(d)
force_vs_adc(pkl, adc, ft, c)
pp.xlim((0,1000))
pp.ylim((-10,80))
fit_the_data(pkl_list, 'k')
pp.show()
|
from ex25 import *
print "Let's practice everything."
print 'You\'d need to know \'bout escapes with \\ that do \n newlines and \t tabs.'
poem = """
\tThe lovely world
with logic so firmly planted
cannot discern \n the needs of love
nor comprehend passion from intuition
and requires an explantion
\n\t\twhere there is none.
"""
print "--------------"
print poem
print "--------------" |
five = 10 - 2 + 2 - 5
print "This should be five: %s" % five
def secret_formula(started):
jelly_beans = started * 500
jars = jelly_beans / 1000
crates = jars / 100
return jelly_beans, jars, crates
start_point = 10000
beans, jars, crates = secret_formula(start_point)
print "With a starting point of: %d" % start_point
print "We'd have %d beans, %d jars, an | d %d crates." % (beans, jars, crates)
start_point = start_point / 10
print "We can also do that this way:"
print "We'd have %d beans, %d jars, and %d crates." % secret_formula(start_point)
sentence = "All good things come to those who wait."
words = break_words(sentence)
sorted_words = sort_words(words)
print_first_word(words)
print_last_word(words)
print_first_word(sorted_words)
print_last_word(sorted_words)
sorted_words = sort_sentence(sentence)
print sorted_words
print_first_and_last(sentence)
print_first_and_last_sorted(sentence)
|
import numpy as np
import pytest
from sklearn.preprocessing import StandardScaler
from sklearn.utils import check_random_state
import fri
from fri import genLupiData
from fri.parameter_searcher import find_best_model
@pytest.fixture(scope="session")
def randomstate():
return check_random_state(1337)
@pytest.mark.parametrize("n_weak", [0, 2])
@pytest.mark.parametrize | ("problem", fri.LUPI_MODELS)
def test_baseline_lupi(problem, n_weak, randomst | ate):
n_samples = 300
template = problem.value[0]().get_initmodel_template
params = problem.value[0]().get_all_parameters()
data = genLupiData(
problem,
n_strel=1,
n_weakrel=n_weak,
n_samples=n_samples,
n_irrel=1,
n_repeated=0,
random_state=randomstate,
)
X, X_priv, y = data
X = StandardScaler().fit(X).transform(X)
X_priv = StandardScaler().fit(X_priv).transform(X_priv)
combined = np.hstack([X, X_priv])
iter = 50
best_model, best_score = find_best_model(
template,
params,
(combined, y),
randomstate,
iter,
verbose=1,
n_jobs=-2,
lupi_features=X_priv.shape[1],
)
assert best_score > 0.5
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class M | igration(migrations.Migration):
dependencies = [
('libreosteoweb', '0021_therapeutsettings_siret'),
]
operations = [
migrations.AddField(
model_name='therapeutsettings',
name='invoice_footer',
field=models.TextField(null=True, verbose_name='Invoice footer', blank=True),
| preserve_default=True,
),
]
|
e for these cases.
language = 'en'
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#
# today = ''
#
# Else, today_fmt is used as the format for a strftime call.
#
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents.
# "<project> v<release> documentation" by default.
#
# html_title = 'tritonschedule v'
# A shorter title for the navigation bar. Default is the same as html_title.
#
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#
# html_logo = None
# The name of an image file (relative to this directory) to use as a favicon of
# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
# html_extra_path = []
# If not None, a 'Last updated on:' timestamp is inserted at every page
# bottom, using the given strftime format.
# The empty string is equivalent to '%b %d, %Y'.
#
# html_last_updated_fmt = None
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#
# html_additional_pages = {}
# If false, no module index is generated.
#
# html_domain_indices = True
# If false, no index is generated.
#
# html_use_index = True
# If true, the index is split into individual pages for each letter.
#
# html_split_index = False
# If true, links to the reST sources are added to the pages.
#
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh'
#
# html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# 'ja' uses this config value.
# 'zh' user can custom change `jieba` dictionary path.
#
# html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'tritonscheduledoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'tritonschedule.tex', 'tritonschedule Documentation',
'tritonschedule', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#
# latex_use_parts = False
# If true, show page references after internal links.
#
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
#
# latex_show_urls = False
# Documents to append as an appendix to all manua | ls.
#
# latex_appendices = []
# If false, no module index is generated.
#
# latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'tritonschedule', 'tritonschedule Documentation',
[author], 1)
]
|
# If true, show URL addresses after external links.
#
# man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'tritonschedule', 'tritonschedule Documentation',
author, 'tritonschedule', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#
# texinfo_appendices = []
# If false, no module index is generated.
#
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#
# texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#
# texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The basename for the epub file. It defaults to the project name.
# epub_basename = project
# The HTML theme for the epub output. Since the default themes are not
# optimized for small screen space, using the same theme for HTML and epub
# output is usually not wise. This defaults to 'epub', a theme designed to save
# visual space.
#
# epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or 'en' if the language |
# coding:utf-8
#
# The MIT License (MIT)
#
# Copyright (c) 2016-2021 yutiansut/QUANTAXIS
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sqlite3
import os
CACHE = None
def _init(cache_file):
"""Creates a new Cache object."""
global CACHE
CACHE = Cache(cache_file)
def get_cache(config_file=None):
"""Used to retrieve the global cache object."""
if CACHE is None:
_init(config_file)
return CACHE
class Cache():
"""This object is used to interface with the job cache. It uses a SQLite3
database to store the information.
:param str cache_file: The path to the cache file. This will be created if
it does not already exist.
"""
def __init__(self, cache_file):
self.filename = cache_file
if not os.path.isfile(self.filename):
self._create(self.filename)
self.conn = sqlite3.connect(self.filename)
self.cur = self.conn.cursor()
self.cur.execute("PRAGMA foreign_keys = ON")
def __del__(self):
"""Commit the changes and close the connection."""
if getattr(self, "conn", None):
| self.conn.commit()
self.conn.close()
def _create(self, | cache_file):
"""Create the tables needed to store the information."""
conn = sqlite3.connect(cache_file)
cur = conn.cursor()
cur.execute("PRAGMA foreign_keys = ON")
cur.execute('''
CREATE TABLE jobs(
hash TEXT NOT NULL UNIQUE PRIMARY KEY, description TEXT NOT NULL,
last_run REAL, next_run REAL, last_run_result INTEGER)''')
cur.execute('''
CREATE TABLE history(
hash TEXT, description TEXT, time REAL, result INTEGER,
FOREIGN KEY(hash) REFERENCES jobs(hash))''')
conn.commit()
conn.close()
def has(self, job):
"""Checks to see whether or not a job exists in the table.
:param dict job: The job dictionary
:returns: True if the job exists, False otherwise
"""
return bool(self.cur.execute('SELECT count(*) FROM jobs WHERE hash=?', (job["id"],)))
def get(self, id):
"""Retrieves the job with the selected ID.
:param str id: The ID of the job
:returns: The dictionary of the job if found, None otherwise
"""
self.cur.execute("SELECT * FROM jobs WHERE hash=?", (id,))
item = self.cur.fetchone()
if item:
return dict(zip(
("id", "description", "last-run", "next-run", "last-run-result"),
item))
return None
def update(self, job):
"""Update last_run, next_run, and last_run_result for an existing job.
:param dict job: The job dictionary
:returns: True
"""
self.cur.execute('''UPDATE jobs
SET last_run=?,next_run=?,last_run_result=? WHERE hash=?''', (
job["last-run"], job["next-run"], job["last-run-result"], job["id"]))
def add_job(self, job):
"""Adds a new job into the cache.
:param dict job: The job dictionary
:returns: True
"""
self.cur.execute("INSERT INTO jobs VALUES(?,?,?,?,?)", (
job["id"], job["description"], job["last-run"], job["next-run"], job["last-run-result"]))
return True
def add_result(self, job):
"""Adds a job run result to the history table.
:param dict job: The job dictionary
:returns: True
"""
self.cur.execute(
"INSERT INTO history VALUES(?,?,?,?)",
(job["id"], job["description"], job["last-run"], job["last-run-result"]))
return True |
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyKiwisolver(PythonPackage):
"""A fast implementation of the Cassowary constraint solver"""
homepage = "https://github.com/nucleic/kiwi"
pypi = "kiwisolver/kiwisolver-1.1.0.tar.gz"
version('1.3.2', sha256='fc4453705b81d03568d5b808ad8f09c77c47534f6ac2e72e733f9ca4714aa75c')
version('1.3.1', sha256='950a199911a8d94683a6b10321f9345d5a3a8433ec58b217ace979e18f16e248')
version('1.3.0', sha256='14f81644e | 1f3bf01fbc8b9c990a7889e9bb4400c4d0ff9155aa0bdd19cce24a9')
version('1.2.0', sha256='247800260cd38160c362d211dcaf4ed0f7816afb5efe56544748b21d6ad6d17f')
version('1.1.0', sha256='53eaed412477c836e1b9522c19858a8557d6e595077830146182225613b11a75')
version('1.0.1', sha256='ce3be5d520b4d2c3e5eeb4cd2ef62b9b9ab8a | c6b6fedbaa0e39cdb6f50644278')
depends_on('python@2.7:2.8,3.4:', type=('build', 'run'))
depends_on('python@3.5:', type=('build', 'run'), when='@1.2.0:')
depends_on('python@3.6:', type=('build', 'run'), when='@1.3.0:')
depends_on('python@3.7:', type=('build', 'run'), when='@1.3.2:')
depends_on('py-setuptools', type='build')
depends_on('py-cppy@1.1.0:', type='build', when='@1.2.0:')
|
# Lint as: python3
"""A torque based stance controller framework."""
from __future__ import absolute_import
from __future__ import division
#from __future__ import google_type_annotations
from __future__ import print_function
import os
import inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(os.path.dirname(currentdir))
os.sys.path.insert(0, parentdir)
from typing import Any, Sequence, Tuple
import numpy as np
import pybullet as p # pytype: disable=import-error
try:
from mpc_controller import gait_generator as gait_generator_lib
from mpc_controller import leg_controller
except: #pylint: disable=W0702
print("You need to install motion_imitation")
print("Either run python3 setup.py install --user in this repo")
print("or use pip3 install motion_imitation --user")
sys.exit()
try:
import mpc_osqp as convex_mpc # pytype: disable=import-error
except: #pylint: disable=W0702
print("You need to install motion_imitation")
print("Either run python3 setup.py install --user in this repo")
print("or use pip3 install motion_imitation --user")
sys.exit()
_FORCE_DIMENSION = 3
# The QP weights in the convex MPC formulation. See the MIT paper for details:
# https://ieeexplore.ieee.org/document/8594448/
# Intuitively, this is the weights of each state dimension when tracking a
# desired CoM trajectory. The full CoM state is represented by
# (roll_pitch_yaw, position, angular_velocity, velocity, gravity_place_holder).
# _MPC_WEIGHTS = (5, 5, 0.2, 0, 0, 10, 0.5, 0.5, 0.2, 0.2, 0.2, 0.1, 0)
# This worked well for in-place stepping in the real robot.
# _MPC_WEIGHTS = (5, 5, 0.2, 0, 0, 10, 0., 0., 0.2, 1., 1., 0., 0)
_MPC_WEIGHTS = (5, 5, 0.2, 0, 0, 10, 0., 0., 1., 1., 1., 0., 0)
_PLANNING_HORIZON_STEPS = 10
_PLANNING_TIMESTEP = 0.025
class TorqueStanceLegController(leg_controller.LegController):
"""A torque based stance leg controller framework.
Takes in high level parameters like walking speed and turning speed, and
generates necessary the torques for stance legs.
"""
def __init__(
self,
robot: Any,
gait_generator: Any,
state_estimator: Any,
desired_speed: Tuple[float, float] = (0, 0),
desired_twisting_speed: float = 0,
desired_body_height: float | = 0.45,
body_mass: float = 220 / 9.8,
body_inertia: Tuple[float, float, float, float, float, float, float,
float, float] = (0.07335, 0, 0, 0, 0.25068, 0, 0, 0,
0.25447),
num_legs: int = 4,
friction_coeffs: Sequence[float | ] = (0.45, 0.45, 0.45, 0.45),
qp_solver = convex_mpc.QPOASES
):
"""Initializes the class.
Tracks the desired position/velocity of the robot by computing proper joint
torques using MPC module.
Args:
robot: A robot instance.
gait_generator: Used to query the locomotion phase and leg states.
state_estimator: Estimate the robot states (e.g. CoM velocity).
desired_speed: desired CoM speed in x-y plane.
desired_twisting_speed: desired CoM rotating speed in z direction.
desired_body_height: The standing height of the robot.
body_mass: The total mass of the robot.
body_inertia: The inertia matrix in the body principle frame. We assume
the body principle coordinate frame has x-forward and z-up.
num_legs: The number of legs used for force planning.
friction_coeffs: The friction coeffs on the contact surfaces.
"""
self._robot = robot
self._gait_generator = gait_generator
self._state_estimator = state_estimator
self.desired_speed = desired_speed
self.desired_twisting_speed = desired_twisting_speed
self._desired_body_height = desired_body_height
self._body_mass = body_mass
self._num_legs = num_legs
self._friction_coeffs = np.array(friction_coeffs)
body_inertia_list = list(body_inertia)
weights_list = list(_MPC_WEIGHTS)
self._cpp_mpc = convex_mpc.ConvexMpc(
body_mass,
body_inertia_list,
self._num_legs,
_PLANNING_HORIZON_STEPS,
_PLANNING_TIMESTEP,
weights_list,
1e-5,
qp_solver
)
def reset(self, current_time):
del current_time
def update(self, current_time):
del current_time
def get_action(self):
"""Computes the torque for stance legs."""
desired_com_position = np.array((0., 0., self._desired_body_height),
dtype=np.float64)
desired_com_velocity = np.array(
(self.desired_speed[0], self.desired_speed[1], 0.), dtype=np.float64)
desired_com_roll_pitch_yaw = np.array((0., 0., 0.), dtype=np.float64)
desired_com_angular_velocity = np.array(
(0., 0., self.desired_twisting_speed), dtype=np.float64)
foot_contact_state = np.array(
[(leg_state in (gait_generator_lib.LegState.STANCE,
gait_generator_lib.LegState.EARLY_CONTACT))
for leg_state in self._gait_generator.desired_leg_state],
dtype=np.int32)
# We use the body yaw aligned world frame for MPC computation.
com_roll_pitch_yaw = np.array(self._robot.GetBaseRollPitchYaw(),
dtype=np.float64)
com_roll_pitch_yaw[2] = 0
#predicted_contact_forces=[0]*self._num_legs*_FORCE_DIMENSION
# print("Com Vel: {}".format(self._state_estimator.com_velocity_body_frame))
# print("Com RPY: {}".format(self._robot.GetBaseRollPitchYawRate()))
# print("Com RPY Rate: {}".format(self._robot.GetBaseRollPitchYawRate()))
p.submitProfileTiming("predicted_contact_forces")
predicted_contact_forces = self._cpp_mpc.compute_contact_forces(
[0], #com_position
np.asarray(self._state_estimator.com_velocity_body_frame,
dtype=np.float64), #com_velocity
np.array(com_roll_pitch_yaw, dtype=np.float64), #com_roll_pitch_yaw
# Angular velocity in the yaw aligned world frame is actually different
# from rpy rate. We use it here as a simple approximation.
np.asarray(self._robot.GetBaseRollPitchYawRate(),
dtype=np.float64), #com_angular_velocity
foot_contact_state, #foot_contact_states
np.array(self._robot.GetFootPositionsInBaseFrame().flatten(),
dtype=np.float64), #foot_positions_base_frame
self._friction_coeffs, #foot_friction_coeffs
desired_com_position, #desired_com_position
desired_com_velocity, #desired_com_velocity
desired_com_roll_pitch_yaw, #desired_com_roll_pitch_yaw
desired_com_angular_velocity #desired_com_angular_velocity
)
p.submitProfileTiming()
# sol = np.array(predicted_contact_forces).reshape((-1, 12))
# x_dim = np.array([0, 3, 6, 9])
# y_dim = x_dim + 1
# z_dim = y_dim + 1
# print("Y_forces: {}".format(sol[:, y_dim]))
contact_forces = {}
for i in range(self._num_legs):
contact_forces[i] = np.array(
predicted_contact_forces[i * _FORCE_DIMENSION:(i + 1) *
_FORCE_DIMENSION])
action = {}
for leg_id, force in contact_forces.items():
# While "Lose Contact" is useful in simulation, in real environment it's
# susceptible to sensor noise. Disabling for now.
# if self._gait_generator.leg_state[
# leg_id] == gait_generator_lib.LegState.LOSE_CONTACT:
# force = (0, 0, 0)
motor_torques = self._robot.MapContactForceToJointTorques(leg_id, force)
for joint_id, torque in motor_torques.items():
action[joint_id] = (0, 0, 0, 0, torque)
return action, contact_forces
|
fileObject = open(filepath, 'a+')
fileObject.writelines(transList)
fileObject.flush()
return fileObject
def shelveNetwork(graph = {},filepath = "defaultFilePath"):
""" Stores via the Graph at the filepath location e.g '/apath/file.xay'
Preconditions: Takes a graph
Postconditions: Returns a shelf object with stored graph
Usage:
>>> agraph = {'Key1': ['Value1', 'Value2'], 'Key2':['Value3',4]}
>>> shelveGraph(agraph,'storeagraph')
{'Key2': ['Value3', 4], 'Key1': ['Value1', 'Value2']}
>>>
Algorithms (see pseudocode below)
#Open a shelfObject
#read graph into shelfObject; deal with case of emptygraph via get fn
#return shelfObject
"""
#DATASETS FOR FIND COMPONENTS
anetwork = {'a': [1,2,3], 'b' : [4,5,6], 1 : [' | d', 'e', 'f']}
nothernetwork = {'a': [1,2,3], 'b' : [4,5,6], 1 : ['d', 'e', 'f'], 4: ['e', 'f', 'g'],
'x': [9, 10,11], 11: [12, 13, 14]}
loopnetwork = {'a': [1,2,3], 'b' : [4,5,6], 1 : ['d', 'e', 'f'], 4: ['e', 'f', 'g'],
12: [13], 13: [12]}
#DATASETS FOR MUTUAL INFORMATION
fullyconnet = {'a':['a','b' | ,'c'],
'b': ['a', 'b', 'c'],
'c': ['a','b','c']}
noselfcon = {'a':['b','c'],
'b': ['a', 'c'],
'c': ['a','b']}
cycle3 = {'a':['b'],
'b':['c'],
'c':['a']}
cycle4 = {'a':['b'],
'b':['c'],
'c':['d'],
'd':['a']}
tree = {'a': ['b','c'],
'b': ['d', 'e'],
'c': ['f', 'g'],
'd': ['h', 'i'],
'e': ['j', 'k']}
def findComponents (graph = {}):
"""
Given a directed graph (network) findComponents returns a list of isolated
"Islands in the Network" or components. Within an island, there are paths
to nodes. Across islands, there is not direct path.
General Algorithm:
1. Define the keyset (the parent nodes in a set of directed arcs)
2. Assign each parent node and its direct children (arcs) to a new component
3. Iterate through the node-set and combine parent nodes that have paths, including their child arcs
(These are the candidate components)
4. Remove candidate components that are subsets of each other.
5. Create final components from the remaining candidate components
(and do a check on the results of loops -- note this may be a bug in the path-finding algorithm)
6. Return the final list of components
"""
# Define the keyset
keys = graph.keys()
keys2 = copy.deepcopy(keys)
# For each key, assign arcs to a new component.
compgraph = {}
compkey = 0
for key in keys:
compkey = compkey +1
compgraph[compkey] = [key] + graph[key]
# Iterate through keys, and combine pairs of keys with a path between them
# These are the 'candidate' components
for dkey in keys2:
if key <> dkey:
if xayacore.findAllPaths(graph, key, dkey) <> {}:
compgraph[compkey] = [key] + graph[key] + graph[dkey]
keys2.remove(key) # remove the key that has been combined
# Remove candidate components that are simply subsets of each other
compkeys = compgraph.keys()
compkeys2 = copy.deepcopy(compkeys)
for key in compkeys:
for nextkey in compkeys:
if key <> nextkey:
set1 = set(compgraph[key])
set2 = set(compgraph[nextkey])
if set1.difference(set2) == set([]) and set2.difference(set1) <> set([]):
compkeys2.remove(key)
# Create Final components
finalcomp = {}
finalcompkey = 0
for key in compkeys2:
# Check on and remove the output from loops -- same element is repeated so list <> set cardinality
if len(compgraph[key]) == len(set(compgraph[key])):
finalcompkey = finalcompkey + 1
finalcomp[finalcompkey] = compgraph[key]
return finalcomp
def countArcs(graph = {}):
# Calculate Number of Arcs in graph
arcounter = 0
for key in graph:
arcounter = arcounter + len(graph[key])
return arcounter
def calcMI(graph= {}):
'''Given a xayaformat graph -- calculates the mutal information of the
adjacency matrix of connections (i.e. does not assume flow values)
'''
sources = graph
destinations = xayacore.reverseGraph(sources)
# Calculate Number of Arcs in graph
arcounter = 0
for key in sources:
arcounter = arcounter + len(sources[key])
pSourceDest = 1/float(arcounter)
sumMI = 0
for key in sources:
# calc P(Source/Destination)
for arc in sources[key]:
# calc P(Source)
pSource = len(sources[key])/float(arcounter)
# calc P (Destination)
pDest = len(destinations[arc])/float(arcounter)
# calc MI
quotient = pSourceDest/(pSource * pDest)
#Test Code: print pSourceDest, pSource, pDest, quotient
mi = pSourceDest * math.log(quotient,2)
sumMI = sumMI + mi
return sumMI
def createModelBipGraph(network = {}):
'''
Given a network, creates a bipartite graph with the same number of arcs,
and the same degree distribution on keys
'''
bipgraph = {}
arcs = countArcs(network)
counter = arcs*10 #just to make sure keys start at much higher numbers
for key in network:
counter = counter+1
samples = len(network[key])
arclist = xayastats.samplePopulationsNoR(samples,arcs)
bipgraph[counter] = arclist
return bipgraph
def test():
if 1 == 2:
print 'false'
else: print 'true'
def createRandomDirectedNetwork(nodes=0, edges=0):
'''Given a number of nodes and edges, generate a random directed graph
in XAYA format, where each edge is randomly generated. Arcs from a node,
back into that node are disallowed in the current version.
Last modified, Sept, 29, 2009.
'''
#Create an empty network data structure
network = {}
#If either nodes or edges are 0/negative, return an empty graph
if (nodes <=0) or (edges <= 0):
return network
# Check that edges < nodes * (nodes-1) -- i.e. a complete graph minus self-arcs
if edges > nodes * (nodes - 1):
return network
#Use while loop to determine when sufficient edges produced
while countNetworkEdges(network) < edges:
#Generate a node value
nodevalue = xayastats.diceroll(1,nodes)
#Generate an Edge value
edgevalue = xayastats.diceroll(1,nodes)
#Check for self-loops. If self loop return to top of While
if nodevalue != edgevalue:
#If node does not already exist, add it and value
if not network.has_key(nodevalue):
network[nodevalue] = [edgevalue]
else:
#Check if value already in an arc, and if not, add it.
if edgevalue not in network[nodevalue]:
network[nodevalue].append(edgevalue)
#reversednetwork = xayacore.reverseGraph(network)-- not working as expected
#undirectednetwork = xayacore.unionGraphs(network, reversednetwork) --nwaexpcted
return network
def countNetworkEdges(network):
noedges = 0
for value in network.values():
noedges = noedges + len(value)
return noedges
def runVirusSimulator(network={}, immuneprob=1, viralprob=1, precision=10, iterations=25, itertables=0):
'''Virus Simulation consists of:
1. Initializing a state table for the network
2. Initializing an initial point of infection on the statetable
3. Running the simulation
3a Immune Phase
3b Viral Immune Phase
3c Viral virulent phase
itertables is a key, which if set to <> return |
#! | /usr/bin/env python
from PyMiniQbt import getVersion, getName, QasmAsyncInterpreter
import sys
def main(arguments):
if(not len(arguments) == 2):
print("which file?")
sys.exit(-1)
print("Using", getName(), "version:", getVersion())
with open(arguments[1]) as dat:
src = dat.read()
interpreter = QasmAsyncInterpreter()
interpreter.interpret(src)
while interpreter.hasErrors():
print(interpreter.getError())
print("result | s:")
for register in interpreter.getRegisters():
print(register,":",interpreter.readClassicResult(register).dataToString())
if __name__ == "__main__":
main(sys.argv) |
import logging
from sleekxmpp import ClientXMPP
log = logging.getLogger(__name__)
class HipshareXMPP(ClientXMPP):
def __init__(self, jid, password):
ClientXMPP.__init__(self, jid, password)
self.add_event_handler("session_start", self.session_start, threaded=True)
def session_start(self, event):
self.send_presence()
self.get_roster()
class Client(object):
def __init__(self, config):
self.config = con | fig
self.xmpp = HipshareXMPP(config.strategy['jid'], config.strategy['password'])
for plugin in config.opt | ions['plugins']:
self.xmpp.register_plugin(plugin)
def connect(self, *args, **kwargs):
return self.xmpp.connect(*args, **kwargs)
def disconnect(self, *args, **kwargs):
return self.xmpp.disconnect(*args, **kwargs)
def get_plugin(self, plugin):
return self.xmpp.plugin[plugin]
def process(self, *args, **kwargs):
return self.xmpp.process(*args, **kwargs)
def line_emitter(self, data):
log.debug("Emitting {} to {}:".format(data, self.config.strategy['rooms']))
for room in self.config.strategy['rooms']:
self.xmpp.send_message(**{
"mto": room,
"mbody": data,
"mtype": 'groupchat'
})
|
ging.WARNING)
class TestRepoUtils(object):
@classmethod
def setup_class(cls):
cls.temp_path = tempfile.mkdtemp(prefix='test_repo-')
cls.dest_path = cls.temp_path + '/empty_dest'
cls.src_path = cls.temp_path + '/empty_src'
if 'TEST_ONLINE' in os.environ:
cls.repo_url = 'https://github.com/ceph/empty.git'
cls.commit = '71245d8e454a06a38a00bff09d8f19607c72e8bf'
else:
cls.repo_url = 'file://' + cls.src_path
cls.commit = None
@classmethod
def teardown_class(cls):
shutil.rmtr | ee(cls.temp_path)
def setup_method(self, method):
assert not os.path.exists(self.dest_path)
proc = subprocess.Popen(
('git', 'init', self.src_path),
stdout=subprocess.PIPE,
)
assert proc.wait() == 0
proc = subprocess.Popen | (
('git', 'config', 'user.email', 'test@ceph.com'),
cwd=self.src_path,
stdout=subprocess.PIPE,
)
assert proc.wait() == 0
proc = subprocess.Popen(
('git', 'config', 'user.name', 'Test User'),
cwd=self.src_path,
stdout=subprocess.PIPE,
)
assert proc.wait() == 0
proc = subprocess.Popen(
('git', 'commit', '--allow-empty', '--allow-empty-message',
'--no-edit'),
cwd=self.src_path,
stdout=subprocess.PIPE,
)
assert proc.wait() == 0
if not self.commit:
result = subprocess.check_output(
'git rev-parse HEAD',
shell=True,
cwd=self.src_path,
).split()
assert result
self.commit = result[0].decode()
def teardown_method(self, method):
shutil.rmtree(self.dest_path, ignore_errors=True)
def test_clone_repo_existing_branch(self):
repo_utils.clone_repo(self.repo_url, self.dest_path, 'master', self.commit)
assert os.path.exists(self.dest_path)
def test_clone_repo_non_existing_branch(self):
with raises(BranchNotFoundError):
repo_utils.clone_repo(self.repo_url, self.dest_path, 'nobranch', self.commit)
assert not os.path.exists(self.dest_path)
def test_fetch_no_repo(self):
fake_dest_path = self.temp_path + '/not_a_repo'
assert not os.path.exists(fake_dest_path)
with raises(OSError):
repo_utils.fetch(fake_dest_path)
assert not os.path.exists(fake_dest_path)
def test_fetch_noop(self):
repo_utils.clone_repo(self.repo_url, self.dest_path, 'master', self.commit)
repo_utils.fetch(self.dest_path)
assert os.path.exists(self.dest_path)
def test_fetch_branch_no_repo(self):
fake_dest_path = self.temp_path + '/not_a_repo'
assert not os.path.exists(fake_dest_path)
with raises(OSError):
repo_utils.fetch_branch(fake_dest_path, 'master')
assert not os.path.exists(fake_dest_path)
def test_fetch_branch_fake_branch(self):
repo_utils.clone_repo(self.repo_url, self.dest_path, 'master', self.commit)
with raises(BranchNotFoundError):
repo_utils.fetch_branch(self.dest_path, 'nobranch')
@mark.parametrize('git_str',
["fatal: couldn't find remote ref",
"fatal: Couldn't find remote ref"])
@mock.patch('subprocess.Popen')
def test_fetch_branch_different_git_versions(self, mock_popen, git_str):
"""
Newer git versions return a lower case string
See: https://github.com/git/git/commit/0b9c3afdbfb629363
"""
branch_name = 'nobranch'
process_mock = mock.Mock()
attrs = {
'wait.return_value': 1,
'stdout.read.return_value': f"{git_str} {branch_name}".encode(),
}
process_mock.configure_mock(**attrs)
mock_popen.return_value = process_mock
with raises(BranchNotFoundError):
repo_utils.fetch_branch('', branch_name)
def test_enforce_existing_branch(self):
repo_utils.enforce_repo_state(self.repo_url, self.dest_path,
'master')
assert os.path.exists(self.dest_path)
def test_enforce_existing_commit(self):
repo_utils.enforce_repo_state(self.repo_url, self.dest_path,
'master', self.commit)
assert os.path.exists(self.dest_path)
def test_enforce_non_existing_branch(self):
with raises(BranchNotFoundError):
repo_utils.enforce_repo_state(self.repo_url, self.dest_path,
'blah', self.commit)
assert not os.path.exists(self.dest_path)
def test_enforce_non_existing_commit(self):
with raises(CommitNotFoundError):
repo_utils.enforce_repo_state(self.repo_url, self.dest_path,
'master', 'c69e90807d222c1719c45c8c758bf6fac3d985f1')
assert not os.path.exists(self.dest_path)
def test_enforce_multiple_calls_same_branch(self):
repo_utils.enforce_repo_state(self.repo_url, self.dest_path,
'master', self.commit)
assert os.path.exists(self.dest_path)
repo_utils.enforce_repo_state(self.repo_url, self.dest_path,
'master', self.commit)
assert os.path.exists(self.dest_path)
repo_utils.enforce_repo_state(self.repo_url, self.dest_path,
'master', self.commit)
assert os.path.exists(self.dest_path)
def test_enforce_multiple_calls_different_branches(self):
with raises(BranchNotFoundError):
repo_utils.enforce_repo_state(self.repo_url, self.dest_path,
'blah1')
assert not os.path.exists(self.dest_path)
repo_utils.enforce_repo_state(self.repo_url, self.dest_path,
'master', self.commit)
assert os.path.exists(self.dest_path)
repo_utils.enforce_repo_state(self.repo_url, self.dest_path,
'master', self.commit)
assert os.path.exists(self.dest_path)
with raises(BranchNotFoundError):
repo_utils.enforce_repo_state(self.repo_url, self.dest_path,
'blah2')
assert not os.path.exists(self.dest_path)
repo_utils.enforce_repo_state(self.repo_url, self.dest_path,
'master', self.commit)
assert os.path.exists(self.dest_path)
def test_enforce_invalid_branch(self):
with raises(ValueError):
repo_utils.enforce_repo_state(self.repo_url, self.dest_path, 'a b', self.commit)
def test_simultaneous_access(self):
count = 5
with parallel.parallel() as p:
for i in range(count):
p.spawn(repo_utils.enforce_repo_state, self.repo_url,
self.dest_path, 'master', self.commit)
for result in p:
assert result is None
def test_simultaneous_access_different_branches(self):
branches = [('master', self.commit), ('master', self.commit), ('nobranch', 'nocommit'),
('nobranch', 'nocommit'), ('master', self.commit), ('nobranch', 'nocommit')]
with parallel.parallel() as p:
for branch, commit in branches:
if branch == 'master':
p.spawn(repo_utils.enforce_repo_state, self.repo_url,
self.dest_path, branch, commit)
else:
dest_path = self.dest_path + '_' + branch
def func():
repo_utils.enforce_repo_state(
self.repo_url, dest_path,
branch, commit)
p.spawn(
raises,
BranchNotFoundError,
func,
)
for result in p:
|
#!/usr/bin/env python
import os
import django
# Base paths
DJANGO_ROOT = os.path.dirname(os.path.realpath(django.__file__))
SITE_ROOT = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
# Debugging
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@domain.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'hacktyler_crime',
' | USER': 'hacktyler_crime',
'PASSWORD': 'qw8ndyHprt',
}
}
# Localization
TIME_ZONE = 'America/Chicago'
LANGUAGE_CODE = 'en-us'
USE_I18N = True
USE_L10N = True
# Media
STATIC_ROOT = os.path.join(SITE_ROOT, 'media')
STATIC_URL = '/site_media/'
ADMIN_MEDIA_PREFIX = '/site_media/ad | min/'
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder',
)
# Uploads
MEDIA_ROOT = '/tmp/sirens'
# Make this unique, and don't share it with anybody.
SECRET_KEY = '+ei7-2)76sh$$dy^5h4zmkglw#ey1d3f0cj^$r+3zo!wq9j+_*'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
'django.template.loaders.eggs.Loader',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.media',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
)
ROOT_URLCONF = 'config.urls'
TEMPLATE_DIRS = (
os.path.join(SITE_ROOT, 'templates')
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.humanize',
'django.contrib.staticfiles',
'django.contrib.gis',
'compressor',
'activecalls',
'sirens'
)
# Email
# run "python -m smtpd -n -c DebuggingServer localhost:1025" to see outgoing
# messages dumped to the terminal
EMAIL_HOST = 'localhost'
EMAIL_PORT = 1025
DEFAULT_FROM_EMAIL = 'do.not.reply@crime.hacktyler.com'
# Django-compressor
COMPRESS_ENABLED = False
# Caching
CACHE_MIDDLEWARE_KEY_PREFIX='hacktyler_crime'
CACHE_MIDDLEWARE_SECONDS=90 * 60 # 90 minutes
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
# Logging
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'console': {
'level':'DEBUG',
'class':'logging.StreamHandler',
'formatter': 'standard'
},
'default': {
'level':'INFO',
'class':'logging.handlers.RotatingFileHandler',
'filename': '/var/log/sites/hacktyler_crime/hacktyler_crime.log',
'maxBytes': 1024*1024*5, # 5 MB
'backupCount': 5,
'formatter':'standard',
},
'request_handler': {
'level':'INFO',
'class':'logging.handlers.RotatingFileHandler',
'filename': '/var/log/sites/hacktyler_crime/requests.log',
'maxBytes': 1024*1024*5, # 5 MB
'backupCount': 5,
'formatter':'standard',
},
'backend_handler': {
'level':'DEBUG',
'class':'django.utils.log.NullHandler',
},
},
'loggers': {
'': {
'handlers': ['default', 'console'],
'level': 'DEBUG',
'propagate': True
},
'django.request': {
'handlers': ['request_handler', 'console'],
'level': 'DEBUG',
'propagate': False
},
'django.db': {
'handlers': ['backend_handler'],
'level': 'DEBUG',
'propagate': False
},
'requests.packages.urllib3.connectionpool': {
'handlers': ['console'],
'level': 'ERROR',
'propogate': False
},
'geopy': {
'handlers': ['console'],
'level': 'INFO',
'propogate': False
}
}
}
# Pusher
PUSHER_APP_ID = '11732'
PUSHER_KEY = 'd20fddb74c58823cd05d'
PUSHER_SECRET = None # must be in local_settings.py
PUSHER_CHANNEL = 'active-calls-test'
# Mapquest
MAPQUEST_API_KEY = None # must be in local_settings.py
# App
DEFAULT_HOURS_DISPLAYED = 4
# Allow for local (per-user) override
try:
from local_settings import *
except ImportError:
pass
|
# This is an example of popping a packet from the Emotiv class's packet queue
# and printing the gyro x and y values to the console.
from emokit.emotiv import Emotiv
import platform
if platform.system() == "Windows":
import socket # Needed to prevent gevent crashing on Windows. (surfly / gevent issue #459)
import gevent
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
is_running = True
def evt_main(ring_buf):
headset = Emotiv()
gevent.spawn(headset.setup)
gevent.sleep(0)
pos = 0
try:
while True:
packet = headset.dequeue()
print packet.gyro_x, packet.gyro_y
ring_buf[pos] = packet.gyro_x
if pos % 4 == 0:
yield ring_buf
pos = (pos + 1) % 1024
gevent.sleep(0)
except KeyboardInterrupt:
headset.close()
finally:
is_running = False
headset.close()
x = np.linspace(0, 1023, 1024)
test_buf = np.zeros(1024)
fig, ax = plt.subplots()
line, = ax.plot(x, test_buf)
plt.axis([0, 1024, -100, 100])
def evt_wrapper():
def gen():
return evt_main(test_buf)
return gen
def init():
line. | set_ydata(np.ma.array(x, mask=True))
return line,
def animate(rb):
print "Animation!"
print rb
| line.set_ydata(rb)
return line,
def counter():
i = 0
while is_running:
yield i
i = i + 1
ani = animation.FuncAnimation(fig, animate, evt_wrapper(), init_func=init, interval=20, blit=True)
plt.show()
# gevent.Greenlet.spawn(evt_main, test_buf)
while True:
gevent.sleep(0)
|
import pybedtools
a = pybedtools.example_bed | tool('a.bed')
b = pybedtools.example_bedtool('b.bed')
print "cat a.bed\n" + str(a)
print "cat b.bed\n" | + str(b)
print a.intersect(b)
|
#
# Copyright (C) 2013-2014 Emerson Max de Medeiros Silva
#
# This file is part of ippl.
#
# i | ppl is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later v | ersion.
#
# ippl is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ippl. If not, see <http://www.gnu.org/licenses/>.
#
class Chromosome(object):
def __init__(self):
super(Chromosome, self).__init__()
self.genes = []
self.fitness = 0.0
|
"""
Tests for the InstructorService
"""
import json
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
from courseware.models import StudentModule
from instructor.services import InstructorService
from instructor.tests.test_tools import msk_from_problem_urlname
from nose.plugins.attrib import attr
from student.models import CourseEnrollment
from student.tests.factories import UserFactory
@attr('shard_1')
class InstructorServiceTests(ModuleStoreTestCase):
"""
Tests for the InstructorService
"""
def setUp(self):
super(InstructorServiceTests, self).setUp()
self.course = CourseFactory.create()
self.student = UserFactory()
CourseEnrollment.enroll(self.student, self.course.id)
self.problem_location = msk_from_problem_urlname(
self.course.id,
'robot-some-problem-urlname'
)
self.other_problem_location = msk_from_problem_urlname(
| self.course.id,
'robot-some-other_problem-urlname'
)
self.problem_urlname = unicode(self.problem_location)
| self.other_problem_urlname = unicode(self.other_problem_location)
self.service = InstructorService()
self.module_to_reset = StudentModule.objects.create(
student=self.student,
course_id=self.course.id,
module_state_key=self.problem_location,
state=json.dumps({'attempts': 2}),
)
def test_reset_student_attempts_delete(self):
"""
Test delete student state.
"""
# make sure the attempt is there
self.assertEqual(
StudentModule.objects.filter(
student=self.module_to_reset.student,
course_id=self.course.id,
module_state_key=self.module_to_reset.module_state_key,
).count(),
1
)
self.service.delete_student_attempt(
self.student.username,
unicode(self.course.id),
self.problem_urlname
)
# make sure the module has been deleted
self.assertEqual(
StudentModule.objects.filter(
student=self.module_to_reset.student,
course_id=self.course.id,
module_state_key=self.module_to_reset.module_state_key,
).count(),
0
)
def test_reset_bad_content_id(self):
"""
Negative test of trying to reset attempts with bad content_id
"""
result = self.service.delete_student_attempt(
self.student.username,
unicode(self.course.id),
'foo/bar/baz'
)
self.assertIsNone(result)
def test_reset_bad_user(self):
"""
Negative test of trying to reset attempts with bad user identifier
"""
result = self.service.delete_student_attempt(
'bad_student',
unicode(self.course.id),
'foo/bar/baz'
)
self.assertIsNone(result)
def test_reset_non_existing_attempt(self):
"""
Negative test of trying to reset attempts with bad user identifier
"""
result = self.service.delete_student_attempt(
self.student.username,
unicode(self.course.id),
self.other_problem_urlname
)
self.assertIsNone(result)
|
# Copyright (c) 2014 Yubico AB
# All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU | General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Additional permission under GNU GPL version 3 section 7
#
# If you modify this program, or any covered work, by linking or
# combining it with the OpenSSL project's OpenSSL library (or a
# modified version of that library), containing parts covered by the
# terms of the OpenSSL or SSLeay licenses, We | grant you additional
# permission to convey the resulting work. Corresponding Source for a
# non-source form of such a combination shall include the source code
# for the parts of OpenSSL used as well as that of the covered work.
from __future__ import print_function
from .utils import time_challenge, parse_full, format_code
from .standard import TYPE_TOTP
from .exc import InvalidSlotError, NeedsTouchError
from yubioath.yubicommon.ctypes import CLibrary
from hashlib import sha1
from ctypes import (Structure, POINTER, c_int, c_uint8, c_uint, c_char_p,
c_bool, sizeof, create_string_buffer, cast, addressof)
import weakref
SLOTS = [
-1,
0x30,
0x38
]
YK_KEY = type('YK_KEY', (Structure,), {})
# Programming
SLOT_CONFIG = 1
SLOT_CONFIG2 = 3
CONFIG1_VALID = 1
CONFIG2_VALID = 2
YKP_CONFIG = type('YKP_CONFIG', (Structure,), {})
YK_CONFIG = type('YK_CONFIG', (Structure,), {})
YK_STATUS = type('YK_STATUS', (Structure,), {})
class YkPers(CLibrary):
_yk_errno_location = [], POINTER(c_int)
yk_init = [], bool
yk_release = [], bool
ykpers_check_version = [c_char_p], c_char_p
yk_open_first_key = [], POINTER(YK_KEY)
yk_close_key = [POINTER(YK_KEY)], bool
yk_challenge_response = [POINTER(YK_KEY), c_uint8, c_int, c_uint, c_char_p,
c_uint, c_char_p], bool
ykds_alloc = [], POINTER(YK_STATUS)
ykds_free = [POINTER(YK_STATUS)], None
ykds_touch_level = [POINTER(YK_STATUS)], c_int
yk_get_status = [POINTER(YK_KEY), POINTER(YK_STATUS)], c_int
ykp_alloc = [], POINTER(YKP_CONFIG)
ykp_free_config = [POINTER(YKP_CONFIG)], bool
ykp_configure_version = [POINTER(YKP_CONFIG), POINTER(YK_STATUS)], None
ykp_HMAC_key_from_raw = [POINTER(YKP_CONFIG), c_char_p], bool
ykp_set_tktflag_CHAL_RESP = [POINTER(YKP_CONFIG), c_bool], bool
ykp_set_cfgflag_CHAL_HMAC = [POINTER(YKP_CONFIG), c_bool], bool
ykp_set_cfgflag_HMAC_LT64 = [POINTER(YKP_CONFIG), c_bool], bool
ykp_set_extflag_SERIAL_API_VISIBLE = [POINTER(YKP_CONFIG), c_bool], bool
ykp_set_extflag_ALLOW_UPDATE = [POINTER(YKP_CONFIG), c_bool], bool
ykp_set_cfgflag_CHAL_BTN_TRIG = [POINTER(YKP_CONFIG), c_bool], bool
ykp_core_config = [POINTER(YKP_CONFIG)], POINTER(YK_CONFIG)
yk_write_command = [POINTER(YK_KEY), POINTER(YK_CONFIG), c_uint8, c_char_p
], bool
def yk_get_errno(self):
return self._yk_errno_location().contents.value
ykpers = YkPers('ykpers-1', '1')
YK_ETIMEOUT = 0x04
YK_EWOULDBLOCK = 0x0b
if not ykpers.yk_init():
raise Exception("Unable to initialize ykpers")
ykpers_version = ykpers.ykpers_check_version(None).decode('ascii')
class LegacyOathOtp(object):
"""
OTP interface to a legacy OATH-enabled YubiKey.
"""
def __init__(self, device):
self._device = device
def slot_status(self):
st = ykpers.ykds_alloc()
ykpers.yk_get_status(self._device, st)
tl = ykpers.ykds_touch_level(st)
ykpers.ykds_free(st)
return (
bool(tl & CONFIG1_VALID == CONFIG1_VALID),
bool(tl & CONFIG2_VALID == CONFIG2_VALID)
)
def calculate(self, slot, digits=6, timestamp=None, mayblock=0):
challenge = time_challenge(timestamp)
resp = create_string_buffer(64)
status = ykpers.yk_challenge_response(
self._device, SLOTS[slot], mayblock, len(challenge), challenge,
sizeof(resp), resp)
if not status:
errno = ykpers.yk_get_errno()
if errno == YK_EWOULDBLOCK:
raise NeedsTouchError()
raise InvalidSlotError()
return format_code(parse_full(resp.raw[:20]), digits)
def put(self, slot, key, require_touch=False):
if len(key) > 64: # Keys longer than 64 bytes are hashed, as per HMAC.
key = sha1(key).digest()
if len(key) > 20:
raise ValueError('YubiKey slots cannot handle keys over 20 bytes')
slot = SLOT_CONFIG if slot == 1 else SLOT_CONFIG2
key += b'\x00' * (20 - len(key)) # Keys must be padded to 20 bytes.
st = ykpers.ykds_alloc()
ykpers.yk_get_status(self._device, st)
cfg = ykpers.ykp_alloc()
ykpers.ykp_configure_version(cfg, st)
ykpers.ykds_free(st)
ykpers.ykp_set_tktflag_CHAL_RESP(cfg, True)
ykpers.ykp_set_cfgflag_CHAL_HMAC(cfg, True)
ykpers.ykp_set_cfgflag_HMAC_LT64(cfg, True)
ykpers.ykp_set_extflag_SERIAL_API_VISIBLE(cfg, True)
ykpers.ykp_set_extflag_ALLOW_UPDATE(cfg, True)
if require_touch:
ykpers.ykp_set_cfgflag_CHAL_BTN_TRIG(cfg, True)
if ykpers.ykp_HMAC_key_from_raw(cfg, key):
raise ValueError("Error setting the key")
ycfg = ykpers.ykp_core_config(cfg)
try:
if not ykpers.yk_write_command(self._device, ycfg, slot, None):
raise ValueError("Error writing configuration to key")
finally:
ykpers.ykp_free_config(cfg)
def delete(self, slot):
slot = SLOT_CONFIG if slot == 1 else SLOT_CONFIG2
if not ykpers.yk_write_command(self._device, None, slot, None):
raise ValueError("Error writing configuration to key")
class LegacyCredential(object):
def __init__(self, legacy, slot, digits=6):
self.name = 'YubiKey slot %d' % slot
self.oath_type = TYPE_TOTP
self.touch = None # Touch is unknown
self._legacy = legacy
self._slot = slot
self._digits = digits
def calculate(self, timestamp=None):
try:
return self._legacy.calculate(self._slot, self._digits, timestamp,
1 if self.touch else 0)
except NeedsTouchError:
self.touch = True
raise
else:
if self.touch is None:
self.touch = False
def delete(self):
self._legacy.delete(self._slot)
def __repr__(self):
return self.name
# Keep track of YK_KEY references.
_refs = []
def open_otp():
key = ykpers.yk_open_first_key()
if key:
key_p = cast(addressof(key.contents), POINTER(YK_KEY))
def cb(ref):
_refs.remove(ref)
ykpers.yk_close_key(key_p)
_refs.append(weakref.ref(key, cb))
return key
return None
|
import Fast5File
def run(parser, args):
if args.read:
for i, fast5 in enumerate(Fast5File.Fast5FileSet(args.files)):
for metadata_dict in fast5.read_metadata:
if i == 0:
header = metadata_dict.keys()
print "\t".join(["filename"] + header)
print "\t".join([fast5.filename] + [str( metadata_dict[k] ) for k in header])
else:
print "asic_id\tasic_temp\theatsink_temp"
for fast | 5 in Fast5File.Fast5FileSet(args.files):
asic_temp = fast5.get_asic_temp()
asic_id = fast5.get_asic_id()
heatsink_temp = fast5.get_heatsink_temp()
pr | int "%s\t%s\t%s" % (asic_id, asic_temp, heatsink_temp)
fast5.close()
|
# -*- coding: utf-8 -*-
import time
import requests
from datetime import datetime
from logging import getLogger
from typing import Optional
from typing import Dict
from typing import Iterable
from funcy import compose
from funcy import partial
from pandas import DataFrame
from pandas import to_datetime
from pandas import Series
from pyloniex import PoloniexPublicAPI
from moneybot.clients import Postgres
from moneybot.clients import Poloniex
YEAR_IN_SECS = 60 * 60 * 24 * 365
logger = getLogger(__name__)
def format_time(ts: datetime) -> str:
return ts.strftime('%Y-%m-%d %H:%M:%S')
def historical(ticker: str) -> Dict:
url = f'https://graphs.coinmarketcap.com/currencies/{ticker}'
return requests.get(url).json()
def market_cap(hist_ticker: Dict) -> Series:
r = {}
ts = None
for key, vals in hist_ticker.items():
if ts is None:
ts = [to_datetime(t[0] * 1000000) for t in vals]
r[key] = [t[1] for t in vals]
return DataFrame(r, index=ts)
coin_history = compose(market_cap, historical)
def marshall(hist_df):
btc_to_usd = hist_df['price_usd'] / hist_df['price_btc']
# volume in BTC
# TODO is this correct? or is `'volume'` the quote volume?
hist_df['volume'] = hist_df['volume_usd'] / btc_to_usd
hist_df = hist_df.drop([
'market_cap_by_available_supply',
'volume_usd'
], axis=1)
hist_df['weighted_average'] = hist_df['price_usd']
hist_df['time'] = hist_df.index
hist_df['currency_pair'] = hist_df.apply(lambda x: 'USD_BTC', axis=1)
def nothing_burger():
return hist_df.apply(lambda x: None, axis=1)
hist_df['open'] = nothing_burger()
hist_df['high'] = nothing_burger()
hist_df['low'] = nothing_burger()
hist_df['close'] = nothing_burger()
hist_df['quote_volume'] = nothing_burger()
return hist_df
def historical_prices_of(
polo: PoloniexPublicAPI,
btc_price_history: Series,
pair: str,
period: int = 900,
start: Optional[float] = None,
end: Optional[float] = None,
) -> Iterable[Series]:
'''
Returns a series of time-indexed prices.
`pair` is of the form e.g. 'BTC_ETH',
`period` is an integer number of seconds,
either 300, 900, 1800, 7200, 14400, or 86400.
We do some data marshalling in this method as well,
to turn API results into stuff amenable for our Postgres DB.
'''
def contemporary_usd_price(row: Series) -> float:
contemporary_btc_price = btc_price_history['price_usd'].asof(row.name)
return row['weightedAverage'] * contemporary_btc_price
# Scraping
now = time.time()
start = start or now - YEAR_IN_SECS
end = end or now
ex_trades = polo.return_chart_data(
currency_pair=pair,
period=period,
start=start,
end=end,
)
# Data marshalling
ts_df = DataFrame(ex_trades, dtype=float)
ts_df['time'] = [datetime.fromtimestamp(t) for t in ts_df['date']]
ts_df.index = ts_df['time']
ts_df['price_usd'] = ts_df.apply(contemporary_usd_price, axis=1)
ts_df['currency_pair'] = ts_df.apply(lambda x: pair, axis=1)
ts_df = ts_df.rename(index=str, columns={
'quoteVolume': 'quote_volume',
'weightedAverage': 'weighted_average',
})
for _, row in ts_df.iterrows():
# chart = scraped_chart(pair, row)
# for some reason, when there's no chart data to report,
# the API will give us some reading with all 0s.
if row['volume'] == 0 and row['weighted_average'] == 0:
# we will just ignore these
pass
else:
yield row
def insert(cursor, row):
return cursor.execute("""
INSERT INTO scraped_chart (time, currency_pair, high, low, price_usd, quote_volume, volume, weighted_average)
VALUES (%(time)s, %(currency_pair)s, %(high)s, %(low)s, %(price_usd)s, %(quote_volume)s, %(volume)s, %(weighted_average)s);""",
row.to_dict())
def scrape_since_last_reading():
# postgres client
client = Postgres.get_client()
cursor = client.cursor()
inserter = partial(insert, cursor)
# get the last time we fetched some data,
# looking at the most recent result in the db
query = ' '.join([
'select time from scraped_chart',
'order by time desc',
'limit 1',
])
cursor.execute(query)
latest_fetch_time = cursor.fetchone()[0]
latest_fetch_unix = time.mktime(latest_fetch_time.timetuple())
# n | ow get USD_BTC history
btc_price_hist = coin_history('bitcoin')
# and write that history to DB,
btc_rows = marshall(btc_price_hist)
# NOTE since latest fetch time?
# recent_btc = btc_rows[btc_rows['time'] > latest_fetch_time]
# [inserter(row) for _, row in recent_btc.iterrows()]
[inserter(row) for _, row in btc_rows.iterrows()]
client.commit()
logger.debug('Scraped USD_BTC')
# now, a poloniex client
polo = Poloniex.get | _public()
# and a method for grabbing historical prices
grab_historical_prices = partial(historical_prices_of, polo, btc_price_hist)
# for each market,
for market in polo.return_ticker():
# fetch all the chart data since last fetch
generator = grab_historical_prices(
market,
start=latest_fetch_unix,
end=time.time(),
)
list(map(inserter, generator))
client.commit()
logger.debug(f'Scraped {market}')
cursor.close()
|
#!/usr/bin/python
from ABE_ADCDACPi import ADCDACPi
import time
import math
"""
================================================
ABElectronics ADCDAC Pi 2-Channel ADC, 2-Channel DAC | DAC sine wave generator demo
Version 1.0 Created 17/05/2014
Version 1.1 16/11/2014 updated code and functions to PEP8 format
run with: python demo-dacsinewave.py
================================================
# this demo uses the set_dac_raw method to generate a sine wave from a
# predefined set of values
"""
adcdac = ADCDACPi(1) # create an instance of the ADCDAC Pi with a DAC gain set to 1
DACLookup_FullSine_12Bit = \
[2048, 2073, 2098, 2123, 2148, 2174, 2199, 2224,
2249, 2274, 2299, 2324, 2349, 2373, 2398, 2423,
2448, 2472, 2497, 2521, 2546, 2570, 2594, 2618,
2643, 2667, 2690, 2714, 2738, 2762, 2785, 2808,
2832, 2855, 2878, 2901, 2924, 2946, 2969, 2991,
3013, 3036, 3057, 3079, 3101, 3122, 3144, 3165,
3186, 3207, 3227, 3248, 3268, 3288, 3308, 3328,
3347, 3367, 3386, 3405, 3423, 3442, 3460, 3478,
3496, 3514, 3531, 3548, 3565, 3582, 3599, 3615,
3631, 3647, 3663, 3678, 3693, 3708, 3722, 3737,
3751, 3765, 3778, 3792, 3805, 3817, 3830, 3842,
3854, 3866, 3877, 3888, 3899, 3910, 3920, 3930,
3940, 3950, 3959, 3968, 3976, 3985, 3993, 4000,
4008, 4015, 4022, 4028, 4035, 4041, 4046, 4052,
4057, 4061, 4066, 4070, 4074, 4077, 4081, 4084,
4086, 4088, 4090, 4092, 4094, 4095, 4095, 4095,
4095, 4095, 4095, 4095, 4094, 4092, 4090, 4088,
4086, 4084, 4081, 4077, 4074, 4070, 4066, 4061,
4057, 4052, 4046, 4041, 4035, 4028, 4022, 4015,
4008, 4000, 3993, 3985, 3976, 3968, 3959, 3950,
3940, 3930, 3920, 3910, 3899, 3888, 3877, 3866,
3854, 3842, 3830, 3817, 3805, 3792, 3778, 3765,
3751, 3737, 3722, 3708, 3693, 3678, 3663, 3647,
3631, 3615, 3599, 358 | 2, 3565, 35 | 48, 3531, 3514,
3496, 3478, 3460, 3442, 3423, 3405, 3386, 3367,
3347, 3328, 3308, 3288, 3268, 3248, 3227, 3207,
3186, 3165, 3144, 3122, 3101, 3079, 3057, 3036,
3013, 2991, 2969, 2946, 2924, 2901, 2878, 2855,
2832, 2808, 2785, 2762, 2738, 2714, 2690, 2667,
2643, 2618, 2594, 2570, 2546, 2521, 2497, 2472,
2448, 2423, 2398, 2373, 2349, 2324, 2299, 2274,
2249, 2224, 2199, 2174, 2148, 2123, 2098, 2073,
2048, 2023, 1998, 1973, 1948, 1922, 1897, 1872,
1847, 1822, 1797, 1772, 1747, 1723, 1698, 1673,
1648, 1624, 1599, 1575, 1550, 1526, 1502, 1478,
1453, 1429, 1406, 1382, 1358, 1334, 1311, 1288,
1264, 1241, 1218, 1195, 1172, 1150, 1127, 1105,
1083, 1060, 1039, 1017, 995, 974, 952, 931,
910, 889, 869, 848, 828, 808, 788, 768,
749, 729, 710, 691, 673, 654, 636, 618,
600, 582, 565, 548, 531, 514, 497, 481,
465, 449, 433, 418, 403, 388, 374, 359,
345, 331, 318, 304, 291, 279, 266, 254,
242, 230, 219, 208, 197, 186, 176, 166,
156, 146, 137, 128, 120, 111, 103, 96,
88, 81, 74, 68, 61, 55, 50, 44,
39, 35, 30, 26, 22, 19, 15, 12,
10, 8, 6, 4, 2, 1, 1, 0,
0, 0, 1, 1, 2, 4, 6, 8,
10, 12, 15, 19, 22, 26, 30, 35,
39, 44, 50, 55, 61, 68, 74, 81,
88, 96, 103, 111, 120, 128, 137, 146,
156, 166, 176, 186, 197, 208, 219, 230,
242, 254, 266, 279, 291, 304, 318, 331,
345, 359, 374, 388, 403, 418, 433, 449,
465, 481, 497, 514, 531, 548, 565, 582,
600, 618, 636, 654, 673, 691, 710, 729,
749, 768, 788, 808, 828, 848, 869, 889,
910, 931, 952, 974, 995, 1017, 1039, 1060,
1083, 1105, 1127, 1150, 1172, 1195, 1218, 1241,
1264, 1288, 1311, 1334, 1358, 1382, 1406, 1429,
1453, 1478, 1502, 1526, 1550, 1575, 1599, 1624,
1648, 1673, 1698, 1723, 1747, 1772, 1797, 1822,
1847, 1872, 1897, 1922, 1948, 1973, 1998, 2023]
while True:
for val in DACLookup_FullSine_12Bit:
adcdac.set_dac_raw(1, val)
|
# PRNG (Pseudo-Random Number Generator) Test
# PRNG info:
# http://en.wikipedia.org/wiki/Pseudorandom_number_generator
# FB - 201012046
# Compares output distribution of any given PRNG
# w/ an hypothetical True-Random Number Generator (TRNG)
import math
import time
global x
x = time.clock() # seed for the PRNG
# PRNG to test
def prng():
global x
x = math.fmod((x + math.pi) ** 2.0, 1.0)
return x
# combination by recursive method
def c(n, k):
if k == 0: return 1
if n == 0: return 0
return c(n - 1, k - 1) + c(n - 1, k)
### combination by multiplicative method
##def c_(n, k):
## mul = 1.0
## for i in range(k):
## mul = mul * (n - k + i + 1) / (i + 1)
## return mul
# MAIN
n = 20 # number of bits in each trial
print 'Test in progress...'
print
cnk = [] # array to hold bit counts
for k in range(n + 1):
cnk.append(0)
# generate 2**n n-bit pseudo-random numbers
for j in range(2 ** n):
# generate n-bit pseudo-random number and count the 0's in it
# num = ''
ctr = 0
for i in range(n):
b = int(round(prng())) # generate 1 pseudo-random bit
# num += str(b)
| if b == 0: ctr += 1
# print num
# increase bit count in the array
cnk[ctr] += 1
print 'Number of bits in each pseudo-random number (n) =', n
print
print 'Comparison of "0" count distributions:'
print
print ' k', ' c(n,k)', ' actual', '%dif'
difSum = 0
for k in range(n + 1):
cnk_ = c(n, k)
dif = abs(cnk_ - cnk[k])
print '%2d %10d %10d %4d' % (k, cnk_, cnk[k], 100 * dif / cnk_)
difSum += dif
print
print 'Differenc | e percentage between the distributions:'
print 100 * difSum / (2 ** n)
|
"""Tests for the unused-variable message in assignment expressions"""
def typed_assignment_in_function_default( # [unused-variable]
param: str = (typed_default := "walrus"), # [unused-variable]
) -> None:
"""An unused annotated assignment expression in a defau | lt parameter should emit"""
return param
def assignment_in_function_default( # [unused-variable]
param=(default := "walrus"), # [unused-variable]
) -> None:
"""An unused assignment expression in a default parameter should emit"""
return param
def assignment_used_in_function_scope( # [unused-variable]
param=(function_default := "walrus"),
| ) -> None:
"""An used assignment expression in a default parameter should not emit"""
print(function_default)
return param
def assignment_used_in_global_scope( # [unused-variable]
param=(global_default := "walrus"),
) -> None:
"""An used assignment expression in a default parameter should not emit"""
return param
print(global_default)
|
#!/usr/bin/env python
from iris_sdk.mode | ls.maps.base | _map import BaseMap
class SubscriptionsMap(BaseMap):
subscription = None |
log.info('Registering data type "{}".'.format(tag))
@abstractproperty
def skeleton(self):
""" Reserved for future use """
pass
@abstractproperty
def tag(self):
""" A str which uniquely identifies this type and serves as its key in
the Registry.typemap dict
"""
pass
@abstractproperty
def value(self):
""" This type's representation of the value contained within
a dataset of the same type
"""
pass
@abstractmethod
def validate(self):
""" Raises (any) exception if the dataset's value argument
cannot be correctly represented as this type
"""
pass
@abstractmethod
def json(self):
""" Reserved for future use """
pass
@abstractmethod
def fromDataset(cls, value, metadata=None):
""" Factory method which performs any required transformations
on a dataset argument, invokes the type's ctor, and returns
the resulting instance
"""
pass
class Scalar(DataType):
tag = "SCALAR"
name = "Scalar"
skeleton = "[%f]"
json = ScalarJSON()
def __init__(self, value):
super(Scalar, self).__init__()
self.value = value
self.validate()
@classmethod
def fromDataset(cls, value, metadata=None):
return cls(value)
def validate(self):
f = float(self.value)
assert not math.isnan(f) # Excludes NaN etc
def get_value(self):
return str(self._value)
def set_value(self, val):
self._value = val
value = property(get_value, set_value)
class Array(DataType):
tag = "ARRAY"
name = "Array"
skeleton = "[%f, ...]"
json = ArrayJSON()
def __init__(self, encstr):
super(Array, self).__init__()
self.value = encstr
self.validate()
@classmethod
def fromDataset(cls, value, metadata=None):
return cls(value)
def validate(self):
j = json.loads(self.value)
assert len(j) > 0 # Sized
assert iter(j) is not None # Iterable
assert j.__getitem__ # Container
assert not isinstance(j, six.string_types) # Exclude strs
def get_value(self):
return self._value
def set_value(self, val):
self._value = val
value = property(get_value, set_value)
class Descriptor(DataType):
tag = "DESCRIPTOR"
name = "Descriptor"
skeleton = "%s"
json = DescriptorJSON()
def __init__(self, data):
super(Descriptor, self).__init__()
self.value = data
self.validate()
@classmethod
def fromDataset(cls, value, metadata=None):
if metadata and metadata.get('data_type') == 'hashtable':
try:
df = pd.read_json(six.text_type(value))
data = df.transpose().to_json()
except Exception:
noindexdata = json.loads(six.text_type(value))
indexeddata = {0:noindexdata}
data = json.dumps(indexeddata)
return cls(data)
else:
return cls(six.text_type(value))
def validate(self):
pass
def get_value(self):
return self._value
def set_value(self, val):
self._value = val
value = property(get_value, set_value)
class Dataframe(DataType):
tag = "DATAFRAME"
name = "Data Frame"
skeleton = "%s"
json | = DataframeJSON()
def __init__(self, data):
super(Dataframe, self).__init__()
self.value = data
self.validate()
@classmethod
def fromDataset(cls, value, metadata=None):
df = cls._create_dataframe(value)
return cls(df)
@classmethod
def _create_dataframe(cls, value):
"""
Builds a dataframe from the value
"""
try:
ordered_jo | = json.loads(six.text_type(value), object_pairs_hook=collections.OrderedDict)
#Pandas does not maintain the order of dicts, so we must break the dict
#up and put it into the dataframe manually to maintain the order.
cols = list(ordered_jo.keys())
if len(cols) == 0:
raise ValueError("Dataframe has no columns")
#Assume all sub-dicts have the same set of keys
if isinstance(ordered_jo[cols[0]], list):
index = range(len(ordered_jo[cols[0]]))
else:
#cater for when the indices are not the same by identifying
#all the indices, and then making a set of them.
longest_index = []
for col in ordered_jo.keys():
index = list(ordered_jo[col].keys())
if len(index) > len(longest_index):
longest_index = index
index = longest_index
df = pd.read_json(value, convert_axes=False)
#Make both indices the same type, so they can be compared
df.index = df.index.astype(str)
new_index = pd.Index(index).astype(str)
#Now reindex the dataframe so that the index is in the correct order,
#as per the data in the DB, and not with the default pandas ordering.
new_df = df.reindex(new_index)
#If the reindex didn't work, don't use that value
if new_df.isnull().sum().sum() != len(df.index):
df = new_df
except ValueError as e:
""" Raised on scalar types used as pd.DataFrame values
in absence of index arg
"""
log.exception(e)
raise HydraError(str(e))
except AssertionError as e:
log.warning("An error occurred creating the new data frame: %s. Defaulting to a simple read_json"%(e))
df = pd.read_json(value).fillna(0)
return df
def validate(self):
assert isinstance(self._value, pd.DataFrame)
assert not self._value.empty
def get_value(self):
return self._value.to_json()
def set_value(self, val):
self._value = val
try:
""" Use validate test to confirm is pd.DataFrame... """
self.validate()
except AssertionError:
""" ...otherwise attempt as json..."""
try:
df = self.__class__._create_dataframe(val)
self._value = df
self.validate()
except Exception as e:
""" ...and fail if neither """
raise HydraError(str(e))
value = property(get_value, set_value)
class Timeseries(DataType):
tag = "TIMESERIES"
name = "Time Series"
skeleton = "[%s, ...]"
json = TimeseriesJSON()
def __init__(self, ts):
super(Timeseries, self).__init__()
self.value = ts
self.validate()
@classmethod
def fromDataset(cls, value, metadata=None):
ordered_jo = json.loads(six.text_type(value), object_pairs_hook=collections.OrderedDict)
ts = pd.DataFrame.from_dict(ordered_jo)
return cls(ts)
def validate(self):
base_ts = pd.Timestamp("01-01-1970")
#TODO: We need a more permanent solution to seasonal/repeating timeseries
seasonal_year = config.get('DEFAULT','seasonal_year', '1678')
seasonal_key = config.get('DEFAULT', 'seasonal_key', '9999')
jd = json.loads(self.value, object_pairs_hook=collections.OrderedDict)
for k,v in jd.items():
for date in (six.text_type(d) for d in v.keys()):
#A date with '9999' in it is a special case, but is an invalid year
#for pandas, so replace it with the first year allowed by pandas -- 1678
if date.find(seasonal_key) >= 0:
date = date.replace(seasonal_key, seasonal_year)
ts = pd.Timestamp(date)
assert isinstance(ts, base_ts.__class__) # Same type as known valid ts
def get_value(self):
return self._value.to_json(date_fo |
"""
Copyright (c) 2015 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from __future__ import unicode_literals
import json
import os
from osbs.api import OSBS
from osbs.conf import Configuration
from atomic_reactor.plugin import PostBuildPlugin
from atomic_reactor.util import ImageName
class ImportImagePlugin(PostBuildPlugin):
"""
Import image tags from external docker registry into | OpenShift.
"""
key = "import_image"
can_fail = False
def __init__(self, tasker, workflow, url, verify_ssl=True, use_auth=True):
"""
constructor
:param tasker: DockerTasker instance
:param workflow: DockerBuildWorkflow instance
:param url: str, URL to OSv3 instance
:param verify_ssl: bool, verify SSL certificate?
:param use_auth: bool, initiate authentication with openshift?
| """
# call parent constructor
super(ImportImagePlugin, self).__init__(tasker, workflow)
self.url = url
self.verify_ssl = verify_ssl
self.use_auth = use_auth
def run(self):
try:
build_json = json.loads(os.environ["BUILD"])
except KeyError:
self.log.error("No $BUILD env variable. "
"Probably not running in build container.")
raise
osbs_conf = Configuration(conf_file=None, openshift_uri=self.url,
use_auth=self.use_auth,
verify_ssl=self.verify_ssl)
osbs = OSBS(osbs_conf, osbs_conf)
metadata = build_json.get("metadata", {})
kwargs = {}
if 'namespace' in metadata:
kwargs['namespace'] = metadata['namespace']
labels = metadata.get("labels", {})
try:
imagestream = labels["imagestream"]
except KeyError:
self.log.error("No imagestream label set for this Build")
raise
self.log.info("Importing tags for %s", imagestream)
osbs.import_image(imagestream, **kwargs)
|
s
import frappe
from frappe import _
from frappe.utils import fmt_money, formatdate, format_time, now_datetime, \
get_url_to_form, get_url_to_list, flt
from datetime import timedelta
from dateutil.relativedelta import relativedelta
from frappe.core.doctype.user.user import STANDARD_USERS
import frappe.desk.notifications
from erpnext.accounts.utils import get_balance_on
user_specific_content = ["calendar_events", "todo_list"]
from frappe.model.document import Document
class EmailDigest(Document):
def __init__(self, arg1, arg2=None):
super(EmailDigest, self).__init__(arg1, arg2)
self.from_date, self.to_date = self.get_from_to_date()
self.set_dates()
self._accounts = {}
self.currency = frappe.db.get_value("Company", self.company,
"default_currency")
def get_users(self):
"""get list of users"""
user_list = frappe.db.sql("""
select name, enabled from tabUser
where name not in ({})
and user_type != "Website User"
order by enabled desc, name asc""".format(", ".join(["%s"]*len(STANDARD_USERS))), STANDARD_USERS, as_dict=1)
if self.recipient_list:
recipient_list = self.recipient_list.split("\n")
else:
recipient_list = []
for p in user_list:
p["checked"] = p["name"] in recipient_list and 1 or 0
frappe.response['user_list'] = user_list
def send(self):
# send email only to enabled users
valid_users = [p[0] for p in frappe.db.sql("""select name from `tabUser`
where enabled=1""")]
recipients = filter(lambda r: r in valid_users,
self.recipient_list.split("\n"))
original_user = frappe.session.user
if recipients:
for user_id in recipients:
frappe.set_user(user_id)
msg_for_this_receipient = self.get_msg_html()
if msg_for_this_receipient:
frappe.sendmail(
recipients=user_id,
subject="{frequency} Digest".format(frequency=self.frequency),
message=msg_for_this_receipient,
bulk=True,
reference_doctype = self.doctype,
reference_name = self.name,
unsubscribe_message = _("Unsubscribe from this Email Digest"))
frappe.set_user(original_user)
def get_msg_html(self):
"""Build email digest content"""
frappe.flags.ignore_account_permission = True
from erpnext.setup.doctype.email_digest.quotes import get_random_quote
context = frappe._dict()
context.update(self.__dict__)
self.set_title(context)
self.set_style(context)
self.set_accounting_cards(context)
context.events = self.get_calendar_events()
context.todo_list = self.get_todo_list()
context.notifications = self.get_notifications()
quote = get_random_quote()
context.quote = {"text": quote[0], "author": quote[1]}
if not (context.events or context.todo_list or context.notifications or context.cards):
return None
frappe.flags.ignore_account_permission = False
# style
return frappe.render_template("erpnext/setup/doctype/email_digest/templates/default.html",
context, is_path=True)
def set_title(self, context):
"""Set digest title"""
if self.frequency=="Daily":
context.title = _("Daily Reminders")
context.subtitle = _("Pending activities for today")
elif self.frequency=="Weekly":
context.title = _("This Week's Summary")
context.subtitle = _("Summary for this week and pending activities")
elif self.frequency=="Monthly":
context.title = _("This Month's Summary")
context.subtitle = _("Summary for this month and pending activities")
def set_style(self, context):
"""Set standard digest style"""
context.text_muted = '#8D99A6'
context.text_color = '#36414C'
context.h1 = 'margin-bottom: 30px; margin-bottom: 0; margin-top: 40px; font-weight: 400;'
context.label_css = '''display: inline-block; color: {text_muted};
padding: 3px 7px; margin-right: 7px;'''.format(text_muted = context.text_muted)
context.section_head = 'margin-top: 60px; font-size: 16px;'
context.line_item = 'padding: 5px 0px; margin: 0; border-bottom: 1px solid #d1d8dd;'
context.link_css = 'color: {text_color}; text-decoration: none;'.format(text_color = context.text_color)
def get_notifications(self):
"""Get notifications for user"""
notifications = frappe.desk.notifications.get_notifications()
notifications = sorted(notifications.get("open_count_doctype", {}).items(),
lambda a, b: 1 if a[1] < b[1] else -1)
notifications = [{"key": n[0], "value": n[1],
"link": get_url_to_list(n[0])} for n in notifications if n[1]]
return notifications
def get_calendar_events(self):
"""Get calendar events for given user"""
from frappe.desk.doctype.event.event import get_events
events = get_events(self.future_from_date.strftime("%Y-%m-%d"),
self.future_to_date.strftime("%Y-%m-%d")) or []
for i, e in enumerate(events):
e.starts_on_label = format_time(e.starts_on)
e.ends_on_label = format_time(e.ends_on)
e.date = formatdate(e.starts)
e.link = get_url_to_form("Event", e.name)
return events
def get_todo_list(self, user_id=None):
"""Get to-do list"""
if not user_id:
user_id = frappe.session.user
todo_list = frappe.db.sql("""select *
from `tabToDo` where (owner=%s or assigned_by=%s) and status="Open"
order by field(priority, 'High', 'Medium', 'Low') asc, date asc""",
(user_id, user_id), as_dict=True)
for t in todo_list:
t.link = get_url_to_form("ToDo", t.name)
return todo_list
def set_accounting_cards(self, context):
"""Create accounting cards if checked"""
cache = frappe.cache()
context.cards = []
for key in ("income", "expenses_booked", "income_year_to_date", "expense_year_to_date",
"invoiced_amount", "payables", "bank_balance"):
if self.get(key):
cache_key = "email_digest:card:" + key
card = cache.get(cache_key)
if card:
card = eval(card)
else:
card = frappe._dict(getattr(self, "get_" + key)())
# format values
if card.last_value:
card.diff = int(flt(card.value - card.last_value) / card.last_value * 100)
if card.diff < 0:
card.diff = str(card.diff)
card.gain = False
else:
card.diff = "+" + str(card.diff)
card.gain = True
card.last_value = self.fmt_money(card.last_value)
card.value = self.fmt_money(card.value)
cache.setex(cache_key, card, 24 * 60 * 60)
context.cards.append(card)
def get_income(self):
"""Get income for given period"""
income, past_income = self.get_period_amounts(self.get_root_type_accounts("income"))
return {
"label": self.meta.get_label("income"),
"value": income,
"last_value": past_income
}
def get_income_year_to_date(self):
"""Get income to date"""
return self.get_year_to_date_balance("income")
def get_expense_year_to_date(self):
"""Get income to date"""
return self.get_year_to_date_balance("expense")
def get_year_to_date_balance(self, root_type):
"""Get income to date"""
balance = 0.0
for account in self.get_root_type_accounts(root_type):
balance += get_balance_on(account, date = self.future_to_date)
return {
"label": self.meta.get_label(root_type + "_year_to_date"),
"value": balance
}
def get_bank_balance(self):
# account is of type "Bank" or "Cash"
return self.get_type_balance('bank_balance', 'Bank')
d | ef get_payables(self):
return self.get_type_balance('payables', 'Payable')
def get_invoiced_amount(self):
return self.get_type_balance('invoiced_amount', 'Receivable')
def get_expenses_booked(self):
expense, past_expense = self.get_period_amounts(self.get_root_type_accounts("expense"))
return {
"label": self.meta.get_label("expenses_booked"),
"value": expense,
"last_value": past_expense
}
def get_period_amounts | (self, accounts):
"""Get amounts for current and past periods"""
balance = past_balance = 0.0
for account in accounts:
balance += (get_balance_on(account, date = self.future_to_date)
- get_balance_on(account, date = self.future_from_date))
past_balance += (get_balance_on(account, date = self.past_to_date)
- get_balance_on(account, date = self.past_from_date))
return balance, past_balance
def get_type_balance(self, fieldname, account_type):
accounts = [d.name for d in \
frappe.db.get_all("Account", fi |
# -*- coding: utf-8 -*-
import attr
from navmazing import NavigateToAttribute
from widgetastic.widget import NoSuchElementException
from widgetastic.widget import Text
from widgetastic.widget import View
from widgetastic_patternfly import BootstrapNav
from widgetastic_patternfly import BreadCrumb
from widgetastic_patternfly import Button
from widgetastic_patternfly import Dropdown
from cfme.base.ui import BaseLoggedInPage
from cfme.common import Taggable
from cfme.exceptions import ItemNotFound
from cfme.modeling.base import BaseCollection
from cfme.modeling.base import BaseEntity
from cfme.utils.appliance.implementations.ui import CFMENavigateStep
from cfme.utils.appliance.implementations.ui import navigate_to
from cfme.utils.appliance.implementations.ui import navigator
from cfme.utils.providers import get_crud_by_name
from widgetastic_manageiq import Accordion
from widgetastic_manageiq import BaseEntitiesView
from widgetastic_manageiq import ItemsToolBarViewSelector
from widgetastic_manageiq import ManageIQTree
from widgetastic_manageiq import Search
from widgetastic_manageiq import SummaryTable
class ObjectStoreObjectToolbar(View):
"""The toolbar on the Object Store Object page"""
configuration = Dropdown('Configuration')
policy = Dropdown('Policy')
download = Dropdown('Download')
view_selector = View.nested(ItemsToolBarViewSelector)
class ObjectStoreObjectDetailsToolbar(View):
"""The toolbar on the Object Store Object detail page"""
policy = Dropdown('Policy')
download = Button(title='Download summary in PDF format')
class ObjectStoreObjectDetailsEntities(View):
"""The entities on the Object Store Object detail page"""
breadcrumb = BreadCrumb()
properties = SummaryTable('Properties')
relationships = SummaryTable('Relationships')
smart_management = SummaryTable('Smart Management')
class ObjectStoreObjectDetailsSidebar(View):
"""The sidebar on the Object Store Object details page"""
@View.nested
class properties(Accordion): # noqa
tree = ManageIQTree()
@View.nested
class relationships(Accordion): # noqa
tree = ManageIQTree()
class ObjectStoreObjectView(BaseLoggedInPage):
"""A base view for all the Object Store Object pages"""
title = Text('.//div[@id="center_div" or @id="main-content"]//h1')
@property
def in_object(self):
return (
self.logged_in_as_current_user and
self.navigation.currently_selected == ['Storage', 'Object Storage',
'Object Store Objects'])
class ObjectStoreObjectAllView(ObjectStoreObjectView):
"""The all Object Store Object page"""
toolbar = View.nested(ObjectStoreObjectToolbar)
search = View.nested(Search)
including | _entities = View.include(BaseEntitiesView, use_parent=True)
@property
def is_displayed(self):
return (
self.in_object and
self.title.text == 'Cloud Object S | tore Objects')
@View.nested
class my_filters(Accordion): # noqa
ACCORDION_NAME = "My Filters"
navigation = BootstrapNav('.//div/ul')
tree = ManageIQTree()
class ObjectStoreObjectDetailsView(ObjectStoreObjectView):
"""The detail Object Store Object page"""
@property
def is_displayed(self):
expected_title = '{} (Summary)'.format(self.context['object'].key)
return (
self.title.text == expected_title and
self.entities.breadcrumb.active_location == expected_title)
toolbar = View.nested(ObjectStoreObjectDetailsToolbar)
sidebar = View.nested(ObjectStoreObjectDetailsSidebar)
entities = View.nested(ObjectStoreObjectDetailsEntities)
@attr.s
class ObjectStoreObject(BaseEntity, Taggable):
""" Model of an Storage Object Store Object in cfme
Args:
key: key of the object.
provider: provider
"""
key = attr.ib()
provider = attr.ib()
@attr.s
class ObjectStoreObjectCollection(BaseCollection):
"""Collection object for the :py:class:'cfme.storage.object_store_object.ObjStoreObject' """
ENTITY = ObjectStoreObject
@property
def manager(self):
coll = self.appliance.collections.object_managers.filter(
{"provider": self.filters.get('provider')}
)
# For each provider has single object type storage manager
return coll.all()[0]
def all(self):
"""returning all Object Store Objects"""
view = navigate_to(self, 'All')
view.entities.paginator.set_items_per_page(500)
objects = []
try:
if 'provider'in self.filters:
for item in view.entities.elements.read():
if self.filters['provider'].name in item['Cloud Provider']:
objects.append(self.instantiate(key=item['Key'],
provider=self.filters['provider']))
else:
for item in view.entities.elements.read():
provider_name = item['Cloud Provider'].split()[0]
provider = get_crud_by_name(provider_name)
objects.append(self.instantiate(key=item['Key'], provider=provider))
return objects
except NoSuchElementException:
return None
def delete(self, *objects):
view = navigate_to(self, 'All')
for obj in objects:
try:
view.entities.get_entity(key=obj.key, surf_pages=True).check()
except ItemNotFound:
raise ItemNotFound('Could not locate object {}'.format(obj.key))
view.toolbar.configuration.item_select('Remove Object Storage Objects', handle_alert=True)
view.flash.assert_no_error()
@navigator.register(ObjectStoreObjectCollection, 'All')
class ObjectStoreObjectAll(CFMENavigateStep):
VIEW = ObjectStoreObjectAllView
prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn')
def step(self, *args, **kwargs):
self.prerequisite_view.navigation.select(
'Storage', 'Object Storage', 'Object Store Objects')
@navigator.register(ObjectStoreObject, 'Details')
class ObjectStoreObjectDetails(CFMENavigateStep):
VIEW = ObjectStoreObjectDetailsView
prerequisite = NavigateToAttribute('parent', 'All')
def step(self, *args, **kwargs):
try:
self.prerequisite_view.entities.get_entity(key=self.obj.key, surf_pages=True).click()
except ItemNotFound:
raise ItemNotFound('Could not locate object {}'.format(self.obj.key))
|
#####
__all__ = [
'Domain',
'ProductDomain',
'RealDomain',
'IntegerDomain',
'MultinomialDomain'
]
## FUNCTIONS #################################################################
## ABSTRACT CLASSES AND MIXINS ###############################################
class Domain(with_metaclass(abc.ABCMeta, object)):
"""
Abstract base class for domains of outcomes of models.
"""
## ABSTRACT PROPERTIES ##
@abc.abstractproperty
def is_continuous(self):
"""
Whether or not the domain has an uncountable number of values.
:type: `bool`
"""
pass
@abc.abstractproperty
def is_finite(self):
"""
Whether or not the domain contains a finite number of points.
:type: `bool`
"""
pass
@abc.abstractproperty
def dtype(self):
"""
The numpy dtype of a single element of the domain.
:type: `np.dtype`
"""
pass
@abc.abstractproperty
def n_members(self):
"""
Returns the number of members in the domain if it
`is_finite`, otherwise, returns `np.inf`.
:type: ``int`` or ``np.inf``
"""
pass
@abc.abstractproperty
def example_point(self):
"""
Returns any single point guaranteed to be in the domain, but
no other guarantees; useful for testing purposes.
This is given as a size 1 ``np.array`` of type `dtype`.
:type: ``np.ndarray``
"""
pass
@abc.abstractproperty
def values(self):
"""
Returns an `np.array` of type `dtype` containing
some values from the domain.
For domains where `is_finite` is ``True``, all elements
of the domain will be yielded exactly once.
:rtype: `np.ndarray`
"""
pass
## CONCRETE PROPERTIES ##
@property
def is_discrete(self):
"""
Whether or not the domain has a countable number of values.
:type: `bool`
"""
return not self.is_continuous
## ABSTRACT METHODS ##
@abc.abstractmethod
def in_domain(self, points):
"""
Returns ``True`` if all of the given points are in the domain,
``False`` otherwise.
:param np.ndarray points: An `np.ndarray` of type `self.dtype`.
:rtype: `bool`
"""
pass
class ProductDomain(Domain):
"""
A domain made from the cartesian product of other domains.
:param Domain domains: ``Domain`` instances as separate arguments,
or as a singe list of ``Domain`` instances.
"""
def __init__(self, *domains):
if len(domains) == 1:
try:
self._domains = list(domains[0])
except:
self._domains = domains
else:
self._domains = domains
self._domains = domains
self._dtypes = [domain.example_point.dtype for domain in self._domains]
self._example_point = join_struct_arrays(
[np.array(domain.example_point) for domain in self._domains]
)
self._dtype = self._example_point.dtype
@property
def is_continuous(self):
"""
Whether or not the domain has an uncountable number of values.
:type: `bool`
"""
return any([domain.is_continuous for domain in self._domains])
@property
def is_finite(self):
"""
Whether or not the domain contains a finite number of points.
:type: `bool`
"""
return all([domain.is_finite for domain in self._domains])
@property
def dtype(self):
"""
The numpy dtype of a single element of the domain.
:type: `np.dtype`
"""
return self._dtype
@property
def n_members(self):
"""
Returns the number of members in the domain if it
`is_finite`, otherwise, returns `np.inf`.
:type: ``int`` or ``np.inf``
"""
if self.is_finite:
return reduce(mul, [domain.n_members for domain in self._domains], 1)
else:
return np.inf
@property
def example_point(self):
"""
Returns any single point guaranteed to be in the domain, but
no other guarantees; useful for testing purposes.
This is given as a size 1 ``np. | array`` of type `dtype`.
:type: ``np.ndarray``
"""
return self._example_point
@property
def values(self):
"""
Returns an `np.array` of type `dtype` containing
some values from the domain.
For domains where `is_finite` is ``True``, all elements
| of the domain will be yielded exactly once.
:rtype: `np.ndarray`
"""
separate_values = [domain.values for domain in self._domains]
return np.concatenate([
join_struct_arrays(list(map(np.array, value)))
for value in product(*separate_values)
])
## METHODS ##
def _mytype(self, array):
# astype does weird stuff with struct names, and possibly
# depends on numpy version; hopefully
# the following is a bit more predictable since it passes through
# uint8
return separate_struct_array(array, self.dtype)[0]
def to_regular_arrays(self, array):
"""
Expands from an array of type `self.dtype` into a list of
arrays with dtypes corresponding to the factor domains.
:param np.ndarray array: An `np.array` of type `self.dtype`.
:rtype: ``list``
"""
return separate_struct_array(self._mytype(array), self._dtypes)
def from_regular_arrays(self, arrays):
"""
Merges a list of arrays (of the same shape) of dtypes
corresponding to the factor domains into a single array
with the dtype of the ``ProductDomain``.
:param list array: A list with each element of type ``np.ndarray``
:rtype: `np.ndarray`
"""
return self._mytype(join_struct_arrays([
array.astype(dtype)
for dtype, array in zip(self._dtypes, arrays)
]))
def in_domain(self, points):
"""
Returns ``True`` if all of the given points are in the domain,
``False`` otherwise.
:param np.ndarray points: An `np.ndarray` of type `self.dtype`.
:rtype: `bool`
"""
return all([
domain.in_domain(array)
for domain, array in
zip(self._domains, separate_struct_array(points, self._dtypes))
])
## CLASSES ###################################################################
class RealDomain(Domain):
"""
A domain specifying a contiguous (and possibly open ended) subset
of the real numbers.
:param float min: A number specifying the lowest possible value of the
domain.
:param float max: A number specifying the largest possible value of the
domain.
"""
def __init__(self, min=-np.inf, max=np.inf):
self._min = min
self._max = max
## PROPERTIES ##
@property
def min(self):
"""
Returns the minimum value of the domain.
:rtype: `float`
"""
return self._min
@property
def max(self):
"""
Returns the maximum value of the domain.
:rtype: `float`
"""
return self._max
@property
def is_continuous(self):
"""
Whether or not the domain has an uncountable number of values.
:type: `bool`
"""
return True
@property
def is_finite(self):
"""
Whether or not the domain contains a finite number of points.
:type: `bool`
"""
return False
@property
def dtype(self):
"""
The numpy dtype of a single element of the domain.
:type: `np.dtype`
"""
return np.float
@property
def n_members(self):
"""
Returns the number of members in the domain if it
`is_finite`, otherwise, returns `None`.
|
# | -*- coding: utf-8 -*-
from __future__ import unicode_literals
if __name__ == '__main__':
from command_manager import Manager
manager = Manager(["commands"])
manager.ru | n()
|
# -*- coding: utf-8 -*-
slenderParameters = {'notPlate': {'beam': {'M': {'BF': '2*bf', 'tfCriteria': True, 'TF': ('2*tf', ''), 'D': 'd',
'twCriteria': True, 'TW': ('(D-2*TF)/(d-2(tf+r))*tw', '')},
'H': {'BF': '2*bf', 'tfCriteria': True, 'TF': ('2*0.55*tf/.6', ''), 'D': 'd',
'twCriteria': True, 'TW': ('(D-2*TF)/(d-2(tf+r))*tw', '')}},
'column': {'M': {'BF': '2*bf', 'tfCriteria': True, 'TF': ('2*tf', ''), 'D': 'd',
'twCriteria': True, 'TW': ('(D-2*TF)/(d-2(tf+r))*tw', '')},
'H': {'BF': '2*bf', 'tfCriteria': True, 'TF': ('2*tf', ''), 'D': 'd',
'twCriteria': True, 'TW': ('(D-2*TF)/(d-2(tf+r))*tw', '')}}},
'TBPlate': {'beam': {'M': {'BF': 'c+bf', 'tfCriteria': 't1<(.76*B1*tf)/(1.12*bf)',
'TF': ('(1.12*BF*t1)/(.76*B1)', '(BF*tf)/bf'), 'D': 'd+2*t1',
'twCriteria': True, 'TW': ('(D-2*TF)/(d-2(tf+r))*tw', '')},
'H': {'BF': 'c+bf', 'tfCriteria': 't1<(.6*B1*tf)/(0.55*bf)',
'TF': ('(0.55*BF*t1)/(.60*B1)', '(BF*tf)/bf'), 'D': 'd+2*t1',
'twCriteria': True, 'TW': ('(D-2*TF)/(d-2(tf+r))*tw', '')}},
'column': {'M': {'BF': 'c+bf', 'tfCriteria': 't1<(.76*B1*tf)/(1.12*bf)',
'TF': ('(1.12*BF*t1)/(.76*B1)', '(BF*tf)/bf'), 'D': 'd+2*t1',
'twCriteria': True, 'TW': ('(D-2*TF)/(d-2(tf+r))*tw', '')},
'H': {'BF': 'c+bf', 'tfCriteria': 't1<(B1*tf)/(bf)',
'TF': ('(BF*t1)/(B1)', '(BF*tf)/bf'), 'D': 'd+2*t1',
'twCriteria': True, 'TW': ('(D-2*TF)/(d-2(tf+r))*tw', '')}}},
'LRPlate': {'beam': {'M': {'BF': 'c+bf+2*tf', 'tfCriteria': 't1<(.76*B1*tf)/(1.12*bf)',
'TF': ('(1.12*BF*t1)/(.76*B1)', '(BF*tf)/bf'), 'D': 'd+2*t1',
'twCriteria': 't2<(d*tw)/(d-2(tf+r))', 'TW': ('t2*(D-2*TF)/d', 'tw*(D-2*TF)/(d-2*(tf+r))')},
'H': {'BF': 'c+bf+2*tf', 'tfCriteria': 't1<(.6*B1*tf)/(0.55*bf)',
'TF': ('(0.55*BF*t1)/(.60*B1)', '(BF*tf)/bf'), 'D': 'd+2*t1',
'twCriteria': 't2<(d*tw)/(d-2(tf+r))', 'TW': ('t2*(D-2*TF)/d', 'tw*(D-2*TF)/(d-2*(tf+r))')}},
'column': {'M': {'BF': 'c+bf+2*tf', 'tfCriteria': 't1<(.76*B1*tf)/(1.12*bf)',
'TF': ('(1.12*BF*t1)/(.76*B1)', '(BF*tf)/bf'), 'D': 'd+2*t1',
'twCriteria': 't2<(d*tw)/(d-2(tf+r))', 'TW': ('t2*(D-2*TF)/d', | 'tw*(D-2*TF)/(d-2*(tf+r))')},
'H': {'BF': 'c+bf+2*tf', 'tfCriteria': 't1<(B1*tf)/(bf)',
'TF': ('(BF*t1)/(B1)', '(BF*tf)/bf'), 'D': 'd+2*t1',
'twCriteria': 't2<(d*tw)/(d-2(tf+r))', 'TW': ('t2*(D-2*TF)/d', 'tw*(D-2*TF)/(d-2*(tf+r))')}}}}
if __name__ == '__main__':
comositeSection = 'LRPlate'
sectionPos = 'beam'
duct | ility = 'M'
parameters = slenderParameters[comositeSection][sectionPos][ductility]
BF = parameters['BF']
tfCriteria = parameters['tfCriteria']
TF1 = parameters['TF'][0]
TF2 = parameters['TF'][1]
D = parameters['D']
twCriteria = parameters['twCriteria']
TW1 = parameters['TW'][0]
TW2 = parameters['TW'][1]
print BF
print tfCriteria |
defaul | t_app_config = 'hs_tr | acking.apps.HSTrackingAppConfig'
|
"""The vimdoc parser."""
from vimdoc import codeline
from vimdoc import docline
from vimdoc import error
from vimdoc import regex
def IsComment(line):
return regex.comment_leader.match(line)
def IsContinuation(line):
return regex.line_continuation.match(line)
def StripContinuator(line):
assert regex.line_continuation.match(line)
return regex.line_continuation.sub('', line)
def EnumerateStripNewlinesAndJoinContinuations(lines):
"""Preprocesses the lines of a vimscript file.
Enumerates the lines, strips the newlines from the end, and joins the
continuations.
Args:
lines: The lines of the file.
Yields:
Each preprocessed line.
"""
lineno, cached = (None, None)
for i, line in enumerate(lines):
line = line.rstrip('\n')
if IsContinuation(line):
if cached is None:
raise error.CannotContinue('No preceding line.', i)
elif IsComment(cached) and not IsComment(line):
raise error.CannotContinue('No comment to continue.', i)
else:
cached += StripContinuator(line)
continue
if cached is not None:
yield lineno, cached
lineno, cached = (i, line)
if cached is not None:
yield lineno, cached
def EnumerateParsedLines(lines):
vimdoc_mode = False
for i, line in EnumerateStripNewlinesAndJoinContinuations(lines):
if not vimdoc_mode:
if regex.vimdoc_leader.match(line):
vimdoc_mode = True
# There's no need to yield the blank line if it's an empty starter line.
# For example, in:
# ""
# " @usage whatever
# " description
# There's no need to yield the first docline as a blank.
if not regex.empty_vimdoc_leader.match(line):
# A starter line starts with two comment leaders.
# If we strip one of them it's a normal comment line.
yield i, ParseCommentLine(regex.comment_leader.sub('', line))
elif IsComment(line):
yield i, ParseCommentLine(line)
else:
vimdoc_mode = False
yield i, ParseCodeLine(line)
def ParseCodeLine(line):
"""Parses one line of code and creates the appropriate CodeLine."""
if regex.b | lank_code_line.match(line):
return codeline.Blank()
fmatch = regex.function_line.match(line)
if fmatch:
namespace, name, args = fmatch.groups()
return codeline.Function(name, namespace, regex.function_arg.findall(args))
cmatch = regex.command_line.match(line)
if cmatch:
args, name = cmatch.groups()
flags = {
'bang': '-bang' in args,
'range': '-range' in args,
'count': '-count' in args,
| 'register': '-register' in args,
'buffer': '-buffer' in args,
'bar': '-bar' in args,
}
return codeline.Command(name, **flags)
smatch = regex.setting_line.match(line)
if smatch:
name, = smatch.groups()
return codeline.Setting('g:' + name)
flagmatch = regex.flag_line.match(line)
if flagmatch:
a, b, default = flagmatch.groups()
return codeline.Flag(a or b, default)
return codeline.Unrecognized(line)
def ParseCommentLine(line):
"""Parses one line of documentation and creates the appropriate DocLine."""
block = regex.block_directive.match(line)
if block:
return ParseBlockDirective(*block.groups())
return docline.Text(regex.comment_leader.sub('', line))
def ParseBlockDirective(name, rest):
if name in docline.BLOCK_DIRECTIVES:
try:
return docline.BLOCK_DIRECTIVES[name](rest)
except ValueError:
raise error.InvalidBlockArgs(rest)
raise error.UnrecognizedBlockDirective(name)
def ParseBlocks(lines, filename):
blocks = []
selection = []
lineno = 0
try:
for lineno, line in EnumerateParsedLines(lines):
for block in line.Affect(blocks, selection):
yield block.Close()
for block in codeline.EndOfFile().Affect(blocks, selection):
yield block.Close()
except error.ParseError as e:
e.lineno = lineno + 1
e.filename = filename
raise
|
#!/usr/bin/python
#coding=utf-8
'''
@author: sheng
@license:
'''
import unittest
from meridian.acupoints import zhima | i44
class TestZhimai44Functions(unittest.TestCase):
def setUp(self):
pass
def test_xxx(self):
pass
if __name__ == '__main__':
un | ittest.main()
|
utoff of the same type already exists with this cut-off date !'
)]
def cutoff_date_onchange(
self, cr, uid, ids, type, cutoff_date, move_label):
res = {'value': {}}
if type and cutoff_date:
context = {'type': type, 'cutoff_date': cutoff_date}
res['value']['move_label'] = self._default_move_label(
cr, uid, context=context)
return res
def back2draft(self, cr, uid, ids, context=None):
assert len(ids) == 1,\
'This function should only be used for a single id at a time'
cur_cutoff = self.browse(cr, uid, ids[0], context=context)
if cur_cutoff.move_id:
self.pool['account.move'].unlink(
cr, uid, [cur_cutoff.move_id.id], context=context)
self.write(cr, uid, ids[0], {'state': 'draft'}, context=context)
return True
def _prepare_move(self, cr, uid, cur_cutoff, to_provision, context=None):
if context is None:
context = {}
movelines_to_create = []
amount_total = 0
move_label = cur_cutoff.move_label
for (cutoff_account_id, analytic_account_id), amount in \
to_provision.items():
movelines_to_create.append((0, 0, {
'account_id': cutoff_account_id,
'name': move_label,
'debit': amount < 0 and amount * -1 or 0,
'credit': amount >= 0 and amount or 0,
'analytic_account_id': analytic_account_id,
}))
amount_total += amount
# add contre-partie
counterpart_amount = amount_total * -1
movelines_to_create.append((0, 0, {
'account_id': cur_cutoff.cutoff_account_id.id,
'debit': counterpart_amount < 0 and counterpart_amount * -1 or 0,
'credit': counterpart_amount >= 0 and counterpart_amount or 0,
'name': move_label,
'analytic_account_id': False,
}))
# Select period
local_ctx = context.copy()
local_ctx['account_period_prefer_normal'] = True
period_search = self.pool['account.period'].find(
cr, uid, cur_cutoff.cutoff_date, context=local_ctx)
if len(period_search) != 1:
raise orm.except_orm(
'Error:', "No matching period for date '%s'"
% cur_cutoff.cutoff_date)
period_id = period_search[0]
res = {
'journal_id': cur_cutoff.cutoff_journal_id.id,
'date': cur_cutoff.cutoff_date,
'period_id': period_id,
'ref': move_label,
'line_id': movelines_to_create,
}
return res
def create_move(self, cr, uid, ids, context=None):
assert len(ids) == 1, \
'This function should only be used for a single id at a time'
move_obj = self.pool['account.move']
cur_cutoff = self.browse(cr, uid, ids[0], context=context)
if cur_cutoff.move_id:
raise orm.except_orm(
_('Error:'),
_("The Cut-off Journal Entry already exists. You should "
"delete it before running this function."))
if not cur_cutoff.line_ids:
raise orm.except_orm(
_('Error:'),
_("There are no lines on this Cut-off, so we can't create "
"a Journal Entry."))
to_provision = {}
# key = (cutoff_account_id, analytic_account_id)
# value = amount
for line in cur_cutoff.line_ids:
# if it is already present
if (
line.cutoff_account_id.id,
line.analytic_account_id.id or False
) in to_provision:
to_provision[(
line.cutoff_account_id.id,
line.analytic_account_id.id or False
)] += line.cutoff_amount
else:
# if not already present
to_provision[(
line.cutoff_account_id.id,
line.analytic_account_id.id or False
)] = line.cutoff_amount
# Same for tax lines
for tax_line in line.tax_line_ids:
if (
tax_line.cutoff_account_id.id,
tax_line.analytic_account_id.id or False
) in to_provision:
to_provision[(
tax_line.cutoff_account_id.id,
tax_line.analytic_account_id.id or False
)] += tax_line.cutoff_amount
else:
to_provision[(
tax_line.cutoff_account_id.id,
tax_line.analytic_account_id.id or False
)] = tax_line.cutoff_amount
vals = self._prepare_move(
cr, uid, cur_cutoff, to_provision, context=context)
move_id = move_obj.create(cr, uid, vals, context=context)
move_obj.validate(cr, uid, [move_id], context=context)
self.write(cr, uid, ids[0], {
'move_id': move_id,
'state': 'done',
}, context=context)
action = {
'name': 'Cut-off Account Move',
'view_type': 'form',
'view_mode': 'form,tree',
'res_id': move_id,
'view_id': False,
'res_model': 'account.move',
'type': | 'ir.actions.act_window',
'nodestroy': False,
'target': 'current',
}
return action
class account_cutoff_line(orm.Model):
_name = 'account.cutoff.line'
_description = 'Account Cut-off Line'
_columns = {
'parent_id': fields.many2one(
'account.cutoff', 'Cut-off', ondelete='cascade'),
'name': fields.char('Description', size=64),
'company_currency | _id': fields.related(
'parent_id', 'company_currency_id', type='many2one',
relation='res.currency', string="Company Currency", readonly=True),
'partner_id': fields.many2one('res.partner', 'Partner', readonly=True),
'account_id': fields.many2one(
'account.account', 'Account',
domain=[('type', '<>', 'view'), ('type', '<>', 'closed')],
required=True, readonly=True),
'cutoff_account_id': fields.many2one(
'account.account', 'Cut-off Account',
domain=[('type', '<>', 'view'), ('type', '<>', 'closed')],
required=True, readonly=True),
'cutoff_account_code': fields.related(
'cutoff_account_id', 'code', type='char',
string='Cut-off Account Code', readonly=True),
'analytic_account_id': fields.many2one(
'account.analytic.account', 'Analytic Account',
domain=[('type', 'not in', ('view', 'template'))],
readonly=True),
'analytic_account_code': fields.related(
'analytic_account_id', 'code', type='char',
string='Analytic Account Code', readonly=True),
'currency_id': fields.many2one(
'res.currency', 'Amount Currency', readonly=True,
help="Currency of the 'Amount' field."),
'amount': fields.float(
'Amount', digits_compute=dp.get_precision('Account'),
readonly=True,
help="Amount that is used as base to compute the Cut-off Amount. "
"This Amount is in the 'Amount Currency', which may be different "
"from the 'Company Currency'."),
'cutoff_amount': fields.float(
'Cut-off Amount', digits_compute=dp.get_precision('Account'),
readonly=True,
help="Cut-off Amount without taxes in the Company Currency."),
'tax_ids': fields.many2many(
'account.tax', id1='cutoff_line_id', id2='tax_id', string='Taxes',
readonly=True),
'tax_line_ids': fields.one2many(
'account.cutoff.tax.line', 'parent_id', 'Cut-off Tax Lines',
readonly=True),
}
class account_cutoff_tax_line(orm.Model):
_name = 'acco |
"""
Tests for job management
"""
import unittest
from unittest import mock
import biokbase.narrative.jobs.jobmanager
from biokbase.narrative.jobs.job import Job
from .util import TestConfig
import os
from IPython.display import HTML
from .narrative_mock.mockclients import get_mock_client, get_failing_mock_client
from biokbase.narrative.exception_util import NarrativeException
__author__ = "Bill Riehl <wjriehl@lbl.gov>"
config = TestConfig()
job_info = config.load_json_file(config.get("jobs", "ee2_job_info_file"))
@mock.patch("biokbase.narrative.jobs.job.clients.get", get_mock_client)
def phony_job():
return Job.from_state(
"phony_job",
{"params": [], "service_ver": "0.0.0"},
"kbasetest",
"NarrativeTest/test_editor",
tag="dev",
)
def create_jm_message(r_type, job_id=None, data={}):
data["request_type"] = r_type
data["job_id"] = job_id
return {"content": {"data": data}}
class JobManagerTest(unittest.TestCase):
@classmethod
@mock.patch("biokbase.narrative.jobs.jobmanager.clients.get", get_mock_client)
def setUpClass(cls):
cls.jm = biokbase.narrative.jobs.jobmanager.JobManager()
cls.job_ids = list(job_info.keys())
os.environ["KB_WORKSPACE_ID"] = config.get("jobs", "job_test_wsname")
@mock.patch("biokbase.narrative.jobs.jobmanager.clients.get", get_mock_client)
def setUp(self):
self.jm.initialize_jobs()
def validate_status_message(self, msg):
core_keys = set(["widget_info", "owner", "state", "spec"])
state_keys = set(
["user", "authstrat", "wsid", "status", "updated", "job_input"]
)
if not core_keys.issubset(set(msg.keys())):
print(
"Missing core key(s) - [{}]".format(
", ".join(core_keys.difference(set(msg.keys())))
)
)
return False
if not state_keys.issubset(set(msg["state"].keys())):
print(
"Missing status key(s) - [{}]".format(
", ".join(state_keys.difference(set(msg["state"].keys())))
)
)
return False
return True
def test_get_job_good(self):
job_id = self.job_ids[0]
job = self.jm.get_job(job_id)
self.assertEqual(job_id, job.job_id)
def test_get_job_bad(self):
with self.assertRaises(ValueError):
self.jm.get_job("not_a_job_id")
@mock.patch("biokbase.narrative.jobs.jobmanager.clients.get", get_mock_client)
def test_list_jobs_html(self):
jobs_html = self.jm.list_jobs()
self.assertIsInstance(jobs_html, HTML)
html = jobs_html.data
print(html)
self.assertIn("<td>5d64935ab215ad4128de94d6</td>", html)
self.assertIn("<td>NarrativeTest/test_editor</td>", html)
self.assertIn("<td>2019-08-26 ", html)
self.assertIn(":54:48</td>", html)
self.assertIn("<td>fake_test_user</td>", html)
self.assertIn("<td>completed</td>", html)
self.assertIn("<td>Not started</td>", html)
self.assertIn("<td>Incomplete</td>", html)
@mock.patch("biokbase.narrative.jobs.jobmanager.clients.get", get_mock_client)
def test_cancel_job_good(self):
new_job = phony_job()
job_id = new_job.job_id
self.jm.register_new_job(new_job)
self.jm.cancel_job(job_id)
def test_cancel_job_bad(self):
with self.assertRaises(ValueError):
self.jm.cancel_job(None)
@mock.patch("biokbase.narrative.jobs.jobmanager.clients.get", get_mock_client)
def test_lookup_all_job_states(self):
states = self.jm.lookup_all_job_states()
self.assertEqual(len(states), 2)
states = self.jm.lookup_all_job_states(ignore_refresh_flag=True)
self.assertEqual(len(states), 3)
# @mock.patch('biokbase.narrative.jobs.jobmanager.clients.get', get_mock_client)
# def test_job_status_fetching(self):
# self.jm._handle_comm_message(create_jm_message("all_status"))
# msg = self.jm._comm.last_message
# job_data = msg.get('data', {}).get('content', {})
# job_ids = list(job_data.keys())
# # assert that each job info that's flagged for lookup gets returned
# jobs_to_lookup = [j for j in self.jm._running_jobs.keys()]
# self.assertCountEqual(job_ids, jobs_to_lookup)
# for job_id in job_ids:
# self.assertTrue(self.validate_status_message(job_data[job_id]))
# self.jm._comm.clear_message_cache()
# @mock.patch('biokbase.narrative.jobs.jobmanager.clients.get', get_mock_client)
# de | f test_single_job_status_fetch(self):
# new_job = phony_job()
# self.jm.register_new_job(new_job)
# self.jm._handle_comm_message(create_jm_message("job_status", new_job.job_id))
# msg = self.jm._comm.last_message
# self.assertEqual(msg['data']['msg_type'], "job_status")
# # self.assertTrue(self.validate_status_message(msg['data']['content']))
# self.jm._comm.clear | _message_cache()
# Should "fail" based on sent message.
# def test_job_message_bad_id(self):
# self.jm._handle_comm_message(create_jm_message("foo", job_id="not_a_real_job"))
# msg = self.jm._comm.last_message
# self.assertEqual(msg['data']['msg_type'], 'job_does_not_exist')
def test_cancel_job_lookup(self):
pass
# @mock.patch('biokbase.narrative.jobs.jobmanager.clients.get', get_mock_client)
# def test_stop_single_job_lookup(self):
# # Set up and make sure the job gets returned correctly.
# new_job = phony_job()
# phony_id = new_job.job_id
# self.jm.register_new_job(new_job)
# self.jm._handle_comm_message(create_jm_message("start_job_update", job_id=phony_id))
# self.jm._handle_comm_message(create_jm_message("stop_update_loop"))
# self.jm._lookup_all_job_status()
# msg = self.jm._comm.last_message
# self.assertTrue(phony_id in msg['data']['content'])
# self.assertEqual(msg['data']['content'][phony_id].get('listener_count', 0), 1)
# self.jm._comm.clear_message_cache()
# self.jm._handle_comm_message(create_jm_message("stop_job_update", job_id=phony_id))
# self.jm._lookup_all_job_status()
# msg = self.jm._comm.last_message
# self.assertTrue(self.jm._running_jobs[phony_id]['refresh'] == 0)
# self.assertIsNone(msg)
@mock.patch(
"biokbase.narrative.jobs.jobmanager.clients.get", get_failing_mock_client
)
def test_initialize_jobs_ee2_fail(self):
# init jobs should fail. specifically, ee2.check_workspace_jobs should error.
with self.assertRaises(NarrativeException) as e:
self.jm.initialize_jobs()
self.assertIn("Job lookup failed", str(e.exception))
if __name__ == "__main__":
unittest.main()
|
if not self.isPause: # se o jogador nao 'pausou' o jogo
if self.gameOver: # se fim de jogo
if self.goAlpha == 0:
pygame.mixer.music.stop()
self.gameOverSound.play()
self.goAlpha += 2
if self.goAlpha > 200 and (self.inputState.K_ESCAPE or self.inputState.K_ENTER or self.inputState.K_SPACE or self.inputState.K_a):
self.gameOverSound.fadeout(600)
# depois de morrer abre o menu de fase
self.screenManager.setBaseObjectToUpdate(Menu.StagesMenu(self.screenManager))
pygame.mixer.music.load("res/sound/title.ogg")
pygame.mixer.music.play(-1)
elif self.isFinished(): # se passou de fase
if self.flCount == 0:
pygame.mixer.music.stop()
self.finishLevelSound.play()
elif self.flCount > self.screenManager.resolution[0]:
self.finishLevelSound.fadeout(900)
# depois de terminar a fase abre o menu de fase
self.screenManager.setBaseObjectToUpdate(Menu.StagesMenu(self.screenManager))
pygame.mixer.music.load("res/sound/title.ogg")
pygame.mixer.music.play(-1)
self.flCount += 20
else: # update da propria fase
self.handlePlayer()
self.handleGameObjects()
if self.gameObjectsToRemove.__len__() > 0:
self.kill()
# referencePoint = centro do jogador no eixo x, um pouco acima do jogador no eixo y
self.referencePoint.x = self.player.position.x + self.player.size.x / 2
self.referencePoint.y = self.player.position.y + self.player.size.y / 2 # esta tirando 200 pro jogador ficar um pouco abaixo do centro da camera
self.screenManager.updateCamera(self.stageSize.t(), self.referencePoint)
self.putEnemies()
# manipula o jogador, faz suas atualizacoes e tratamentos
def handlePlayer(self):
# atualisa o player
self.updateGameObject(self.player)
playerPosX = self.player.position.x
# nao deixa o player sair do mapa na horizontal
if playerPosX < 0:
self.player.position.x = 0
elif playerPosX + self.player.size.x > self.stageSize.x:
self.player.position.x = self.stageSize.x - self.player.size.x
# ve se o player saiu do mapa na vertical
playerOutOfStage = self.updateStageModifiers(self.player)
if playerOutOfStage:
self.player.loseLife()
# manipula os gameObjects, faz suas atualizacoes e tratamentos
def handleGameObjects(self):
# atualisa os outros GameObjects
for go in self.gameObjects:
goOutOfStage = False
if self.screenManager.isInsideScreen(go.position, self.modifierOffset):
# se 'go' esta dentro da tela
if self.screenManager.isInsideScreen(go.position, self.updateOffset):
self.updateGameObject(go)
# se 'go' esta dentro do offset
goOutOfStage = self.updateStageModifiers(go)
# se tiver fora do offset nao atualisa o 'go' e nem aplica os modificadores da fase
if go.type == Enemy.SHOOTER:
go.updateShot() # atualisa os tiros dos inimigos
goAmmunition = go.shot.ammunition
playerCollided = self.gameObjectAndBulletsCollided(self.player, goAmmunition)
if playerCollided:
self.player.loseLife()
if goOutOfStage:
self.addToKill(go)
if go.state == None:
self.addToKill(go)
elif go.state != Enemy.DYING:
if self.gameObjectsCollided(go, self.player):
self.player.loseLife()
playerAmmunition = self.player.shot.ammunition
goCollided = self.gameObjectAndBulletsCollided(go, playerAmmunition)
if goCollided:
self.killSound.play()
go.changeState(Enemy.DYING)
# atualisa o gameObject
# param: gameObject - um GameObjec | t para ser atualisado
def updateGameObject(self, gameObject):
gameObject.previousPosition = gameObject.position.copy()
| gameObject.update()
# aplica os modificadores da fase no gameObject
# param: gameObject - um GameObject para ser aplicado as mudancas da fase
# retorna se o gameObject saiu do mapa
def updateStageModifiers(self, gameObject):
self.applyGravity(gameObject)
self.applyCollision(gameObject)
gameObject.postCollision()
return self.isOutOfStage(gameObject)
# dependendo da posicao do jogador coloca os inimigos na fase
def putEnemies(self):
if self.enemyList.__len__() > 0:
enemy = self.enemyList[0]
enemyPos = enemy[0]
enemyDirection = enemy[2]
if enemyDirection: # se for um cara que corre da esquerda pra direita
enemyPos.x = self.screenManager.camera.x - 200
if self.screenManager.isInsideScreen(enemyPos, self.updateOffset):
enemyType = enemy[1]
# adiciona o inimigo na fase
self.gameObjects.append( self.screenManager.factory.getEnemy(self, enemyPos, enemyType, enemyDirection) )
# retira o inimigo da lista de inimigos restantes
self.enemyList.remove(enemy)
# override GameObject.draw()
def draw(self):
self.drawMap()
self.player.draw()
for go in self.gameObjects:
if self.screenManager.isInsideScreen(go.position, go.size.t()):
go.draw()
if go.type == Enemy.SHOOTER:
go.drawShot() # desenha os tiros dos inimigos
lifeSurface = self.player.getLifeSurface()
if lifeSurface != None: # o jogador ainda tem vida
self.screenManager.blitNonCameraRelative(lifeSurface, (25,15))
else: # se o jogador morreu
self.gameOver = True
self.gameOverImage.set_alpha(self.goAlpha)
self.screenManager.blitNonCameraRelative(self.gameOverImage, (0,0))
if self.isFinished():
surface = self.screenManager.screen
res1 = self.screenManager.resolution[1]
for i in range(0, self.flCount):
pygame.draw.line( surface, Resources.BLACK, (i, 0), (i, res1) )
# desenha o mapa na tela, de acordo com a posicao da camera
def drawMap(self):
self.screenManager.blitNonCameraRelative(self.background, (0,0))
camRow = self.screenManager.camera.y / self.tileHeight
camCol = self.screenManager.camera.x/self.tileWidth
camEndRow = (self.screenManager.camera.y + self.screenManager.resolution[1])/self.tileHeight
camEndCol = (self.screenManager.camera.x + self.screenManager.resolution[0])/self.tileWidth
for row in range(camRow, camEndRow + 1):
for col in range(camCol, camEndCol + 1):
# verifica se nao esta fora de indice
if row >= 0 and col >= 0 and row < self.mapDic['mh'] and col < self.mapDic['mw']:
# posicao do tile na lista de tiles 'tileList'
tilePos = self.mapDic['tl'][row][col] - 1
if tilePos > -1:
pos = Vector2.Vector2(col * self.tileWidth ,row * self.tileHeight)
self.screenManager.blit(self.tileList[tilePos], pos)
# aplica a gravidade no objeto
# param: gameObject - Um objeto do tipo GameObject para a posicao ser alter |
test_framework.test_framework import OpenBazaarTestFramework, TestFailure
class CompleteDirectOnlineTest(OpenBazaarTestFramework):
def __init__(self):
super().__init__()
self.num_nodes = 2
def run_test(self):
alice = self.nodes[0]
bob = self.nodes[1]
# generate some coins and send them to bob
time.sleep(4)
api_url = bob["gateway_url"] + "wallet/address"
r = requests.get(api_url)
if r.status_code == 200:
resp = json.loads(r.text)
address = resp["address"]
elif r.status_code == 404:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Address endpoint not found")
else:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Unknown response")
self.send_bitcoin_cmd("sendtoaddress", address, 10)
time.sleep(20)
# post listing to alice
with open('testdata/listing.json') as listing_file:
listing_json = json.load(listing_file, object_pairs_hook=OrderedDict)
api_url = alice["gateway_url"] + "ob/listing"
r = requests.post(api_url, data=json.dumps(listing_json, indent=4))
if r.status_code == 404:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Listing post endpoint not found")
elif r.status_code != 200:
resp = json.loads(r.text)
raise TestFailure("CompleteDirectOnlineTest - FAIL: Listing POST failed. Reason: %s", resp["reason"])
resp = json.loads(r.text)
slug = resp["slug"]
time.sleep(4)
# get listing hash
api_url = alice["gateway_url"] + "ipns/" + alice["peerId"] + "/listings.json"
r = requests.get(api_url)
if r.status_code != 200:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Couldn't get listing index")
resp = json.loads(r.text)
listingId = resp[0]["hash"]
# bob send order
with open('testdata/order_direct.json') as order_file:
order_json = json.load(order_file, object_pairs_hook=OrderedDict)
order_json["items"][0]["listingHash"] = listingId
api_url = bob["gateway_url"] + "ob/purchase"
r = requests.post(api_url, data=json.dumps(order_json, indent=4))
if r.status_code == 404:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Purchase post endpoint not found")
elif r.status_code != 200:
resp = json.loads(r.text)
raise TestFailure("CompleteDirectOnlineTest - FAIL: Purchase POST failed. Reason: %s", resp["reason"])
resp = json.loads(r.text)
orderId = resp["orderId"]
payment_address = resp["paymentAddress"]
payment_amount = resp["amount"]
# check the purchase saved correctly
api_url = bob["gateway_url"] + "ob/order/" + orderId
r = requests.get(api_url)
if r.status_code != 200:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Couldn't load order from Bob")
resp = json.loads(r.text)
if resp["state"] != "AWAITING_PAYMENT":
raise TestFailure("CompleteDirectOnlineTest - FAIL: Bob purchase saved in incorrect state")
if resp["funded"] == True:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Bob incorrectly saved as fund | ed")
# check the sale saved correctly
api_url = alice["gateway_url"] + "ob/order/" + orderId
r = requests.get(api_url)
if r.status_code != 200:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Couldn't load order from Alice")
resp = json.loads(r.text)
if resp["state"] != "AWAITING_PAYMENT":
raise TestFailure("CompleteDirectOnlineTest - FAIL: Alice purchase saved in incorrect state")
if resp["funded"] == | True:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Alice incorrectly saved as funded")
# fund order
spend = {
"address": payment_address,
"amount": payment_amount,
"feeLevel": "NORMAL"
}
api_url = bob["gateway_url"] + "wallet/spend"
r = requests.post(api_url, data=json.dumps(spend, indent=4))
if r.status_code == 404:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Spend post endpoint not found")
elif r.status_code != 200:
resp = json.loads(r.text)
raise TestFailure("CompleteDirectOnlineTest - FAIL: Spend POST failed. Reason: %s", resp["reason"])
time.sleep(20)
# check bob detected payment
api_url = bob["gateway_url"] + "ob/order/" + orderId
r = requests.get(api_url)
if r.status_code != 200:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Couldn't load order from Bob")
resp = json.loads(r.text)
if resp["state"] != "AWAITING_FULFILLMENT":
raise TestFailure("CompleteDirectOnlineTest - FAIL: Bob failed to detect his payment")
if resp["funded"] == False:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Bob incorrectly saved as unfunded")
# check alice detected payment
api_url = alice["gateway_url"] + "ob/order/" + orderId
r = requests.get(api_url)
if r.status_code != 200:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Couldn't load order from Alice")
resp = json.loads(r.text)
if resp["state"] != "AWAITING_FULFILLMENT":
raise TestFailure("CompleteDirectOnlineTest - FAIL: Alice failed to detect payment")
if resp["funded"] == False:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Alice incorrectly saved as unfunded")
# alice send order fulfillment
with open('testdata/fulfillment.json') as fulfillment_file:
fulfillment_json = json.load(fulfillment_file, object_pairs_hook=OrderedDict)
fulfillment_json["orderId"] = orderId
fulfillment_json["slug"] = slug
api_url = alice["gateway_url"] + "ob/orderfulfillment"
r = requests.post(api_url, data=json.dumps(fulfillment_json, indent=4))
if r.status_code == 404:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Fulfillment post endpoint not found")
elif r.status_code != 200:
resp = json.loads(r.text)
raise TestFailure("CompleteDirectOnlineTest - FAIL: Fulfillment POST failed. Reason: %s", resp["reason"])
time.sleep(5)
# check bob received fulfillment
api_url = bob["gateway_url"] + "ob/order/" + orderId
r = requests.get(api_url)
if r.status_code != 200:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Couldn't load order from Bob")
resp = json.loads(r.text)
if resp["state"] != "FULFILLED":
raise TestFailure("CompleteDirectOnlineTest - FAIL: Bob failed to detect order fulfillment")
# check alice set fulfillment correctly
api_url = alice["gateway_url"] + "ob/order/" + orderId
r = requests.get(api_url)
if r.status_code != 200:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Couldn't load order from Bob")
resp = json.loads(r.text)
if resp["state"] != "FULFILLED":
raise TestFailure("CompleteDirectOnlineTest - FAIL: Alice failed to order fulfillment")
# bob send order completion
oc = {
"orderId": orderId,
"ratings": [
{
"slug": slug,
"overall": 4,
"quality": 5,
"description": 5,
"customerService": 4,
"deliverySpeed": 3,
"review": "I love it!"
}
]
}
api_url = bob["gateway_url"] + "ob/ordercompletion"
r = requests.post(api_url, data=json.dumps(oc, indent=4))
if r.status_code == 404:
raise TestFailure("CompleteDirectOnlineTest - FAIL: Completion post endpoint not found")
elif r.status_code != 200:
resp = json.loads(r.text)
|
#!/usr/bin/python3
def vogal(v):
lista = ['a','e','i','o','u','A' | ,'E | ','I','O','U']
if v in lista:
return True
else:
return False
|
.selected[0].objkey)
elif menuitem.returnkey == 'popupMissionSet':
self.set_mission(obj.selected[0].objkey, obj.selected[0].extra_info)
elif menuitem.returnkey == 'popupMissionRemove':
self.remove_mission(obj.selected[0].objkey, obj.selected[0].extra_info)
elif menuitem.returnkey == 'popupMissionMove':
self.move_mission(obj.selected[0].objkey, obj.selected[0].extra_info)
elif menuitem.returnkey == 'popupFenceRemove':
self.remove_fencepoint(obj.selected[0].objkey, obj.selected[0].extra_info)
elif menuitem.returnkey == 'popupFenceMove':
self.move_fencepoin | t(obj.selected[0].objkey, obj.selected[0].extra_info)
elif menuitem.returnkey == 'showPosition':
self.show_position()
def map_callback(self, obj):
'''called when an event happens on the slipmap'''
from MAVProxy.modules.mavproxy_map import mp_slipmap
if isinstance(obj, mp_slipmap.SlipMenuEvent):
self.handle_menu_event(obj)
r | eturn
if not isinstance(obj, mp_slipmap.SlipMouseEvent):
return
if obj.event.m_leftDown and self.moving_rally is not None:
self.click_position = obj.latlon
self.click_time = time.time()
self.mpstate.functions.process_stdin("rally move %u" % self.moving_rally)
self.moving_rally = None
return
if obj.event.m_rightDown and self.moving_rally is not None:
print("Cancelled rally move")
self.moving_rally = None
return
if obj.event.m_leftDown and self.moving_wp is not None:
self.click_position = obj.latlon
self.click_time = time.time()
self.mpstate.functions.process_stdin("wp move %u" % self.moving_wp)
self.moving_wp = None
return
if obj.event.m_leftDown and self.moving_fencepoint is not None:
self.click_position = obj.latlon
self.click_time = time.time()
self.mpstate.functions.process_stdin("fence move %u" % (self.moving_fencepoint+1))
self.moving_fencepoint = None
return
if obj.event.m_rightDown and self.moving_wp is not None:
print("Cancelled wp move")
self.moving_wp = None
return
if obj.event.m_rightDown and self.moving_fencepoint is not None:
print("Cancelled fence move")
self.moving_fencepoint = None
return
elif obj.event.m_leftDown:
if time.time() - self.click_time > 0.1:
self.click_position = obj.latlon
self.click_time = time.time()
self.drawing_update()
if self.module('misseditor') is not None:
self.module('misseditor').update_map_click_position(self.click_position)
if obj.event.m_rightDown:
if self.draw_callback is not None:
self.drawing_end()
return
if time.time() - self.click_time > 0.1:
self.click_position = obj.latlon
self.click_time = time.time()
def unload(self):
'''unload module'''
self.mpstate.map.close()
self.mpstate.map = None
self.mpstate.map_functions = {}
def idle_task(self):
now = time.time()
if self.last_unload_check_time + self.unload_check_interval < now:
self.last_unload_check_time = now
if not self.mpstate.map.is_alive():
self.needs_unloading = True
def create_vehicle_icon(self, name, colour, follow=False, vehicle_type=None):
'''add a vehicle to the map'''
from MAVProxy.modules.mavproxy_map import mp_slipmap
if vehicle_type is None:
vehicle_type = self.vehicle_type_name
if name in self.have_vehicle and self.have_vehicle[name] == vehicle_type:
return
self.have_vehicle[name] = vehicle_type
icon = self.mpstate.map.icon(colour + vehicle_type + '.png')
self.mpstate.map.add_object(mp_slipmap.SlipIcon(name, (0,0), icon, layer=3, rotation=0, follow=follow,
trail=mp_slipmap.SlipTrail()))
def drawing_update(self):
'''update line drawing'''
from MAVProxy.modules.mavproxy_map import mp_slipmap
if self.draw_callback is None:
return
self.draw_line.append(self.click_position)
if len(self.draw_line) > 1:
self.mpstate.map.add_object(mp_slipmap.SlipPolygon('drawing', self.draw_line,
layer='Drawing', linewidth=2, colour=(128,128,255)))
def drawing_end(self):
'''end line drawing'''
from MAVProxy.modules.mavproxy_map import mp_slipmap
if self.draw_callback is None:
return
self.draw_callback(self.draw_line)
self.draw_callback = None
self.mpstate.map.add_object(mp_slipmap.SlipDefaultPopup(self.default_popup, combine=True))
self.mpstate.map.add_object(mp_slipmap.SlipClearLayer('Drawing'))
def draw_lines(self, callback):
'''draw a series of connected lines on the map, calling callback when done'''
from MAVProxy.modules.mavproxy_map import mp_slipmap
self.draw_callback = callback
self.draw_line = []
self.mpstate.map.add_object(mp_slipmap.SlipDefaultPopup(None))
def cmd_set_home(self, args):
'''called when user selects "Set Home" on map'''
(lat, lon) = (self.click_position[0], self.click_position[1])
alt = self.ElevationMap.GetElevation(lat, lon)
print("Setting home to: ", lat, lon, alt)
self.master.mav.command_long_send(
self.settings.target_system, self.settings.target_component,
mavutil.mavlink.MAV_CMD_DO_SET_HOME,
1, # set position
0, # param1
0, # param2
0, # param3
0, # param4
lat, # lat
lon, # lon
alt) # param7
def set_secondary_vehicle_position(self, m):
'''show 2nd vehicle on map'''
if m.get_type() != 'GLOBAL_POSITION_INT':
return
(lat, lon, heading) = (m.lat*1.0e-7, m.lon*1.0e-7, m.hdg*0.01)
if abs(lat) < 1.0e-3 and abs(lon) > 1.0e-3:
return
# hack for OBC2016
self.create_vehicle_icon('VehiclePos2', 'blue', follow=False, vehicle_type='heli')
self.mpstate.map.set_position('VehiclePos2', (lat, lon), rotation=heading)
def mavlink_packet(self, m):
'''handle an incoming mavlink packet'''
from MAVProxy.modules.mavproxy_map import mp_slipmap
if m.get_type() == "HEARTBEAT":
if m.type in [mavutil.mavlink.MAV_TYPE_FIXED_WING]:
self.vehicle_type_name = 'plane'
elif m.type in [mavutil.mavlink.MAV_TYPE_GROUND_ROVER,
mavutil.mavlink.MAV_TYPE_SURFACE_BOAT,
mavutil.mavlink.MAV_TYPE_SUBMARINE]:
self.vehicle_type_name = 'rover'
elif m.type in [mavutil.mavlink.MAV_TYPE_QUADROTOR,
mavutil.mavlink.MAV_TYPE_HEXAROTOR,
mavutil.mavlink.MAV_TYPE_OCTOROTOR,
mavutil.mavlink.MAV_TYPE_TRICOPTER]:
self.vehicle_type_name = 'copter'
elif m.type in [mavutil.mavlink.MAV_TYPE_COAXIAL]:
self.vehicle_type_name = 'singlecopter'
elif m.type in [mavutil.mavlink.MAV_TYPE_HELICOPTER]:
self.vehicle_type_name = 'heli'
elif m.type in [mavutil.mavlink.MAV_TYPE_ANTENNA_TRACKER]:
self.vehicle_type_name = 'antenna'
# this is the beginnings of allowing support for multiple vehicles
# in the air at the same time
vehicle = 'Vehicle%u' % m.get_srcSystem()
if m.get_type() == "SIMSTATE" and self.map_settings.showsimpos:
self.create_vehicle_icon('Sim' + vehicle, 'green')
self.mpstate.map.set_posi |
from algorithms.structures.disjoint_set import DisjointSet
def test_disjoint_set():
a = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
ds = Disjo | intSet(a)
assert ds.find_set(1) != ds.find_set(2)
| ds.union(1, 2)
assert ds.find_set(1) == ds.find_set(2)
assert ds.find_set(1) != ds.find_set(3)
ds.union(2, 3)
assert ds.find_set(1) == ds.find_set(2)
assert ds.find_set(2) == ds.find_set(3)
|
import re
from lib.common import helpers
class Module:
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'Invoke-ShellcodeMSIL',
'Author': ['@mattifestation'],
'Description': ('Execute shellcode within the context of the running PowerShell '
'process without making any Win32 function calls. Warning: This script has '
'no way to validate that your shellcode is 32 vs. 64-bit!'
'Note: Your shellcode must end in a ret (0xC3) and maintain proper stack '
'alignment or PowerShell will crash!'),
'Background' : False,
'OutputExtension' : None,
'NeedsAdmin' : False,
'OpsecSafe' : True,
'Language' : 'powershell',
'MinLanguageVersion' : '2',
'Comme | nts': [
'http://www.exploit-monday.com',
'https://github.com/mattifestation/PowerSploit/blob/master/CodeExecution/Invoke-ShellcodeMSIL.ps1'
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required | , default_value}
'Agent' : {
'Description' : 'Agent to run module on.',
'Required' : True,
'Value' : ''
},
'Shellcode' : {
'Description' : 'Shellcode to inject, 0x00,0x0a,... format.',
'Required' : True,
'Value' : ''
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self, obfuscate=False, obfuscationCommand=""):
# read in the common module source code
moduleSource = self.mainMenu.installPath + "/data/module_source/code_execution/Invoke-ShellcodeMSIL.ps1"
if obfuscate:
helpers.obfuscate_module(moduleSource=moduleSource, obfuscationCommand=obfuscationCommand)
moduleSource = moduleSource.replace("module_source", "obfuscated_module_source")
try:
f = open(moduleSource, 'r')
except:
print helpers.color("[!] Could not read module source path at: " + str(moduleSource))
return ""
moduleCode = f.read()
f.close()
script = moduleCode
scriptEnd = "Invoke-ShellcodeMSIL"
for option,values in self.options.iteritems():
if option.lower() != "agent":
if values['Value'] and values['Value'] != '':
if option.lower() == "shellcode":
# transform the shellcode to the correct format
sc = ",0".join(values['Value'].split("\\"))[1:]
scriptEnd += " -" + str(option) + " @(" + sc + ")"
if obfuscate:
scriptEnd = helpers.obfuscate(psScript=scriptEnd, obfuscationCommand=obfuscationCommand)
script += scriptEnd
return script
|
14,
'mov m,e': 115,
'mov m,h': 116,
'mov m,l': 117,
'hlt': 118,
'mov m,a': 119,
'mov a,b': 120,
'mov a,c': 121,
'mov a,d': 122,
'mov a,e': 123,
'mov a,h': 124,
'mov a,l': 125,
'mov a,m': 126,
'mov a,a': 127,
'add b': 128,
'add c': 129,
'add d': 130,
'add e': 131,
'add h': 132,
'add l': 133,
'add m': 134,
'add a': 135,
'adc b': 136,
'adc c': 137,
'adc d': 138,
'adc e': 139,
'adc h': 140,
'adc l': 141,
'adc m': 142,
'adc a': 143,
'sub b': 144,
'sub c': 145,
'sub d': 146,
'sub e': 147,
'sub h': 148,
'sub l': 149,
'sub m': 150,
'sub a': 151,
'sbb b': 152,
'sbb c': 153,
'sbb d': 154,
'sbb e': 155,
'sbb h': 156,
'sbb l': 157,
'sbb m': 158,
'sbb a': 159,
'ana b': 160,
'ana c': 161,
'ana d': 162,
'ana e': 163,
'ana h': 164,
'ana l': 165,
'ana m': 166,
'ana a': 167,
'xra b': 168,
'xra c': 169,
'xra d': 170,
'xra e': 171,
'xra h': 172,
'xra l': 173,
'xra m': 174,
'xra a': 175,
'ora b': 176,
'ora c': 177,
'ora d': 178,
'ora e': 179,
'ora h': 180,
'ora l': 181,
'ora m': 182,
'ora a': 183,
'cmp b': 184,
'cmp c': 185,
'cmp d': 186,
'cmp e': 187,
'cmp h': 188,
'cmp l': 189,
'cmp m': 190,
'cmp a': 191,
'rnz': 192,
'pop b': 193,
}
# Instruction table dictionary that expects a secondary parameter (8-bit)
varInstructionTable_EigthBit = {
'mvi b,': 6,
'mvi c,': 14,
'mvi d,': 22,
'mvi e,': 30,
'mvi h,': 38,
'mvi l,': 46,
'sta': 50,
'mvi m,': 54,
'lda': 58,
'mvi a,': 62,
'adi': 198,
'aci': 206,
'out': 211,
'sui': 214,
'in': 219,
'sbi': 222,
'ani': 230,
'xri': 238,
'ori': 246,
'cpi': 254
}
# Instruction table dictionary that expects a secondary parameter (16-bit)
varInstructionTable_SixteenBit = {
'lxi b,': 1,
'lxi d,': 17,
'lxi h,': 33,
'shld': 34,
'lhld': 42,
'lxi sp,': 49,
'jnz': 194,
'jmp': 195,
'cnz': 196,
'jz': 202,
'cz': 204,
'call': 205,
'jnc': 210,
'cnc': 212,
'cc': 220,
'jc': 218,
'jpo': 226,
'cpo': 228,
'jpe': 234,
'cpe': 236,
'jp': 242,
'cp': 244,
'jm': 250,
'cm': 252
}
helpArgVariants = ['-h', '--h', '-help', '--help']
######### FUNCTIONS #########
# Print a small ASCII art banner
def banner():
print(Style.DIM)
print(' ___________________________')
print(' / /\\')
print(' / sadboyzvone\'s _/ /\\')
print(' / Intel 8080 / \/')
print(' / Assembler /\\')
print('/___________________________/ /')
print('\___________________________\/')
print(' \\ \\ \\ \\ \\ \\ \\ \\ \\ \\ \\ \\ \\ \\ \\'
+ Style.RESET_ALL + Style.BRIGHT)
print(Fore.WHITE + '\nPowered by ' + Fore.BLUE + 'Pyt'
+ Fore.YELLOW + 'hon' + Fore.WHITE
+ '\nCopyright (C) 2017, Zvonimir Rudinski')
# Print usage information
def printHelp():
print('\nThis ' + Fore.BLUE + 'Intel' + Fore.WHITE
+ ' 8080 assembler was made for ' + Fore.BLUE + 'Project '
+ Fore.YELLOW + 'Week' + Fore.WHITE + ' at my school.')
print('It is written in ' + Fore.BLUE + 'Pyt' + Fore.YELLOW + 'hon'
+ Fore.WHITE)
print('Modules: ' + Fore.RED + 'Co' + Fore.BLUE + 'lo'
+ Fore.YELLOW + 'ra' + Fore.GREEN + 'ma' + Fore.WHITE)
print('\nPass a file path as an arguement.')
# Main function
def run(fileNameArg):
banner() # Print banner
# File name
fileName = None
# Variable and label info
labelMap = {}
variableMap = {}
# Program counter
programCounter = 0
try:
if fileNameArg in helpArgVariants:
printHelp() # Print help then exit
exit(0)
else:
fileName = fileNameArg # Argument is provided
print('Trying to open ' + Fore.YELLOW +
'\'' + fileName + '\'' + Fore.WHITE)
if path.isfile(fileName) is False: # Check if the file exists
print(Fore.RED + 'Fatal error: ' + Fore.WHITE +
'File not found: ' + Fore.YELLOW + '\'' + fileName + '\'')
raise IOError # It doesn't raise an exception
# Read in the source code from the file
with open(fileName, 'r') as sourceFile:
sourceCode = sourceFile.readlines()
# Strip the newlines
sourceCode = map(lambda sc: sc.strip(), sourceCode)
# Start compiling the code
with open(fileName + '.rom', 'wb+') as romFile:
# Check the line
for (i, scLine) in enumerate(sourceCode):
scLine = scLine.lower() # Turn it to lower case for easier lookup
# Check for ORG
if scLine.split(' ')[0] == "org":
programCounter = int(scLine.split(' ')[1].zfill(4))
print("ORG set to " + str(programCounter))
romFile.seek(0,0)
for x in range(0,programCounter):
romFile.write(pack('B', instructionTable['nop']))
romFile.seek(programCounter,0)
continue
# Check if it's a label
if len(scLine.split(':')) > 1:
print('Updating labels')
labelMap[scLine.split(':')[0]] = unHex(
str(programCounter).zfill(4))
continue
# Check if it's in the instruction table
if scLine in instructionTable:
# Write the opcode
romFile.write(pack('B', instructionTable[scLine]))
programCounter += 1 # 1 byte
continue
|
elif scLine.split(' ')[1] == 'equ':
# Check if it's a variable declaration
if int(scLine.split(' ')[2]) >= 2 ** 8:
# Number is out of bounds for Intel 8080
print(Fore.RED + 'Variable too large: ' +
scLine + ' : Line ' + | str(i + 1))
raise SyntaxError
variableMap[scLine.split(' ')[0]] = unHex(scLine.split(
' ')[2].ljust(4, '0')) # It is, save it to a dictionary
print('Updating variables')
continue
else:
# Check if it's in a instruction table (8-bit)
for tableKey in varInstructionTable_EigthBit.keys():
if scLine.startswith(tableKey):
# Write the opcode
romFile.write(
pack('B', varInstructionTable_EigthBit[tableKey]))
try:
# Check if it's a variable
variable = (scLine.split(',')[1].strip(
) if ',' in scLine else scLine.split()[1].strip())
if variable in variableMap.keys(): # If it is get it's value from the dict
romFile.write(variableMap[variable][:1]) # Python3 had an error if I tried `variableMap[variable][0]` so I'll just start from the beginning and stop before the end...it's a hack I know ;)
elif variable in labelMap.keys():
# It it is get it's value from the dict
romFile.write(labelMap[variable])
else:
# Else write it down
romFile.write(unHex(variable))
programCounter += 2 # 2 bytes
break
except (ValueError, TypeError):
# That's not even a number...or a variable name
|
from twi | sted.trial import unittest
from twistedchecke | r.checkers.docstring import DocstringChecker
class DocstringTestCase(unittest.TestCase):
"""
Test for twistedchecker.checkers.docstring
"""
def test_getLineIndent(self):
"""
Test of twistedchecker.checkers.docstring._getLineIndent.
"""
checker = DocstringChecker()
indentNoSpace = checker._getLineIndent("foo")
indentTwoSpaces = checker._getLineIndent(" foo")
indentFourSpaces = checker._getLineIndent(" foo")
self.assertEqual(indentNoSpace, 0)
self.assertEqual(indentTwoSpaces, 2)
self.assertEqual(indentFourSpaces, 4)
|
import os.path
for root, dirs, files in os.walk(os.path.dirname(os.path.realpath(__file__))):
for file in files :
if file.endswith('.prog') :
os.chdir(root)
f1 = open(file, 'r')
output = file.split(".")
f2 = open(output[0]+'.py', 'w')
indent = 0
pad = " "
temp = f1.read().splitlines()
for line in temp:
tags = line.split(";;")
for tag in tags:
a | = tag.replace("{", ":\n")
a = a.replace("CIKLUS", "for")
a = a.replace("ELAGAZAS", "if")
if "\n" in a:
| t = a.split("\n")
for t1 in t:
for i in range(indent):
t1 = pad + t1
if "for" in t1 or "if" in t1:
indent += 1
if "}" in t1:
indent -= t1.count("}")
t1 = t1.replace("}" ,"")
f2.write(t1 + '\n')
else:
for i in range(indent):
a = pad + a
if "}" in a:
indent -= a.count("}")
a = a.replace("}", "")
f2.write(a + '\n')
f1.close()
f2.close()
|
png
except ImportError:
png = False
import pygtk
pygtk.require('2.0')
import gtk
import gtk.gtkgl
import gtk.gtkgl.apputils
from OpenGL.GL import *
from OpenGL.GLU import *
import openglutils
class MatrixWidget(gtk.DrawingArea, gtk.gtkgl.Widget):
"""OpenGL widget drawing the view of the matrices of the world."""
default_max_width = 1024
default_max_height = 768
default_min_width = 200
default_min_height = 150
def __init__(self, world, pointsize=None, offset=None):
gtk.DrawingArea.__init__(self)
#gtk.gtkgl.Widget.__init__(self) # (abstract class)
# Object initialisation
self.__world = world
self.__original_pointsize = pointsize
self.__pointsize_power = 0
self.__pointsize_power_min = -50
self.__pointsize_power_max = +50
self.__pointsize_factor = 1.1
self.__pointsize = pointsize # self.__original_pointsize * self.__pointsize_factor ** self.__pointsize_power
self.offset = list(offset) if not offset == None else None
self.__is_panning = None
self.__needs_reconfigure = False
# Widget initialisation
# If no pointsize is given, find a good one
if self.__original_pointsize == None:
self.__original_pointsize = 1
w = self.default_max_width / float(world.get_shape()[1])
h = self.default_max_height / float(world.get_shape()[0])
pw = math.log(w) / math.log(self.__pointsize_factor)
ph = math.log(h) / math.log(self.__pointsize_factor)
if pw > 0: pw = int(math.ceil(pw))
else: pw = int(math.floor(pw))
if ph > 0: ph = int(math.ceil(ph))
else: ph = int(math.floor(ph))
self.__pointsize_power = max(self.__pointsize_power_min, min(self.__pointsize_power_max, min(pw,ph)))
self.__pointsize = self.__original_pointsize * self.__pointsize_factor ** self.__pointsize_power
# Request a default size
reqw, reqh = int(math.ceil(max(self.default_min_width,min(self.default_max_width,world.get_shape()[1]*self.__pointsize)))), int(math.ceil(max(self.default_min_height,min(self.default_max_height,world.get_shape()[0]*self.__pointsize))))
self.set_size_request(reqw, reqh)
# Calculate an offset to center the matrices, assuming the default size is the actual size
if self.offset == None:
mw = reqw/self.__pointsize - world.get_shape()[1]
mh = reqh/self.__pointsize - world.get_shape()[0]
self.offset = [-mh/2, -mw/2]
# Set OpenGL-capability to the drawing area
self.set_gl_capability(openglutils.get_glconfig(), share_list=None, direct=True, render_type=gtk.gdkgl.RGBA_TYPE)
# Connect the relevant signals for the drawing
self.connect_after('realize', self.__on_realize)
self.connect('configure-event', self.__on_configure_event)
self.connect('expose-event', self.__on_expose_event)
# Connect additionnal | events for the manipulation of the view
self.set_events(gtk.gdk.ALL_EVENTS_MASK)
self.connect('scroll-event', self.__on_scroll)
self.connect('motion-notify-event', self.__on_motion_notify)
self.connect('button-press-event', self.__on_butt | on_press)
self.connect('button-release-event', self.__on_button_release)
def get_pointsize(self):
""" Returns the size of a cell, in pixels."""
return self.__pointsize
def set_pointsize(self,pointsize):
""" Sets the size of a cell, in pixels.
Requests a redraw."""
self.__pointsize = pointsize
self.queue_redraw(True)
def queue_redraw(self,needs_reconfigure=False):
""" Requests a redraw of the widget.
needs_reconfigure: set to True if the offset or the point size has changed."""
if needs_reconfigure:
self.__needs_reconfigure = needs_reconfigure
self.queue_draw()
def __on_scroll(self, widget, event):
"""Handles the mousewheel events and zooms the view accordingly."""
# Pan to get the origin at the mouse position's point
self.offset[0] += (widget.get_allocation().height-event.y) / self.__pointsize
self.offset[1] += event.x / self.__pointsize
# Zoom
if event.direction == gtk.gdk.SCROLL_UP:
self.__pointsize_power = min(self.__pointsize_power_max, self.__pointsize_power+1)
elif event.direction == gtk.gdk.SCROLL_DOWN:
self.__pointsize_power = max(self.__pointsize_power_min, self.__pointsize_power-1)
self.__pointsize = self.__original_pointsize * self.__pointsize_factor ** self.__pointsize_power
# Pan back to get the mouse position's point back under the pointer on the screen
self.offset[0] -= (widget.get_allocation().height-event.y) / self.__pointsize
self.offset[1] -= event.x / self.__pointsize
self.queue_redraw(True)
def __on_button_press(self, widget, event):
"""Handles a button press and starts a panning operation."""
self.__is_panning = (event.y, event.x)
self.queue_redraw(True)
def __on_motion_notify(self, widget, event):
"""Handles the pointer motion and pans accordingly if in a panning operation."""
if self.__is_panning != None:
self.offset[0] += (event.y - self.__is_panning[0]) / self.__pointsize
self.offset[1] -= (event.x - self.__is_panning[1]) / self.__pointsize
self.__is_panning = (event.y, event.x)
self.queue_redraw(True)
def __on_button_release(self, widget, event):
"""Handles a button release and stops a panning operation."""
self.__is_panning = None
def __on_realize(self, *args):
"""A one time widget setup. Initialises the OpenGL drawable."""
gldrawable = self.get_gl_drawable()
glcontext = self.get_gl_context()
if not gldrawable.gl_begin(glcontext):
return
# Generate (one) texture #TODO: Test if one texture by matrix is better
self.iTex = glGenTextures(1)
# Configure use of textures
glEnable(GL_TEXTURE_2D)
glDisable(GL_TEXTURE_GEN_S)
glDisable(GL_TEXTURE_GEN_T)
glBindTexture(GL_TEXTURE_2D, self.iTex)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR)
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST)
# As we're in 2D, we don't need any depth test
glDisable(GL_DEPTH_TEST)
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
# Set opaque black as background color
glClearColor(0.0, 0.0, 0.0, 1.0)
# Refresh the view configuration
self.__reconfigure()
gldrawable.gl_end()
return True
def __on_configure_event(self, *args):
"""Called whenever the widget changes in size. Refreshes the view configuration."""
gldrawable = self.get_gl_drawable()
glcontext = self.get_gl_context()
if not gldrawable.gl_begin(glcontext):
return False
# Refresh the view configuration
self.__reconfigure()
gldrawable.gl_end()
return True
def __reconfigure(self):
"""Configures the view origin, size and viewport.
To be called inside gl_begin() and gl_end()."""
# Get the widget's size
width, height = self.allocation.width, self.allocation.height
# Tell OpenGL to draw onto the same size
glViewport(0, 0, width, height)
# Set the view origin and extent
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(self.offset[1]+0.0, self.offset[1]+width/float(self.__pointsize), self.offset[0]+0.0, self.offset[0]+height/float(self.__pointsize), -1.0, 1.0)
# Reset any 3D transform
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
def __on_expose_event(self, widget, event, for_export=False):
gldrawable = self.get_gl_drawable()
glcontext = self.get_gl_context()
if not gldrawable.gl_begin(glcontext):
ret |
# -*- coding: utf-8 -*-
from django import forms
from cms_content.settings import EDITOR
from cms_content.models import CMSArticle
from cms_content import widgets
WIDGET = getattr(widgets, EDITOR)
class CMSArticleAdminForm(forms.ModelForm):
content = forms.CharField(widget=WIDGET)
class Meta:
model = CMSArticle
class CMSArticleFrontendForm(forms.ModelForm):
error_css_clas | s = 'error'
required_css_class = 'required'
content = forms.CharField( | widget=WIDGET)
class Meta:
model = CMSArticle
fields = ('title', 'slug', 'content', 'category',)
|
import os
import os.path
import sys
import pygame
from buffalo import utils
from buffalo.scene import Scene
from buffalo.label import Label
from buffalo.button import Button
from buffalo.input import Input
from buffalo.tray import Tray
from camera import Camera
from mapManager import MapManager
from pluginManager import PluginManager
from toolManager import ToolManager
class CameraController:
def __init__(self):
self.fPos = (0.0, 0.0)
self.pos = (int(self.fPos[0]), int(self.fPos[1]))
self.xv, self.yv = 0.0, 0.0
self.speed = 1.2
self.shift_speed = self.speed * 5.0
def update(self, keys):
w, a, s, d, shift = (
keys[pygame.K_w],
keys[pygame.K_a],
keys[pygame.K_s],
keys[pygame.K_d],
keys[pygame.K_LSHIFT],
)
if shift:
speed = self.shift_speed
else:
speed = self.speed
speed *= utils.delta / 16.0
self.xv = 0.0
self.yv = 0.0
if w:
self.yv -= speed
if a:
self.xv -= speed
if s:
self.yv += speed
if d:
self.xv += speed
x, y = self.fPos
x += self.xv
y += self.yv
self.fPos = x, y
self.pos | = (int(self.fPos[0]), int(self.fPos[1]))
class EditMapTestScene(Scene):
def on_escape(self):
sys.exit()
def blit(self):
Camera.blitView()
def update(self | ):
super(EditMapTestScene, self).update()
keys = pygame.key.get_pressed()
self.camera_controller.update(keys)
Camera.update()
MapManager.soft_load_writer()
def __init__(self):
Scene.__init__(self)
self.BACKGROUND_COLOR = (0, 0, 0, 255)
PluginManager.loadPlugins()
self.camera_controller = CameraController()
Camera.lock(self.camera_controller, initial_update=True)
Button.DEFAULT_SEL_COLOR = (50, 50, 100, 255)
self.tool_tray = Tray(
(utils.SCREEN_W - 270, 20),
(250, 800),
min_width=250, max_width=250,
min_height=250, max_height=800,
color=(100, 50, 50, 100),
)
self.tool_tray.labels.add(
Label(
(int(self.tool_tray.width / 2), 10),
"Tool Tray",
color=(255,255,255,255),
x_centered=True,
font="default24",
)
)
self.tool_tray.labels.add(
Label(
(int(self.tool_tray.width / 2), 25),
"________________",
color=(255,255,255,255),
x_centered=True,
font="default18",
)
)
self.tool_tray.labels.add(
Label(
(int(self.tool_tray.width / 2), 50),
"Function",
color=(255,255,255,255),
x_centered=True,
font="default18",
)
)
def set_func_state_to_select():
ToolManager.set_func_state(ToolManager.FUNC_SELECT)
self.tool_tray.render()
self.button_select_mode = Button(
(15, 80),
" Select Mode ",
color=(255,255,255,255),
bg_color=(100,100,200,255),
font="default12",
func=set_func_state_to_select,
)
self.tool_tray.buttons.add(self.button_select_mode)
def set_func_state_to_fill():
ToolManager.set_func_state(ToolManager.FUNC_FILL)
self.tool_tray.render()
self.button_fill_mode = Button(
(self.tool_tray.width - 15, 80),
" Fill Mode ",
color=(255,255,255,255),
bg_color=(100,100,200,255),
invert_x_pos=True,
font="default12",
func=set_func_state_to_fill,
)
self.tool_tray.buttons.add(self.button_fill_mode)
self.tool_tray.labels.add(
Label(
(int(self.tool_tray.width / 2), 120),
"________________",
color=(255,255,255,255),
x_centered=True,
font="default18",
)
)
self.tool_tray.labels.add(
Label(
(int(self.tool_tray.width / 2), 150),
"Area of Effect",
color=(255,255,255,255),
x_centered=True,
font="default18",
)
)
def set_effect_state_to_draw():
ToolManager.set_effect_state(ToolManager.EFFECT_DRAW)
self.tool_tray.render()
self.button_draw_mode = Button(
(15, 180),
" Draw Mode ",
color=(255,255,255,255),
bg_color=(100,100,200,255),
font="default12",
func=set_effect_state_to_draw,
)
self.tool_tray.buttons.add(self.button_draw_mode)
def set_effect_state_to_area():
ToolManager.set_effect_state(ToolManager.EFFECT_AREA)
self.tool_tray.render()
self.button_area_mode = Button(
(self.tool_tray.width - 15, 180),
" Area Mode ",
color=(255,255,255,255),
bg_color=(100,100,200,255),
invert_x_pos=True,
font="default12",
func=set_effect_state_to_area,
)
self.tool_tray.buttons.add(self.button_area_mode)
ToolManager.initialize_states(
ToolManager.FUNC_SELECT, ToolManager.EFFECT_DRAW,
(
self.button_fill_mode,
self.button_select_mode,
self.button_draw_mode,
self.button_area_mode,
),
)
self.tool_tray.render()
self.trays.add(self.tool_tray)
|
# Copyright 2015 Antiun Ingenieria S.L. - Javier Iniesta
# Copyright 2016 Tecnativa S.L. - Vicent Cubells
# Copyright 2016 Tecnativa S.L. - Pedro M. Baeza
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html).
from odoo.exceptions import UserError, ValidationError
from odoo.tests import common
class TestResPartnerIndustry(common.SavepointCase):
@classmethod
def setUpClass(cls):
super(TestResPartnerIndustry, cls).setUpClass()
cls.industry_model = cls.env["res.partner.industry"]
cls.industry_main = cls.industry_model.create({"name": "Test"})
cls.industry_child = cls.industry_model.create(
{"name": "Test child", "parent_id": cls.industry_main.id}
)
def test_check_industries(self):
with self.assertRaises(ValidationError):
self.env["res.partner"].create(
{
"name": "Test",
"industry_id": self.industry_main.id,
"secondary_industry_ids": [(4, self.industry_main.id)],
}
)
def test_check_copy(self):
industry_copy = self.industry_child.copy()
self.assertEqual(industry_copy.name, "Test child 2")
def test_check_uniq_name(self):
with self.assertRaises(ValidationError):
self.industry_model.create({"name": "Test"})
def test_check_recursion(self):
with self.assertRaises(UserError):
self.industry_main.parent_id = self.industry_child.id
with self.assertRaises(ValidationError) as e:
self.industry_main._check_parent_id()
error_message = "Error! You | cannot create recursive industries."
self.assertEqual(e.exception.args[0], error_message)
def test_name(s | elf):
self.assertEqual(self.industry_child.display_name, "Test / Test child")
|
#!/usr/bin/env python
#
# Generate report in Excel format (from xml input)
#
import sys,os,shelve
import re,dfxml,fiwalk
from bc_utils import filename_from_path
from openpyxl.workbook import Workbook
from openpyxl.writer.excel import ExcelWriter
from openpyxl.cell import get_column_letter
def bc_generate_feature_xlsx(PdfReport, data, feature_file):
wb = Workbook()
dest_filename = PdfReport.featuredir +'/'+ (filename_from_path(feature_file))[10:-3] + "xlsx"
row_idx = [2]
ws = wb.worksheets[0]
ws.title = "File Feature Information"
ws.cell('%s%s'%('A', '1')).value = '%s' % "Filename"
ws.cell('%s%s'%('B', '1')).value = '%s' % "Position"
ws.cell('%s%s'%('C', '1')).value = '%s' % "Feature"
linenum=0
for row in data:
# Skip the lines with known text lines to be eliminated
if (re.match("Total features",str(row))):
continu | e
filename = "Unknown"
feature = "Unknown"
position = "Unknown"
# Some lines in the annotated_xxx.txt have less than three
# columns where filename or | feature may be missing.
if len(row) > 3:
filename = row[3]
else:
filename = "Unknown"
if len(row) > 1:
feature = row[1]
else:
feature = "Unknown"
position = row[0]
# If it is a special file, check if the user wants it to
# be repoted. If not, exclude this from the table.
if (PdfReport.bc_config_report_special_files == False) and \
(is_special_file(filename)):
## print("D: File %s is special. So skipping" %(filename))
continue
ws.cell('%s%s'%('A', row_idx[0])).value = '%s' % filename
ws.cell('%s%s'%('B', row_idx[0])).value = '%s' % feature
ws.cell('%s%s'%('C', row_idx[0])).value = '%s' % position
row_idx[0] += 1
wb.save(filename=dest_filename)
|
import numpy as np
import cvxopt as co
def load_mnist_dataset():
import torchvision.datasets as datasets
mnist_train = datasets.MNIST(root='../data/mnist', train=True, download=True, transform=None)
mnist_test = datasets.MNIST(root='../data/mnist', train=False, download=True, transform=None)
test_labels = np.array([mnist_test[i][1].numpy() for i in range(len(mnist_test))], dtype=np.int)
train_labels = np.array([mnist_train[i][1].numpy() for i in range(len(mnist_train))], dtype=np.int)
test = np.array([np.asarray(mnist_test[i][0]).reshape(28*28) for i in range(len(mnist_test))], dtype=np.float)
train = np.array([np.asarray(mnist_train[i][0]).reshape(28*28) for i in range(len(mnist_train))], dtype=np.float)
train /= 255. # normalize data to be in range [0,1]
test /= 255.
return train, train_labels, test, test_labels, [28, 28]
def load_fashion_mnist_dataset():
import torchvision.datasets as datasets
mnist_train = datasets.FashionMNIST(root='../data/fashion-mnist', train=True, download=True, transform=None)
mnist_test = datasets.FashionMNIST(root='../data/fashion-mnist', train=False, download=True, transform=None)
test_labels = np.array([mnist_test[i][1].numpy() for i in range(len(mnist_test))], dtype=np.int)
train_labels = np.array([mnist_train[i][1].numpy() for i in range(len(mnist_train))], dtype=np.int)
test = np.array([np.asarray(mnist_test[i][0]).reshape(28*28) for i in range(len(mnist_test))], dtype=np.float)
train = np.array([np.asarray(mnist_train[i][0]).reshape(28*28) for i in range(len(mnist_train))], dtype=np.float)
train /= 255. # normalize data to be in range [0,1]
test /= 255.
return train, train_labels, test, test_labels, [28, 28]
def load_emnist_dataset():
import torchvision.datasets as datasets
mnist_train = datasets.EMNIST(root='../data/emnist', split='balanced', train=True, download=True, transform=None)
mnist_test = datasets.EMNIST(root='../data/emnist', split='balanced', train=False, download=True, transform=None)
test_labels = np.array([mnist_test[i][1].numpy() for i in range(len(mnist_test))], dtype=np.int)
train_labels = np.array([mnist_train[i][1].numpy() for i in range(len(mnist_train))], dtype=np.int)
test = np.array([np.asarray(mnist_test[i][0]).reshape(28*28) for i in range(len(mnist_test))], dtype=np.float)
train = np.array([np.asarray(mnist_train[i][0]).reshape(28*28) for i in range(len(mnist_train))], dtype=np.float)
train /= 255. # normalize data to be in range [0,1]
test /= 255.
return train, train_labels, test, test_labels, [28, 28]
def load_cifar10_dataset():
import torchvision.datasets as datasets
cifar_train = datasets.CIFAR10(root='../data/cifar10', train=True, download=True, transform=None)
cifar_test = datasets.CIFAR10(root='../data/cifar10', train=False, download=True, transform=None)
test_labels = np.array([cifar_test[i][1] for i in range(len(cifar_test))], dtype=np.int)
train_labels = np.array([cifar_train[i][1] for i in range(len(cifar_train))], dtype=np.int)
test = np.array([np.asarray(cifar_test[i][0].convert('F')).reshape(32*32) for i in range(len(cifar_test))], dtype=np.float)
train = np.array([np.asarray(cifar_train[i][0].convert('F')).reshape(32*32) for i in range(len(cifar_train))], dtype=np.float)
train /= 255. # normalize data to be in range [0,1]
test /= 255.
return train, train_labels, test, test_labels, [32, 32]
def get_gaussian(num, dims=2, means=[0,0], vars=[1,1]):
data = np.random.multivariate_normal(means, np.eye(dims), num)
return data
def get_2state_gaussian_seq(lens,dims=2,means1=[2,2,2,2],means2=[5,5,5,5],vars1=[1,1,1,1],vars2=[1,1,1,1],anom_prob=1.0):
seqs = np.zeros((dims, lens))
lbls = np.zeros((1, lens), dtype=np.int8)
marker = 0
# generate first state sequence
for d in range(dims):
seqs[d, :] = np.random.randn(lens)*vars1[d] + means1[d]
prob = np.random.uniform()
if prob < anom_prob:
# add second state blocks
while True:
max_block_len = 0.6*lens
min_block_len = 0.1*lens
block_len = np.int(max_block_len*np.random.uniform()+3)
block_start = np.int(lens*np.random.uniform())
if block_len - (block_start+block | _len-lens)-3 > min_block_len:
break
block_len = min( [block_len, block_len - (block_start+block_len-lens)-3] )
lbls[block_start:block_start+block_len-1] = 1
marker = 1
for d in range(dims):
seqs[d,block_start:block_start+block_len-1] = np.random.randn(1,block_len-1)*vars2[d] + means2[d]
return seqs, lbls, marker
def get_2state_anom_seq(lens, | comb_block_len, anom_prob=1.0, num_blocks=1):
seqs = co.matrix(0.0, (1, lens))
lbls = co.matrix(0, (1, lens))
marker = 0
# generate first state sequence, gaussian noise 0=mean, 1=variance
seqs = np.zeros((1, lens))
lbls = np.zeros((1, lens))
bak = seqs.copy()
prob = np.random.uniform()
if prob < anom_prob:
# add second state blocks
block_len = np.int(np.floor(comb_block_len / float(num_blocks)))
marker = 1
# add a single block
blen = 0
for b in range(np.int(num_blocks)):
if (b==num_blocks-1 and b>1):
block_len = np.round(comb_block_len-blen)
isDone = False
while isDone == False:
start = np.int(np.random.uniform()*float(lens-block_len+1))
if np.sum(lbls[0,start:start+block_len]) == 0:
lbls[0, start:start+block_len] = 1
seqs[0, start:start+block_len] = bak[0, start:start+block_len]+4.0
isDone = True
break
blen += block_len
return seqs, lbls, marker
|
3765\">B: Bring me the golden mane of Kerunos</a><br>",
"<a action=\"bypass -h Quest 335_TheSongOfTheHunter 3766\">A: Bring back 20 skulls of undead executed criminals</a><br>",
"<a action=\"bypass -h Quest 335_TheSongOfTheHunter 3767\">A: Recover the stolen bust of the late King Travis</a><br>",
"<a action=\"bypass -h Quest 335_TheSongOfTheHunter 3768\">A: Recover 10 swords of Cadmus</a><br>"
]
def HasItems(st,check) :
count = 0
for list in Grey_Advance :
count2 = 0
for item in list[0] :
if not st.getQuestItemsCount(item) >= list[1] :
break
count2 += 1
if count2 == len(list[0]) :
count += 1
if count >= check :
return 1
return 0
def AutoChat(npc,text) :
chars = npc.getKnownList().getKnownPlayers().values().toArray()
if chars != None:
for pc in chars :
sm = CreatureSay(npc.getObjectId(), 0, npc.getName(), text)
pc.sendPacket(sm)
return
def HasRequestCompleted(st,level) :
rewards = Tor_Rewards_1
if level == 2 :
rewards = Tor_Rewards_2
for req in rewards.keys() :
if st.getQuestItemsCount(req) :
if st.getQuestItemsCount(rewards[req][0]) >= rewards[req][1] :
return req
return 0
class Quest (JQuest) :
def __init__(self,id,name,descr):
JQuest.__init__(self,id,name,descr)
self.questItemIds = range(3692,3811) + [3471]
def onAdvEvent (self,event,npc,player):
st = player.getQuestState(qn)
if not st: return
htmltext = event
if event == "30744-03.htm" :
st.setState(STARTED)
st.playSound("ItemSound.quest_accept")
st.giveItems(Test_Instructions_1,1)
st.set("cond","1")
#set Memo = 0
elif event == "30744-32.htm" :
st.playSound("ItemSound.quest_finish")
if st.getQuestItemsCount(Leaf_Pin) >= 20 :
htmltext = "30744-33.htm"
st.giveItems(57,20000)
st.exitQuest(1)
elif event == "30744-19.htm" :
if not HasItems(st,1) :
st.giveItems(Test_Instructions_2,1)
htmltext = "30744-18.htm"
elif event == "30745-03.htm" :
if st.getQuestItemsCount(Test_Instructions_2) :
htmltext = "30745-04.htm"
elif event == "Tor_list_1" :
if not st.getInt("hasTask") :
htmltext = "<html><body>Guild Member Tor:<br>"
pins = st.getQuestItemsCount(Leaf_Pin)
reply_0 = Rnd.get(12)
reply_1 = Rnd.get(12)
reply_2 = Rnd.get(12)
reply_3 = Rnd.get(12)
reply_4 = Rnd.get(12) |
if Rnd.get(100) < 20 :
if pins < 4 and pins :
reply_0 = Rnd.get(6) + 12
reply_2 = Rnd.get(6)
| reply_3 = Rnd.get(6) + 6
elif pins >= 4 :
reply_0 = Rnd.get(6) + 6
if not Rnd.get(20) :
reply_1 = Rnd.get(2) + 18
reply_2 = Rnd.get(6)
reply_3 = Rnd.get(6) + 6
elif pins >= 4 :
if not Rnd.get(20) :
reply_1 = Rnd.get(2) + 18
reply_2 = Rnd.get(6)
reply_3 = Rnd.get(6) + 6
htmltext += Tor_menu[reply_0] + Tor_menu[reply_1] + Tor_menu[reply_2] + Tor_menu[reply_3] + Tor_menu[reply_4]
htmltext += "</body></html>"
elif event == "Tor_list_2" :
if not st.getInt("hasTask") :
htmltext = "<html><body>Guild Member Tor:<br>"
pins = st.getQuestItemsCount(Leaf_Pin)
reply_0 = Rnd.get(10)
reply_1 = Rnd.get(10)
reply_2 = Rnd.get(5)
reply_3 = Rnd.get(5) + 5
reply_4 = Rnd.get(10)
if Rnd.get(100) < 20 :
if pins < 4 and pins:
reply_0 = Rnd.get(6) + 10
elif pins >= 4 :
reply_0 = Rnd.get(6) + 10
if not Rnd.get(20):
reply_1 = Rnd.get(3) + 16
elif pins >= 4 :
if not Rnd.get(20) :
reply_1 = Rnd.get(3) + 16
htmltext += Tor_menu[reply_0 + 20] + Tor_menu[reply_1 + 20] + Tor_menu[reply_2 + 20] + Tor_menu[reply_3 + 20] + Tor_menu[reply_4 + 20]
htmltext += "</body></html>"
elif event == "30745-10.htm" :
st.takeItems(Leaf_Pin,1)
for item in range(3727,3811) :
st.takeItems(item,-1)
st.set("hasTask","0")
elif event == "30746-03.htm" :
if not st.getQuestItemsCount(Cyb_Req) :
st.giveItems(Cyb_Req,1)
if not st.getQuestItemsCount(3471) :
st.giveItems(3471,1)
if not st.getQuestItemsCount(3698) :
st.giveItems(3698,1)
st.takeItems(6708,-1)
elif event == "30746-08.htm" :
for item in Cyb_Rewards.keys() :
if st.getQuestItemsCount(item) :
st.takeItems(item,-1)
st.giveItems(57,Cyb_Rewards[item])
break
elif event == "30746-12.htm" :
st.takeItems(3698,-1)
st.takeItems(3697,-1)
st.takeItems(3471,-1)
elif event.isdigit() :
event = int(event)
st.giveItems(event,1)
st.set("hasTask","1")
event = event - 3712
htmltext = "30745-" + str(event) + ".htm"
return htmltext
def onTalk (self,npc,player):
htmltext = "<html><body>You are either not on a quest that involves this NPC, or you don't meet this NPC's minimum quest requirements.</body></html>"
st = player.getQuestState(qn)
if not st : return htmltext
npcId = npc.getNpcId()
cond = st.getInt("cond")
id = st.getState()
level = player.getLevel()
bracelet_1 = st.getQuestItemsCount(License_1)
bracelet_2 = st.getQuestItemsCount(License_2)
if npcId == Grey :
if id == CREATED :
if level >= 35 :
htmltext = "02"
else :
htmltext = "01"
elif cond == 1 :
if HasItems(st,3) :
htmltext = "12"
st.set("cond","2")
for item in range(3709,3717) :
st.takeItems(item,-1)
st.takeItems(Test_Instructions_1,-1)
st.giveItems(License_1,1)
else :
htmltext = "11"
elif cond == 2 :
instructions = st.getQuestItemsCount(Test_Instructions_2)
if level < 45 and bracelet_1 :
htmltext = "13"
elif level >= 45 and bracelet_1 and not instructions :
htmltext = "16"
elif instructions :
if HasItems(st,3) :
htmltext = "28"
st.set("cond","3")
for item in range(3718,3727) :
st.takeItems(item,-1)
st.takeItems(Test_Instructions_2,-1)
st.takeItems(License_1,-1)
st.giveItems(License_2,1)
else :
htmltext = "27"
elif cond == 3 :
htmltext = "29"
elif npcId == Tor :
if not bracelet_1 and not bracelet_2 :
htmltext = "01"
elif bracelet_1 :
req = HasRequestCompleted(st,1)
if not st.getInt("hasTask") :
if level >= 45 :
if st.getQuestItemsCount(Test_Instructions_2) :
htmltext = "0 |
import uuid
from unithelper import DBTestCase
from unithelper import mocker
from unithelper import requestor
from unithelper import hashable_dict
from bc import database
from bc import metrics
from bc_wapi import wapi_metrics
class Test(DBTestCase):
def test_metric_get(self):
"""Check getting metric with metricGet"""
data={
'id': str(uuid.uuid4()),
'type': str(uuid.uuid4())[:10],
'formula': metrics.constants.FORMULA_SPEED,
'aggregate': 0L,
}
with database.DBConnect() as db:
db.insert('metrics', data)
self.assertEquals(wapi_metrics.metricGet({'id': data['id']}),
requestor({'metric': data}, | 'ok'))
self.a | ssertEquals(wapi_metrics.metricGet({'id':''}),
requestor({'message': 'Metric not found' }, 'error'))
with mocker([('bc.metrics.get', mocker.exception),
('bc_wapi.wapi_metrics.LOG.error', mocker.passs)]):
self.assertEquals(wapi_metrics.metricGet({'id':''}),
requestor({'message': 'Unable to obtain metric' }, 'servererror'))
def test_metric_get_list(self):
"""Check getting metrics with metricList"""
data = []
for i in range(2, 10):
d={
'id': str(uuid.uuid4()),
'type': str(uuid.uuid4())[:10],
'formula': metrics.constants.FORMULA_SPEED,
'aggregate': 0L,
}
with database.DBConnect() as db:
db.insert('metrics', d)
data.append(d)
ans = wapi_metrics.metricList('')
self.assertEquals(ans[0], (01 << 2))
self.assertEquals(ans[1]['status'], 'ok')
self.assertEquals(set(map(lambda x: hashable_dict(x), ans[1]['metrics'])),
set(map(lambda x: hashable_dict(x), data)))
with mocker([('bc.metrics.get_all', mocker.exception),
('bc_wapi.wapi_metrics.LOG.error', mocker.passs)]):
self.assertEquals(wapi_metrics.metricList({'id':''}),
requestor({'message': 'Unable to obtain metric list' }, 'servererror'))
def test_metric_add(self):
"""Check the creating metric with metricAdd"""
data={
'id': str(uuid.uuid4()),
'type': str(uuid.uuid4())[:10],
'formula': metrics.constants.FORMULA_SPEED,
'aggregate': 0L,
}
ans = wapi_metrics.metricAdd(data.copy())
self.assertEquals(ans, requestor({'id':data['id']}, 'ok'))
with database.DBConnect() as db:
t1 = db.find('metrics').one()
self.assertEquals(data['id'], t1['id'])
self.assertEquals(data['type'], t1['type'])
with mocker([('bc.metrics.add', mocker.exception),
('bc_wapi.wapi_metrics.LOG.error', mocker.passs)]):
self.assertEquals(wapi_metrics.metricAdd({'id':''}),
requestor({'message': 'Unable to add new metric' }, 'servererror'))
|
#! /usr/bin/env python
# -*- coding: | utf-8 -*-
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under | the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import socket
import select
import threading
class inputThread(threading.Thread):
def say(self, chan, words):
send = 'PRIVMSG %s :%s\r\n' % (chan, words)
sock.send(send)
def run(self):
global done
while not done:
msg = raw_input('IRC > ')
if msg == 'QUIT':
sock.send('QUIT')
done = True
self.say(channel, msg)
class outputThread(threading.Thread):
def checkForMessages(self):
read_list, write_list, error_list = \
select.select([sock], [], [sock], 0)
if sock in read_list:
raw = sock.recv(8000)
self.printOutput(raw)
def run(self):
while not done:
self.checkForMessages()
def printOutput(self, text):
if '\r' in text:
text = text.replace('\r', '\n')
lines = text.split('\n')
for one_line in lines:
if len(one_line) > 0:
print self.formatLine(one_line)
def formatLine(self, line):
words = line.split()
sender = ""
if line[0] == ':' and len(words) >= 2:
sender = line[1:line.find('!')]
words = words[1:]
tag = words[0].upper()
if tag == 'PRIVMSG':
return '%s: %s' % (sender, words[2])
else:
return line
network = 'irc.freenode.net'
port = 6667
nick = 'lolasuketo'
uname = 'jinna'
host = 'jlei-laptop'
server = 'comcast'
realname = 'python irc bot'
channel = '#kamtest'
sock = socket.socket ( socket.AF_INET, socket.SOCK_STREAM )
sock.connect ( ( network, port ) )
sock.send ('NICK %s \r\n' % nick )
sock.send ( 'USER %s %s %s :%s r\n' % (uname, host, server, realname))
sock.send ( 'JOIN #kamtest\r\n' )
done = False
input1 = inputThread()
output1 = outputThread()
input1.start()
output1.start()
|
# -*- coding: utf-8 -*-
"""
Created on Mon Apr 24 10:55:03 2017
@author: rstreet
"""
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from events.models import Field, ObsRequest
from sys import exit
from scripts import query_db
class Command(BaseCommand):
help = ''
def add_arguments(self, parser):
parser.add_argument('field', nargs='+', type=str)
def _fetch_obs | _for_field(self,*args, **options):
field_name = options['field'][0]
field_id = Field.objects.get(name=field_name).id
print '\nActive obs for '+field_name+':\n'
active_obs = query_db.get_acti | ve_obs()
for obs in active_obs:
print obs.grp_id, obs.timestamp, obs.time_expire
def handle(self,*args, **options):
self._fetch_obs_for_field(*args,**options)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2017 Julien Girard
#
# Licensed under the GNU GENERAL PUBLIC LICENSE, Version 3 ;
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://http://www.gnu.org/licenses/gpl-3.0.html
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import argparse
import configparser
import names
import os
import socket
def get_open_port():
"""
http://stackoverflow.com/questions/2838244/get-open-tcp-port-in-python/2838309#2838309
:return: free port
"""
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.bind(("", 0))
s.listen(1)
port = s.getsockname()[1]
return port
def is_open_port(port):
p = int(port)
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
try:
# Test if port is open
s.bind(("", p))
is_open = p
except socket.error as e:
# If port is use, we try with next port
if e.errno == 98:
| is_open = is_open_port(p + 1)
else:
print(e)
except OverflowError as e:
# If port is overflow, we get a random open port
is_open = get_open_port()
return is_open
def get_fw_port(port):
if ":" in port:
fw_port = is_open_port(port.split(":")[-1])
else:
fw_port = get_open_port()
return fw_port
def launch():
# Parse command line arguments.
parser = argparse.ArgumentParser(description='Becau | se sometimes you don\'t need a full stack')
parser.add_argument(
'-b', '--boot',
help='Boot option. Can be hdd, cdrom or network.',
dest='boot'
)
parser.add_argument(
'-c', '--cores',
help='Number of cores for the virtual machine.',
dest='cores'
)
parser.add_argument(
'-d', '--disk',
help='Path to a disk image',
dest='disk'
)
parser.add_argument(
'-e', '--domains',
help='DNS to look for.',
dest='domains'
)
parser.add_argument(
'--file',
help='Path to the configuration file. Default is ~/.openstick',
dest='configuration_file_path',
default=os.path.expanduser('~') + '/.openstick'
)
parser.add_argument(
'-f', '--format',
help='Format to use for images.',
dest='format'
)
parser.add_argument(
'-j', '--hostname',
help='Hostname of the virtual machine.',
dest='hostname'
)
parser.add_argument(
'-i', '--image',
help='Virtual machine hard drive image path',
dest='image'
)
parser.add_argument(
'-m', '--memory',
help='Memory size of the virtual machine.',
dest='memory'
)
parser.add_argument(
'-n', '--name',
help='Name of the virtual machine. Default is a random name.',
dest='name',
default=names.get_first_name()
)
parser.add_argument(
'-p', '--ports',
help='Ports to forward.',
dest='ports'
)
parser.add_argument(
'-r', '--root',
help='Path to a virtual machine image to use as root (base).',
dest='root_image_path'
)
parser.add_argument(
'-s', '--size',
help='Size of the virtual machine hard drive.',
dest='size'
)
parser.add_argument(
'-u', '--sockets',
help='Number of CPU sockets',
dest='sockets'
)
parser.add_argument(
'-t', '--threads',
help='Number of threads by core.',
dest='threads'
)
args = parser.parse_args()
# Parse configuration file.
config = configparser.ConfigParser()
config.read(args.configuration_file_path)
# Write specified values to configuration file.
vm_name = args.name.lower()
if not vm_name in config:
config[vm_name] = {}
exclude_args = ["configuration_file_path"]
for key, value in vars(args).items():
if key in exclude_args or value is None:
continue
config[vm_name][key] = value
with open(args.configuration_file_path, 'w') as configfile:
config.write(configfile)
# Initialize Parameters.
vm_boot = config.get(vm_name, 'boot', fallback='hdd')
vm_cores = config.get(vm_name, 'cores', fallback='2')
vm_disk = config.get(vm_name, 'disk', fallback='/dev/null')
vm_domains = config.get(vm_name, 'domains', fallback='')
vm_hostname = config.get(vm_name, 'hostname', fallback='%s-%s-%s' % (socket.gethostname(), os.getlogin(), vm_name))
vm_format = config.get(vm_name, 'format', fallback='qcow2')
vm_image = os.path.expanduser(
config.get(vm_name, 'image', fallback=os.path.join(os.path.expanduser('~/VMs'), '%s.%s' % (vm_name, vm_format)))
)
vm_memory = config.get(vm_name, 'memory', fallback='1G')
vm_ports = config.get(vm_name, 'ports', fallback='22,80,443')
vm_size = config.get(vm_name, 'size', fallback='20G')
vm_sockets = config.get(vm_name, 'sockets', fallback='1')
vm_threads = config.get(vm_name, 'threads', fallback='1')
# Create the base directory of the vm_image if necessary
vm_image_dir = os.path.dirname(vm_image)
if not os.path.isdir(vm_image_dir):
os.makedirs(vm_image_dir)
# Create virtual machine image if it does not exist.
if not os.path.isfile(vm_image):
if args.root_image_path is not None:
os.system('qemu-img create -f %s -b %s %s' % (vm_format, args.root_image_path, vm_image))
else:
if vm_boot != 'cdrom' and vm_boot != 'network':
print('You\'re trying to create a new virtual machine but does not specify boot from cdrom or network.')
exit(1)
else:
os.system('qemu-img create -f %s %s %s' % (vm_format, vm_image, vm_size))
# Build network configuration
vm_network = 'user,id=net0,hostname=%s' % vm_hostname
for domain in [x for x in vm_domains.split(',') if x]:
vm_network = '%s,dnssearch=%s' % (vm_network, domain)
print("VM name is : %s" % vm_name)
# Forward port
for port in [x for x in vm_ports.split(',') if x]:
fw_port = get_fw_port(port)
if ":" in port:
port = port.split(":")[0]
else:
pass
print("Forward port %s to : %s" % (port, fw_port))
vm_network = '%s,hostfwd=tcp::%s-:%s' % (vm_network, fw_port, port)
# Build boot option
if vm_boot == 'hdd':
vm_boot_option = 'c'
elif vm_boot == 'cdrom':
vm_boot_option = 'd'
elif vm_boot == 'network':
vm_boot_option = 'n'
else:
print('Invalid boot option: \'%s\'' % vm_boot)
exit(2)
# Start virtual machine.
os.system('qemu-system-x86_64 -enable-kvm -smp cores=%s,threads=%s,sockets=%s -hda %s -device virtio-net-pci,netdev=net0,id=nic0 -netdev %s -m %s -boot %s -cdrom %s -monitor stdio' % (
vm_cores,
vm_threads,
vm_sockets,
vm_image,
vm_network,
vm_memory,
vm_boot_option,
vm_disk
))
def list():
# Parse command line arguments.
parser = argparse.ArgumentParser(description='Because sometimes you don\'t need a full stack')
parser.add_argument(
'--file',
help='Path to the configuration file. Default is ~/.openstick',
dest='configuration_file_path',
default=os.path.expanduser('~') + '/.openstick'
)
args = parser.parse_args()
# Parse configuration file.
config = configparser.ConfigParser()
config.read(args.configuration_file_path)
for name in config.sections():
print(name)
# Launch main if call as a script
if __name__ == '__main__':
launch()
|
_("calendar|Islamic"),
_("calendar|Swedish") )
_ = locale.translation.lexgettext
self.hebrew = (
"",
# TRANSLATORS: see
# http://gramps-project.org/wiki/index.php?title=Translating_Gramps#Translating_dates
# to learn how to select proper inflection to be used in your localized
# DateDisplayer code!
_("Hebrew month lexeme|Tishri"),
_("Hebrew month lexeme|Heshvan"),
_("Hebrew month lexeme|Kislev"),
_("Hebrew month lexeme|Tevet"),
_("Hebrew month lexeme|Shevat"),
_("Hebrew month lexeme|AdarI"),
_("Hebrew month lexeme|AdarII"),
_("Hebrew month lexeme|Nisan"),
_("Hebrew month lexeme|Iyyar"),
_("Hebrew month lexeme|Sivan"),
_("Hebrew month lexeme|Tammuz"),
_("Hebrew month lexeme|Av"),
_("Hebrew month lexeme|Elul")
)
self.french = (
"",
# TRANSLATORS: see
# http://gramps-project.org/wiki/index.php?title=Translating_Gramps#Translating_dates
# to learn how to select proper inflection to be used in your localized
# DateDisplayer code!
_("French month lexeme|Vendémiaire"),
_("French month lexeme|Brumaire"),
_("French month lexeme|Frimaire"),
_("French month lexeme|Nivôse"),
_("French month lexeme|Pluviôse"),
_("French month lexeme|Ventôse"),
_("French month lexeme|Germinal"),
_("French month lexeme|Floréal"),
_("French month lexeme|Prairial"),
_("French month lexeme|Messidor"),
_("French month lexeme|Thermidor"),
_("French month lexeme|Fructidor"),
_("French month lexeme|Extra"),
)
self.islamic = (
"",
# TRANSLATORS: see
# http://gramps-project.org/wiki/index.php?title=Translating_Gramps#Translating_dates
# to learn how to select proper inflection to be used in your localized
# DateDisplayer code!
_("Islamic month lexeme|Muharram"),
_("Islamic month lexeme|Safar"),
_("Islamic month lexeme|Rabi`al-Awwal"),
_("Islamic month lexeme|Rabi`ath-Thani"),
_("Islamic month lexeme|Jumada l-Ula"),
_("Islamic month lexeme|Jumada t-Tania"),
_("Islamic month lexeme|Rajab"),
_("Islamic month lexeme|Sha`ban"),
_("Islamic month lexeme|Ramadan"),
_("Islamic month lexeme|Shawwal"),
_("Islamic month lexeme|Dhu l-Qa`da"),
_("Islamic month lexeme|Dhu l-Hijja"),
)
self.persian = (
"",
# TRANSLATORS: see
# http://gramps-project.org/wiki/index.php?title=Translating_Gramps#Translating_dates
# to learn how to select proper inflection to be used in your localized
# DateDisplayer code!
_("Persian month lexeme|Farvardin"),
_("Persian month lexeme|Ordibehesht"),
_("Persian month lexeme|Khordad"),
_("Persian month lexeme|Tir"),
_("Persian month lexeme|Mordad"),
_("Persian month lexeme|Shahrivar"),
_("Persian month lexeme|Mehr"),
_("Persian month lexeme|Aban"),
_("Persian month lexeme|Azar"),
_("Persian month lexeme|Dey"),
_("Persian month lexeme|Bahman"),
_("Persian month lexeme|Esfand"),
)
self.modifiers = ("",
_("date modifier|before "),
_("date modifier|after "),
_("date modifier|about "),
"", "", "")
self.qualifiers = ("",
_("date quality|estimated "),
_("date quality|calculated "),
)
# 6753: localized day names. Eventually should sprout into
# a per-calendar type thing instead.
self.long_days = ("",
_("Sunday"),
_("Monday"),
_("Tuesday"),
_("Wednesday"),
_("Thursday"),
_("Friday"),
_("Saturday"),
)
__doc__ += """
__main__
--------
Run this code with the appropriate ``LANG`` and ``LC_DATE`` set for your target
language, in order to generate the .po snippets initialized with the strings
from your locale (from the deprecated data pr | ovided in _grampslocale).
E.g., for French::
LANG=fr_FR.utf8 LC_ALL=fr_FR.utf8 GRAMPS_RESOURCES=$PWD python -m gramps.gen | .datehandler._datestrings
Then merge the output into your language's .po file, and further modify the
strings as needed. Then remove the strings from your language's
:class:`DateParserXX` and :class:`DateHandlerXX` classes.
"""
if __name__ == '__main__':
import sys
from ..utils.grampslocale import GrampsLocale
from gramps.gen.const import GRAMPS_LOCALE as glocale
from ._grampslocale import (_deprecated_long_months as old_long,
_deprecated_short_months as old_short,
_deprecated_long_days as old_days)
from ._datedisplay import DateDisplay
import gettext
lang = glocale.lang
lang_short = lang[:2]
available_langs = glocale.get_available_translations()
if glocale.check_available_translations(lang) is None:
print ("Translation for current language {lang} not available.\n"
"Available translations: {list}.\n"
"Does po/{lang_short}*.po exist in gramps source tree?!\n"
"Please set your LANG / LC_ALL environment to something else...\n".format(
lang=lang, list=available_langs, lang_short=lang_short),
file=sys.stderr)
sys.exit(1)
print ("# Generating snippets for {}*.po\n"
"# Available languages: {}".format(
lang_short, available_langs))
glocale = GrampsLocale(languages=(lang))
dd = glocale.date_displayer
ds = dd._ds
glocale_EN = GrampsLocale(languages=('en'))
ds_EN = DateStrings(glocale_EN)
filename = __file__
try:
localized_months = dd.__class__.long_months
except AttributeError:
localized_months = old_long
def print_po_snippet(en_loc_old_lists, context):
for m,localized,old in zip(*en_loc_old_lists):
if m == "":
continue
if m == localized:
localized = old
print ('#: {file}:{line}\n'
'msgid "{context}{en_month}"\n'
'msgstr "{localized_month}"\n'.format(
context = context,
file = filename,
line = print_po_snippet.line,
en_month = m,
localized_month = localized))
print_po_snippet.line += 1
print_po_snippet.line = 10000
try:
localized_months = dd.__class__.long_months
except AttributeError:
localized_months = old_long
print_po_snippet((ds_EN.long_months, localized_months, old_long),
"localized lexeme inflections||")
try:
localized_months = dd.__class__.short_months
except AttributeError:
localized_months = old_short
print_po_snippet((ds_EN.short_months, localized_months, old_short),
"localized lexeme inflections - short month form||")
try:
loc = dd.__class__.hebrew
print_po_snippet((ds_EN.hebrew, loc, loc),
"Hebrew month lexeme|")
except AttributeError:
pass
try:
loc = dd.__class__.french
print_po_snippet((ds_EN.french, loc, loc),
"French month lexeme|")
except AttributeError:
pass
try:
loc = dd.__class__.islamic
print_po_snippet((ds_EN.islamic, loc, loc),
"Islamic month lexeme|")
except AttributeError:
pass
try:
loc |
"""
Tests for the lms_result_processor
"""
import six
import pytest
from lms.djangoapps.courseware.tests.factories import UserFactory
from lms.lib.courseware_search.lms_result_processor import LmsSearchResultProcessor
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
class LmsSearchResultProcessorTestCase(ModuleStoreTestCase):
""" Test case class to test search result processor """
def build_course(self):
"""
Build up a course tree with an html control
"""
self.global_staff = UserFactory(is_staff=True)
self.course = CourseFactory.create(
org='Elasticsearch',
course='ES101',
run='test_run',
display_name='Elasticsearch test course',
)
self.section = ItemFactory.create(
parent=self.course,
category='chapter',
display_name='Test Section',
)
self.subsection = | ItemFactory.create(
parent=self.section,
category='sequential',
display_name='Test Subsection',
)
self.vertical = ItemFactory.create(
parent=self.subsection,
category='vertical',
display_name='Test Unit',
)
self.html = ItemFactory.create(
parent=self. | vertical,
category='html',
display_name='Test Html control',
)
self.ghost_subsection = ItemFactory.create(
parent=self.section,
category='sequential',
display_name=None,
)
self.ghost_vertical = ItemFactory.create(
parent=self.ghost_subsection,
category='vertical',
display_name=None,
)
self.ghost_html = ItemFactory.create(
parent=self.ghost_vertical,
category='html',
display_name='Ghost Html control',
)
def setUp(self):
super(LmsSearchResultProcessorTestCase, self).setUp() # lint-amnesty, pylint: disable=super-with-arguments
self.build_course()
def test_url_parameter(self):
fake_url = ""
srp = LmsSearchResultProcessor({}, "test")
with pytest.raises(ValueError):
fake_url = srp.url
assert fake_url == ''
srp = LmsSearchResultProcessor(
{
"course": six.text_type(self.course.id),
"id": six.text_type(self.html.scope_ids.usage_id),
"content": {"text": "This is the html text"}
},
"test"
)
assert srp.url == '/courses/{}/jump_to/{}'.format(six.text_type(self.course.id),
six.text_type(self.html.scope_ids.usage_id))
def test_should_remove(self):
"""
Tests that "visible_to_staff_only" overrides start date.
"""
srp = LmsSearchResultProcessor(
{
"course": six.text_type(self.course.id),
"id": six.text_type(self.html.scope_ids.usage_id),
"content": {"text": "This is html test text"}
},
"test"
)
assert srp.should_remove(self.global_staff) is False
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.