code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
#!/usr/bin/env python
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# Remove cullers so single vertex will render
ren1 = vtk.vtkRenderer()
ren1.GetCullers().RemoveAllItems()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren1)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
cell = vtk.vtkGenericCell()
ptIds = vtk.vtkIdList()
# 0D
ZeroDPts = vtk.vtkPoints()
ZeroDPts.SetNumberOfPoints(1)
ZeroDPts.SetPoint(0,0,0,0)
ZeroDGrid = vtk.vtkStructuredGrid()
ZeroDGrid.SetDimensions(1,1,1)
ZeroDGrid.SetPoints(ZeroDPts)
ZeroDGrid.GetCell(0)
ZeroDGrid.GetCell(0,cell)
ZeroDGrid.GetCellPoints(0,ptIds)
ZeroDGeom = vtk.vtkStructuredGridGeometryFilter()
ZeroDGeom.SetInputData(ZeroDGrid)
ZeroDGeom.SetExtent(0,2,0,2,0,2)
ZeroDMapper = vtk.vtkPolyDataMapper()
ZeroDMapper.SetInputConnection(ZeroDGeom.GetOutputPort())
ZeroDActor = vtk.vtkActor()
ZeroDActor.SetMapper(ZeroDMapper)
ZeroDActor.SetPosition(0,0,0)
ren1.AddActor(ZeroDActor)
# 1D - X
XPts = vtk.vtkPoints()
XPts.SetNumberOfPoints(2)
XPts.SetPoint(0,0,0,0)
XPts.SetPoint(1,1,0,0)
XGrid = vtk.vtkStructuredGrid()
XGrid.SetDimensions(2,1,1)
XGrid.SetPoints(XPts)
XGrid.GetCell(0)
XGrid.GetCell(0,cell)
XGrid.GetCellPoints(0,ptIds)
XGeom = vtk.vtkStructuredGridGeometryFilter()
XGeom.SetInputData(XGrid)
XGeom.SetExtent(0,2,0,2,0,2)
XMapper = vtk.vtkPolyDataMapper()
XMapper.SetInputConnection(XGeom.GetOutputPort())
XActor = vtk.vtkActor()
XActor.SetMapper(XMapper)
XActor.SetPosition(2,0,0)
ren1.AddActor(XActor)
# 1D - Y
YPts = vtk.vtkPoints()
YPts.SetNumberOfPoints(2)
YPts.SetPoint(0,0,0,0)
YPts.SetPoint(1,0,1,0)
YGrid = vtk.vtkStructuredGrid()
YGrid.SetDimensions(1,2,1)
YGrid.SetPoints(YPts)
YGrid.GetCell(0)
YGrid.GetCell(0,cell)
YGrid.GetCellPoints(0,ptIds)
YGeom = vtk.vtkStructuredGridGeometryFilter()
YGeom.SetInputData(YGrid)
YGeom.SetExtent(0,2,0,2,0,2)
YMapper = vtk.vtkPolyDataMapper()
YMapper.SetInputConnection(YGeom.GetOutputPort())
YActor = vtk.vtkActor()
YActor.SetMapper(YMapper)
YActor.SetPosition(4,0,0)
ren1.AddActor(YActor)
# 1D - Z
ZPts = vtk.vtkPoints()
ZPts.SetNumberOfPoints(2)
ZPts.SetPoint(0,0,0,0)
ZPts.SetPoint(1,0,0,1)
ZGrid = vtk.vtkStructuredGrid()
ZGrid.SetDimensions(1,1,2)
ZGrid.SetPoints(ZPts)
ZGrid.GetCell(0)
ZGrid.GetCell(0,cell)
ZGrid.GetCellPoints(0,ptIds)
ZGeom = vtk.vtkStructuredGridGeometryFilter()
ZGeom.SetInputData(ZGrid)
ZGeom.SetExtent(0,2,0,2,0,2)
ZMapper = vtk.vtkPolyDataMapper()
ZMapper.SetInputConnection(ZGeom.GetOutputPort())
ZActor = vtk.vtkActor()
ZActor.SetMapper(ZMapper)
ZActor.SetPosition(6,0,0)
ren1.AddActor(ZActor)
# 2D - XY
XYPts = vtk.vtkPoints()
XYPts.SetNumberOfPoints(4)
XYPts.SetPoint(0,0,0,0)
XYPts.SetPoint(1,1,0,0)
XYPts.SetPoint(2,0,1,0)
XYPts.SetPoint(3,1,1,0)
XYGrid = vtk.vtkStructuredGrid()
XYGrid.SetDimensions(2,2,1)
XYGrid.SetPoints(XYPts)
XYGrid.GetCell(0)
XYGrid.GetCell(0,cell)
XYGrid.GetCellPoints(0,ptIds)
XYGeom = vtk.vtkStructuredGridGeometryFilter()
XYGeom.SetInputData(XYGrid)
XYGeom.SetExtent(0,2,0,2,0,2)
XYMapper = vtk.vtkPolyDataMapper()
XYMapper.SetInputConnection(XYGeom.GetOutputPort())
XYActor = vtk.vtkActor()
XYActor.SetMapper(XYMapper)
XYActor.SetPosition(0,2,0)
ren1.AddActor(XYActor)
# 2D - YZ
YZPts = vtk.vtkPoints()
YZPts.SetNumberOfPoints(4)
YZPts.SetPoint(0,0,0,0)
YZPts.SetPoint(1,0,1,0)
YZPts.SetPoint(2,0,0,1)
YZPts.SetPoint(3,0,1,1)
YZGrid = vtk.vtkStructuredGrid()
YZGrid.SetDimensions(1,2,2)
YZGrid.SetPoints(YZPts)
YZGrid.GetCell(0)
YZGrid.GetCell(0,cell)
YZGrid.GetCellPoints(0,ptIds)
YZGeom = vtk.vtkStructuredGridGeometryFilter()
YZGeom.SetInputData(YZGrid)
YZGeom.SetExtent(0,2,0,2,0,2)
YZMapper = vtk.vtkPolyDataMapper()
YZMapper.SetInputConnection(YZGeom.GetOutputPort())
YZActor = vtk.vtkActor()
YZActor.SetMapper(YZMapper)
YZActor.SetPosition(2,2,0)
ren1.AddActor(YZActor)
# 2D - XZ
XZPts = vtk.vtkPoints()
XZPts.SetNumberOfPoints(4)
XZPts.SetPoint(0,0,0,0)
XZPts.SetPoint(1,1,0,0)
XZPts.SetPoint(2,0,0,1)
XZPts.SetPoint(3,1,0,1)
XZGrid = vtk.vtkStructuredGrid()
XZGrid.SetDimensions(2,1,2)
XZGrid.SetPoints(XZPts)
XZGrid.GetCell(0)
XZGrid.GetCell(0,cell)
XZGrid.GetCellPoints(0,ptIds)
XZGeom = vtk.vtkStructuredGridGeometryFilter()
XZGeom.SetInputData(XZGrid)
XZGeom.SetExtent(0,2,0,2,0,2)
XZMapper = vtk.vtkPolyDataMapper()
XZMapper.SetInputConnection(XZGeom.GetOutputPort())
XZActor = vtk.vtkActor()
XZActor.SetMapper(XZMapper)
XZActor.SetPosition(4,2,0)
ren1.AddActor(XZActor)
# 3D
XYZPts = vtk.vtkPoints()
XYZPts.SetNumberOfPoints(8)
XYZPts.SetPoint(0,0,0,0)
XYZPts.SetPoint(1,1,0,0)
XYZPts.SetPoint(2,0,1,0)
XYZPts.SetPoint(3,1,1,0)
XYZPts.SetPoint(4,0,0,1)
XYZPts.SetPoint(5,1,0,1)
XYZPts.SetPoint(6,0,1,1)
XYZPts.SetPoint(7,1,1,1)
XYZGrid = vtk.vtkStructuredGrid()
XYZGrid.SetDimensions(2,2,2)
XYZGrid.SetPoints(XYZPts)
XYZGrid.GetCell(0)
XYZGrid.GetCell(0,cell)
XYZGrid.GetCellPoints(0,ptIds)
XYZGeom = vtk.vtkStructuredGridGeometryFilter()
XYZGeom.SetInputData(XYZGrid)
XYZGeom.SetExtent(0,2,0,2,0,2)
XYZMapper = vtk.vtkPolyDataMapper()
XYZMapper.SetInputConnection(XYZGeom.GetOutputPort())
XYZActor = vtk.vtkActor()
XYZActor.SetMapper(XYZMapper)
XYZActor.SetPosition(6,2,0)
ren1.AddActor(XYZActor)
# render the image
#
renWin.SetSize(300,150)
cam1 = ren1.GetActiveCamera()
cam1.SetClippingRange(2.27407,14.9819)
cam1.SetFocalPoint(3.1957,1.74012,0.176603)
cam1.SetPosition(-0.380779,6.13894,5.59404)
cam1.SetViewUp(0.137568,0.811424,-0.568037)
renWin.Render()
iren.Initialize()
# prevent the tk window from showing up then start the event loop
# --- end of script --
|
HopeFOAM/HopeFOAM
|
ThirdParty-0.1/ParaView-5.0.1/VTK/Common/DataModel/Testing/Python/TestStructuredGrid.py
|
Python
|
gpl-3.0
| 5,545
|
# $Id$
#
# Copyright (C) 2002-2008 greg Landrum and Rational Discovery LLC
#
# @@ All Rights Reserved @@
# This file is part of the RDKit.
# The contents are covered by the terms of the BSD license
# which is included in the file license.txt, found at the root
# of the RDKit source tree.
#
""" Atom-based calculation of LogP and MR using Crippen's approach
Reference:
S. A. Wildman and G. M. Crippen *JCICS* _39_ 868-873 (1999)
"""
from __future__ import print_function
import os
from rdkit import RDConfig
from rdkit import Chem
from rdkit.Chem import rdMolDescriptors
import numpy
_smartsPatterns = {}
_patternOrder = []
# this is the file containing the atom contributions
defaultPatternFileName = os.path.join(RDConfig.RDDataDir,'Crippen.txt')
def _ReadPatts(fileName):
""" *Internal Use Only*
parses the pattern list from the data file
"""
patts = {}
order = []
with open(fileName,'r') as f:
lines = f.readlines()
for line in lines:
if line[0] != '#':
splitLine = line.split('\t')
if len(splitLine)>=4 and splitLine[0] != '':
sma = splitLine[1]
if sma!='SMARTS':
sma.replace('"','')
try:
p = Chem.MolFromSmarts(sma)
except:
pass
else:
if p:
if len(splitLine[0])>1 and splitLine[0][1] not in 'S0123456789':
cha = splitLine[0][:2]
else:
cha = splitLine[0][0]
logP = float(splitLine[2])
if splitLine[3] != '':
mr = float(splitLine[3])
else:
mr = 0.0
if cha not in order:
order.append(cha)
l = patts.get(cha,[])
l.append((sma,p,logP,mr))
patts[cha] = l
else:
print('Problems parsing smarts: %s'%(sma))
return order,patts
_GetAtomContribs=rdMolDescriptors._CalcCrippenContribs
def _pyGetAtomContribs(mol,patts=None,order=None,verbose=0,force=0):
""" *Internal Use Only*
calculates atomic contributions to the LogP and MR values
if the argument *force* is not set, we'll use the molecules stored
_crippenContribs value when possible instead of re-calculating.
**Note:** Changes here affect the version numbers of MolLogP and MolMR
as well as the VSA descriptors in Chem.MolSurf
"""
if not force and hasattr(mol,'_crippenContribs'):
return mol._crippenContribs
if patts is None:
patts = _smartsPatterns
order = _patternOrder
nAtoms = mol.GetNumAtoms()
atomContribs = [(0.,0.)]*nAtoms
doneAtoms=[0]*nAtoms
nAtomsFound=0
done = False
for cha in order:
pattVect = patts[cha]
for sma,patt,logp,mr in pattVect:
#print('try:',entry[0])
for match in mol.GetSubstructMatches(patt,False,False):
firstIdx = match[0]
if not doneAtoms[firstIdx]:
doneAtoms[firstIdx]=1
atomContribs[firstIdx] = (logp,mr)
if verbose:
print('\tAtom %d: %s %4.4f %4.4f'%(match[0],sma,logp,mr))
nAtomsFound+=1
if nAtomsFound>=nAtoms:
done=True
break
if done: break
mol._crippenContribs = atomContribs
return atomContribs
def _Init():
global _smartsPatterns,_patternOrder
if _smartsPatterns == {}:
_patternOrder,_smartsPatterns = _ReadPatts(defaultPatternFileName)
def _pyMolLogP(inMol,patts=None,order=None,verbose=0,addHs=1):
""" DEPRECATED
"""
if addHs < 0:
mol = Chem.AddHs(inMol,1)
elif addHs > 0:
mol = Chem.AddHs(inMol,0)
else:
mol = inMol
if patts is None:
global _smartsPatterns,_patternOrder
if _smartsPatterns == {}:
_patternOrder,_smartsPatterns = _ReadPatts(defaultPatternFileName)
patts = _smartsPatterns
order = _patternOrder
atomContribs = _pyGetAtomContribs(mol,patts,order,verbose=verbose)
return numpy.sum(atomContribs,0)[0]
_pyMolLogP.version="1.1.0"
def _pyMolMR(inMol,patts=None,order=None,verbose=0,addHs=1):
""" DEPRECATED
"""
if addHs < 0:
mol = Chem.AddHs(inMol,1)
elif addHs > 0:
mol = Chem.AddHs(inMol,0)
else:
mol = inMol
if patts is None:
global _smartsPatterns,_patternOrder
if _smartsPatterns == {}:
_patternOrder,_smartsPatterns = _ReadPatts(defaultPatternFileName)
patts = _smartsPatterns
order = _patternOrder
atomContribs = _pyGetAtomContribs(mol,patts,order,verbose=verbose)
return numpy.sum(atomContribs,0)[1]
_pyMolMR.version="1.1.0"
MolLogP=lambda *x,**y:rdMolDescriptors.CalcCrippenDescriptors(*x,**y)[0]
MolLogP.version=rdMolDescriptors._CalcCrippenDescriptors_version
MolLogP.__doc__=""" Wildman-Crippen LogP value
Uses an atom-based scheme based on the values in the paper:
S. A. Wildman and G. M. Crippen JCICS 39 868-873 (1999)
**Arguments**
- inMol: a molecule
- addHs: (optional) toggles adding of Hs to the molecule for the calculation.
If true, hydrogens will be added to the molecule and used in the calculation.
"""
MolMR=lambda *x,**y:rdMolDescriptors.CalcCrippenDescriptors(*x,**y)[1]
MolMR.version=rdMolDescriptors._CalcCrippenDescriptors_version
MolMR.__doc__=""" Wildman-Crippen MR value
Uses an atom-based scheme based on the values in the paper:
S. A. Wildman and G. M. Crippen JCICS 39 868-873 (1999)
**Arguments**
- inMol: a molecule
- addHs: (optional) toggles adding of Hs to the molecule for the calculation.
If true, hydrogens will be added to the molecule and used in the calculation.
"""
if __name__=='__main__':
import sys
if len(sys.argv):
ms = []
verbose=0
if '-v' in sys.argv:
verbose=1
sys.argv.remove('-v')
for smi in sys.argv[1:]:
ms.append((smi,Chem.MolFromSmiles(smi)))
for smi,m in ms:
print('Mol: %s'%(smi))
logp = MolLogP(m,verbose=verbose)
print('----')
mr = MolMR(m,verbose=verbose)
print('Res:',logp,mr)
newM = Chem.AddHs(m)
logp = MolLogP(newM,addHs=0)
mr = MolMR(newM,addHs=0)
print('\t',logp,mr)
print('-*-*-*-*-*-*-*-*')
|
soerendip42/rdkit
|
rdkit/Chem/Crippen.py
|
Python
|
bsd-3-clause
| 6,129
|
#!/usr/bin/env python
"""
Use the AppVeyor API to download Windows artifacts.
Taken from: https://bitbucket.org/ned/coveragepy/src/tip/ci/download_appveyor.py
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://bitbucket.org/ned/coveragepy/src/default/NOTICE.txt
"""
from __future__ import unicode_literals
import argparse
import os
import requests
import zipfile
def make_auth_headers():
"""Make the authentication headers needed to use the Appveyor API."""
path = os.path.expanduser("~/.appveyor.token")
if not os.path.exists(path):
raise RuntimeError(
"Please create a file named `.appveyor.token` in your home directory. "
"You can get the token from https://ci.appveyor.com/api-token"
)
with open(path) as f:
token = f.read().strip()
headers = {
'Authorization': 'Bearer {}'.format(token),
}
return headers
def download_latest_artifacts(account_project, build_id):
"""Download all the artifacts from the latest build."""
if build_id is None:
url = "https://ci.appveyor.com/api/projects/{}".format(account_project)
else:
url = "https://ci.appveyor.com/api/projects/{}/build/{}".format(account_project, build_id)
build = requests.get(url, headers=make_auth_headers()).json()
jobs = build['build']['jobs']
print(u"Build {0[build][version]}, {1} jobs: {0[build][message]}".format(build, len(jobs)))
for job in jobs:
name = job['name']
print(u" {0}: {1[status]}, {1[artifactsCount]} artifacts".format(name, job))
url = "https://ci.appveyor.com/api/buildjobs/{}/artifacts".format(job['jobId'])
response = requests.get(url, headers=make_auth_headers())
artifacts = response.json()
for artifact in artifacts:
is_zip = artifact['type'] == "Zip"
filename = artifact['fileName']
print(u" {0}, {1} bytes".format(filename, artifact['size']))
url = "https://ci.appveyor.com/api/buildjobs/{}/artifacts/{}".format(job['jobId'], filename)
download_url(url, filename, make_auth_headers())
if is_zip:
unpack_zipfile(filename)
os.remove(filename)
def ensure_dirs(filename):
"""Make sure the directories exist for `filename`."""
dirname, _ = os.path.split(filename)
if dirname and not os.path.exists(dirname):
os.makedirs(dirname)
def download_url(url, filename, headers):
"""Download a file from `url` to `filename`."""
ensure_dirs(filename)
response = requests.get(url, headers=headers, stream=True)
if response.status_code == 200:
with open(filename, 'wb') as f:
for chunk in response.iter_content(16 * 1024):
f.write(chunk)
else:
print(u" Error downloading {}: {}".format(url, response))
def unpack_zipfile(filename):
"""Unpack a zipfile, using the names in the zip."""
with open(filename, 'rb') as fzip:
z = zipfile.ZipFile(fzip)
for name in z.namelist():
print(u" extracting {}".format(name))
ensure_dirs(name)
z.extract(name)
parser = argparse.ArgumentParser(description='Download artifacts from AppVeyor.')
parser.add_argument('--id',
metavar='PROJECT_ID',
default='Justin-W/clifunland',
help='Project ID in AppVeyor.')
parser.add_argument('build',
nargs='?',
metavar='BUILD_ID',
help='Build ID in AppVeyor. Eg: master-123')
if __name__ == "__main__":
# import logging
# logging.basicConfig(level="DEBUG")
args = parser.parse_args()
download_latest_artifacts(args.id, args.build)
|
Justin-W/clifunland
|
ci/appveyor-download.py
|
Python
|
bsd-2-clause
| 3,815
|
# flake8: noqa
# There's no way to ignore "F401 '...' imported but unused" warnings in this
# module, but to preserve other warnings. So, don't check this module at all.
# Copyright 2021 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import TYPE_CHECKING
from ...file_utils import (
_LazyModule,
is_sentencepiece_available,
is_speech_available,
is_tf_available,
is_torch_available,
)
_import_structure = {
"configuration_speech_to_text": [
"SPEECH_TO_TEXT_PRETRAINED_CONFIG_ARCHIVE_MAP",
"Speech2TextConfig",
],
}
if is_sentencepiece_available():
_import_structure["tokenization_speech_to_text"] = ["Speech2TextTokenizer"]
if is_speech_available():
_import_structure["feature_extraction_speech_to_text"] = ["Speech2TextFeatureExtractor"]
if is_sentencepiece_available():
_import_structure["processing_speech_to_text"] = ["Speech2TextProcessor"]
if is_tf_available():
_import_structure["modeling_tf_speech_to_text"] = [
"TF_SPEECH_TO_TEXT_PRETRAINED_MODEL_ARCHIVE_LIST",
"TFSpeech2TextForConditionalGeneration",
"TFSpeech2TextModel",
"TFSpeech2TextPreTrainedModel",
]
if is_torch_available():
_import_structure["modeling_speech_to_text"] = [
"SPEECH_TO_TEXT_PRETRAINED_MODEL_ARCHIVE_LIST",
"Speech2TextForConditionalGeneration",
"Speech2TextModel",
"Speech2TextPreTrainedModel",
]
if TYPE_CHECKING:
from .configuration_speech_to_text import SPEECH_TO_TEXT_PRETRAINED_CONFIG_ARCHIVE_MAP, Speech2TextConfig
if is_sentencepiece_available():
from .tokenization_speech_to_text import Speech2TextTokenizer
if is_speech_available():
from .feature_extraction_speech_to_text import Speech2TextFeatureExtractor
if is_sentencepiece_available():
from .processing_speech_to_text import Speech2TextProcessor
if is_tf_available():
from .modeling_tf_speech_to_text import (
TF_SPEECH_TO_TEXT_PRETRAINED_MODEL_ARCHIVE_LIST,
TFSpeech2TextForConditionalGeneration,
TFSpeech2TextModel,
TFSpeech2TextPreTrainedModel,
)
if is_torch_available():
from .modeling_speech_to_text import (
SPEECH_TO_TEXT_PRETRAINED_MODEL_ARCHIVE_LIST,
Speech2TextForConditionalGeneration,
Speech2TextModel,
Speech2TextPreTrainedModel,
)
else:
import sys
sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
|
huggingface/transformers
|
src/transformers/models/speech_to_text/__init__.py
|
Python
|
apache-2.0
| 3,127
|
import collections
import operator
import numpy
import sys
from keras.layers import Dense, Embedding
from keras.layers import Dropout
from keras.layers.recurrent import GRU
from keras.models import Sequential
from keras.utils import np_utils
from keras.layers.convolutional import Convolution1D
from keras.layers.convolutional import MaxPooling1D
from keras.layers import LSTM
import language_parser.SemanticVector as sv
import language_parser.Structure as structure
import language_parser.Word as w
class StructureModel:
def __init__(self, file):
self.file = file
def model(self):
struct = structure.Structure(self.file.text)
seq_length = 10
word_list = struct.prepare_pure_list_of_words()
# compute the vocabulary size
vocabulary = sorted(list(set(word_list)))
vocab_lenght = len(vocabulary)
struct.generate_tags_dict()
# semantic modeling
semantic = StructureModel.semantic_model(struct, seq_length)
StructureModel.word_model(struct, seq_length, semantic.model, word_list, vocabulary, vocab_lenght)
@classmethod
def word_model(cls, structure, seq_length, word2vec, word_list, vocabulary, vocab_length):
total = 0
for t in structure.sentences_obj:
total += t.sentence_len
avg = total / len(structure.sentences_obj)
print "average length of sentence", avg
word_to_int = dict((c, i) for i, c in enumerate(vocabulary))
int_to_word = dict((i, c) for i, c in enumerate(vocabulary))
non_word2vec_list = [0.0] * 100
dataX = []
dataY = list()
n_words_in_text = len(word_list)
#for sentence in structure.sentences_obj:
if 1==1:
for i in range(0, len(word_list) - seq_length):
#for i in range(0, len(sentence.words) - seq_length, 1):
seq_in = word_list[i:i + seq_length] #sentence.words[i:i + seq_length]
seq_out = word_list[i + seq_length] #sentence.words[i + seq_length]
dataX.append([word_to_int[word] for word in seq_in])
if seq_out not in word2vec.wv.vocab:
dataY.append(non_word2vec_list)
else:
dataY.append(word2vec[seq_out])
n_patterns = len(dataX)
for x in dataX:
if len(x) != seq_length:
print 'uneqal'
X = numpy.reshape(dataX, (n_patterns, seq_length))
print X.shape
y = numpy.reshape(dataY, (n_patterns, 100))
print X.shape, y.shape
#X = X[0:1000, :]
#y = y[0:1000, :]
#y = numpy.reshape(y, (1000, 1))
print X.shape
print X
print y
word_model = Sequential()
nn = 16*2
print vocab_length, seq_length
embedding_layer = Embedding(50000, 100, input_length=seq_length)
word_model.add(embedding_layer)
word_model.add(Convolution1D(nb_filter=32*9, filter_length=3, border_mode='same', activation='relu'))
word_model.add(MaxPooling1D(pool_length=3))
word_model.add(LSTM(32*3*3, return_sequences=True))
word_model.add(Dropout(0.05))
word_model.add(Convolution1D(nb_filter=32*3, filter_length=3, border_mode='same', activation='relu'))
word_model.add(MaxPooling1D(pool_length=3))
word_model.add(LSTM(32*3, return_sequences=True))
word_model.add(Dropout(0.05))
#word_model.add(Convolution1D(nb_filter=32*2, filter_length=3, border_mode='same', activation='relu'))
#word_model.add(MaxPooling1D(pool_length=3))
word_model.add(LSTM(32*3, return_sequences=True))
word_model.add(Dropout(0.05))
word_model.add(LSTM(32*3, return_sequences=True))
word_model.add(Dropout(0.05))
word_model.add(LSTM(32*3, return_sequences=True))
word_model.add(Dropout(0.05))
word_model.add(LSTM(nn * 4, return_sequences=False))
word_model.add(Dropout(0.05))
word_model.add(Dense(y.shape[1], activation='tanh'))
print word_model.summary()
# load the network weights
word_model.compile(loss='mean_squared_error', optimizer='adam', metrics=['accuracy'])
# testing
for rn in range(100):
print rn
word_model.fit(X, y, nb_epoch=5, batch_size=512) # , callbacks=callbacks_list)
# pick a random seed
start = numpy.random.randint(0, len(dataX) - 1)
pattern = dataX[start] #dataX
print "Seed:"
print start
print(pattern)
rs = []
for i in range(30):
x = numpy.reshape(pattern, (1, seq_length))
prediction = word_model.predict(x, verbose=0)
prd_word = StructureModel.find_nearest_words(word2vec, prediction)
sys.stdout.write(prd_word[0]+' ')
pattern.append(word_to_int[prd_word[0]])
pattern = pattern[1:len(pattern)]
print "\nDone."
@classmethod
def find_nearest_words(cls, word2vec, prediction_vec):
model_word_vector = numpy.array(prediction_vec[0], dtype='f')
topn = 20
most_similar_words = word2vec.wv.most_similar([model_word_vector], [], topn)
return most_similar_words[0]
@classmethod
def semantic_model(cls, structure, seq_length):
semantic_model = sv.SemanticVector(structure)
semantic_model.model_word2vec(15, seq_length)
semantic_model.save_model('weights.02.11.hdf5')
return semantic_model
|
arashzamani/lstm_nlg_ver1
|
test_cases/algorithm2_with_embedding.py
|
Python
|
gpl-3.0
| 5,402
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import re
import time
import math
from openerp import api, fields as fields2
from openerp import tools
from openerp.osv import fields, osv
from openerp.tools import float_round, float_is_zero, float_compare
from openerp.tools.translate import _
from openerp.exceptions import UserError
CURRENCY_DISPLAY_PATTERN = re.compile(r'(\w+)\s*(?:\((.*)\))?')
class res_currency(osv.osv):
def _current_rate(self, cr, uid, ids, name, arg, context=None):
return self._get_current_rate(cr, uid, ids, context=context)
def _current_rate_silent(self, cr, uid, ids, name, arg, context=None):
return self._get_current_rate(cr, uid, ids, raise_on_no_rate=False, context=context)
def _get_current_rate(self, cr, uid, ids, raise_on_no_rate=True, context=None):
if context is None:
context = {}
res = {}
date = context.get('date') or time.strftime('%Y-%m-%d')
for id in ids:
cr.execute('SELECT rate FROM res_currency_rate '
'WHERE currency_id = %s '
'AND name <= %s '
'ORDER BY name desc LIMIT 1',
(id, date))
if cr.rowcount:
res[id] = cr.fetchone()[0]
elif not raise_on_no_rate:
res[id] = 0
else:
currency = self.browse(cr, uid, id, context=context)
raise UserError(_("No currency rate associated for currency '%s' for the given period: %s") % (currency.name, date))
return res
def _decimal_places(self, cr, uid, ids, name, arg, context=None):
res = {}
for currency in self.browse(cr, uid, ids, context=context):
if currency.rounding > 0 and currency.rounding < 1:
res[currency.id] = int(math.ceil(math.log10(1/currency.rounding)))
else:
res[currency.id] = 0
return res
_name = "res.currency"
_description = "Currency"
_columns = {
# Note: 'code' column was removed as of v6.0, the 'name' should now hold the ISO code.
'name': fields.char('Currency', size=3, required=True, help="Currency Code (ISO 4217)"),
'symbol': fields.char('Symbol', size=4, help="Currency sign, to be used when printing amounts."),
'rate': fields.function(_current_rate, string='Current Rate', digits=(12,6),
help='The rate of the currency to the currency of rate 1.'),
# Do not use for computation ! Same as rate field with silent failing
'rate_silent': fields.function(_current_rate_silent, string='Current Rate', digits=(12,6),
help='The rate of the currency to the currency of rate 1 (0 if no rate defined).'),
'rate_ids': fields.one2many('res.currency.rate', 'currency_id', 'Rates'),
'accuracy': fields.integer('Computational Accuracy'),
'rounding': fields.float('Rounding Factor', digits=(12,6)),
'decimal_places': fields.function(_decimal_places, string='Decimal Places', type='integer'),
'active': fields.boolean('Active'),
'company_id':fields.many2one('res.company', 'Company'),
'base': fields.boolean('Base'),
'position': fields.selection([('after','After Amount'),('before','Before Amount')], 'Symbol Position', help="Determines where the currency symbol should be placed after or before the amount.")
}
_defaults = {
'active': 1,
'position' : 'after',
'rounding': 0.01,
'accuracy': 4,
'company_id': False,
}
_sql_constraints = [
# this constraint does not cover all cases due to SQL NULL handling for company_id,
# so it is complemented with a unique index (see below). The constraint and index
# share the same prefix so that IntegrityError triggered by the index will be caught
# and reported to the user with the constraint's error message.
('unique_name_company_id', 'unique (name, company_id)', 'The currency code must be unique per company!'),
]
_order = "name"
def init(self, cr):
# CONSTRAINT/UNIQUE INDEX on (name,company_id)
# /!\ The unique constraint 'unique_name_company_id' is not sufficient, because SQL92
# only support field names in constraint definitions, and we need a function here:
# we need to special-case company_id to treat all NULL company_id as equal, otherwise
# we would allow duplicate "global" currencies (all having company_id == NULL)
cr.execute("""SELECT indexname FROM pg_indexes WHERE indexname = 'res_currency_unique_name_company_id_idx'""")
if not cr.fetchone():
cr.execute("""CREATE UNIQUE INDEX res_currency_unique_name_company_id_idx
ON res_currency
(name, (COALESCE(company_id,-1)))""")
date = fields2.Date(compute='compute_date')
@api.one
@api.depends('rate_ids.name')
def compute_date(self):
self.date = self.rate_ids[:1].name
def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
if not args:
args = []
results = super(res_currency,self)\
.name_search(cr, user, name, args, operator=operator, context=context, limit=limit)
if not results:
name_match = CURRENCY_DISPLAY_PATTERN.match(name)
if name_match:
results = super(res_currency,self)\
.name_search(cr, user, name_match.group(1), args, operator=operator, context=context, limit=limit)
return results
def name_get(self, cr, uid, ids, context=None):
if not ids:
return []
if isinstance(ids, (int, long)):
ids = [ids]
reads = self.read(cr, uid, ids, ['name','symbol'], context=context, load='_classic_write')
return [(x['id'], tools.ustr(x['name'])) for x in reads]
@api.v8
def round(self, amount):
""" Return `amount` rounded according to currency `self`. """
return float_round(amount, precision_rounding=self.rounding)
@api.v7
def round(self, cr, uid, currency, amount):
"""Return ``amount`` rounded according to ``currency``'s
rounding rules.
:param Record currency: currency for which we are rounding
:param float amount: the amount to round
:return: rounded float
"""
return float_round(amount, precision_rounding=currency.rounding)
@api.v8
def compare_amounts(self, amount1, amount2):
""" Compare `amount1` and `amount2` after rounding them according to
`self`'s precision. An amount is considered lower/greater than
another amount if their rounded value is different. This is not the
same as having a non-zero difference!
For example 1.432 and 1.431 are equal at 2 digits precision, so this
method would return 0. However 0.006 and 0.002 are considered
different (returns 1) because they respectively round to 0.01 and
0.0, even though 0.006-0.002 = 0.004 which would be considered zero
at 2 digits precision.
"""
return float_compare(amount1, amount2, precision_rounding=self.rounding)
@api.v7
def compare_amounts(self, cr, uid, currency, amount1, amount2):
"""Compare ``amount1`` and ``amount2`` after rounding them according to the
given currency's precision..
An amount is considered lower/greater than another amount if their rounded
value is different. This is not the same as having a non-zero difference!
For example 1.432 and 1.431 are equal at 2 digits precision,
so this method would return 0.
However 0.006 and 0.002 are considered different (returns 1) because
they respectively round to 0.01 and 0.0, even though
0.006-0.002 = 0.004 which would be considered zero at 2 digits precision.
:param Record currency: currency for which we are rounding
:param float amount1: first amount to compare
:param float amount2: second amount to compare
:return: (resp.) -1, 0 or 1, if ``amount1`` is (resp.) lower than,
equal to, or greater than ``amount2``, according to
``currency``'s rounding.
"""
return float_compare(amount1, amount2, precision_rounding=currency.rounding)
@api.v8
def is_zero(self, amount):
""" Return true if `amount` is small enough to be treated as zero
according to currency `self`'s rounding rules.
Warning: ``is_zero(amount1-amount2)`` is not always equivalent to
``compare_amounts(amount1,amount2) == 0``, as the former will round
after computing the difference, while the latter will round before,
giving different results, e.g., 0.006 and 0.002 at 2 digits precision.
"""
return float_is_zero(amount, precision_rounding=self.rounding)
@api.v7
def is_zero(self, cr, uid, currency, amount):
"""Returns true if ``amount`` is small enough to be treated as
zero according to ``currency``'s rounding rules.
Warning: ``is_zero(amount1-amount2)`` is not always equivalent to
``compare_amounts(amount1,amount2) == 0``, as the former will round after
computing the difference, while the latter will round before, giving
different results for e.g. 0.006 and 0.002 at 2 digits precision.
:param Record currency: currency for which we are rounding
:param float amount: amount to compare with currency's zero
"""
return float_is_zero(amount, precision_rounding=currency.rounding)
def _get_conversion_rate(self, cr, uid, from_currency, to_currency, context=None):
if context is None:
context = {}
ctx = context.copy()
from_currency = self.browse(cr, uid, from_currency.id, context=ctx)
to_currency = self.browse(cr, uid, to_currency.id, context=ctx)
if from_currency.rate == 0 or to_currency.rate == 0:
date = context.get('date', time.strftime('%Y-%m-%d'))
if from_currency.rate == 0:
currency_symbol = from_currency.symbol
else:
currency_symbol = to_currency.symbol
raise UserError(_('No rate found \n' \
'for the currency: %s \n' \
'at the date: %s') % (currency_symbol, date))
return to_currency.rate/from_currency.rate
def _compute(self, cr, uid, from_currency, to_currency, from_amount, round=True, context=None):
if (to_currency.id == from_currency.id):
if round:
return self.round(cr, uid, to_currency, from_amount)
else:
return from_amount
else:
rate = self._get_conversion_rate(cr, uid, from_currency, to_currency, context=context)
if round:
return self.round(cr, uid, to_currency, from_amount * rate)
else:
return from_amount * rate
@api.v7
def compute(self, cr, uid, from_currency_id, to_currency_id, from_amount,
round=True, context=None):
context = context or {}
if not from_currency_id:
from_currency_id = to_currency_id
if not to_currency_id:
to_currency_id = from_currency_id
xc = self.browse(cr, uid, [from_currency_id,to_currency_id], context=context)
from_currency = (xc[0].id == from_currency_id and xc[0]) or xc[1]
to_currency = (xc[0].id == to_currency_id and xc[0]) or xc[1]
return self._compute(cr, uid, from_currency, to_currency, from_amount, round, context)
@api.v8
def compute(self, from_amount, to_currency, round=True):
""" Convert `from_amount` from currency `self` to `to_currency`. """
assert self, "compute from unknown currency"
assert to_currency, "compute to unknown currency"
# apply conversion rate
if self == to_currency:
to_amount = from_amount
else:
to_amount = from_amount * self._get_conversion_rate(self, to_currency)
# apply rounding
return to_currency.round(to_amount) if round else to_amount
def get_format_currencies_js_function(self, cr, uid, context=None):
""" Returns a string that can be used to instanciate a javascript function that formats numbers as currencies.
That function expects the number as first parameter and the currency id as second parameter. In case of failure it returns undefined."""
function = ""
for row in self.search_read(cr, uid, domain=[], fields=['id', 'name', 'symbol', 'rounding', 'position'], context=context):
digits = int(math.ceil(math.log10(1 / row['rounding'])))
symbol = row['symbol'] or row['name']
format_number_str = "openerp.web.format_value(arguments[0], {type: 'float', digits: [69," + str(digits) + "]}, 0.00)"
if row['position'] == 'after':
return_str = "return " + format_number_str + " + '\\xA0" + symbol + "';"
else:
return_str = "return '" + symbol + "\\xA0' + " + format_number_str + ";"
function += "if (arguments[1] === " + str(row['id']) + ") { " + return_str + " }"
return function
class res_currency_rate(osv.osv):
_name = "res.currency.rate"
_description = "Currency Rate"
_columns = {
'name': fields.datetime('Date', required=True, select=True),
'rate': fields.float('Rate', digits=(12, 6), help='The rate of the currency to the currency of rate 1'),
'currency_id': fields.many2one('res.currency', 'Currency', readonly=True),
}
_defaults = {
'name': lambda *a: time.strftime('%Y-%m-%d 00:00:00'),
}
_order = "name desc"
def name_search(self, cr, user, name, args=None, operator='ilike', context=None, limit=80):
if operator in ['=', '!=']:
try:
date_format = '%Y-%m-%d'
if context.get('lang'):
lang_obj = self.pool['res.lang']
lang_ids = lang_obj.search(cr, user, [('code', '=', context['lang'])], context=context)
if lang_ids:
date_format = lang_obj.browse(cr, user, lang_ids[0], context=context).date_format
name = time.strftime('%Y-%m-%d', time.strptime(name, date_format))
except ValueError:
try:
args.append(('rate', operator, float(name)))
except ValueError:
return []
name = ''
operator = 'ilike'
return super(res_currency_rate, self).name_search(cr, user, name, args=args, operator=operator, context=context, limit=limit)
|
Grirrane/odoo
|
openerp/addons/base/res/res_currency.py
|
Python
|
agpl-3.0
| 15,990
|
from __future__ import print_function
"""
Deprecated. Use ``update-tld-names`` command instead.
"""
__title__ = 'tld.update'
__author__ = 'Artur Barseghyan'
__copyright__ = '2013-2015 Artur Barseghyan'
__license__ = 'GPL 2.0/LGPL 2.1'
from tld.utils import update_tld_names
_ = lambda x: x
if __name__ == '__main__':
update_tld_names()
print(_("Local TLD names file has been successfully updated!"))
|
underdogio/tld
|
src/tld/update.py
|
Python
|
gpl-2.0
| 414
|
from .base import *
class List(Base, list):
def __init__(self, value=[]):
Base.__init__(self)
list.__init__(self, value)
def __getitem__(self, i):
item = list.__getitem__(self, i)
if isinstance(item, list):
item = List(item)
return item
def get_pen(self, index):
return Base.get_pen(self, index) or QColor(0x000000)
def get_brush(self, index):
return Base.get_brush(self, index) or QColor(0x0080FF)
def draw(self, visualization, painter):
w, h = visualization.width(), visualization.height()
length = len(self)
if length == 0:
return
wi = float(w) / length
mh = max(self)
x = 0
painter.translate(0, h)
painter.scale(1, 1)
for i in range(length):
painter.setPen(self.get_pen(i))
painter.setBrush(self.get_brush(i))
height = self[i] * h / mh
painter.drawRect(x, - height, wi, height)
x += wi
|
zlsun/VisualAlgorithm
|
src/structures/list.py
|
Python
|
mit
| 1,031
|
from random import randint
from flask.ext.script import Manager, prompt_bool
from faker import Factory
from app.mod_school import load_school, random_school
from app.mod_user import random_user
from app.mod_proposal import random_proposal
from .services import delete_all_collections
from .models import Collection
manager = Manager(usage="Perform Collective Development collection operations")
@manager.option('-n', '--num', dest='num', default=4)
def fake_collections(num):
""" Generates num fake collections """
faker = Factory.create()
for x in range(int(num)):
c = Collection(
proposer = random_user(),
title = faker.bs(),
description = faker.text(),
schools = [random_school(),],
created = faker.date_time(),
events = [],
proposals = [],
)
c.save()
for x1 in range(randint(0,20)):
c.add_interested_user(random_user())
for y in range(0,x):
c.add_proposal(random_proposal())
print "Created: ", c.title
@manager.option('-c', '--coll', dest='coll_id', default=None)
def display(coll_id):
try:
c = Collection.objects.get(id=coll_id)
print "Collection: ", c.title
print "Proposals:"
for p in c.all_proposals():
print "%s: %s" % (p.id, p.title)
print "Events:"
for e in c.all_events():
print "%s: %s" % (e.start, e.title)
except:
print "No such collection."
@manager.command
def delete_all():
""" Deletes all collections """
if prompt_bool(
"Are you sure you want to delete all collections? This cannot be undone."):
delete_all_collections()
print "All collections have been deleted"
|
codeforanchorage/collective-development
|
app/mod_collection/manage.py
|
Python
|
mit
| 1,565
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Register utilities for external codegen."""
_PATTERN_TABLES = {}
def register_pattern_table(compiler, table=None):
"""Register a pattern table for an external compiler.
Pattern tables are used to create composite functions.
See the MergeComposite pass.
Parameters
----------
compiler : str
The name of compiler
table : function, optional
A function that returns the pattern table
Returns
-------
fregister : function
Register function if value is not specified.
"""
def _register(t):
"""internal register function"""
_PATTERN_TABLES[compiler] = t()
return t
return _register(table) if table is not None else _register
def get_pattern_table(compiler):
"""Get the pattern table associated with a compiler (if it's registered)."""
return _PATTERN_TABLES[compiler] if compiler in _PATTERN_TABLES else None
|
sxjscience/tvm
|
python/tvm/relay/op/contrib/register.py
|
Python
|
apache-2.0
| 1,708
|
#This file is part of Tryton. The COPYRIGHT file at the top level of
#this repository contains the full copyright notices and license terms.
__all__ = ['Wizard', 'StateView', 'StateTransition', 'StateAction', 'Button',
'Session']
try:
import simplejson as json
except ImportError:
import json
from trytond.pool import Pool
from trytond.transaction import Transaction
from trytond.error import WarningErrorMixin
from trytond.url import URLMixin
from trytond.protocols.jsonrpc import object_hook, JSONEncoder
from trytond.model.fields import states_validate
from trytond.pyson import PYSONEncoder
from trytond.model.browse import BrowseRecordNull
class Button(object):
'''
Define a button on wizard.
'''
def __init__(self, string, state, icon='', default=False, states=None):
self.string = string
self.state = state
self.icon = icon
self.default = bool(default)
self.__states = None
self.states = states or {}
@property
def states(self):
return self.__states
@states.setter
def states(self, value):
states_validate(value)
self.__states = value
class State(object):
'''
A State of a wizard.
'''
class StateView(State):
'''
A view state of a wizard.
'''
def __init__(self, model_name, view, buttons):
'''
model_name is the name of the model
view is the xml id of the view
buttons is a list of Button
'''
self.model_name = model_name
self.view = view
self.buttons = buttons
assert len(self.buttons) == len(set(b.state for b in self.buttons))
assert len([b for b in self.buttons if b.default]) <= 1
def get_view(self):
'''
Returns the view definition
'''
model_obj = Pool().get(self.model_name)
model_data_obj = Pool().get('ir.model.data')
module, fs_id = self.view.split('.')
view_id = model_data_obj.get_id(module, fs_id)
return model_obj.fields_view_get(view_id=view_id, view_type='form')
def get_defaults(self, wizard, session, state_name, fields):
'''
Returns defaults values for the fields
'''
model_obj = Pool().get(self.model_name)
defaults = model_obj.default_get(fields)
default = getattr(wizard, 'default_%s' % state_name, None)
if default:
defaults.update(default(session, fields))
return defaults
def get_buttons(self, wizard, state_name):
'''
Returns button definitions translated
'''
translation_obj = Pool().get('ir.translation')
def translation_key(button):
return (','.join((wizard._name, state_name, button.state)),
'wizard_button', Transaction().language, button.string)
translation_keys = [translation_key(button) for button in self.buttons]
translations = translation_obj.get_sources(translation_keys)
encoder = PYSONEncoder()
result = []
for button in self.buttons:
result.append({
'state': button.state,
'icon': button.icon,
'default': button.default,
'string': (translations.get(translation_key(button))
or button.string),
'states': encoder.encode(button.states),
})
return result
class StateTransition(State):
'''
A transition state of a wizard.
'''
class StateAction(StateTransition):
'''
An action state of a wizard.
'''
def __init__(self, action_id):
'''
action_id is a string containing ``module.xml_id``
'''
super(StateAction, self).__init__()
self.action_id = action_id
def get_action(self):
"Returns action definition"
pool = Pool()
model_data_obj = pool.get('ir.model.data')
action_obj = pool.get('ir.action')
module, fs_id = self.action_id.split('.')
action_id = action_obj.get_action_id(
model_data_obj.get_id(module, fs_id))
action = action_obj.browse(action_id)
return action_obj.get_action_values(action.type, action.id)
class _SessionRecord(object):
'''
A record of a wizard form.
'''
# Declared in class to prevent:
# 'maximum recursion depth exceeded in __subclasscheck__'
_model = None
_data = None
__cache = None
def __init__(self, model, data):
self._model = model
self._data = data
self.__cache = {}
self.dirty = False
def __getattr__(self, name):
if name in self.__cache:
return self.__cache[name]
field = self._model._columns[name]
data = self._data.get(name, None)
target_obj = None
if hasattr(field, 'model_name'):
target_obj = Pool().get(field.model_name)
elif hasattr(field, 'get_target'):
target_obj = field.get_target()
if data:
if target_obj:
def instance(data):
if isinstance(data, dict):
return _SessionRecord(target_obj, data)
return target_obj.browse(data)
if isinstance(data, list):
data = [instance(x) for x in data]
else:
data = instance(data)
elif target_obj and not isinstance(data, list):
return BrowseRecordNull()
self.__cache[name] = data
return data
def __setattr__(self, name, value):
if (self._model is not None
and (name in self._model._columns
or name in self._model._inherit_fields)):
self.__cache.pop(name, None)
self._data[name] = value
self.dirty = True
else:
super(_SessionRecord, self).__setattr__(name, value)
class Session(object):
'''
A wizard session.
'''
def __init__(self, wizard, session_id):
pool = Pool()
session_obj = pool.get('ir.session.wizard')
self._session = session_obj.browse(session_id)
self.data = json.loads(self._session.data.encode('utf-8'),
object_hook=object_hook)
for state_name, state in wizard.states.iteritems():
if isinstance(state, StateView):
model = pool.get(state.model_name)
self.data.setdefault(state_name, {})
setattr(self, state_name,
_SessionRecord(model, self.data[state_name]))
@property
def dirty(self):
return any(getattr(self, state_name).dirty
for state_name in self.data)
def save(self):
"Save the session in database"
session_obj = Pool().get('ir.session.wizard')
if self.dirty:
session_obj.write(self._session.id, {
'data': json.dumps(self.data, cls=JSONEncoder),
})
class Wizard(WarningErrorMixin, URLMixin):
_name = ""
start_state = 'start'
end_state = 'end'
def __new__(cls):
Pool.register(cls, type='wizard')
def __init__(self):
super(Wizard, self).__init__()
self._rpc = {
'create': True,
'delete': True,
'execute': True,
}
self._error_messages = {}
@property
def states(self):
result = {}
for attr in dir(self):
if attr == 'states':
continue
if isinstance(getattr(self, attr), State):
result[attr] = getattr(self, attr)
return result
def init(self, module_name):
pool = Pool()
translation_obj = pool.get('ir.translation')
cursor = Transaction().cursor
for state_name, state in self.states.iteritems():
if isinstance(state, StateView):
for button in state.buttons:
cursor.execute('SELECT id, name, src '
'FROM ir_translation '
'WHERE module = %s '
'AND lang = %s '
'AND type = %s '
'AND name = %s',
(module_name, 'en_US', 'wizard_button',
self._name + ',' + state_name + ',' +
button.state))
res = cursor.dictfetchall()
src_md5 = translation_obj.get_src_md5(button.string)
if not res:
cursor.execute('INSERT INTO ir_translation '
'(name, lang, type, src, src_md5, value, module, '
'fuzzy) '
'VALUES (%s, %s, %s, %s, %s, %s, %s, %s)',
(self._name + ',' + state_name + ',' +
button.state,
'en_US', 'wizard_button', button.string,
src_md5, '', module_name, False))
elif res[0]['src'] != button.string:
cursor.execute('UPDATE ir_translation '
'SET src = %s, src_md5 = %s '
'WHERE id = %s',
(button.string, src_md5, res[0]['id']))
cursor.execute('SELECT id, src FROM ir_translation '
'WHERE lang = %s '
'AND type = %s '
'AND name = %s',
('en_US', 'error', self._name))
trans_error = {}
for trans in cursor.dictfetchall():
trans_error[trans['src']] = trans
for error in self._error_messages.values():
if error not in trans_error:
error_md5 = translation_obj.get_src_md5(error)
cursor.execute('INSERT INTO ir_translation '
'(name, lang, type, src, src_md5, value, module, fuzzy) '
'VALUES (%s, %s, %s, %s, %s, %s, %s, %s)',
(self._name, 'en_US', 'error', error, error_md5, '',
module_name, False))
def create(self):
"Create a session"
session_obj = Pool().get('ir.session.wizard')
return (session_obj.create({}), self.start_state, self.end_state)
def delete(self, session_id):
"Delete the session"
session_obj = Pool().get('ir.session.wizard')
session_obj.delete(session_id)
def execute(self, session, data, state_name):
'''
Execute the wizard state.
session is a Session or a Session id
data is a dictionary with the session data to update
state_name is the name of state to execute
Returns a dictionary with:
- ``actions``: a list of Action to execute
- ``view``: a dictionary with:
- ``fields_view``: a fields/view definition
- ``defaults``: a dictionary with default values
- ``buttons``: a list of buttons
'''
if state_name == self.end_state:
return {}
if isinstance(session, (int, long)):
session = Session(self, session)
for key, value in data.iteritems():
prev_data = session.data[key].copy()
session.data[key].update(value)
if prev_data != session.data[key]:
getattr(session, key).dirty = True
state = self.states[state_name]
result = {}
if isinstance(state, StateView):
view = state.get_view()
defaults = state.get_defaults(self, session, state_name,
view['fields'].keys())
buttons = state.get_buttons(self, state_name)
result['view'] = {
'fields_view': view,
'defaults': defaults,
'buttons': buttons,
'state': state_name,
}
elif isinstance(state, StateTransition):
do_result = None
if isinstance(state, StateAction):
action = state.get_action()
do = getattr(self, 'do_%s' % state_name, None)
if do:
do_result = do(session, action)
else:
do_result = action, {}
transition = getattr(self, 'transition_%s' % state_name, None)
if transition:
result = self.execute(session, {}, transition(session))
if do_result:
result.setdefault('actions', []).append(do_result)
session.save()
return result
|
mediafactory/tryton_core_daemon
|
trytond/wizard/wizard.py
|
Python
|
gpl-3.0
| 12,694
|
#-*- coding:utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 One Click Software (http://oneclick.solutions)
# and Copyright (C) 2013 Michael Telahun Makonnen <mmakonnen@gmail.com>.
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Time Accrual Policy',
'version': '1.1',
'category': 'Human Resources',
'description': """
Define Time Accrual Policies
============================
Define properties of a leave accrual policy. The accrued time is calculated
based on the length of service of the employee. An additional premium may be
added on the base rate based on additional months of service. This policy is ideal
for annual leave accruals. If the type of accrual is 'Standard' time is accrued and
withdrawn manually. However, if the type is 'Calendar' the time is accrued (and recorded)
at a fixed frequency.
""",
'author':'Michael Telahun Makonnen <mmakonnen@gmail.com> and One Click Software',
'website':'http://oneclick.solutions',
'depends': [
'hr_accrual',
'hr_contract_state',
'hr_employee_seniority',
'hr_policy_group',
],
'init_xml': [
],
'update_xml': [
'security/ir.model.access.csv',
'hr_policy_accrual_cron.xml',
'hr_policy_accrual_view.xml',
],
'test': [
],
'demo_xml': [
],
'installable': True,
'active': False,
}
|
cartertech/odoo-hr-ng
|
hr_policy_accrual/__openerp__.py
|
Python
|
agpl-3.0
| 2,187
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# (C) British Crown Copyright 2012-6 Met Office.
#
# This file is part of Rose, a framework for meteorological suites.
#
# Rose is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Rose is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Rose. If not, see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------
import pango
import pygtk
pygtk.require("2.0")
import gtk
import rose.config
import rose.config_editor
import rose.gtk.util
import rose.config_editor.plugin.um.widget.stash_util as stash_util
class AddStashDiagnosticsPanelv1(gtk.VBox):
"""Display a grouped set of stash requests to add."""
STASH_PARSE_DESC_OPT = "name"
STASH_PARSE_ITEM_OPT = "item"
STASH_PARSE_SECT_OPT = "sectn"
def __init__(self, stash_lookup, request_lookup,
changed_request_lookup, stash_meta_lookup,
add_stash_request_func,
navigate_to_stash_request_func,
refresh_stash_requests_func):
"""Create a widget displaying STASHmaster information.
stash_lookup is a nested dictionary that uses STASH section
numbers and item numbers as a key chain to get the information
about a specific record - e.g. stash_lookup[1][0]["name"] may
return the 'name' (text description) for stash section 1, item
0.
request_lookup is a nested dictionary in the same form as stash
lookup (section numbers, item numbers), but then contains
a dictionary of relevant streq namelists vs option-value pairs
as a sub-level - e.g. request_lookup[1][0].keys() gives all the
relevant streq indices for stash section 1, item 0.
request_lookup[1][0]["0abcd123"]["dom_name"] may give the
domain profile name for the relevant namelist:streq(0abcd123).
changed_request_lookup is a dictionary of changed streq
namelists (keys) and their change description text (values).
stash_meta_lookup is a dictionary of STASHmaster property
names (keys) with value-metadata-dict key-value pairs (values).
To extract the metadata dict for a 'grid' value of "2", look
at stash_meta_lookup["grid=2"] which should be a dict of normal
Rose metadata key-value pairs such as:
{"description": "2 means Something something"}.
add_stash_request_func is a hook function that should take a
STASH section number argument and a STASH item number argument,
and add this request as a new namelist in a configuration.
navigate_to_stash_request_func is a hook function that should
take a streq namelist section id and search for it. It should
display it if found.
refresh_stash_requests_func is a hook function that should call
the update_request_info method with updated streq namelist
info.
"""
super(AddStashDiagnosticsPanelv1, self).__init__(self)
self.set_property("homogeneous", False)
self.stash_lookup = stash_lookup
self.request_lookup = request_lookup
self.changed_request_lookup = changed_request_lookup
self.stash_meta_lookup = stash_meta_lookup
self._add_stash_request = add_stash_request_func
self.navigate_to_stash_request = navigate_to_stash_request_func
self.refresh_stash_requests = refresh_stash_requests_func
self.group_index = 0
self._visible_metadata_columns = ["Section"]
# Automatically hide columns which have fixed-value metadata.
self._hidden_column_names = []
for key, metadata in self.stash_meta_lookup.items():
if "=" in key:
continue
values_string = metadata.get(rose.META_PROP_VALUES, "0, 1")
if len(rose.variable.array_split(values_string)) == 1:
self._hidden_column_names.append(key)
self._should_show_meta_column_titles = False
self.control_widget_hbox = self._get_control_widget_hbox()
self.pack_start(self.control_widget_hbox, expand=False, fill=False)
self._view = rose.gtk.util.TooltipTreeView(
get_tooltip_func=self.set_tree_tip)
self._view.set_rules_hint(True)
self.sort_util = rose.gtk.util.TreeModelSortUtil(
lambda: self._view.get_model(), 2)
self._view.show()
self._view.connect("button-press-event",
self._handle_button_press_event)
self._view.connect("cursor-changed",
lambda v: self._update_control_widget_sensitivity())
self._window = gtk.ScrolledWindow()
self._window.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
self.generate_tree_view(is_startup=True)
self._window.add(self._view)
self._window.show()
self.pack_start(self._window, expand=True, fill=True)
self._update_control_widget_sensitivity()
self.show()
def add_cell_renderer_for_value(self, column):
"""Add a cell renderer to represent the model value."""
cell_for_value = gtk.CellRendererText()
column.pack_start(cell_for_value, expand=True)
column.set_cell_data_func(cell_for_value,
self._set_tree_cell_value)
def add_stash_request(self, section, item):
"""Handle an add stash request call."""
self._add_stash_request(section, item)
self.refresh_stash_requests()
def generate_tree_view(self, is_startup=False):
"""Create the summary of page data."""
for column in self._view.get_columns():
self._view.remove_column(column)
self._view.set_model(self.get_tree_model())
for i, column_name in enumerate(self.column_names):
col = gtk.TreeViewColumn()
if column_name in self._hidden_column_names:
col.set_visible(False)
col_title = column_name.replace("_", "__")
if self._should_show_meta_column_titles:
col_meta = self.stash_meta_lookup.get(column_name, {})
title = col_meta.get(rose.META_PROP_TITLE)
if title is not None:
col_title = title
col.set_title(col_title)
self.add_cell_renderer_for_value(col)
if i < len(self.column_names) - 1:
col.set_resizable(True)
col.set_sort_column_id(i)
self._view.append_column(col)
if is_startup:
group_model = gtk.TreeStore(str)
group_model.append(None, [""])
for i, name in enumerate(self.column_names):
if name not in ["?", "#"]:
group_model.append(None, [name])
self._group_widget.set_model(group_model)
self._group_widget.set_active(self.group_index + 1)
self._group_widget.connect("changed", self._handle_group_change)
self.update_request_info()
def get_model_data_and_columns(self):
"""Return a list of data tuples and columns"""
data_rows = []
columns = ["Section", "Item", "Description", "?", "#"]
sections = self.stash_lookup.keys()
sections.sort(self.sort_util.cmp_)
mod_markup = rose.config_editor.SUMMARY_DATA_PANEL_MODIFIED_MARKUP
props_excess = [self.STASH_PARSE_DESC_OPT, self.STASH_PARSE_ITEM_OPT,
self.STASH_PARSE_SECT_OPT]
for section in sections:
if section == "-1":
continue
items = self.stash_lookup[section].keys()
items.sort(self.sort_util.cmp_)
for item in items:
data = self.stash_lookup[section][item]
this_row = [section, item, data[self.STASH_PARSE_DESC_OPT]]
this_row += ["", ""]
for prop in sorted(data.keys()):
if prop not in props_excess:
this_row.append(data[prop])
if prop not in columns:
columns.append(prop)
data_rows.append(this_row)
return data_rows, columns
def get_tree_model(self):
"""Construct a data model of other page data."""
data_rows, cols = self.get_model_data_and_columns()
data_rows, cols, rows_are_descendants = self._apply_grouping(
data_rows, cols, self.group_index)
self.column_names = cols
if data_rows:
col_types = [str] * len(data_rows[0])
else:
col_types = []
self._store = gtk.TreeStore(*col_types)
parent_iter_ = None
for i, row_data in enumerate(data_rows):
if rows_are_descendants is None:
self._store.append(None, row_data)
elif rows_are_descendants[i]:
self._store.append(parent_iter, row_data)
else:
parent_data = [row_data[0]] + [None] * len(row_data[1:])
parent_iter = self._store.append(None, parent_data)
self._store.append(parent_iter, row_data)
filter_model = self._store.filter_new()
filter_model.set_visible_func(self._filter_visible)
sort_model = gtk.TreeModelSort(filter_model)
for i in range(len(self.column_names)):
sort_model.set_sort_func(i, self.sort_util.sort_column, i)
sort_model.connect("sort-column-changed",
self.sort_util.handle_sort_column_change)
return sort_model
def set_tree_tip(self, treeview, row_iter, col_index, tip):
"""Add the hover-over text for a cell to 'tip'.
treeview is the gtk.TreeView object
row_iter is the gtk.TreeIter for the row
col_index is the index of the gtk.TreeColumn in
e.g. treeview.get_columns()
tip is the gtk.Tooltip object that the text needs to be set in.
"""
model = treeview.get_model()
stash_section_index = self.column_names.index("Section")
stash_item_index = self.column_names.index("Item")
stash_desc_index = self.column_names.index("Description")
stash_request_num_index = self.column_names.index("#")
stash_section = model.get_value(row_iter, stash_section_index)
stash_item = model.get_value(row_iter, stash_item_index)
stash_desc = model.get_value(row_iter, stash_desc_index)
stash_request_num = model.get_value(row_iter, stash_request_num_index)
if not stash_request_num or stash_request_num == "0":
stash_request_num = "None"
name = self.column_names[col_index]
value = model.get_value(row_iter, col_index)
help = None
if value is None:
return False
if name == "?":
name = "Requests Status"
if value == rose.config_editor.SUMMARY_DATA_PANEL_MODIFIED_MARKUP:
value = "changed"
else:
value = "no changes"
elif name == "#":
name = "Requests"
if stash_request_num != "None":
sect_streqs = self.request_lookup.get(stash_section, {})
streqs = sect_streqs.get(stash_item, {}).keys()
streqs.sort(rose.config.sort_settings)
if streqs:
value = "\n " + "\n ".join(streqs)
else:
value = stash_request_num + " total"
if name == "Section":
meta_key = self.STASH_PARSE_SECT_OPT + "=" + value
elif name == "Description":
metadata = stash_util.get_metadata_for_stash_section_item(
self.stash_meta_lookup, stash_section, stash_item, value
)
help = metadata.get(rose.META_PROP_HELP)
meta_key = self.STASH_PARSE_DESC_OPT + "=" + value
else:
meta_key = name + "=" + value
value_meta = self.stash_meta_lookup.get(meta_key, {})
title = value_meta.get(rose.META_PROP_TITLE, "")
if help is None:
help = value_meta.get(rose.META_PROP_HELP, "")
if title and not help:
value += "\n" + title
if help:
value += "\n" + rose.gtk.util.safe_str(help)
text = name + ": " + str(value) + "\n\n"
text += "Section: " + str(stash_section) + "\n"
text += "Item: " + str(stash_item) + "\n"
text += "Description: " + str(stash_desc) + "\n"
if stash_request_num != "None":
text += str(stash_request_num) + " request(s)"
text = text.strip()
tip.set_text(text)
return True
def update_request_info(self, request_lookup=None,
changed_request_lookup=None):
"""Refresh streq namelist information."""
if request_lookup is not None:
self.request_lookup = request_lookup
if changed_request_lookup is not None:
self.changed_request_lookup = changed_request_lookup
sect_col_index = self.column_names.index("Section")
item_col_index = self.column_names.index("Item")
streq_info_index = self.column_names.index("?")
num_streqs_index = self.column_names.index("#")
parent_iter_stack = []
# For speed, pass in the relevant indices here.
user_data = (sect_col_index, item_col_index,
streq_info_index, num_streqs_index)
self._store.foreach(self._update_row_request_info, user_data)
# Loop over any parent rows and sum numbers and info.
parent_iter = self._store.iter_children(None)
while parent_iter is not None:
num_streq_children = 0
streq_info_children = ""
child_iter = self._store.iter_children(parent_iter)
if child_iter is None:
parent_iter = self._store.iter_next(parent_iter)
continue
while child_iter is not None:
num = self._store.get_value(child_iter, num_streqs_index)
info = self._store.get_value(child_iter, streq_info_index)
if isinstance(num, basestring) and num.isdigit():
num_streq_children += int(num)
if info and not streq_info_children:
streq_info_children = info
child_iter = self._store.iter_next(child_iter)
self._store.set_value(parent_iter, num_streqs_index,
str(num_streq_children))
self._store.set_value(parent_iter, streq_info_index,
streq_info_children)
parent_iter = self._store.iter_next(parent_iter)
def _update_row_request_info(self, model, path, iter_, user_data):
# Update the streq namelist information for a model row.
(sect_col_index, item_col_index,
streq_info_index, num_streqs_index) = user_data
section = model.get_value(iter_, sect_col_index)
item = model.get_value(iter_, item_col_index)
if section is None or item is None:
model.set_value(iter_, num_streqs_index, None)
model.set_value(iter_, streq_info_index, None)
return
streqs = self.request_lookup.get(section, {}).get(item, {})
model.set_value(iter_, num_streqs_index, str(len(streqs)))
streq_info = ""
mod_markup = rose.config_editor.SUMMARY_DATA_PANEL_MODIFIED_MARKUP
for streq_section in streqs:
if streq_section in self.changed_request_lookup:
streq_info = mod_markup + streq_info
break
model.set_value(iter_, streq_info_index, streq_info)
def _append_row_data(self, model, path, iter_, data_rows):
# Append new row data.
data_rows.append(model.get(iter_))
def _apply_grouping(self, data_rows, column_names, group_index=None,
descending=False):
# Calculate nesting (grouping) for the data.
rows_are_descendants = None
if group_index is None:
return data_rows, column_names, rows_are_descendants
k = group_index
data_rows = [r[k:k + 1] + r[0:k] + r[k + 1:] for r in data_rows]
column_names.insert(0, column_names.pop(k))
data_rows.sort(lambda x, y:
self._sort_row_data(x, y, 0, descending))
last_entry = None
rows_are_descendants = []
for i, row in enumerate(data_rows):
if i > 0 and last_entry == row[0]:
rows_are_descendants.append(True)
else:
rows_are_descendants.append(False)
last_entry = row[0]
return data_rows, column_names, rows_are_descendants
def _filter_refresh(self, widget=None):
# Hook function that reacts to a change in filter status.
self._view.get_model().get_model().refilter()
def _filter_visible(self, model, iter_):
# This returns whether a row should be visible.
filt_text = self._filter_widget.get_text()
if not filt_text:
return True
for col_text in model.get(iter_, *range(len(self.column_names))):
if (isinstance(col_text, basestring) and
filt_text.lower() in col_text.lower()):
return True
child_iter = model.iter_children(iter_)
while child_iter is not None:
if self._filter_visible(model, child_iter):
return True
child_iter = model.iter_next(child_iter)
return False
def _get_control_widget_hbox(self):
# Build the control widgets for the dialog.
filter_label = gtk.Label(
rose.config_editor.SUMMARY_DATA_PANEL_FILTER_LABEL)
filter_label.show()
self._filter_widget = gtk.Entry()
self._filter_widget.set_width_chars(
rose.config_editor.SUMMARY_DATA_PANEL_FILTER_MAX_CHAR)
self._filter_widget.connect("changed", self._filter_refresh)
self._filter_widget.set_tooltip_text("Filter by literal values")
self._filter_widget.show()
group_label = gtk.Label(
rose.config_editor.SUMMARY_DATA_PANEL_GROUP_LABEL)
group_label.show()
self._group_widget = gtk.ComboBox()
cell = gtk.CellRendererText()
self._group_widget.pack_start(cell, expand=True)
self._group_widget.add_attribute(cell, 'text', 0)
self._group_widget.show()
self._add_button = rose.gtk.util.CustomButton(
label="Add",
stock_id=gtk.STOCK_ADD,
tip_text="Add a new request for this entry")
self._add_button.connect("activate",
lambda b: self._handle_add_current_row())
self._add_button.connect("clicked",
lambda b: self._handle_add_current_row())
self._refresh_button = rose.gtk.util.CustomButton(
label="Refresh",
stock_id=gtk.STOCK_REFRESH,
tip_text="Refresh namelist:streq statuses")
self._refresh_button.connect("activate",
lambda b: self.refresh_stash_requests())
self._refresh_button.connect("clicked",
lambda b: self.refresh_stash_requests())
self._view_button = rose.gtk.util.CustomButton(
label="View",
tip_text="Select view options",
has_menu=True)
self._view_button.connect("button-press-event",
self._popup_view_menu)
filter_hbox = gtk.HBox()
filter_hbox.pack_start(group_label, expand=False, fill=False)
filter_hbox.pack_start(self._group_widget, expand=False, fill=False)
filter_hbox.pack_start(filter_label, expand=False, fill=False,
padding=10)
filter_hbox.pack_start(self._filter_widget, expand=False, fill=False)
filter_hbox.pack_end(self._view_button, expand=False, fill=False)
filter_hbox.pack_end(self._refresh_button, expand=False, fill=False)
filter_hbox.pack_end(self._add_button, expand=False, fill=False)
filter_hbox.show()
return filter_hbox
def _get_current_section_item(self):
# Return the current highlighted section (or None) and item (or None).
current_path, current_column = self._view.get_cursor()
if current_path is None:
return (None, None)
current_iter = self._view.get_model().get_iter(current_path)
return self._get_section_item_from_iter(current_iter)
def _get_section_item_col_indices(self):
# Return the column indices of the STASH section and item.
model = self._view.get_model()
sect_index = 0
if self.group_index is not None and self.group_index != sect_index:
sect_index = 1
item_index = 1
if self.group_index is not None:
if self.group_index == 0:
item_index = 1
elif self.group_index == 1:
item_index = 0
else:
item_index = 2
return sect_index, item_index
def _get_section_item_from_iter(self, iter_):
# Return the STASH section and item numbers for this row.
sect_index, item_index = self._get_section_item_col_indices()
model = self._view.get_model()
section = model.get_value(iter_, sect_index)
item = model.get_value(iter_, item_index)
return section, item
def _handle_add_current_row(self):
section, item = self._get_current_section_item()
return self.add_stash_request(section, item)
def _handle_activation(self, view, path, column):
# React to an activation of a row in the dialog.
model = view.get_model()
row_iter = model.get_iter(path)
section, item = self._get_section_item_from_iter(row_iter)
if section is None or item is None:
return False
return self.add_stash_request(section, item)
def _handle_button_press_event(self, treeview, event):
# React to a button press (mouse click).
pathinfo = treeview.get_path_at_pos(int(event.x),
int(event.y))
if pathinfo is not None:
path, col, cell_x, cell_y = pathinfo
if event.button != 3:
if event.type == gtk.gdk._2BUTTON_PRESS:
self._handle_activation(treeview, path, col)
else:
self._popup_tree_menu(path, col, event)
def _handle_group_change(self, combobox):
# Handle grouping (nesting) status changes.
model = combobox.get_model()
col_name = model.get_value(combobox.get_active_iter(), 0)
if col_name:
if col_name in self._hidden_column_names:
self._hidden_column_names.remove(col_name)
group_index = self.column_names.index(col_name)
# Any existing grouping changes the order of self.column_names.
if (self.group_index is not None and
group_index <= self.group_index):
group_index -= 1
else:
group_index = None
if group_index == self.group_index:
return False
self.group_index = group_index
self.generate_tree_view()
return False
def _launch_record_help(self, menuitem):
"""Launch the help from a menu."""
rose.gtk.dialog.run_scrolled_dialog(menuitem._help_text,
menuitem._help_title)
def _popup_tree_menu(self, path, col, event):
"""Launch a menu for this main treeview row."""
menu = gtk.Menu()
menu.show()
model = self._view.get_model()
row_iter = model.get_iter(path)
section, item = self._get_section_item_from_iter(row_iter)
if section is None or item is None:
return False
add_menuitem = gtk.ImageMenuItem(stock_id=gtk.STOCK_ADD)
add_menuitem.set_label("Add STASH request")
add_menuitem.connect("activate",
lambda i: self.add_stash_request(section, item))
add_menuitem.show()
menu.append(add_menuitem)
stash_desc_index = self.column_names.index("Description")
stash_desc_value = model.get_value(row_iter, stash_desc_index)
desc_meta = self.stash_meta_lookup.get(
self.STASH_PARSE_DESC_OPT + "=" + str(stash_desc_value), {})
desc_meta_help = desc_meta.get(rose.META_PROP_HELP)
if desc_meta_help is not None:
help_menuitem = gtk.ImageMenuItem(stock_id=gtk.STOCK_HELP)
help_menuitem.set_label("Help")
help_menuitem._help_text = desc_meta_help
help_menuitem._help_title = "Help for %s" % stash_desc_value
help_menuitem.connect("activate", self._launch_record_help)
help_menuitem.show()
menu.append(help_menuitem)
streqs = self.request_lookup.get(section, {}).get(item, {}).keys()
if streqs:
view_menuitem = gtk.ImageMenuItem(stock_id=gtk.STOCK_FIND)
view_menuitem.set_label(label="View...")
view_menuitem.show()
view_menu = gtk.Menu()
view_menu.show()
view_menuitem.set_submenu(view_menu)
streqs.sort(rose.config.sort_settings)
for streq in streqs:
view_streq_menuitem = gtk.MenuItem(label=streq)
view_streq_menuitem._section = streq
view_streq_menuitem.connect(
"button-release-event",
lambda m, e: self.navigate_to_stash_request(m._section))
view_streq_menuitem.show()
view_menu.append(view_streq_menuitem)
menu.append(view_menuitem)
menu.popup(None, None, None, event.button, event.time)
return False
def _popup_view_menu(self, widget, event):
# Create a menu below the widget for view options.
menu = gtk.Menu()
meta_menuitem = gtk.CheckMenuItem(label="Show expanded value info")
if len(self.column_names) == len(self._visible_metadata_columns):
meta_menuitem.set_active(True)
meta_menuitem.connect("toggled", self._toggle_show_more_info)
meta_menuitem.show()
if not self.stash_meta_lookup:
meta_menuitem.set_sensitive(False)
menu.append(meta_menuitem)
col_title_menuitem = gtk.CheckMenuItem(
label="Show expanded column titles")
if self._should_show_meta_column_titles:
col_title_menuitem.set_active(True)
col_title_menuitem.connect("toggled",
self._toggle_show_meta_column_titles)
col_title_menuitem.show()
if not self.stash_meta_lookup:
col_title_menuitem.set_sensitive(False)
menu.append(col_title_menuitem)
sep = gtk.SeparatorMenuItem()
sep.show()
menu.append(sep)
show_column_menuitem = gtk.MenuItem("Show/hide columns")
show_column_menuitem.show()
show_column_menu = gtk.Menu()
show_column_menuitem.set_submenu(show_column_menu)
menu.append(show_column_menuitem)
for i, column in enumerate(self._view.get_columns()):
col_name = self.column_names[i]
col_title = col_name.replace("_", "__")
if self._should_show_meta_column_titles:
col_meta = self.stash_meta_lookup.get(col_name, {})
title = col_meta.get(rose.META_PROP_TITLE)
if title is not None:
col_title = title
col_menuitem = gtk.CheckMenuItem(label=col_title,
use_underline=False)
col_menuitem.show()
col_menuitem.set_active(column.get_visible())
col_menuitem._connect_args = (col_name,)
col_menuitem.connect(
"toggled",
lambda c: self._toggle_show_column_name(*c._connect_args))
show_column_menu.append(col_menuitem)
menu.popup(None, None, widget.position_menu, event.button,
event.time, widget)
def _set_tree_cell_value(self, column, cell, treemodel, iter_):
# Extract an appropriate value for this cell from the model.
cell.set_property("visible", True)
col_index = self._view.get_columns().index(column)
col_title = self.column_names[col_index]
value = self._view.get_model().get_value(iter_, col_index)
if col_title in self._visible_metadata_columns and value is not None:
if col_title == "Section":
key = self.STASH_PARSE_SECT_OPT + "=" + value
else:
key = col_title + "=" + value
value_meta = self.stash_meta_lookup.get(key, {})
title = value_meta.get(rose.META_PROP_TITLE, "")
if title:
value = title
desc = value_meta.get(rose.META_PROP_DESCRIPTION, "")
if desc:
value += ": " + desc
max_len = 36
if value is not None and len(value) > max_len and col_index != 0:
cell.set_property("width-chars", max_len)
cell.set_property("ellipsize", pango.ELLIPSIZE_END)
if col_index == 0 and treemodel.iter_parent(iter_) is not None:
cell.set_property("visible", False)
if value is not None and col_title != "?":
value = rose.gtk.util.safe_str(value)
cell.set_property("markup", value)
def _sort_row_data(self, row1, row2, sort_index, descending=False):
# Handle column sorting.
fac = (-1 if descending else 1)
x = row1[sort_index]
y = row2[sort_index]
return fac * self.sort_util.cmp_(x, y)
def _toggle_show_column_name(self, column_name):
# Handle a show/hide of a particular column.
col_index = self.column_names.index(column_name)
column = self._view.get_columns()[col_index]
if column.get_visible():
return column.set_visible(False)
return column.set_visible(True)
def _toggle_show_more_info(self, widget, column_name=None):
# Handle a show/hide of extra information.
should_show = widget.get_active()
if column_name is None:
column_names = self.column_names
else:
column_names = [column_name]
for name in column_names:
if should_show:
if name not in self._visible_metadata_columns:
self._visible_metadata_columns.append(name)
elif name in self._visible_metadata_columns:
if name != "Section":
self._visible_metadata_columns.remove(name)
self._view.columns_autosize()
def _toggle_show_meta_column_titles(self, widget):
self._should_show_meta_column_titles = widget.get_active()
self.generate_tree_view()
def _update_control_widget_sensitivity(self):
section, item = self._get_current_section_item()
self._add_button.set_sensitive(section is not None and
item is not None)
|
kaday/rose
|
lib/python/rose/config_editor/plugin/um/widget/stash_add.py
|
Python
|
gpl-3.0
| 31,926
|
"""Langevin dynamics class."""
import sys
import numpy as np
from numpy.random import standard_normal
from ase.md.md import MolecularDynamics
# For parallel GPAW simulations, the random forces should be distributed.
if '_gpaw' in sys.modules:
# http://wiki.fysik.dtu.dk/gpaw
from gpaw.mpi import world as gpaw_world
else:
gpaw_world = None
class Langevin(MolecularDynamics):
"""Langevin (constant N, V, T) molecular dynamics.
Usage: Langevin(atoms, dt, temperature, friction)
atoms
The list of atoms.
dt
The time step.
temperature
The desired temperature, in energy units.
friction
A friction coefficient, typically 1e-4 to 1e-2.
fixcm
If True, the position and momentum of the center of mass is
kept unperturbed. Default: True.
The temperature and friction are normally scalars, but in principle one
quantity per atom could be specified by giving an array.
This dynamics accesses the atoms using Cartesian coordinates."""
def __init__(self, atoms, timestep, temperature, friction, fixcm=True,
trajectory=None, logfile=None, loginterval=1,
communicator=gpaw_world):
MolecularDynamics.__init__(self, atoms, timestep, trajectory,
logfile, loginterval)
self.temp = temperature
self.frict = friction
self.fixcm = fixcm # will the center of mass be held fixed?
self.communicator = communicator
self.updatevars()
def set_temperature(self, temperature):
self.temp = temperature
self.updatevars()
def set_friction(self, friction):
self.frict = friction
self.updatevars()
def set_timestep(self, timestep):
self.dt = timestep
self.updatevars()
def updatevars(self):
dt = self.dt
# If the friction is an array some other constants must be arrays too.
self._localfrict = hasattr(self.frict, 'shape')
lt = self.frict * dt
masses = self.masses
sdpos = dt * np.sqrt(self.temp / masses * (2.0/3.0 - 0.5 * lt) * lt)
sdpos.shape = (-1, 1)
sdmom = np.sqrt(self.temp * masses * 2.0 * (1.0 - lt) * lt)
sdmom.shape = (-1, 1)
pmcor = np.sqrt(3.0)/2.0 * (1.0 - 0.125 * lt)
cnst = np.sqrt((1.0 - pmcor) * (1.0 + pmcor))
act0 = 1.0 - lt + 0.5 * lt * lt
act1 = (1.0 - 0.5 * lt + (1.0/6.0) * lt * lt)
act2 = 0.5 - (1.0/6.0) * lt + (1.0/24.0) * lt * lt
c1 = act1 * dt / masses
c1.shape = (-1, 1)
c2 = act2 * dt * dt / masses
c2.shape = (-1, 1)
c3 = (act1 - act2) * dt
c4 = act2 * dt
del act1, act2
if self._localfrict:
# If the friction is an array, so are these
act0.shape = (-1, 1)
c3.shape = (-1, 1)
c4.shape = (-1, 1)
pmcor.shape = (-1, 1)
cnst.shape = (-1, 1)
self.sdpos = sdpos
self.sdmom = sdmom
self.c1 = c1
self.c2 = c2
self.act0 = act0
self.c3 = c3
self.c4 = c4
self.pmcor = pmcor
self.cnst = cnst
def step(self, f):
atoms = self.atoms
p = self.atoms.get_momenta()
random1 = standard_normal(size=(len(atoms), 3))
random2 = standard_normal(size=(len(atoms), 3))
if self.communicator is not None:
self.communicator.broadcast(random1, 0)
self.communicator.broadcast(random2, 0)
rrnd = self.sdpos * random1
prnd = (self.sdmom * self.pmcor * random1 +
self.sdmom * self.cnst * random2)
if self.fixcm:
rrnd = rrnd - np.sum(rrnd, 0) / len(atoms)
prnd = prnd - np.sum(prnd, 0) / len(atoms)
n = len(atoms)
rrnd *= np.sqrt(n / (n - 1.0))
prnd *= np.sqrt(n / (n - 1.0))
atoms.set_positions(atoms.get_positions() +
self.c1 * p +
self.c2 * f + rrnd)
p *= self.act0
p += self.c3 * f + prnd
atoms.set_momenta(p)
f = atoms.get_forces()
atoms.set_momenta(p + self.c4 * f)
return f
|
slabanja/ase
|
ase/md/langevin.py
|
Python
|
gpl-2.0
| 4,307
|
#
# Copyright 2014 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
import copy
import functools
import glob
import json
import logging
import os
import shutil
import subprocess
import urlparse
import urllib
import uuid
import warnings
from os.path import join
import xmltodict
import paths
import subnet_lease
import utils
import virt
import log_utils
LOGGER = logging.getLogger(__name__)
LogTask = functools.partial(log_utils.LogTask, logger=LOGGER)
log_task = functools.partial(log_utils.log_task, logger=LOGGER)
def _create_ip(subnet, index):
"""
Given a subnet or an ip and an index returns the ip with that lower index
from the subnet (255.255.255.0 mask only subnets)
Args:
subnet (str): Strign containing the three first elements of the decimal
representation of a subnet (X.Y.Z) or a full ip (X.Y.Z.A)
index (int or str): Last element of a decimal ip representation, for
example, 123 for the ip 1.2.3.123
Returns:
str: The dotted decimal representation of the ip
"""
return '.'.join(subnet.split('.')[:3] + [str(index)])
def _ip_in_subnet(subnet, ip):
"""
Checks if an ip is included in a subnet.
Note:
only 255.255.255.0 masks allowed
Args:
subnet (str): Strign containing the three first elements of the decimal
representation of a subnet (X.Y.Z) or a full ip (X.Y.Z.A)
ip (str or int): Decimal ip representation
Returns:
bool: ``True`` if ip is in subnet, ``False`` otherwise
"""
return (
_create_ip(subnet, 1) == _create_ip(ip, 1)
or '0.0.0.1' == _create_ip(ip, 1)
)
class Prefix(object):
"""
A prefix is a directory that will contain all the data needed to setup the
environment.
Attributes:
_prefix (str): Path to the directory of this prefix
_paths (lago.path.Paths): Path handler class
_virt_env (lago.virt.VirtEnv): Lazily loaded virtual env handler
_metadata (dict): Lazily loaded metadata
"""
VIRT_ENV_CLASS = virt.VirtEnv
def __init__(self, prefix):
"""
Args:
prefix (str): Path of the prefix
"""
self._prefix = prefix
self.paths = paths.Paths(self._prefix)
self._virt_env = None
self._metadata = None
def _get_metadata(self):
"""
Retrieve the metadata info for this prefix
Returns:
dict: metadata info
"""
if self._metadata is None:
try:
with open(self.paths.metadata()) as metadata_fd:
json_data = metadata_fd.read()
if json_data:
self._metadata = json.load(json_data)
else:
raise IOError()
except IOError:
self._metadata = {}
return self._metadata
def _save_metadata(self):
"""
Write this prefix metadata to disk
Returns:
None
"""
with open(self.paths.metadata(), 'w') as metadata_fd:
utils.json_dump(self._get_metadata(), metadata_fd)
def save(self):
"""
Save this prefix to persistent storage
Returns:
None
"""
if not os.path.exists(self.paths.virt()):
os.makedirs(self.paths.virt())
self._save_metadata()
self.virt_env.save()
def _create_ssh_keys(self):
"""
Generate a pair of ssh keys for this prefix
Returns:
None
Raises:
RuntimeError: if it fails to create the keys
"""
ret, _, _ = utils.run_command(
[
'ssh-keygen',
'-t',
'rsa',
'-N',
'',
'-f',
self.paths.ssh_id_rsa(),
]
)
if ret != 0:
raise RuntimeError(
'Failed to crate ssh keys at %s',
self.paths.ssh_id_rsa(),
)
@log_task('Initialize prefix')
def initialize(self):
"""
Initialize this prefix, this includes creating the destination path,
and creating the uuid for the prefix, for any other actions see
:func:`Prefix.virt_conf`
Will safely roll back if any of those steps fail
Returns:
None
Raises:
RuntimeError: If it fails to create the prefix dir
"""
prefix = self.paths.prefix
os.environ['LAGO_PREFIX_PATH'] = prefix
os.environ['LAGO_WORKDIR_PATH'] = os.path.dirname(prefix)
with utils.RollbackContext() as rollback:
with LogTask('Create prefix dirs'):
try:
os.mkdir(prefix)
except OSError as error:
raise RuntimeError(
'Could not create prefix at %s:\n%s' % (prefix, error)
)
rollback.prependDefer(shutil.rmtree, prefix)
with open(self.paths.uuid(), 'w') as f, \
LogTask('Generate prefix uuid'):
f.write(uuid.uuid1().hex)
with LogTask('Create ssh keys'):
self._create_ssh_keys()
with LogTask('Tag prefix as initialized'):
with open(self.paths.prefix_lagofile(), 'w') as fd:
fd.write('')
rollback.clear()
@log_task('Cleanup prefix')
def cleanup(self):
"""
Stops any running entities in the prefix and uninitializes it, usually
you want to do this if you are going to remove the prefix afterwards
Returns:
None
"""
with LogTask('Stop prefix'):
self.stop()
with LogTask("Tag prefix as uninitialized"):
os.unlink(self.paths.prefix_lagofile())
@staticmethod
def _init_net_specs(conf):
"""
Given a configuration specification, initializes all the net
definitions in it so they can be used comfortably
Args:
conf (dict): Configuration specification
Returns:
dict: the adapted new conf
"""
for net_name, net_spec in conf.get('nets', {}).items():
net_spec['name'] = net_name
net_spec['mapping'] = {}
net_spec.setdefault('type', 'nat')
return conf
@staticmethod
def _check_predefined_subnets(conf):
"""
Checks if all of the nets defined in the config are inside the allowed
range, throws exception if not
Args:
conf (dict): Configuration spec where to get the nets definitions
from
Returns:
None
Raises:
RuntimeError: If there are any subnets out of the allowed range
"""
for net_spec in conf.get('nets', {}).itervalues():
subnet = net_spec.get('gw')
if subnet is None:
continue
if subnet_lease.is_leasable_subnet(subnet):
raise RuntimeError(
'%s subnet can only be dynamically allocated' % (subnet)
)
def _allocate_subnets(self, conf):
"""
Allocate all the subnets needed by the given configuration spec
Args:
conf (dict): Configuration spec where to get the nets definitions
from
Returns:
tuple(list, dict): allocated subnets and modified conf
"""
allocated_subnets = []
try:
for net_spec in conf.get('nets', {}).itervalues():
if 'gw' in net_spec or net_spec['type'] != 'nat':
continue
net_spec['gw'] = subnet_lease.acquire(self.paths.uuid())
allocated_subnets.append(net_spec['gw'])
except:
for subnet in allocated_subnets:
subnet_lease.release(subnet)
raise
return allocated_subnets, conf
def _add_nic_to_mapping(self, net, dom, nic):
"""
Populates the given net spec mapping entry with the nicks of the given
domain
Args:
net (dict): Network spec to populate
dom (dict): libvirt domain specification
nic (str): Name of the interface to add to the net mapping from the
domain
Returns:
None
"""
dom_name = dom['name']
idx = dom['nics'].index(nic)
name = idx == 0 and dom_name or '%s-eth%d' % (dom_name, idx)
net['mapping'][name] = nic['ip']
def _register_preallocated_ips(self, conf):
"""
Parse all the domains in the given conf and preallocate all their ips
into the networks mappings, raising exception on duplicated ips or ips
out of the allowed ranges
See Also:
:mod:`lago.subnet_lease`
Args:
conf (dict): Configuration spec to parse
Returns:
None
Raises:
RuntimeError: if there are any duplicated ips or any ip out of the
allowed range
"""
for dom_name, dom_spec in conf.get('domains', {}).items():
for idx, nic in enumerate(dom_spec.get('nics', [])):
if 'ip' not in nic:
continue
net = conf['nets'][nic['net']]
if subnet_lease.is_leasable_subnet(net['gw']):
nic['ip'] = _create_ip(
net['gw'], int(nic['ip'].split('.')[-1])
)
dom_name = dom_spec['name']
if not _ip_in_subnet(net['gw'], nic['ip']):
raise RuntimeError(
"%s:nic%d's IP [%s] is outside the subnet [%s]" % (
dom_name,
dom_spec['nics'].index(nic),
nic['ip'],
net['gw'],
),
)
if nic['ip'] in net['mapping'].values():
conflict_list = [
name for name, ip in net['mapping'].items()
if ip == net['ip']
]
raise RuntimeError(
'IP %s was to several domains: %s %s' % (
nic['ip'],
dom_name,
' '.join(conflict_list),
),
)
self._add_nic_to_mapping(net, dom_spec, nic)
def _allocate_ips_to_nics(self, conf):
"""
For all the nics of all the domains in the conf that have dynamic ip,
allocate one and addit to the network mapping
Args:
conf (dict): Configuration spec to extract the domains from
Returns:
None
"""
for dom_name, dom_spec in conf.get('domains', {}).items():
for idx, nic in enumerate(dom_spec.get('nics', [])):
if 'ip' in nic:
continue
net = conf['nets'][nic['net']]
if net['type'] != 'nat':
continue
allocated = net['mapping'].values()
vacant = _create_ip(
net['gw'],
set(range(2, 255)).difference(
set([int(ip.split('.')[-1]) for ip in allocated])
).pop()
)
nic['ip'] = vacant
self._add_nic_to_mapping(net, dom_spec, nic)
def _config_net_topology(self, conf):
"""
Initialize and populate all the network related elements, like
reserving ips and populating network specs of the given confiiguration
spec
Args:
conf (dict): Configuration spec to initalize
Returns:
None
"""
conf = self._init_net_specs(conf)
self._check_predefined_subnets(conf)
allocated_subnets, conf = self._allocate_subnets(conf)
try:
self._register_preallocated_ips(conf)
self._allocate_ips_to_nics(conf)
except:
for subnet in allocated_subnets:
subnet_lease.release(subnet)
raise
return conf
def _create_disk(
self,
name,
spec,
template_repo=None,
template_store=None,
):
"""
Creates a disc with the given name from the given repo or store
Args:
name (str): Name of the domain to create the disk for
spec (dict): Specification of the disk to create
template_repo (TemplateRepository or None): template repo instance
to use
template_store (TemplateStore or None): template store instance to
use
Returns:
Tuple(str, dict): Path to the disk and disk metadata
Raises:
RuntimeError: If the type of the disk is not supported or failed to
create the disk
"""
LOGGER.debug("Spec: %s" % spec)
with LogTask("Create disk %s" % spec['name']):
disk_metadata = {}
if spec['type'] == 'template':
disk_path, disk_metadata = self._handle_template(
host_name=name,
template_spec=spec,
template_repo=template_repo,
template_store=template_store,
)
elif spec['type'] == 'empty':
disk_path, disk_metadata = self._handle_empty_disk(
host_name=name,
disk_spec=spec,
)
elif spec['type'] == 'file':
disk_path, disk_metadata = self._handle_file_disk(
disk_spec=spec,
)
else:
raise RuntimeError('Unknown drive spec %s' % str(spec))
return disk_path, disk_metadata
def _handle_file_disk(self, disk_spec):
url = os.path.expandvars(disk_spec.get('url', ''))
disk_path = os.path.expandvars(disk_spec.get('path', ''))
disk_metadata = disk_spec.get('metadata', {})
if not url and not disk_path:
raise RuntimeError(
'Partial drive spec, no url nor path provided for disk of '
'type file:\n%s' % str(disk_spec)
)
if url:
disk_spec['path'] = self._retrieve_disk_url(url, disk_path)
else:
disk_spec['path'] = disk_path
# If we're using raw file, return its path
disk_path = disk_spec['path']
return disk_path, disk_metadata
def _retrieve_disk_url(self, disk_url, disk_dst_path=None):
disk_in_prefix = self.fetch_url(disk_url)
if disk_dst_path is None:
return disk_in_prefix
else:
shutil.move(disk_in_prefix, disk_dst_path)
return disk_dst_path
@staticmethod
def _generate_disk_name(host_name, disk_name, disk_format):
return '%s_%s.%s' % (
host_name,
disk_name,
disk_format,
)
def _generate_disk_path(self, disk_name):
return os.path.expandvars(self.paths.images(disk_name))
def _handle_empty_disk(self, host_name, disk_spec):
disk_metadata = disk_spec.get('metadata', {})
disk_filename = self._generate_disk_name(
host_name=host_name,
disk_name=disk_spec['name'],
disk_format=disk_spec['format'],
)
disk_path = self._generate_disk_path(disk_name=disk_filename)
qemu_cmd = ['qemu-img', 'create']
if disk_spec['format'] == 'qcow2':
qemu_cmd += [
'-f', disk_spec['format'], '-o', 'preallocation=metadata'
]
else:
qemu_cmd += ['-f', disk_spec['format']]
qemu_cmd += [disk_path, disk_spec['size']]
if os.path.exists(disk_path):
os.unlink(disk_path)
with LogTask(
'Create empty disk %s(%s)' % (host_name, disk_spec['name'])
):
self._run_qemu(qemu_cmd, disk_path)
disk_rel_path = os.path.join(
'$LAGO_PREFIX_PATH',
os.path.basename(self.paths.images()),
os.path.basename(disk_path),
)
return disk_rel_path, disk_metadata
@staticmethod
def _run_qemu(qemu_cmd, disk_path):
ret = utils.run_command(qemu_cmd)
if ret.code != 0:
raise RuntimeError(
'Failed to create image, qemu-img returned %d:\n'
'out:%s\nerr:%s' % ret,
)
# To avoid losing access to the file
os.chmod(disk_path, 0666)
return ret
def _handle_template(
self,
host_name,
template_spec,
template_store=None,
template_repo=None
):
template_type = template_spec.get('template_type', 'lago')
disk_filename = self._generate_disk_name(
host_name=host_name,
disk_name=template_spec['name'],
disk_format=template_spec['format'],
)
disk_path = self._generate_disk_path(disk_name=disk_filename)
if template_type == 'lago':
qemu_cmd, disk_metadata = self._handle_lago_template(
disk_path=disk_path,
template_spec=template_spec,
template_store=template_store,
template_repo=template_repo,
)
elif template_type == 'qcow2':
qemu_cmd, disk_metadata = self._handle_qcow_template(
disk_path=disk_path,
template_spec=template_spec,
)
else:
raise RuntimeError(
'Unsupporte template spec %s' % str(template_spec)
)
if os.path.exists(disk_path):
os.unlink(disk_path)
with LogTask(
'Create disk %s(%s)' % (host_name, template_spec['name'])
):
self._run_qemu(qemu_cmd, disk_path)
# Update the path as relative so it can be relocated
disk_rel_path = os.path.join(
'$LAGO_PREFIX_PATH',
os.path.basename(self.paths.images()),
os.path.basename(disk_path),
)
return disk_rel_path, disk_metadata
def _handle_qcow_template(self, disk_path, template_spec):
base_path = template_spec.get('path', '')
if not base_path:
raise RuntimeError('Partial drive spec %s' % str(template_spec))
qemu_cmd = [
'qemu-img', 'create', '-f', 'qcow2', '-b', base_path, disk_path
]
disk_metadata = template_spec.get('metadata', {})
return qemu_cmd, disk_metadata
def _handle_lago_template(
self, disk_path, template_spec, template_store, template_repo
):
disk_metadata = template_spec.get('metadata', {})
if template_store is None or template_repo is None:
raise RuntimeError('No templates directory provided')
template = template_repo.get_by_name(template_spec['template_name'])
template_version = template.get_version(
template_spec.get('template_version', None)
)
if template_version not in template_store:
LOGGER.info(
log_utils.log_always("Template %s not in cache, downloading") %
template_version.name,
)
template_store.download(template_version)
template_store.mark_used(template_version, self.paths.uuid())
disk_metadata.update(
template_store.get_stored_metadata(template_version, ),
)
base = template_store.get_path(template_version)
qemu_cmd = ['qemu-img', 'create', '-f', 'qcow2', '-b', base, disk_path]
return qemu_cmd, disk_metadata
def _ova_to_spec(self, filename):
"""
Retrieve the given ova and makes a template of it.
Creates a disk from network provided ova.
Calculates the needed memory from the ovf.
The disk will be cached in the template repo
Args:
filename(str): the url to retrive the data from
TODO:
* Add hash checking against the server
for faster download and latest version
* Add config script running on host - other place
* Add cloud init support - by using cdroms in other place
* Handle cpu in some way - some other place need to pick it up
* Handle the memory units properly - we just assume MegaBytes
Returns:
list of dict: list with the disk specification
int: VM memory, None if none defined
int: Number of virtual cpus, None if none defined
Raises:
RuntimeError: If the ova format is not supported
TypeError: If the memory units in the ova are noot supported
(currently only 'MegaBytes')
"""
# extract if needed
ova_extracted_dir = os.path.splitext(filename)[0]
if not os.path.exists(ova_extracted_dir):
os.makedirs(ova_extracted_dir)
subprocess.check_output(
["tar", "-xvf", filename, "-C", ova_extracted_dir],
stderr=subprocess.STDOUT
)
# lets find the ovf file
# we expect only one to be
ovf = glob.glob(ova_extracted_dir + "/master/vms/*/*.ovf")
if len(ovf) != 1:
raise RuntimeError("We support only one vm in ova")
image_file = None
memory = None
vcpus = None
# we found our ovf
# lets extract the resources
with open(ovf[0]) as fd:
# lets extract the items
obj = xmltodict.parse(fd.read())
hardware_items = [
section
for section in obj["ovf:Envelope"]["Content"]["Section"]
if section["@xsi:type"] == "ovf:VirtualHardwareSection_Type"
]
if len(hardware_items) != 1:
raise RuntimeError("We support only one machine desc in ova")
hardware_items = hardware_items[0]
for item in hardware_items["Item"]:
# lets test resource types
CPU_RESOURCE = 3
MEMORY_RESOURCE = 4
DISK_RESOURCE = 17
resource_type = int(item["rasd:ResourceType"])
if resource_type == CPU_RESOURCE:
vcpus = int(item["rasd:cpu_per_socket"]) * \
int(item["rasd:num_of_sockets"])
elif resource_type == MEMORY_RESOURCE:
memory = int(item["rasd:VirtualQuantity"])
if item["rasd:AllocationUnits"] != "MegaBytes":
raise TypeError(
"Fix me : we need to suport other units too"
)
elif resource_type == DISK_RESOURCE:
image_file = item["rasd:HostResource"]
if image_file is not None:
disk_meta = {"root-partition": "/dev/sda1"}
disk_spec = [
{
"type": "template",
"template_type": "qcow2",
"format": "qcow2",
"dev": "vda",
"name": os.path.basename(image_file),
"path": ova_extracted_dir + "/images/" + image_file,
"metadata": disk_meta
}
]
return disk_spec, memory, vcpus
def _use_prototype(self, spec, prototypes):
"""
Populates the given spec with the values of it's declared prototype
Args:
spec (dict): spec to update
prototypes (dict): Configuration spec containing the prototypes
Returns:
dict: updated spec
"""
prototype = spec['based-on']
del spec['based-on']
for attr in prototype:
if attr not in spec:
spec[attr] = copy.deepcopy(prototype[attr])
return spec
def fetch_url(self, url):
"""
Retrieves the given url to the prefix
Args:
url(str): Url to retrieve
Returns:
str: path to the downloaded file
"""
url_path = urlparse.urlsplit(url).path
dst_path = os.path.basename(url_path)
dst_path = self.paths.prefixed(dst_path)
with LogTask('Downloading %s' % url):
urllib.urlretrieve(url=os.path.expandvars(url), filename=dst_path)
return dst_path
def virt_conf_from_stream(
self,
conf_fd,
template_repo=None,
template_store=None,
do_bootstrap=True,
):
"""
Initializes all the virt infrastructure of the prefix, creating the
domains disks, doing any network leases and creating all the virt
related files and dirs inside this prefix.
Args:
conf_fd (File): File like object to read the config from
template_repo (TemplateRepository): template repository intance
template_store (TemplateStore): template store instance
Returns:
None
"""
virt_conf = utils.load_virt_stream(conf_fd)
LOGGER.debug('Loaded virt config:\n%s', virt_conf)
return self.virt_conf(
conf=virt_conf,
template_repo=template_repo,
template_store=template_store,
do_bootstrap=do_bootstrap,
)
def _prepare_domains_images(self, conf, template_repo, template_store):
if not os.path.exists(self.paths.images()):
os.makedirs(self.paths.images())
for name, domain_spec in conf['domains'].items():
if not name:
raise RuntimeError(
'An invalid (empty) domain name was found in the '
'configuration file. Cannot continue. A name must be '
'specified for the domain'
)
domain_spec['name'] = name
conf['domains'][name] = self._prepare_domain_image(
domain_spec=domain_spec,
prototypes=conf.get('prototypes', {}),
template_repo=template_repo,
template_store=template_store,
)
return conf
def _prepare_domain_image(
self, domain_spec, prototypes, template_repo, template_store
):
if 'based-on' in domain_spec:
domain_spec = self._use_prototype(
spec=domain_spec,
prototypes=prototypes,
)
if domain_spec.get('type', '') == 'ova':
domain_spec = self._handle_ova_image(domain_spec=domain_spec)
with LogTask('Create disks for VM %s' % domain_spec['name']):
domain_spec['disks'] = self._create_disks(
domain_name=domain_spec['name'],
disks_specs=domain_spec.get('disks', []),
template_repo=template_repo,
template_store=template_store,
)
return domain_spec
def _handle_ova_image(self, domain_spec):
# we import the ova to template
domain_spec['type'] = 'template'
ova_file = self.fetch_url(domain_spec['url'])
ova_disk, domain_spec["memory"], domain_spec[
"vcpu"
] = self._ova_to_spec(ova_file)
if "disks" not in domain_spec.keys():
domain_spec["disks"] = ova_disk
else:
domain_spec["disks"] = ova_disk + domain_spec["disks"]
return domain_spec
def _create_disks(
self, domain_name, disks_specs, template_repo, template_store
):
new_disks = []
for disk in disks_specs:
path, metadata = self._create_disk(
name=domain_name,
spec=disk,
template_repo=template_repo,
template_store=template_store,
)
new_disks.append(
{
'path': path,
'dev': disk['dev'],
'format': disk['format'],
'metadata': metadata,
'type': disk['type']
},
)
return new_disks
def virt_conf(
self, conf, template_repo=None, template_store=None, do_bootstrap=True
):
"""
Initializes all the virt infrastructure of the prefix, creating the
domains disks, doing any network leases and creating all the virt
related files and dirs inside this prefix.
Args:
conf (dict): Configuration spec
template_repo (TemplateRepository): template repository intance
template_store (TemplateStore): template store instance
Returns:
None
"""
os.environ['LAGO_PREFIX_PATH'] = self.paths.prefix
with utils.RollbackContext() as rollback:
rollback.prependDefer(
shutil.rmtree, self.paths.prefix, ignore_errors=True
)
conf = self._prepare_domains_images(
conf=conf,
template_repo=template_repo,
template_store=template_store,
)
conf = self._config_net_topology(conf)
conf['domains'] = self._copy_deploy_scripts_for_hosts(
domains=conf['domains']
)
self._virt_env = self.VIRT_ENV_CLASS(
prefix=self,
vm_specs=conf['domains'],
net_specs=conf['nets'],
)
if do_bootstrap:
self.virt_env.bootstrap()
self.save()
rollback.clear()
def start(self, vm_names=None):
"""
Start this prefix
Args:
vm_names(list of str): List of the vms to start
Returns:
None
"""
self.virt_env.start(vm_names=vm_names)
def stop(self, vm_names=None):
"""
Stop this prefix
Args:
vm_names(list of str): List of the vms to stop
Returns:
None
"""
self.virt_env.stop(vm_names=vm_names)
def create_snapshots(self, name):
"""
Creates one snapshot on all the domains with the given name
Args:
name (str): Name of the snapshots to create
Returns:
None
"""
self.virt_env.create_snapshots(name)
def revert_snapshots(self, name):
"""
Revert all the snapshots with the given name from all the domains
Args:
name (str): Name of the snapshots to revert
Returns:
None
"""
self.virt_env.revert_snapshots(name)
def get_snapshots(self):
"""
Retrieve info on all the snapshots from all the domains
Returns:
dict of str: list(str): dictionary with vm_name -> snapshot list
"""
return self.virt_env.get_snapshots()
def _create_virt_env(self):
"""
Create a new virt env from this prefix
Returns:
lago.virt.VirtEnv: virt env created from this prefix
"""
return self.VIRT_ENV_CLASS.from_prefix(self)
@property
def virt_env(self):
"""
Getter for this instance's virt env, creates it if needed
Returns:
lago.virt.VirtEnv: virt env instance used by this prefix
"""
if self._virt_env is None:
self._virt_env = self._create_virt_env()
return self._virt_env
def destroy(self):
"""
Destroy this prefix, running any cleanups and removing any files
inside it.
"""
self.cleanup()
shutil.rmtree(self._prefix)
def get_vms(self):
"""
Retrieve info on all the vms
Returns:
dict of str->list(str): dictionary with vm_name -> vm list
"""
return self.virt_env.get_vms()
def get_nets(self):
"""
Retrieve info on all the nets from all the domains
Returns:
dict of str->list(str): dictionary with net_name -> net list
"""
return self.virt_env.get_nets()
@classmethod
def resolve_prefix_path(cls, start_path=None):
"""
Look for an existing prefix in the given path, in a path/.lago dir, or
in a .lago dir under any of it's parent directories
Args:
start_path (str): path to start the search from, if None passed, it
will use the current dir
Returns:
str: path to the found prefix
Raises:
RuntimeError: if no prefix was found
"""
if not start_path or start_path == 'auto':
start_path = os.path.curdir
cur_path = start_path
LOGGER.debug('Checking if %s is a prefix', os.path.abspath(cur_path))
if cls.is_prefix(cur_path):
return os.path.abspath(cur_path)
# now search for a .lago directory that's a prefix on any parent dir
cur_path = join(start_path, '.lago')
while not cls.is_prefix(cur_path):
LOGGER.debug('%s is not a prefix', cur_path)
cur_path = os.path.normpath(
os.path.join(cur_path, '..', '..', '.lago')
)
LOGGER.debug('Checking %s for a prefix', cur_path)
if os.path.realpath(join(cur_path, '..')) == '/':
raise RuntimeError(
'Unable to find prefix for %s' %
os.path.abspath(start_path)
)
return os.path.abspath(cur_path)
@classmethod
def is_prefix(cls, path):
"""
Check if a path is a valid prefix
Args:
path(str): path to be checked
Returns:
bool: True if the given path is a prefix
"""
lagofile = paths.Paths(path).prefix_lagofile()
return os.path.isfile(lagofile)
@log_task('Collect artifacts')
def collect_artifacts(self, output_dir, ignore_nopath):
if os.path.exists(output_dir):
utils.rotate_dir(output_dir)
os.makedirs(output_dir)
def _collect_artifacts(vm):
with LogTask('%s' % vm.name()):
path = os.path.join(output_dir, vm.name())
os.makedirs(path)
vm.collect_artifacts(path, ignore_nopath)
utils.invoke_in_parallel(
_collect_artifacts,
self.virt_env.get_vms().values(),
)
def _get_scripts(self, host_metadata):
"""
Temporary method to retrieve the host scripts
TODO:
remove once the "ovirt-scripts" option gets deprecated
Args:
host_metadata(dict): host metadata to retrieve the scripts for
Returns:
list: deploy scripts for the host, empty if none found
"""
deploy_scripts = host_metadata.get('deploy-scripts', [])
if deploy_scripts:
return deploy_scripts
ovirt_scripts = host_metadata.get('ovirt-scripts', [])
if ovirt_scripts:
warnings.warn(
'Deprecated entry "ovirt-scripts" will not be supported in '
'the future, replace with "deploy-scripts"'
)
return ovirt_scripts
def _set_scripts(self, host_metadata, scripts):
"""
Temporary method to set the host scripts
TODO:
remove once the "ovirt-scripts" option gets deprecated
Args:
host_metadata(dict): host metadata to set scripts in
Returns:
dict: the updated metadata
"""
scripts_key = 'deploy-scripts'
if 'ovirt-scritps' in host_metadata:
scripts_key = 'ovirt-scripts'
host_metadata[scripts_key] = scripts
return host_metadata
def _copy_deploy_scripts_for_hosts(self, domains):
"""
Copy the deploy scripts for all the domains into the prefix scripts dir
Args:
domains(dict): spec with the domains info as when loaded from the
initfile
Returns:
None
"""
with LogTask('Copying any deploy scripts'):
for host_name, host_spec in domains.iteritems():
host_metadata = host_spec.get('metadata', {})
deploy_scripts = self._get_scripts(host_metadata)
new_scripts = self._copy_delpoy_scripts(deploy_scripts)
self._set_scripts(
host_metadata=host_metadata,
scripts=new_scripts,
)
return domains
def _copy_delpoy_scripts(self, scripts):
"""
Copy the given deploy scripts to the scripts dir in the prefix
Args:
scripts(list of str): list of paths of the scripts to copy to the
prefix
Returns:
list of str: list with the paths to the copied scripts, with a
prefixed with $LAGO_PREFIX_PATH so the full path is not
hardcoded
"""
if not os.path.exists(self.paths.scripts()):
os.makedirs(self.paths.scripts())
new_scripts = []
for script in scripts:
script = os.path.expandvars(script)
if not os.path.exists(script):
raise RuntimeError('Script %s does not exist' % script)
sanitized_name = script.replace('/', '_')
new_script_cur_path = os.path.expandvars(
self.paths.scripts(sanitized_name)
)
shutil.copy(script, new_script_cur_path)
new_script_init_path = os.path.join(
'$LAGO_PREFIX_PATH',
os.path.basename(self.paths.scripts()),
sanitized_name,
)
new_scripts.append(new_script_init_path)
return new_scripts
def _deploy_host(self, host):
with LogTask('Deploy VM %s' % host.name()):
deploy_scripts = self._get_scripts(host.metadata)
if not deploy_scripts:
return
with LogTask('Wait for ssh connectivity'):
host.wait_for_ssh()
for script in deploy_scripts:
script = os.path.expanduser(os.path.expandvars(script))
with LogTask('Run script %s' % os.path.basename(script)):
ret, out, err = host.ssh_script(script, show_output=False)
if ret != 0:
LOGGER.debug('STDOUT:\n%s' % out)
LOGGER.error('STDERR\n%s' % err)
raise RuntimeError(
'%s failed with status %d on %s' % (
script,
ret,
host.name(),
),
)
@log_task('Deploy environment')
def deploy(self):
utils.invoke_in_parallel(
self._deploy_host, self.virt_env.get_vms().values()
)
|
leongold/lago
|
lago/prefix.py
|
Python
|
gpl-2.0
| 40,207
|
from numba import *
jitv = jit(void(), warnstyle='simple') #, nopython=True)
def simple_return():
"""
>>> result = jitv(simple_return)
Warning 14:4: Unreachable code
"""
return
print('Where am I?')
def simple_loops():
"""
>>> result = jitv(simple_loops)
Warning 28:8: Unreachable code
Warning 32:8: Unreachable code
Warning 36:8: Unreachable code
Warning 41:12: Unreachable code
Warning 46:8: Unreachable code
Warning 50:4: Unreachable code
"""
for i in range(10):
continue
print('Never be here')
while True:
break
print('Never be here')
while True:
break
print('Never be here')
for i in range(10):
for j in range(10):
return
print("unreachable")
else:
print("unreachable")
print("unreachable")
return
print("unreachable")
print("unreachable")
return
print("unreachable")
def conditional(a, b):
if a:
return 1
elif b:
return 2
else:
return 37
print('oops')
if __name__ == "__main__":
# jitv(simple_loops)
# jitv(simple_return)
import numba
numba.testing.testmod()
|
shiquanwang/numba
|
numba/control_flow/tests/test_w_unreachable.py
|
Python
|
bsd-2-clause
| 1,242
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright 2013 Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import cherrypy
import errno
import six
from ..describe import Description, describeRoute
from ..rest import Resource, RestException, filtermodel, loadmodel
from ...constants import AccessType, TokenScope
from girder.models.model_base import AccessException, GirderException
from girder.api import access
class File(Resource):
"""
API Endpoint for files. Includes utilities for uploading and downloading
them.
"""
def __init__(self):
super(File, self).__init__()
self.resourceName = 'file'
self.route('DELETE', (':id',), self.deleteFile)
self.route('DELETE', ('upload', ':id'), self.cancelUpload)
self.route('GET', ('offset',), self.requestOffset)
self.route('GET', (':id',), self.getFile)
self.route('GET', (':id', 'download'), self.download)
self.route('GET', (':id', 'download', ':name'), self.downloadWithName)
self.route('POST', (), self.initUpload)
self.route('POST', ('chunk',), self.readChunk)
self.route('POST', ('completion',), self.finalizeUpload)
self.route('POST', (':id', 'copy'), self.copy)
self.route('PUT', (':id',), self.updateFile)
self.route('PUT', (':id', 'contents'), self.updateFileContents)
@access.public(scope=TokenScope.DATA_READ)
@loadmodel(model='file', level=AccessType.READ)
@filtermodel(model='file')
@describeRoute(
Description('Get a file\'s information.')
.param('id', 'The ID of the file.', paramType='path')
.errorResponse()
.errorResponse('Read access was denied on the file.', 403)
)
def getFile(self, file, params):
return file
@access.user(scope=TokenScope.DATA_WRITE)
@describeRoute(
Description('Start a new upload or create an empty or link file.')
.responseClass('Upload')
.param('parentType', 'Type being uploaded into (folder or item).')
.param('parentId', 'The ID of the parent.')
.param('name', 'Name of the file being created.')
.param('size', 'Size in bytes of the file.',
dataType='integer', required=False)
.param('mimeType', 'The MIME type of the file.', required=False)
.param('linkUrl', 'If this is a link file, pass its URL instead '
'of size and mimeType using this parameter.', required=False)
.param('reference', 'If included, this information is passed to the '
'data.process event when the upload is complete.',
required=False)
.errorResponse()
.errorResponse('Write access was denied on the parent folder.', 403)
.errorResponse('Failed to create upload.', 500)
)
def initUpload(self, params):
"""
Before any bytes of the actual file are sent, a request should be made
to initialize the upload. This creates the temporary record of the
forthcoming upload that will be passed in chunks to the readChunk
method. If you pass a "linkUrl" parameter, it will make a link file
in the designated parent.
"""
self.requireParams(('name', 'parentId', 'parentType'), params)
user = self.getCurrentUser()
mimeType = params.get('mimeType', 'application/octet-stream')
parentType = params['parentType'].lower()
if parentType not in ('folder', 'item'):
raise RestException('The parentType must be "folder" or "item".')
parent = self.model(parentType).load(id=params['parentId'], user=user,
level=AccessType.WRITE, exc=True)
if 'linkUrl' in params:
return self.model('file').filter(
self.model('file').createLinkFile(
url=params['linkUrl'], parent=parent, name=params['name'],
parentType=parentType, creator=user), user)
else:
self.requireParams('size', params)
try:
upload = self.model('upload').createUpload(
user=user, name=params['name'], parentType=parentType,
parent=parent, size=int(params['size']), mimeType=mimeType,
reference=params.get('reference'))
except OSError as exc:
if exc.errno == errno.EACCES:
raise GirderException(
'Failed to create upload.',
'girder.api.v1.file.create-upload-failed')
raise
if upload['size'] > 0:
return upload
else:
return self.model('file').filter(
self.model('upload').finalizeUpload(upload), user)
@access.user(scope=TokenScope.DATA_WRITE)
@describeRoute(
Description('Finalize an upload explicitly if necessary.')
.notes('This is only required in certain non-standard upload '
'behaviors. Clients should know which behavior models require '
'the finalize step to be called in their behavior handlers.')
.param('uploadId', 'The ID of the upload record.', paramType='form')
.errorResponse('ID was invalid.')
.errorResponse('The upload does not require finalization.')
.errorResponse('Not enough bytes have been uploaded.')
.errorResponse('You are not the user who initiated the upload.', 403)
)
def finalizeUpload(self, params):
self.requireParams('uploadId', params)
user = self.getCurrentUser()
upload = self.model('upload').load(params['uploadId'], exc=True)
if upload['userId'] != user['_id']:
raise AccessException('You did not initiate this upload.')
# If we don't have as much data as we were told would be uploaded and
# the upload hasn't specified it has an alternate behavior, refuse to
# complete the upload.
if upload['received'] != upload['size'] and 'behavior' not in upload:
raise RestException(
'Server has only received %s bytes, but the file should be %s '
'bytes.' % (upload['received'], upload['size']))
file = self.model('upload').finalizeUpload(upload)
extraKeys = file.get('additionalFinalizeKeys', ())
return self.model('file').filter(file, user, additionalKeys=extraKeys)
@access.user(scope=TokenScope.DATA_WRITE)
@describeRoute(
Description('Request required offset before resuming an upload.')
.param('uploadId', 'The ID of the upload record.')
.errorResponse("The ID was invalid, or the offset did not match the "
"server's record.")
)
def requestOffset(self, params):
"""
This should be called when resuming an interrupted upload. It will
report the offset into the upload that should be used to resume.
:param uploadId: The _id of the temp upload record being resumed.
:returns: The offset in bytes that the client should use.
"""
self.requireParams('uploadId', params)
upload = self.model('upload').load(params['uploadId'], exc=True)
offset = self.model('upload').requestOffset(upload)
if isinstance(offset, six.integer_types):
upload['received'] = offset
self.model('upload').save(upload)
return {'offset': offset}
else:
return offset
@access.user(scope=TokenScope.DATA_WRITE)
@describeRoute(
Description('Upload a chunk of a file with multipart/form-data.')
.consumes('multipart/form-data')
.param('uploadId', 'The ID of the upload record.', paramType='form')
.param('offset', 'Offset of the chunk in the file.', dataType='integer',
paramType='form')
.param('chunk', 'The actual bytes of the chunk. For external upload '
'behaviors, this may be set to an opaque string that will be '
'handled by the assetstore adapter.',
dataType='File', paramType='body')
.errorResponse('ID was invalid.')
.errorResponse('Received too many bytes.')
.errorResponse('Chunk is smaller than the minimum size.')
.errorResponse('You are not the user who initiated the upload.', 403)
.errorResponse('Failed to store upload.', 500)
)
def readChunk(self, params):
"""
After the temporary upload record has been created (see initUpload),
the bytes themselves should be passed up in ordered chunks. The user
must remain logged in when passing each chunk, to authenticate that
the writer of the chunk is the same as the person who initiated the
upload. The passed offset is a verification mechanism for ensuring the
server and client agree on the number of bytes sent/received.
"""
self.requireParams(('offset', 'uploadId', 'chunk'), params)
user = self.getCurrentUser()
upload = self.model('upload').load(params['uploadId'], exc=True)
offset = int(params['offset'])
chunk = params['chunk']
if upload['userId'] != user['_id']:
raise AccessException('You did not initiate this upload.')
if upload['received'] != offset:
raise RestException(
'Server has received %s bytes, but client sent offset %s.' % (
upload['received'], offset))
try:
if isinstance(chunk, cherrypy._cpreqbody.Part):
return self.model('upload').handleChunk(upload, chunk.file)
else:
return self.model('upload').handleChunk(upload, chunk)
except IOError as exc:
if exc.errno == errno.EACCES:
raise Exception('Failed to store upload.')
raise
@access.cookie
@access.public(scope=TokenScope.DATA_READ)
@loadmodel(model='file', level=AccessType.READ)
@describeRoute(
Description('Download a file.')
.notes('This endpoint also accepts the HTTP "Range" header for partial '
'file downloads.')
.param('id', 'The ID of the file.', paramType='path')
.param('offset', 'Start downloading at this offset in bytes within '
'the file.', dataType='integer', required=False)
.param('endByte', 'If you only wish to download part of the file, '
'pass this as the index of the last byte to download. Unlike '
'the HTTP Range header, the endByte parameter is non-inclusive, '
'so you should set it to the index of the byte one past the '
'final byte you wish to receive.', dataType='integer',
required=False)
.param('contentDisposition', 'Specify the Content-Disposition response '
'header disposition-type value', required=False,
enum=['inline', 'attachment'], default='attachment')
.errorResponse('ID was invalid.')
.errorResponse('Read access was denied on the parent folder.', 403)
)
def download(self, file, params):
"""
Defers to the underlying assetstore adapter to stream a file out.
Requires read permission on the folder that contains the file's item.
"""
rangeHeader = cherrypy.lib.httputil.get_ranges(
cherrypy.request.headers.get('Range'), file.get('size', 0))
# The HTTP Range header takes precedence over query params
if rangeHeader and len(rangeHeader):
# Currently we only support a single range.
offset, endByte = rangeHeader[0]
else:
offset = int(params.get('offset', 0))
endByte = params.get('endByte')
if endByte is not None:
endByte = int(endByte)
contentDisp = params.get('contentDisposition', None)
if (contentDisp is not None and
contentDisp not in {'inline', 'attachment'}):
raise RestException('Unallowed contentDisposition type "%s".' %
contentDisp)
return self.model('file').download(file, offset, endByte=endByte,
contentDisposition=contentDisp)
@access.cookie
@access.public(scope=TokenScope.DATA_READ)
@describeRoute(
Description('Download a file.')
.param('id', 'The ID of the file.', paramType='path')
.param('name', 'The name of the file. This is ignored.',
paramType='path')
.param('offset', 'Start downloading at this offset in bytes within '
'the file.', dataType='integer', required=False)
.notes('The name parameter doesn\'t alter the download. Some '
'download clients save files based on the last part of a path, '
'and specifying the name satisfies those clients.')
.errorResponse('ID was invalid.')
.errorResponse('Read access was denied on the parent folder.', 403)
)
def downloadWithName(self, id, name, params):
return self.download(id=id, params=params)
@access.user(scope=TokenScope.DATA_WRITE)
@loadmodel(model='file', level=AccessType.WRITE)
@describeRoute(
Description('Delete a file by ID.')
.param('id', 'The ID of the file.', paramType='path')
.errorResponse('ID was invalid.')
.errorResponse('Write access was denied on the parent folder.', 403)
)
def deleteFile(self, file, params):
self.model('file').remove(file)
@access.user(scope=TokenScope.DATA_WRITE)
@loadmodel(model='upload')
@describeRoute(
Description('Cancel a partially completed upload.')
.param('id', 'The ID of the upload.', paramType='path')
.errorResponse('ID was invalid.')
.errorResponse('You lack permission to cancel this upload.', 403)
)
def cancelUpload(self, upload, params):
user = self.getCurrentUser()
if upload['userId'] != user['_id'] and not user.get('admin'):
raise AccessException('You did not initiate this upload.')
self.model('upload').cancelUpload(upload)
return {'message': 'Upload canceled.'}
@access.user(scope=TokenScope.DATA_WRITE)
@loadmodel(model='file', level=AccessType.WRITE)
@filtermodel(model='file')
@describeRoute(
Description('Change file metadata such as name or MIME type.')
.param('id', 'The ID of the file.', paramType='path')
.param('name', 'The name to set on the file.', required=False)
.param('mimeType', 'The MIME type of the file.', required=False)
.errorResponse('ID was invalid.')
.errorResponse('Write access was denied on the parent folder.', 403)
)
def updateFile(self, file, params):
file['name'] = params.get('name', file['name']).strip()
file['mimeType'] = params.get('mimeType',
(file.get('mimeType') or '').strip())
return self.model('file').updateFile(file)
@access.user(scope=TokenScope.DATA_WRITE)
@loadmodel(model='file', level=AccessType.WRITE)
@describeRoute(
Description('Change the contents of an existing file.')
.param('id', 'The ID of the file.', paramType='path')
.param('size', 'Size in bytes of the new file.', dataType='integer')
.param('reference', 'If included, this information is passed to the '
'data.process event when the upload is complete.',
required=False)
.notes('After calling this, send the chunks just like you would with a '
'normal file upload.')
)
def updateFileContents(self, file, params):
self.requireParams('size', params)
user = self.getCurrentUser()
# Create a new upload record into the existing file
upload = self.model('upload').createUploadToFile(
file=file, user=user, size=int(params['size']),
reference=params.get('reference'))
if upload['size'] > 0:
return upload
else:
return self.model('file').filter(
self.model('upload').finalizeUpload(upload), user)
@access.user(scope=TokenScope.DATA_WRITE)
@loadmodel(model='file', level=AccessType.READ)
@loadmodel(model='item', map={'itemId': 'item'}, level=AccessType.WRITE)
@filtermodel(model='file')
@describeRoute(
Description('Copy a file.')
.param('id', 'The ID of the file.', paramType='path')
.param('itemId', 'The item to copy the file to.', required=True)
)
def copy(self, file, item, params):
return self.model('file').copyFile(
file, self.getCurrentUser(), item=item)
|
salamb/girder
|
girder/api/v1/file.py
|
Python
|
apache-2.0
| 17,532
|
def restricted(func):
"""
A decorator to confirm a user is logged in or redirect as needed.
"""
def login(self, *args, **kwargs):
# Redirect to login if user not logged in, else execute func.
if not self.current_user:
self.redirect("/login")
else:
func(self, *args, **kwargs)
return login
|
diegopettengill/multiuserblog
|
handlers/decorators.py
|
Python
|
mit
| 367
|
# -*- coding: utf-8 -*-
from zenqueue import json
from zenqueue import log
class AbstractQueueClient(object):
class QueueClientError(Exception): pass
class ActionError(QueueClientError): pass
class ClosedClientError(QueueClientError): pass
class RequestError(QueueClientError): pass
class Timeout(QueueClientError): pass
class UnknownError(QueueClientError): pass
actions = ['push', 'push_many', 'pull', 'pull_many']
log_name = 'zenq.client'
def __init__(self):
self.log = log.get_logger(self.log_name + ':%x' % (id(self),))
def send(self, data):
raise NotImplementedError
def action(self, action, args, kwargs):
raise NotImplementedError
def handle_response(self, data):
try:
status, result = json.loads(data)
except ValueError, exc:
self.log.error('Invalid response returned: %r', data)
raise
# This handles the various response statuses the server can return.
if status == 'success':
self.log.debug('Request successful')
return result
elif status == 'error:action':
self.log.error('Action error occurred')
raise self.ActionError(result)
elif status == 'error:request':
self.log.error('Request error occurred')
raise self.RequestError(result)
elif status == 'error:timeout':
self.log.debug('Request timed out')
raise self.Timeout
elif status == 'error:unknown':
self.log.error('Unknown error occurred')
raise self.UnknownError(result)
def __getattr__(self, attribute):
if attribute in self.actions:
def wrapper(*args, **kwargs):
return self.action(attribute, args, kwargs)
return wrapper
raise AttributeError(attribute)
|
zacharyvoase/zenqueue
|
zenqueue/client/common.py
|
Python
|
mit
| 1,921
|
import random
import string
from collection.models import CollectionVersion, Collection
from concepts.models import Concept, ConceptVersion, LocalizedText
from oclapi.models import ACCESS_TYPE_EDIT, ACCESS_TYPE_VIEW
from orgs.models import Organization
from sources.models import Source, SourceVersion
from users.models import UserProfile
from mappings.models import Mapping, MappingVersion
from django.contrib.auth.models import User
from django.test import TestCase
class OclApiBaseTestCase(TestCase):
def setUp(self):
self._clear_fixtures()
self.user = create_user()
org_ocl = create_organization("OCL")
create_lookup_concept_classes(self.user, org_ocl)
def _clear_fixtures(self):
LocalizedText.objects.filter().delete()
ConceptVersion.objects.filter().delete()
Concept.objects.filter().delete()
MappingVersion.objects.filter().delete()
Mapping.objects.filter().delete()
SourceVersion.objects.filter().delete()
Source.objects.filter().delete()
CollectionVersion.objects.filter().delete()
Collection.objects.filter().delete()
Organization.objects.filter().delete()
UserProfile.objects.filter().delete()
User.objects.filter().delete()
def tearDown(self):
self._clear_fixtures()
def generate_random_string(length=5):
return ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits) for _ in range(length))
def create_localized_text(name, locale='en', type='FULLY_SPECIFIED', locale_preferred=False):
return LocalizedText(name=name, locale=locale, type=type, locale_preferred=locale_preferred)
def create_user():
suffix = generate_random_string()
user = User.objects.create_user(
username="test{0}".format(suffix),
password="test{0}".format(suffix),
email='user{0}@test.com'.format(suffix),
first_name='Test',
last_name='User'
)
create_user_profile(user)
# set password again as create_user hashed it
user.password = "test{0}".format(suffix)
return user
def create_user_profile(user):
suffix = generate_random_string()
mnemonic = user.username if user else 'user{0}'.format(suffix)
return UserProfile.objects.create(user=user, mnemonic=mnemonic)
def create_organization(name=None, mnemonic=None):
suffix = generate_random_string()
name = name if name else 'org{0}'.format(suffix)
mnemonic = mnemonic if mnemonic else name
return Organization.objects.create(name=name, mnemonic=mnemonic)
def create_source(user, validation_schema=None, organization=None, name=None):
suffix = generate_random_string()
source = Source(
name=name if name else "source{0}".format(suffix),
mnemonic=name if name else "source{0}".format(suffix),
full_name=name if name else "Source {0}".format(suffix),
source_type='Dictionary',
public_access=ACCESS_TYPE_EDIT,
default_locale='en',
supported_locales=['en'],
website='www.source.com',
description='This is a test source',
custom_validation_schema=validation_schema
)
if organization is not None:
kwargs = {
'parent_resource': organization
}
else:
kwargs = {
'parent_resource': UserProfile.objects.get(user=user)
}
Source.persist_new(source, user, **kwargs)
return Source.objects.get(id=source.id)
def create_collection(user, validation_schema=None, name=None):
suffix = generate_random_string()
collection = Collection(
name=name if name else "collection{0}".format(suffix),
mnemonic=name if name else "collection{0}".format(suffix),
full_name=name if name else "Collection {0}".format(suffix),
collection_type='Dictionary',
public_access=ACCESS_TYPE_EDIT,
default_locale='en',
supported_locales=['en'],
website='www.collection2.com',
description='This is the second test collection',
custom_validation_schema=validation_schema
)
kwargs = {
'parent_resource': UserProfile.objects.get(user=user)
}
Collection.persist_new(collection, user, **kwargs)
return Collection.objects.get(id=collection.id)
def create_concept(user, source, source_version=None, names=None, mnemonic=None, descriptions=None, concept_class=None, datatype=None,
force=False, extras=None):
suffix = generate_random_string()
if not names and not force:
names = [create_localized_text("name{0}".format(suffix))]
if not mnemonic and not force:
mnemonic = 'concept{0}'.format(suffix)
if not descriptions and not force:
descriptions = [create_localized_text("desc{0}".format(suffix))]
concept = Concept(
mnemonic=mnemonic,
updated_by=user,
datatype=datatype if datatype else "None",
concept_class=concept_class if concept_class else 'Diagnosis',
names=names,
descriptions=descriptions,
extras=extras
)
if source is not None:
kwargs = {
'parent_resource': source,
}
if source_version is not None:
kwargs['parent_resource_version'] = source_version
errors = Concept.persist_new(concept, user, **kwargs)
else:
errors = Concept.persist_new(concept, user)
return concept, errors
def create_mapping(user, source, from_concept, to_concept, map_type="SAME-AS", mnemonic=None):
mapping=None
if mnemonic:
mapping = Mapping(mnemonic=mnemonic, created_by=user, updated_by=user, parent=source, map_type=map_type,
from_concept=from_concept, to_concept=to_concept, public_access=ACCESS_TYPE_VIEW,)
else:
mapping = Mapping(created_by=user, updated_by=user, parent=source, map_type=map_type,
from_concept=from_concept, to_concept=to_concept, public_access=ACCESS_TYPE_VIEW, )
kwargs = {
'parent_resource': source,
}
Mapping.persist_new(mapping, user, **kwargs)
return Mapping.objects.get(id=mapping.id)
def create_lookup_concept_classes(user, org_ocl):
classes_source = create_source(user, organization=org_ocl, name="Classes")
datatypes_source = create_source(user, organization=org_ocl, name="Datatypes")
nametypes_source = create_source(user, organization=org_ocl, name="NameTypes")
descriptiontypes_source = create_source(user, organization=org_ocl, name="DescriptionTypes")
maptypes_source = create_source(user, organization=org_ocl, name="MapTypes")
locales_source = create_source(user, organization=org_ocl, name="Locales")
create_concept(user, classes_source, concept_class="Concept Class", names=[create_localized_text("Diagnosis")])
create_concept(user, classes_source, concept_class="Concept Class", names=[create_localized_text("Drug")])
create_concept(user, classes_source, concept_class="Concept Class", names=[create_localized_text("Test")])
create_concept(user, classes_source, concept_class="Concept Class", names=[create_localized_text("Procedure")])
create_concept(user, datatypes_source, concept_class="Datatype", names=[create_localized_text("None"), create_localized_text("N/A")])
create_concept(user, datatypes_source, concept_class="Datatype", names=[create_localized_text("Numeric")])
create_concept(user, datatypes_source, concept_class="Datatype", names=[create_localized_text("Coded")])
create_concept(user, datatypes_source, concept_class="Datatype", names=[create_localized_text("Text")])
create_concept(user, nametypes_source, concept_class="NameType",
names=[create_localized_text("FULLY_SPECIFIED"), create_localized_text("Fully Specified")])
create_concept(user, nametypes_source, concept_class="NameType",
names=[create_localized_text("Short"), create_localized_text("SHORT")])
create_concept(user, nametypes_source, concept_class="NameType",
names=[create_localized_text("INDEX_TERM"), create_localized_text("Index Term")])
create_concept(user, nametypes_source, concept_class="NameType", names=[create_localized_text("None")])
create_concept(user, descriptiontypes_source, concept_class="DescriptionType", names=[create_localized_text("None")])
create_concept(user, descriptiontypes_source, concept_class="DescriptionType", names=[create_localized_text("FULLY_SPECIFIED")])
create_concept(user, descriptiontypes_source, concept_class="DescriptionType", names=[create_localized_text("Definition")])
create_concept(user, maptypes_source, concept_class="MapType",
names=[create_localized_text("SAME-AS"), create_localized_text("Same As")])
create_concept(user, maptypes_source, concept_class="MapType", names=[create_localized_text("Is Subset of")])
create_concept(user, maptypes_source, concept_class="MapType", names=[create_localized_text("Different")])
create_concept(user, maptypes_source, concept_class="MapType",
names=[create_localized_text("BROADER-THAN"), create_localized_text("Broader Than"),
create_localized_text("BROADER_THAN")])
create_concept(user, maptypes_source, concept_class="MapType",
names=[create_localized_text("NARROWER-THAN"), create_localized_text("Narrower Than"),
create_localized_text("NARROWER_THAN")])
create_concept(user, maptypes_source, concept_class="MapType", names=[create_localized_text("Q-AND-A")])
create_concept(user, maptypes_source, concept_class="MapType", names=[create_localized_text("More specific than")])
create_concept(user, maptypes_source, concept_class="MapType", names=[create_localized_text("Less specific than")])
create_concept(user, maptypes_source, concept_class="MapType", names=[create_localized_text("Something Else")])
create_concept(user, locales_source, concept_class="Locale", names=[create_localized_text("en")])
create_concept(user, locales_source, concept_class="Locale", names=[create_localized_text("es")])
create_concept(user, locales_source, concept_class="Locale", names=[create_localized_text("fr")])
create_concept(user, locales_source, concept_class="Locale", names=[create_localized_text("tr")])
create_concept(user, locales_source, concept_class="Locale", names=[create_localized_text("Abkhazian", "en")])
create_concept(user, locales_source, concept_class="Locale",names=[create_localized_text("English", "en")])
|
snyaggarwal/oclapi
|
ocl/test_helper/base.py
|
Python
|
mpl-2.0
| 10,616
|
# gtcal.calendar
# Calendar keeps track of events and loads information from disk.
#
# Author: Benjamin Bengfort <bb830@georgetown.edu>
# Created: Mon Sep 14 19:17:10 2015 -0400
#
# Copyright (C) 2015 Georgetown University
# For license information, see LICENSE.txt
#
# ID: calendar.py [] benjamin@bengfort.com $
"""
Calendar keeps track of events and loads information from disk.
"""
##########################################################################
## Imports
##########################################################################
import os
import json
from datetime import datetime
from collections import defaultdict
from gtcal.events import Event
from gtcal.utils import SHORT_DATE
from gtcal.utils import CalendarEncoder, CalendarDecoder
##########################################################################
## Main Calendar App
##########################################################################
class Calendar(object):
"""
A calender holds and manages events, saving and loading them to disk.
"""
def __init__(self, path=None):
# Storage is a dictionary of date --> list of events
self.storage = defaultdict(list)
self.location = path
def load(self):
"""
Load data from path and store in memory.
Note that load checks if the data exists, otherwise it does nothing,
refusing to raise an exception.
"""
# Get the path and check if it exists
if not os.path.exists(self.location): return
# Open the path for reading
with open(self.location, 'r') as data:
# load the data from disk
data = json.load(data, cls=CalendarDecoder)
# parse the data into events
for key, values in data.iteritems():
for value in values:
self.add_event(**value)
def save(self):
"""
Save the calendar from memory back to disk.
"""
# Open the file for writing
with open(self.location, 'w') as f:
# Dump the data to the file
json.dump(self.storage, f, indent=2, cls=CalendarEncoder)
def add_event(self, **kwargs):
"""
Adds an event by creating the event with the arbitrary list of
arguments that is passed into this method, then stores it according
to the year and the day in our internal storage.
"""
event = Event(**kwargs) # Create event
self.storage[event.key].append(event) # Store the event
return event
def todays_agenda(self):
"""
Creates a nice print out of the agenda for today
"""
today = datetime.today() # What day is today?
events = self.storage[today.strftime(SHORT_DATE)] # Get the events out of storage
# Check if we have anything
if not events:
return "No events scheduled for today!"
# Otherwise, start creating agenda
output = []
# Create a nice agenda header
output.append("Agenda for %s:" % today.strftime("%B %d, %Y"))
output.append(" You have %i events" % len(events))
output.append("=" * len(output[0]))
output.append("") # This will create a blank line
for event in events:
output.append(event.pprint(date_format="%I:%M %p"))
output.append("-" * len(output[0]))
output.append("") # This will create a blank line
return "\n".join(output)
def __len__(self):
numevents = 0
for date in self.storage:
numevents += len(self.storage[date])
return numevents
def __str__(self):
output = str(self.__class__.__name__)
if self.location:
output += " at %s" % self.location
output += " with %i events" % (len(self))
return output
|
rebeccabilbro/calendar
|
gtcal/calendars.py
|
Python
|
mit
| 3,924
|
# -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from .models import CrocodocDocument
import signals as crocodoc_signals
import json
import logging
from bunch import Bunch
logger = logging.getLogger('django.request')
class CrocoDocConnectService(object):
"""
Service to setup a crocodoc object
"""
obj = None
is_new = None
def __init__(self, document_object, app_label, field_name='attachment', **kwargs):
upload = kwargs.get('upload_immediately', False)
reviewer = kwargs.get('reviewer', None)
#
# Get the content_type of the passed in model
#
content_type = ContentType.objects.get(model=document_object.__class__.__name__.lower(),
app_label=app_label)
#
# Get or Create a new Crocodoc object associated with the document_object passed in
#
self.obj, self.is_new = CrocodocDocument.objects.get_or_create(content_object_type=content_type,
object_id=document_object.pk,
object_attachment_fieldname=field_name,
reviewer=reviewer)
if upload in [True, 'true', 1, '1']:
# cause an upload to happen
self.generate()
def generate(self):
self.obj.crocodoc_service.uuid
def download_pdf_file(self):
return self.obj.crocodoc_service.download_pdf_file()
class CrocodocWebhookService(object):
payload = None
def __init__(self, payload, *args, **kwargs):
self.user = kwargs.get('user')
self.payload = json.loads(payload)
self.items = [Bunch(**i) for i in self.payload]
def process(self):
page = None
processed = []
for c, i in enumerate(self.items):
event = i.get('event')
event_type = i.get('type')
if i.get('page') is not None:
page = i.get('page')
logger.info("{event} is of type {event_type} on page: {page}".format(event_type=event_type,
event=event,
page=page))
if event == 'comment.create':
i = CrocodocCommentCreateEvent(page=page, **i)
if event == 'comment.update':
i = CrocodocCommentReplyEvent(page=page, **i)
elif event == 'comment.delete':
i = CrocodocCommentDeleteEvent(**i)
elif event in ['annotation.create', 'annotation.delete']:
if event_type == 'textbox':
i = CrocodocAnnotationTextboxEvent(**i)
elif event_type == 'highlight':
i = CrocodocAnnotationHighlightEvent(**i)
elif event_type == 'strikeout':
i = CrocodocAnnotationStrikeoutEvent(**i)
elif event_type == 'drawing':
i = CrocodocAnnotationDrawingEvent(**i)
elif event_type == 'point':
i = CrocodocAnnotationPointEvent(**i)
processed.append(i.process(sender=self) if hasattr(i, 'process') else None)
return processed
class CrocodocBaseEvent(Bunch):
signal = None
_verb = None
_deleted_verb = None
_user = None
_attachment = None
label = 'Crocodoc Webhook Callback'
content = None
event = None
type = None
owner = None
page = None
doc = None
uuid = None
def __init__(self, *args, **kwargs):
super(CrocodocBaseEvent, self).__init__(*args, **kwargs)
self.__dict__.update(kwargs) # @TODO ugly ugly ugly fix this
@property
def user(self):
""" Crocodoc provides userid as string(pk,user_name)"""
if self._user is None:
pk, full_name = self.owner.split(',')
pk = int(pk)
self._user = User.objects.get(pk=pk)
return self._user
@property
def attachment(self):
if self._attachment is None:
from .models import CrocodocDocument
try:
self._attachment = CrocodocDocument.objects.get(uuid=str(self.doc)) # must call str to make filter happen
except CrocodocDocument.DoesNotExist:
logger.critical('CrocodocDocument.DoesNotExist: %s' % self.doc)
return self._attachment
@property
def verb(self):
if self.event is not None and 'delete' in self.event:
return self._deleted_verb
else:
return self._verb
def process(self, sender):
# try:
document = self.attachment
target = filename = None
if document is not None:
target = document.source_object
#
# We allow the fieldname to be variable and is specified at the model
# level by setting object_attachment_fieldname but default to attachment
#
filename = getattr(target, document.object_attachment_fieldname, 'attachment').name
user_pk, user_name = self.owner.split(',') # crocodoc provides as pk,name
self.signal.send(sender=sender,
verb=self.verb,
document=document,
target=target,
attachment_name=filename,
user_info=(int(user_pk), user_name),
crocodoc_event=self.event,
uuid=getattr(self, 'uuid', None),
content=self.content)
logger.info('Send signal: {signal} {verb}'.format(signal=self.signal.__class__.__name__, verb=self.verb))
return True
logger.error('No document could be found by that id: {doc}'.format(doc=str(self.doc)))
return False
class CrocodocCommentCreateEvent(CrocodocBaseEvent):
signal = crocodoc_signals.crocodoc_comment_create
_verb = 'Commented on a Document'
class CrocodocCommentReplyEvent(CrocodocBaseEvent):
signal = crocodoc_signals.crocodoc_comment_update
_verb = 'Replied to a comment on a Document'
class CrocodocCommentDeleteEvent(CrocodocBaseEvent):
signal = crocodoc_signals.crocodoc_comment_delete
_verb = 'Deleted a Commented on a Document'
class CrocodocAnnotationHighlightEvent(CrocodocBaseEvent):
signal = crocodoc_signals.crocodoc_annotation_highlight
_verb = 'Hilighted some text on a Document'
_deleted_verb = 'Deleted a Hilighted of some text on a Document'
class CrocodocAnnotationStrikeoutEvent(CrocodocBaseEvent):
signal = crocodoc_signals.crocodoc_annotation_strikeout
_verb = 'Struck out some text on a Document'
_deleted_verb = 'Deleted the Strikeout of some text on a Document'
class CrocodocAnnotationTextboxEvent(CrocodocBaseEvent):
signal = crocodoc_signals.crocodoc_annotation_textbox
_verb = 'Added a text element on a Document'
_deleted_verb = 'Deleted a text element on a Document'
class CrocodocAnnotationDrawingEvent(CrocodocBaseEvent):
signal = crocodoc_signals.crocodoc_annotation_drawing
_verb = 'Added a drawing element on a Document'
_deleted_verb = 'Deleted a drawing element on a Document'
class CrocodocAnnotationPointEvent(CrocodocBaseEvent):
signal = crocodoc_signals.crocodoc_annotation_point
_verb = 'Added a point element to a Document'
_deleted_verb = 'Deleted a point element on a Document'
|
rosscdh/django-crocodoc
|
dj_crocodoc/services.py
|
Python
|
gpl-2.0
| 7,767
|
import sublime, sublime_plugin
import webbrowser
class OpenInBrowserCommand(sublime_plugin.TextCommand):
def run(self, edit):
if self.view.file_name():
webbrowser.open_new_tab("file://" + self.view.file_name())
def is_visible(self):
return self.view.file_name() != None and (self.view.file_name()[-5:] == ".html" or
self.view.file_name()[-5:] == ".HTML" or
self.view.file_name()[-4:] == ".htm" or
self.view.file_name()[-4:] == ".HTM")
|
koery/win-sublime
|
Data/Packages/Default/open_in_browser.py
|
Python
|
mit
| 509
|
#!/usr/bin/env python3
# This code is an example for a tutorial on Ubuntu Unity/Gnome AppIndicators:
# http://candidtim.github.io/appindicator/2014/09/13/ubuntu-appindicator-step-by-step.html
# source : https://gist.github.com/jmarroyave/a24bf173092a3b0943402f6554a2094d
# see also : http://www.devdungeon.com/content/desktop-notifications-python-libnotify
# http://candidtim.github.io/appindicator/2014/09/13/ubuntu-appindicator-step-by-step.html
# http://python-gtk-3-tutorial.readthedocs.io/en/latest/index.html
# https://openclassrooms.com/courses/pygtk/les-widgets-suite-partie-1
# https://openclassrooms.com/courses/apprenez-a-programmer-en-python/premiere-approche-des-classes
import os
import signal
from urllib import request
from urllib.error import URLError
from urllib.request import urlopen
import gi
gi.require_version('Gtk', '3.0')
gi.require_version('AppIndicator3', '0.1')
gi.require_version('Notify', '0.7')
from gi.repository import Gtk as gtk, GLib, GObject, AppIndicator3 as appindicator, Notify
from subprocess import call, check_output
APPINDICATOR_ID = 'Backup NeuronFarm indicator app'
def backuplaptop_callback_func():
call(['bash', '/home/nomad/bin/backup-laptop-neuronfarm.sh'])
def status(_):
last = check_output(['bash', '/home/nomad/bin/backup-laptop-neuronfarm.sh', 'last'])
Notify.init("App Name")
# Create the notification object
summary = "Last snapshot"
body = str(last)
icon = "/usr/share/icons/gnome/24x24/emotes/face-smile-big.png"
notification = Notify.Notification.new(
summary,
body, # Optional
icon,
)
notification.show()
def gotobackup(_):
### /!\ A CHANGER
os.system('xdg-open "%s"' % '/media/gobt/Backup-houyo/' )
def backup(_):
backuplaptop_callback_func()
Notify.init("App Name")
# Create the notification object
summary = "Backing up!"
body = "Meeting at 3PM!"
icon = "/usr/share/icons/gnome/24x24/emotes/face-smile-big.png"
notification = Notify.Notification.new(
summary,
body, # Optional
icon,
)
notification.add_action(
"action_click",
"Reply to Message",
backuplaptop_callback_func,
None # Arguments
)
notification.show()
def quit(_):
Notify.uninit()
gtk.main_quit()
# good exemple
# https://askubuntu.com/questions/108035/writing-indicators-with-python-gir-and-gtk3
def build_menu(menu):
item_status = gtk.MenuItem('Status')
#item_status.connect('activate', status)
menu.append(item_status)
item_status.set_sensitive(False)
item_backup = gtk.MenuItem('Backup')
item_backup.connect('activate', backup)
menu.append(item_backup)
item_gotobackup = gtk.MenuItem('Open remote backup folder')
item_gotobackup.connect('activate', gotobackup)
menu.append(item_gotobackup)
item_quit = gtk.MenuItem('Quit')
item_quit.connect('activate', quit)
menu.append(item_quit)
menu.show_all()
return menu
def timespent(indicator, menu):
# changement de l'icon du menu
indicator.set_icon('/usr/share/icons/gnome/24x24/emotes/face-embarrassed.png')
# ici rajouter un argument pour faire menu 1 ou menu 2
#indicator.set_menu(build_menu(menu))
#me = indicator.get_menu()
return True
def main():
signal.signal(signal.SIGINT, signal.SIG_DFL)
indicator = appindicator.Indicator.new(APPINDICATOR_ID, \
os.path.abspath('/usr/share/icons/gnome/24x24/emotes/face-smile-big.png'), \
appindicator.IndicatorCategory.SYSTEM_SERVICES)
indicator.set_status(appindicator.IndicatorStatus.ACTIVE)
menu = gtk.Menu()
indicator.set_menu(build_menu(menu))
# GObject.timeout_add(1000, timespent, indicator, menu)
gtk.main()
main()
|
nomad-fr/scripts-systems
|
myappindicator.py
|
Python
|
gpl-3.0
| 3,802
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('article', '0013_auto_20160507_1405'),
]
operations = [
migrations.AlterField(
model_name='article',
name='block',
field=models.ForeignKey(verbose_name='\u6240\u5c5e\u677f\u5757', to='buluojianshe.Benjamin'),
),
]
|
benjaminfs/myforum
|
article/migrations/0014_auto_20160508_0534.py
|
Python
|
gpl-2.0
| 457
|
# coding: utf-8
from __future__ import unicode_literals, absolute_import
try:
import requests as r
except:
r = None
class TigrisSession(object):
"""
Base session layer for Tigris.
"""
def __init__(self,
base_url,
default_headers={}):
"""
:param base_url:
The customer endpoint docroot.
:type base_url:
`str`
:param default_headers
"""
self._base_url = base_url
self._session = r.Session()
self._default_headers = default_headers
self._timeout = 80
def _request(self, method, endpoint, headers, post_data=None, files=None):
"""
Makes an HTTP request
:param method:
The name of the method
:type method:
`str`
:param endpoint:
The name of the endpoint
:type endpoint:
`str`
:param headers:
The name of the endpoint
:type headers:
`dict`
:param post_data:
PATCH/POST/PUT data.
:type post_data:
`dict`
:rtype:
`tuple` of `str`, `int`, `dict`
"""
url = '{0}/{1}'.format(self._base_url, endpoint)
try:
try:
result = self._session.request(method,
url,
headers=headers,
json=post_data,
files=files,
timeout=self._timeout)
except TypeError as e:
raise TypeError(
'WARNING: We couldn\'t find a proper instance of '
'Python `requests`. You may need to update or install '
'the library, which you can do with `pip`: '
' To update `requests`: '
''
' pip install -U requests '
' To install `requests`:'
''
' pip install requests. '
'Alternatively, your POST data may be malformed. '
'Underlying error: {0}'.format(e))
content = result.json()
status_code = result.status_code
except Exception as e:
raise Exception(e)
return content, status_code, result.headers
def _delete(self, endpoint, headers={}):
"""
Executes a DELETE request
:param endpoint:
The name of the endpoint
:type endpoint:
`url`
:rtype:
`tuple`
"""
joined_headers = dict(headers, **self._default_headers)
return self._request('delete', endpoint, joined_headers)
def _get(self, endpoint, headers={}):
"""
Executes a GET request
:param endpoint:
The name of the endpoint
:type endpoint:
`url`
:rtype:
`tuple`
"""
joined_headers = dict(headers, **self._default_headers)
return self._request('get', endpoint, joined_headers)
def _head(self, endpoint, headers={}):
"""
Executes a HEAD request
:param endpoint:
The name of the endpoint
:type endpoint:
`url`
:rtype:
`tuple`
"""
joined_headers = dict(headers, **self._default_headers)
return self._request('head', endpoint, joined_headers)
def _patch(self, endpoint, data={}, headers={}):
"""
Executes a PATCH request
:param endpoint:
The name of the endpoint
:type endpoint:
`url`
:param data:
The payload data to send
:type data:
`dict`
:rtype:
`tuple`
"""
joined_headers = dict(headers, **self._default_headers)
return self._request(
'patch',
endpoint,
joined_headers,
post_data=data)
def _post(self, endpoint, data={}, headers={}, files=None):
"""
Executes a POST request
:param endpoint:
The name of the endpoint
:type endpoint:
`url`
:param data:
The payload data to send
:type data:
`dict`
:rtype:
`tuple`
"""
joined_headers = dict(headers, **self._default_headers)
return self._request(
'post',
endpoint,
joined_headers,
post_data=data,
files=files)
def _put(self, endpoint, data={}, headers={}):
"""
Executes a PATCH request
:param endpoint:
The name of the endpoint
:type endpoint:
`url`
:param data:
The payload data to send
:type data:
`dict`
:rtype:
`tuple`
"""
joined_headers = dict(headers, **self._default_headers)
return self._request(
'put',
endpoint,
joined_headers,
post_data=data)
|
jogral/tigris-python-sdk
|
tigrissdk/session/tigris_session.py
|
Python
|
apache-2.0
| 5,263
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# tests/server/server_rpc.py
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the project nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import types
import unittest
from king_phisher import errors
from king_phisher import version
from king_phisher.server import server_rpc
from king_phisher.server.database import models as db_models
from king_phisher.testing import KingPhisherServerTestCase
from king_phisher.utilities import random_string
class ServerRPCTests(KingPhisherServerTestCase):
def test_rpc_config_get(self):
self.assertEqual(self.rpc('config/get', 'server.address.port'), self.config.get('server.address.port'))
server_address = self.rpc('config/get', ['server.address.host', 'server.address.port'])
self.assertIsInstance(server_address, dict)
self.assertTrue('server.address.host' in server_address)
self.assertTrue('server.address.port' in server_address)
self.assertEqual(server_address['server.address.host'], self.config.get('server.address.host'))
self.assertEqual(server_address['server.address.port'], self.config.get('server.address.port'))
def test_rpc_config_get_permissions(self):
self.assertTrue(self.config.has_option('server.database'))
self.assertRPCPermissionDenied('config/get', 'server.database')
def test_rpc_campaign_delete(self):
campaign_name = random_string(10)
campaign_id = self.rpc('campaign/new', campaign_name)
self.rpc('db/table/delete', 'campaigns', campaign_id)
def test_rpc_campaign_new(self):
campaign_name = random_string(10)
campaign_id = self.rpc('campaign/new', campaign_name)
self.assertIsInstance(campaign_id, int)
campaigns = self.rpc.remote_table('campaigns')
self.assertIsInstance(campaigns, types.GeneratorType)
campaigns = list(campaigns)
self.assertEqual(len(campaigns), 1)
campaign = campaigns[0]
self.assertEqual(campaign.id, campaign_id)
self.assertEqual(campaign.name, campaign_name)
def test_rpc_config_set(self):
config_key = server_rpc.CONFIG_WRITEABLE[0]
config_value = random_string(10)
self.rpc('config/set', {config_key: config_value})
self.assertEqual(self.rpc('config/get', config_key), config_value)
def test_rpc_config_set_permissions(self):
config_key = random_string(10)
config_value = random_string(10)
self.assertRPCPermissionDenied('config/set', {config_key: config_value})
def test_rpc_graphql(self):
response = self.rpc('graphql', '{ version }')
self.assertIn('data', response)
self.assertIn('errors', response)
self.assertIsNotNone(response['data'])
self.assertIsNone(response['errors'])
response = response['data'].get('version')
self.assertEquals(response, version.version)
def test_rpc_graphql_rpc_errors(self):
bad_query = '{ foobar }'
with self.assertRaises(errors.KingPhisherGraphQLQueryError) as context:
self.rpc.graphql(bad_query)
error = context.exception
self.assertIsInstance(error.errors, list)
self.assertIsNotEmpty(error.errors)
self.assertIsInstance(error.query, str)
self.assertEqual(error.query, bad_query)
def test_rpc_graphql_raw_errors(self):
response = self.rpc('graphql', '{ foobar }')
self.assertIn('data', response)
self.assertIn('errors', response)
self.assertIsNone(response['data'])
self.assertIsNotNone(response['errors'])
self.assertIsNotEmpty(response['errors'])
for error in response['errors']:
self.assertIsInstance(error, str)
def test_rpc_is_unauthorized(self):
http_response = self.http_request('/ping', method='RPC')
self.assertHTTPStatus(http_response, 401)
def test_rpc_ping(self):
self.assertTrue(self.rpc('ping'))
def test_rpc_remote_table(self):
self.test_rpc_campaign_new()
campaign = list(self.rpc.remote_table('campaigns'))[0]
campaign = campaign._asdict()
self.assertTrue(isinstance(campaign, dict))
self.assertEqual(sorted(campaign.keys()), sorted(db_models.database_tables['campaigns']))
def test_rpc_shutdown(self):
self.assertIsNone(self.rpc('shutdown'))
self.shutdown_requested = True
def test_rpc_table_count(self):
self.assertEqual(self.rpc('db/table/count', 'campaigns'), 0)
self.assertEqual(self.rpc('db/table/count', 'messages'), 0)
self.assertEqual(self.rpc('db/table/count', 'visits'), 0)
self.test_rpc_campaign_new()
self.assertEqual(self.rpc('db/table/count', 'campaigns'), 1)
def test_rpc_table_view(self):
self.test_rpc_campaign_new()
campaign = self.rpc('db/table/view', 'campaigns')
self.assertTrue(bool(campaign))
self.assertEqual(len(campaign['rows']), 1)
self.assertEqual(len(campaign['rows'][0]), len(db_models.database_tables['campaigns']))
self.assertEqual(sorted(campaign['columns']), sorted(db_models.database_tables['campaigns']))
def test_rpc_set_value(self):
campaign_name = random_string(10)
new_campaign_name = random_string(10)
campaign_id = self.rpc('campaign/new', campaign_name)
campaign = self.rpc.remote_table_row('campaigns', campaign_id)
self.assertEqual(campaign.id, campaign_id)
self.assertEqual(campaign.name, campaign_name)
self.rpc('db/table/set', 'campaigns', campaign_id, 'name', new_campaign_name)
campaign = self.rpc.remote_table_row('campaigns', campaign_id)
self.assertEqual(campaign.name, new_campaign_name)
def test_rpc_version(self):
response = self.rpc('version')
self.assertTrue('version' in response)
self.assertTrue('version_info' in response)
self.assertEqual(response['version'], version.version)
self.assertEqual(response['version_info']['major'], version.version_info.major)
self.assertEqual(response['version_info']['minor'], version.version_info.minor)
self.assertEqual(response['version_info']['micro'], version.version_info.micro)
if __name__ == '__main__':
unittest.main()
|
hdemeyer/king-phisher
|
tests/server/server_rpc.py
|
Python
|
bsd-3-clause
| 7,130
|
"""
This module converts requested URLs to callback view functions.
URLResolver is the main class here. Its resolve() method takes a URL (as
a string) and returns a ResolverMatch object which provides access to all
attributes of the resolved URL match.
"""
import functools
import inspect
import re
import threading
from importlib import import_module
from urllib.parse import quote
from django.conf import settings
from django.core.checks import Error, Warning
from django.core.checks.urls import check_resolver
from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist
from django.utils.datastructures import MultiValueDict
from django.utils.functional import cached_property
from django.utils.http import RFC3986_SUBDELIMS, escape_leading_slashes
from django.utils.regex_helper import normalize
from django.utils.translation import get_language
from .converters import get_converter
from .exceptions import NoReverseMatch, Resolver404
from .utils import get_callable
class ResolverMatch:
def __init__(self, func, args, kwargs, url_name=None, app_names=None, namespaces=None, route=None):
self.func = func
self.args = args
self.kwargs = kwargs
self.url_name = url_name
self.route = route
# If a URLRegexResolver doesn't have a namespace or app_name, it passes
# in an empty value.
self.app_names = [x for x in app_names if x] if app_names else []
self.app_name = ':'.join(self.app_names)
self.namespaces = [x for x in namespaces if x] if namespaces else []
self.namespace = ':'.join(self.namespaces)
if not hasattr(func, '__name__'):
# A class-based view
self._func_path = func.__class__.__module__ + '.' + func.__class__.__name__
else:
# A function-based view
self._func_path = func.__module__ + '.' + func.__name__
view_path = url_name or self._func_path
self.view_name = ':'.join(self.namespaces + [view_path])
def __getitem__(self, index):
return (self.func, self.args, self.kwargs)[index]
def __repr__(self):
return "ResolverMatch(func=%s, args=%s, kwargs=%s, url_name=%s, app_names=%s, namespaces=%s, route=%s)" % (
self._func_path, self.args, self.kwargs, self.url_name,
self.app_names, self.namespaces, self.route,
)
@functools.lru_cache(maxsize=None)
def get_resolver(urlconf=None):
if urlconf is None:
urlconf = settings.ROOT_URLCONF
return URLResolver(RegexPattern(r'^/'), urlconf)
@functools.lru_cache(maxsize=None)
def get_ns_resolver(ns_pattern, resolver, converters):
# Build a namespaced resolver for the given parent URLconf pattern.
# This makes it possible to have captured parameters in the parent
# URLconf pattern.
pattern = RegexPattern(ns_pattern)
pattern.converters = dict(converters)
ns_resolver = URLResolver(pattern, resolver.url_patterns)
return URLResolver(RegexPattern(r'^/'), [ns_resolver])
class LocaleRegexDescriptor:
def __init__(self, attr):
self.attr = attr
def __get__(self, instance, cls=None):
"""
Return a compiled regular expression based on the active language.
"""
if instance is None:
return self
# As a performance optimization, if the given regex string is a regular
# string (not a lazily-translated string proxy), compile it once and
# avoid per-language compilation.
pattern = getattr(instance, self.attr)
if isinstance(pattern, str):
instance.__dict__['regex'] = instance._compile(pattern)
return instance.__dict__['regex']
language_code = get_language()
if language_code not in instance._regex_dict:
instance._regex_dict[language_code] = instance._compile(str(pattern))
return instance._regex_dict[language_code]
class CheckURLMixin:
def describe(self):
"""
Format the URL pattern for display in warning messages.
"""
description = "'{}'".format(self)
if self.name:
description += " [name='{}']".format(self.name)
return description
def _check_pattern_startswith_slash(self):
"""
Check that the pattern does not begin with a forward slash.
"""
regex_pattern = self.regex.pattern
if not settings.APPEND_SLASH:
# Skip check as it can be useful to start a URL pattern with a slash
# when APPEND_SLASH=False.
return []
if regex_pattern.startswith(('/', '^/', '^\\/')) and not regex_pattern.endswith('/'):
warning = Warning(
"Your URL pattern {} has a route beginning with a '/'. Remove this "
"slash as it is unnecessary. If this pattern is targeted in an "
"include(), ensure the include() pattern has a trailing '/'.".format(
self.describe()
),
id="urls.W002",
)
return [warning]
else:
return []
class RegexPattern(CheckURLMixin):
regex = LocaleRegexDescriptor('_regex')
def __init__(self, regex, name=None, is_endpoint=False):
self._regex = regex
self._regex_dict = {}
self._is_endpoint = is_endpoint
self.name = name
self.converters = {}
def match(self, path):
match = self.regex.search(path)
if match:
# If there are any named groups, use those as kwargs, ignoring
# non-named groups. Otherwise, pass all non-named arguments as
# positional arguments.
kwargs = match.groupdict()
args = () if kwargs else match.groups()
return path[match.end():], args, kwargs
return None
def check(self):
warnings = []
warnings.extend(self._check_pattern_startswith_slash())
if not self._is_endpoint:
warnings.extend(self._check_include_trailing_dollar())
return warnings
def _check_include_trailing_dollar(self):
regex_pattern = self.regex.pattern
if regex_pattern.endswith('$') and not regex_pattern.endswith(r'\$'):
return [Warning(
"Your URL pattern {} uses include with a route ending with a '$'. "
"Remove the dollar from the route to avoid problems including "
"URLs.".format(self.describe()),
id='urls.W001',
)]
else:
return []
def _compile(self, regex):
"""Compile and return the given regular expression."""
try:
return re.compile(regex)
except re.error as e:
raise ImproperlyConfigured(
'"%s" is not a valid regular expression: %s' % (regex, e)
)
def __str__(self):
return str(self._regex)
_PATH_PARAMETER_COMPONENT_RE = re.compile(
r'<(?:(?P<converter>[^>:]+):)?(?P<parameter>\w+)>'
)
def _route_to_regex(route, is_endpoint=False):
"""
Convert a path pattern into a regular expression. Return the regular
expression and a dictionary mapping the capture names to the converters.
For example, 'foo/<int:pk>' returns '^foo\\/(?P<pk>[0-9]+)'
and {'pk': <django.urls.converters.IntConverter>}.
"""
original_route = route
parts = ['^']
converters = {}
while True:
match = _PATH_PARAMETER_COMPONENT_RE.search(route)
if not match:
parts.append(re.escape(route))
break
parts.append(re.escape(route[:match.start()]))
route = route[match.end():]
parameter = match.group('parameter')
if not parameter.isidentifier():
raise ImproperlyConfigured(
"URL route '%s' uses parameter name %r which isn't a valid "
"Python identifier." % (original_route, parameter)
)
raw_converter = match.group('converter')
if raw_converter is None:
# If a converter isn't specified, the default is `str`.
raw_converter = 'str'
try:
converter = get_converter(raw_converter)
except KeyError as e:
raise ImproperlyConfigured(
"URL route '%s' uses invalid converter %s." % (original_route, e)
)
converters[parameter] = converter
parts.append('(?P<' + parameter + '>' + converter.regex + ')')
if is_endpoint:
parts.append('$')
return ''.join(parts), converters
class RoutePattern(CheckURLMixin):
regex = LocaleRegexDescriptor('_route')
def __init__(self, route, name=None, is_endpoint=False):
self._route = route
self._regex_dict = {}
self._is_endpoint = is_endpoint
self.name = name
self.converters = _route_to_regex(str(route), is_endpoint)[1]
def match(self, path):
match = self.regex.search(path)
if match:
# RoutePattern doesn't allow non-named groups so args are ignored.
kwargs = match.groupdict()
for key, value in kwargs.items():
converter = self.converters[key]
try:
kwargs[key] = converter.to_python(value)
except ValueError:
return None
return path[match.end():], (), kwargs
return None
def check(self):
warnings = self._check_pattern_startswith_slash()
route = self._route
if '(?P<' in route or route.startswith('^') or route.endswith('$'):
warnings.append(Warning(
"Your URL pattern {} has a route that contains '(?P<', begins "
"with a '^', or ends with a '$'. This was likely an oversight "
"when migrating to django.urls.path().".format(self.describe()),
id='2_0.W001',
))
return warnings
def _compile(self, route):
return re.compile(_route_to_regex(route, self._is_endpoint)[0])
def __str__(self):
return str(self._route)
class LocalePrefixPattern:
def __init__(self, prefix_default_language=True):
self.prefix_default_language = prefix_default_language
self.converters = {}
@property
def regex(self):
# This is only used by reverse() and cached in _reverse_dict.
return re.compile(self.language_prefix)
@property
def language_prefix(self):
language_code = get_language() or settings.LANGUAGE_CODE
if language_code == settings.LANGUAGE_CODE and not self.prefix_default_language:
return ''
else:
return '%s/' % language_code
def match(self, path):
language_prefix = self.language_prefix
if path.startswith(language_prefix):
return path[len(language_prefix):], (), {}
return None
def check(self):
return []
def describe(self):
return "'{}'".format(self)
def __str__(self):
return self.language_prefix
class URLPattern:
def __init__(self, pattern, callback, default_args=None, name=None):
self.pattern = pattern
self.callback = callback # the view
self.default_args = default_args or {}
self.name = name
def __repr__(self):
return '<%s %s>' % (self.__class__.__name__, self.pattern.describe())
def check(self):
warnings = self._check_pattern_name()
warnings.extend(self.pattern.check())
return warnings
def _check_pattern_name(self):
"""
Check that the pattern name does not contain a colon.
"""
if self.pattern.name is not None and ":" in self.pattern.name:
warning = Warning(
"Your URL pattern {} has a name including a ':'. Remove the colon, to "
"avoid ambiguous namespace references.".format(self.pattern.describe()),
id="urls.W003",
)
return [warning]
else:
return []
def resolve(self, path):
match = self.pattern.match(path)
if match:
new_path, args, kwargs = match
# Pass any extra_kwargs as **kwargs.
kwargs.update(self.default_args)
return ResolverMatch(self.callback, args, kwargs, self.pattern.name, route=str(self.pattern))
@cached_property
def lookup_str(self):
"""
A string that identifies the view (e.g. 'path.to.view_function' or
'path.to.ClassBasedView').
"""
callback = self.callback
if isinstance(callback, functools.partial):
callback = callback.func
if not hasattr(callback, '__name__'):
return callback.__module__ + "." + callback.__class__.__name__
return callback.__module__ + "." + callback.__qualname__
class URLResolver:
def __init__(self, pattern, urlconf_name, default_kwargs=None, app_name=None, namespace=None):
self.pattern = pattern
# urlconf_name is the dotted Python path to the module defining
# urlpatterns. It may also be an object with an urlpatterns attribute
# or urlpatterns itself.
self.urlconf_name = urlconf_name
self.callback = None
self.default_kwargs = default_kwargs or {}
self.namespace = namespace
self.app_name = app_name
self._reverse_dict = {}
self._namespace_dict = {}
self._app_dict = {}
# set of dotted paths to all functions and classes that are used in
# urlpatterns
self._callback_strs = set()
self._populated = False
self._local = threading.local()
self._urlconf_lock = threading.Lock()
def __repr__(self):
if isinstance(self.urlconf_name, list) and self.urlconf_name:
# Don't bother to output the whole list, it can be huge
urlconf_repr = '<%s list>' % self.urlconf_name[0].__class__.__name__
else:
urlconf_repr = repr(self.urlconf_name)
return '<%s %s (%s:%s) %s>' % (
self.__class__.__name__, urlconf_repr, self.app_name,
self.namespace, self.pattern.describe(),
)
def check(self):
messages = []
for pattern in self.url_patterns:
messages.extend(check_resolver(pattern))
messages.extend(self._check_custom_error_handlers())
return messages or self.pattern.check()
def _check_custom_error_handlers(self):
messages = []
# All handlers take (request, exception) arguments except handler500
# which takes (request).
for status_code, num_parameters in [(400, 2), (403, 2), (404, 2), (500, 1)]:
try:
handler, param_dict = self.resolve_error_handler(status_code)
except (ImportError, ViewDoesNotExist) as e:
path = getattr(self.urlconf_module, 'handler%s' % status_code)
msg = (
"The custom handler{status_code} view '{path}' could not be imported."
).format(status_code=status_code, path=path)
messages.append(Error(msg, hint=str(e), id='urls.E008'))
continue
signature = inspect.signature(handler)
args = [None] * num_parameters
try:
signature.bind(*args)
except TypeError:
msg = (
"The custom handler{status_code} view '{path}' does not "
"take the correct number of arguments ({args})."
).format(
status_code=status_code,
path=handler.__module__ + '.' + handler.__qualname__,
args='request, exception' if num_parameters == 2 else 'request',
)
messages.append(Error(msg, id='urls.E007'))
return messages
def _populate(self):
# Short-circuit if called recursively in this thread to prevent
# infinite recursion. Concurrent threads may call this at the same
# time and will need to continue, so set 'populating' on a
# thread-local variable.
if getattr(self._local, 'populating', False):
return
try:
self._local.populating = True
lookups = MultiValueDict()
namespaces = {}
apps = {}
language_code = get_language()
for url_pattern in reversed(self.url_patterns):
p_pattern = url_pattern.pattern.regex.pattern
if p_pattern.startswith('^'):
p_pattern = p_pattern[1:]
if isinstance(url_pattern, URLPattern):
self._callback_strs.add(url_pattern.lookup_str)
bits = normalize(url_pattern.pattern.regex.pattern)
lookups.appendlist(
url_pattern.callback,
(bits, p_pattern, url_pattern.default_args, url_pattern.pattern.converters)
)
if url_pattern.name is not None:
lookups.appendlist(
url_pattern.name,
(bits, p_pattern, url_pattern.default_args, url_pattern.pattern.converters)
)
else: # url_pattern is a URLResolver.
url_pattern._populate()
if url_pattern.app_name:
apps.setdefault(url_pattern.app_name, []).append(url_pattern.namespace)
namespaces[url_pattern.namespace] = (p_pattern, url_pattern)
else:
for name in url_pattern.reverse_dict:
for matches, pat, defaults, converters in url_pattern.reverse_dict.getlist(name):
new_matches = normalize(p_pattern + pat)
lookups.appendlist(
name,
(
new_matches,
p_pattern + pat,
{**defaults, **url_pattern.default_kwargs},
{**self.pattern.converters, **url_pattern.pattern.converters, **converters}
)
)
for namespace, (prefix, sub_pattern) in url_pattern.namespace_dict.items():
current_converters = url_pattern.pattern.converters
sub_pattern.pattern.converters.update(current_converters)
namespaces[namespace] = (p_pattern + prefix, sub_pattern)
for app_name, namespace_list in url_pattern.app_dict.items():
apps.setdefault(app_name, []).extend(namespace_list)
self._callback_strs.update(url_pattern._callback_strs)
self._namespace_dict[language_code] = namespaces
self._app_dict[language_code] = apps
self._reverse_dict[language_code] = lookups
self._populated = True
finally:
self._local.populating = False
@property
def reverse_dict(self):
language_code = get_language()
if language_code not in self._reverse_dict:
self._populate()
return self._reverse_dict[language_code]
@property
def namespace_dict(self):
language_code = get_language()
if language_code not in self._namespace_dict:
self._populate()
return self._namespace_dict[language_code]
@property
def app_dict(self):
language_code = get_language()
if language_code not in self._app_dict:
self._populate()
return self._app_dict[language_code]
@staticmethod
def _join_route(route1, route2):
"""Join two routes, without the starting ^ in the second route."""
if not route1:
return route2
if route2.startswith('^'):
route2 = route2[1:]
return route1 + route2
def _is_callback(self, name):
if not self._populated:
self._populate()
return name in self._callback_strs
def resolve(self, path):
path = str(path) # path may be a reverse_lazy object
tried = []
match = self.pattern.match(path)
if match:
new_path, args, kwargs = match
for pattern in self.url_patterns:
try:
sub_match = pattern.resolve(new_path)
except Resolver404 as e:
sub_tried = e.args[0].get('tried')
if sub_tried is not None:
tried.extend([pattern] + t for t in sub_tried)
else:
tried.append([pattern])
else:
if sub_match:
# Merge captured arguments in match with submatch
sub_match_dict = {**kwargs, **self.default_kwargs}
# Update the sub_match_dict with the kwargs from the sub_match.
sub_match_dict.update(sub_match.kwargs)
# If there are *any* named groups, ignore all non-named groups.
# Otherwise, pass all non-named arguments as positional arguments.
sub_match_args = sub_match.args
if not sub_match_dict:
sub_match_args = args + sub_match.args
current_route = '' if isinstance(pattern, URLPattern) else str(pattern.pattern)
return ResolverMatch(
sub_match.func,
sub_match_args,
sub_match_dict,
sub_match.url_name,
[self.app_name] + sub_match.app_names,
[self.namespace] + sub_match.namespaces,
self._join_route(current_route, sub_match.route),
)
tried.append([pattern])
raise Resolver404({'tried': tried, 'path': new_path})
raise Resolver404({'path': path})
@cached_property
def urlconf_module(self):
# import_module is not thread safe if the module throws an exception
# during import, and can return an empty module object in Python < 3.6
# (see https://bugs.python.org/issue36284).
with self._urlconf_lock:
if isinstance(self.urlconf_name, str):
return import_module(self.urlconf_name)
else:
return self.urlconf_name
@cached_property
def url_patterns(self):
# urlconf_module might be a valid set of patterns, so we default to it
patterns = getattr(self.urlconf_module, "urlpatterns", self.urlconf_module)
try:
iter(patterns)
except TypeError:
msg = (
"The included URLconf '{name}' does not appear to have any "
"patterns in it. If you see valid patterns in the file then "
"the issue is probably caused by a circular import."
)
raise ImproperlyConfigured(msg.format(name=self.urlconf_name))
return patterns
def resolve_error_handler(self, view_type):
callback = getattr(self.urlconf_module, 'handler%s' % view_type, None)
if not callback:
# No handler specified in file; use lazy import, since
# django.conf.urls imports this file.
from django.conf import urls
callback = getattr(urls, 'handler%s' % view_type)
return get_callable(callback), {}
def reverse(self, lookup_view, *args, **kwargs):
return self._reverse_with_prefix(lookup_view, '', *args, **kwargs)
def _reverse_with_prefix(self, lookup_view, _prefix, *args, **kwargs):
if args and kwargs:
raise ValueError("Don't mix *args and **kwargs in call to reverse()!")
if not self._populated:
self._populate()
possibilities = self.reverse_dict.getlist(lookup_view)
for possibility, pattern, defaults, converters in possibilities:
for result, params in possibility:
if args:
if len(args) != len(params):
continue
candidate_subs = dict(zip(params, args))
else:
if set(kwargs).symmetric_difference(params).difference(defaults):
continue
if any(kwargs.get(k, v) != v for k, v in defaults.items()):
continue
candidate_subs = kwargs
# Convert the candidate subs to text using Converter.to_url().
text_candidate_subs = {}
for k, v in candidate_subs.items():
if k in converters:
text_candidate_subs[k] = converters[k].to_url(v)
else:
text_candidate_subs[k] = str(v)
# WSGI provides decoded URLs, without %xx escapes, and the URL
# resolver operates on such URLs. First substitute arguments
# without quoting to build a decoded URL and look for a match.
# Then, if we have a match, redo the substitution with quoted
# arguments in order to return a properly encoded URL.
candidate_pat = _prefix.replace('%', '%%') + result
if re.search('^%s%s' % (re.escape(_prefix), pattern), candidate_pat % text_candidate_subs):
# safe characters from `pchar` definition of RFC 3986
url = quote(candidate_pat % text_candidate_subs, safe=RFC3986_SUBDELIMS + '/~:@')
# Don't allow construction of scheme relative urls.
return escape_leading_slashes(url)
# lookup_view can be URL name or callable, but callables are not
# friendly in error messages.
m = getattr(lookup_view, '__module__', None)
n = getattr(lookup_view, '__name__', None)
if m is not None and n is not None:
lookup_view_s = "%s.%s" % (m, n)
else:
lookup_view_s = lookup_view
patterns = [pattern for (_, pattern, _, _) in possibilities]
if patterns:
if args:
arg_msg = "arguments '%s'" % (args,)
elif kwargs:
arg_msg = "keyword arguments '%s'" % (kwargs,)
else:
arg_msg = "no arguments"
msg = (
"Reverse for '%s' with %s not found. %d pattern(s) tried: %s" %
(lookup_view_s, arg_msg, len(patterns), patterns)
)
else:
msg = (
"Reverse for '%(view)s' not found. '%(view)s' is not "
"a valid view function or pattern name." % {'view': lookup_view_s}
)
raise NoReverseMatch(msg)
|
sametmax/Django--an-app-at-a-time
|
ignore_this_directory/django/urls/resolvers.py
|
Python
|
mit
| 27,298
|
import click
from ghutil.edit import edit_as_mail
from ghutil.types import PullRequest
from ghutil.util import optional
@click.command()
@optional("--base", metavar="BRANCH", help="Change branch to pull into")
@optional("-b", "--body", type=click.File(), help="File containing new PR body")
@optional(
"-M",
"--maintainer-can-modify/--maintainer-no-modify",
"--can-modify/--no-modify",
"maintainer_can_modify",
help="Allow maintainers on the base repository to modify the PR?",
)
@optional("--open/--closed", " /--close", help="Open/close the PR")
@optional("-T", "--title", help="New PR title")
@PullRequest.argument("pull_request")
def cli(pull_request, **edited):
"""
Edit a pull request.
If one or more options are given on the command line, the pull request is
modified accordingly. Otherwise, an editor is started, allowing you to
modify the pull request's details as a text file.
"""
if not edited:
data = pull_request.data.copy()
data["base"] = data["base"]["ref"]
data["open"] = data["state"] == "open"
edited = edit_as_mail(
data,
"title open base maintainer_can_modify",
"body",
)
if not edited:
click.echo("No modifications made; exiting")
return
elif "body" in edited:
edited["body"] = edited["body"].read()
if "open" in edited:
edited["state"] = "open" if edited.pop("open") else "closed"
pull_request.patch(json=edited)
|
jwodder/ghutil
|
src/ghutil/cli/pr/edit.py
|
Python
|
mit
| 1,523
|
import numpy
class BiomeSimulation(object):
@staticmethod
def is_applicable(world):
return world.has_humidity() and world.has_temperature() and \
(not world.has_biome())
@staticmethod
def execute(world, seed):
assert seed is not None
w = world
width = world.width
height = world.height
ocean = world.ocean
cm = {}
biome_cm = {}
biome = numpy.zeros((height, width), dtype = object)#this is still kind of expensive memory-wise
for y in range(height):
for x in range(width):
if ocean[y, x]:
biome[y, x] = 'ocean'
else:
if w.is_temperature_polar((x, y)):
if w.is_humidity_superarid((x, y)):
biome[y, x] = 'polar desert'
else:
biome[y, x] = 'ice'
elif w.is_temperature_alpine((x, y)):
if w.is_humidity_superarid((x, y)):
biome[y, x] = 'subpolar dry tundra'
elif w.is_humidity_perarid((x, y)):
biome[y, x] = 'subpolar moist tundra'
elif w.is_humidity_arid((x, y)):
biome[y, x] = 'subpolar wet tundra'
else:
biome[y, x] = 'subpolar rain tundra'
elif w.is_temperature_boreal((x, y)):
if w.is_humidity_superarid((x, y)):
biome[y, x] = 'boreal desert'
elif w.is_humidity_perarid((x, y)):
biome[y, x] = 'boreal dry scrub'
elif w.is_humidity_arid((x, y)):
biome[y, x] = 'boreal moist forest'
elif w.is_humidity_semiarid((x, y)):
biome[y, x] = 'boreal wet forest'
else:
biome[y, x] = 'boreal rain forest'
elif w.is_temperature_cool((x, y)):
if w.is_humidity_superarid((x, y)):
biome[y, x] = 'cool temperate desert'
elif w.is_humidity_perarid((x, y)):
biome[y, x] = 'cool temperate desert scrub'
elif w.is_humidity_arid((x, y)):
biome[y, x] = 'cool temperate steppe'
elif w.is_humidity_semiarid((x, y)):
biome[y, x] = 'cool temperate moist forest'
elif w.is_humidity_subhumid((x, y)):
biome[y, x] = 'cool temperate wet forest'
else:
biome[y, x] = 'cool temperate rain forest'
elif w.is_temperature_warm((x, y)):
if w.is_humidity_superarid((x, y)):
biome[y, x] = 'warm temperate desert'
elif w.is_humidity_perarid((x, y)):
biome[y, x] = 'warm temperate desert scrub'
elif w.is_humidity_arid((x, y)):
biome[y, x] = 'warm temperate thorn scrub'
elif w.is_humidity_semiarid((x, y)):
biome[y, x] = 'warm temperate dry forest'
elif w.is_humidity_subhumid((x, y)):
biome[y, x] = 'warm temperate moist forest'
elif w.is_humidity_humid((x, y)):
biome[y, x] = 'warm temperate wet forest'
else:
biome[y, x] = 'warm temperate rain forest'
elif w.is_temperature_subtropical((x, y)):
if w.is_humidity_superarid((x, y)):
biome[y, x] = 'subtropical desert'
elif w.is_humidity_perarid((x, y)):
biome[y, x] = 'subtropical desert scrub'
elif w.is_humidity_arid((x, y)):
biome[y, x] = 'subtropical thorn woodland'
elif w.is_humidity_semiarid((x, y)):
biome[y, x] = 'subtropical dry forest'
elif w.is_humidity_subhumid((x, y)):
biome[y, x] = 'subtropical moist forest'
elif w.is_humidity_humid((x, y)):
biome[y, x] = 'subtropical wet forest'
else:
biome[y, x] = 'subtropical rain forest'
elif w.is_temperature_tropical((x, y)):
if w.is_humidity_superarid((x, y)):
biome[y, x] = 'tropical desert'
elif w.is_humidity_perarid((x, y)):
biome[y, x] = 'tropical desert scrub'
elif w.is_humidity_arid((x, y)):
biome[y, x] = 'tropical thorn woodland'
elif w.is_humidity_semiarid((x, y)):
biome[y, x] = 'tropical very dry forest'
elif w.is_humidity_subhumid((x, y)):
biome[y, x] = 'tropical dry forest'
elif w.is_humidity_humid((x, y)):
biome[y, x] = 'tropical moist forest'
elif w.is_humidity_perhumid((x, y)):
biome[y, x] = 'tropical wet forest'
else:
biome[y, x] = 'tropical rain forest'
else:
biome[y, x] = 'bare rock'
if not biome[y, x] in biome_cm:
biome_cm[biome[y, x]] = 0
biome_cm[biome[y, x]] += 1
w.set_biome(biome)
return cm, biome_cm
|
esampson/worldengine
|
worldengine/simulations/biome.py
|
Python
|
mit
| 6,058
|
import sys
if __name__ == "__main__":
# Parse command line arguments
if len(sys.argv) < 2:
sys.exit("python {} <datasetFilename> {{<maxPoints>}}".format(sys.argv[0]))
datasetFilename = sys.argv[1]
if len(sys.argv) >= 3:
maxPoints = int(sys.argv[2])
else:
maxPoints = None
# Perform initial pass through file to determine line count (i.e. # of points)
lineCount = 0
with open(datasetFilename, "r") as f:
line = f.readline()
while line:
lineCount += 1
line = f.readline()
# Read first line and use to make assumption about the dimensionality of each point
numDimensions = 0
with open(datasetFilename, "r") as f:
firstLine = f.readline()
numDimensions = len(firstLine.split())
# If dimensionality of dataset is 0, print error message and exit
if numDimensions == 0:
sys.exit("Could not determine dimensionality of dataset")
# Print initial header at END of file (so we have number of points already)
if maxPoints:
numPoints = min(lineCount, maxPoints)
else:
numPoints = lineCount
print("{} {}".format(numDimensions, numPoints))
# Output dataset header which defines dimensionality of data and number of points
# Read entire file line-by-line, printing out each line as a point
with open(datasetFilename, "r") as f:
pointsRead = 0
line = f.readline()
while line:
fields = line.split()
floatFields = [ str(float(x)) for x in fields ]
print(" ".join(floatFields))
# Stop reading file is maximum number of points have been read
pointsRead += 1
if maxPoints and pointsRead >= maxPoints:
break
# Read next line of file
line = f.readline()
|
DonaldWhyte/multidimensional-search-fyp
|
scripts/read_multifield_dataset.py
|
Python
|
mit
| 1,612
|
# This file is part of Copernicus
# http://www.copernicus-computing.org/
#
# Copyright (C) 2011, Sander Pronk, Iman Pouya, Erik Lindahl, and others.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as published
# by the Free Software Foundation
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import logging
import threading
import os
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
try:
from pympler import muppy
from pympler import summary
from pympler import refbrowser
profile=True
except:
profile=False
import cpc.util
import apperror
import keywords
import instance
import vtype
import function
import value
import active_inst
import active_network
import task
import transaction
import lib
import readxml
from cpc.dataflow.value import ValError
log=logging.getLogger(__name__)
class ProjectError(apperror.ApplicationError):
pass
class Project(object):
"""The top-level class describing a running function network together
with all function definitions. """
def __init__(self, name, basedir, conf, queue=None, cmdQueue=None):
"""Initializes an empty project
name = the name of the project
basedir = the full (existing) base directory of the project
queue = an optional shared task queue
"""
# The Update lock prevents multiple threads from updating values
# in the same project. This probably has less impact on performance
# than it sounds: Python is single-threaded at its core, and only
# emulates multithreading. Also: this would only be a problem if
# updating values & scheduling tasks is the rate-limiting step.
self.updateLock=threading.RLock()
self.conf=conf
self.name=name
self.basedir=basedir
if not os.path.exists(self.basedir):
os.mkdir(self.basedir)
log.debug("Creating project %s"%name)
if queue is None:
log.debug("Creating new task queue %s"%name)
self.queue=task.TaskQueue()
else:
self.queue=queue
self.cmdQueue=cmdQueue
# the file list
self.fileList=value.FileList(basedir)
# create the active network (the top-level network)
self.network=active_network.ActiveNetwork(self, None, self.queue,
"", self.updateLock)
# now take care of imports. First get the import path
self.topLevelImport=lib.ImportLibrary("", "", self.network)
# create a list of function definitions
self.functions=dict()
# create a list of already performed imports
self.imports=lib.ImportList()
# and this is where we can start importing builtins, etc.
self.inputDir=os.path.join(self.basedir, "_inputs")
if not os.path.exists(self.inputDir):
os.mkdir(self.inputDir)
self.inputNr=0
# a list of scheduled changes and its lock
self.transactionStackLock=threading.Lock()
tl=transaction.Transaction(self, None, self.network,
self.topLevelImport)
self.transactionStack=[tl]
def getName(self):
"""Return the project name. This is a const property"""
return self.name
def getBasedir(self):
"""Return the base directory. This is a const property"""
return self.basedir
def getNewInputSubDir(self):
"""Return the name of a new input subdir to store new externally
set input files in. NOTE: it won't be created."""
with self.updateLock:
newsub=os.path.join(self.inputDir, "%04d"%self.inputNr)
self.inputNr+=1
while os.path.exists(newsub):
newsub=os.path.join(self.inputDir, "%04d"%self.inputNr)
self.inputNr+=1
return newsub
def getFileList(self):
"""Get the project's file list. This pointer is a const property,
and the file list has its own locking mechanism."""
return self.fileList
def getTopLevelLib(self):
"""Get the top-level import library"""
return self.topLevelImport
def getFunction(self, fname):
"""Return the function object associated with a name."""
with self.updateLock:
try:
return self.functions[fname]
except KeyError:
raise ProjectError("function with name %s is not defined."%
fname)
def addFunction(self, function):
"""Add a function to the project."""
with self.updateLock:
name=function.getName()
if self.functions.has_key(name):
raise ProjectError("function with name %s already exists."%name)
self.functions[name]=function
def getImportList(self):
"""Get the function import list."""
return self.imports
def getNamedValue(self, itemname):
"""Get a value for a specific name according to the rule
[instance]:[instance].[ioitem]."""
with self.updateLock:
itemlist=vtype.parseItemList(itemname)
item=self.getSubValue(itemlist)
return item
def _tryImmediateTransaction(self, outf):
"""Perform an immediate transaction, if the transaction stack has
length 1 (i.e. the last operation was on the topmost level)..
NOTE: assumes a locked tranactionStackLock"""
if len(self.transactionStack) == 1:
self.transactionStack[0].run(outf)
# now replace the transaction with a fresh one.
self.transactionStack[0] = transaction.Transaction(self, None,
self.network,
self.topLevelImport)
return True
return False
def scheduleSet(self, itemname, literal, outf, sourceType=None,
printName=None):
"""Add an instance of a set in the transaction schedule."""
itemname=keywords.fixID(itemname)
with self.transactionStackLock:
sv=self.transactionStack[-1].addSetValue(itemname, literal,
sourceType, printName)
if not self._tryImmediateTransaction(outf):
sv.describe(outf)
def scheduleConnect(self, src, dst, outf):
"""Add an instance of a connect in the transaction schedule."""
src=keywords.fixID(src)
dst=keywords.fixID(dst)
with self.transactionStackLock:
ac=self.transactionStack[-1].addConnection(src, dst)
if not self._tryImmediateTransaction(outf):
ac.describe(outf)
def beginTransaction(self, outf):
"""Create a new transaction list."""
tl=transaction.Transaction(self, None, self.network,
self.topLevelImport)
with self.transactionStackLock:
self.transactionStack.append(tl)
level=len(self.transactionStack)-1
outf.write("Beginning transaction level %d"%level)
def commit(self, outf):
"""Commit a set of changes scheduled with scheduleSet()"""
with self.transactionStackLock:
if len(self.transactionStack) > 1:
li=self.transactionStack.pop(-1)
li.run(outf)
else:
raise ProjectError("No transactions to commit.")
def rollback(self, outf):
"""Cancel a transaction."""
with self.transactionStackLock:
li=len(self.transactionStack) - 1
if li > 0:
outf.write("Canceling transaction level %d"%(li+1))
self.transactionStack.pop(li)
else:
raise ProjectError("No transactions to cancel.")
def getNamedInstance(self, instname):
pathname=keywords.fixID(instname)
with self.updateLock:
itemlist=vtype.parseItemList(pathname)
item=self.getSubValue(itemlist)
if not isinstance(item, active_inst.ActiveInstance):
raise ProjectError("%s is not an active instance"%instname)
return item
def getNamedItemList(self, pathname):
"""Get an list based on a path name according to the rule
[instance]:[instance]"""
pathname=keywords.fixID(pathname)
with self.updateLock:
itemlist=vtype.parseItemList(pathname)
item=self.getSubValue(itemlist)
ret=dict()
if item is None:
ret["type"]="Not found: "
ret["name"]=pathname
elif isinstance(item, value.Value):
# it is an active I/O item
tp=item.getType()
ret["type"]="input/output value"
ret["name"]=pathname
if tp is not None:
ret["typename"]=tp.getName()
ret["value-type"]=tp.jsonDescribe()
else:
ret["typename"]="Not found"
if tp.isSubtype(vtype.recordType):
ret["subitems"]=[]
keys=tp.getMemberKeys()
for key in keys:
mem=tp.getRecordMember(key)
subi=dict()
subi["name"]=key
subi["type"]=mem.type.getName()
#subi["value-type"]=mem.type.jsonDescribe()
optstr=""
conststr=""
if mem.isOptional():
subi["optional"]=1
if mem.isConst():
subi["const"]=1
if mem.isComplete():
subi["complete"]=1
if mem.desc is not None:
subi["desc"]=mem.desc.get()
ret["subitems"].append( subi )
elif (tp.isSubtype(vtype.arrayType) or
tp.isSubtype(vtype.dictType)):
mem=tp.getMembers()
subi={"type" : mem.getName()}
ret["subitems"]=[ subi ]
elif isinstance(item, active_inst.ActiveInstance):
ret["type"]="instance"
ret["name"]=item.getCanonicalName()
ret["fn_name"]=item.function.getFullName()
ret["inputs" ]=item.getInputs().getSubValueList()
ret["outputs" ]=item.getOutputs().getSubValueList()
net=item.getNet()
if net is not None:
ret["instances" ]=net.getActiveInstanceList(False,
False)
ret["state"]=item.getPropagatedStateStr()
cputime=int(item.getCputime())
if cputime > 0:
ret["cputime" ]=str(cputime)
cputime=int(item.getCumulativeCputime())
if cputime > 0:
ret["cumulative-cputime" ]=str(cputime)
elif isinstance(item, Project):
ret["type"]="network"
ret["name"]=pathname
ret["instances"]=item.network.getActiveInstanceList(
False,
False)
cputime=int(item.network.getCumulativeCputime())
if cputime > 0:
ret["cumulative-cputime" ]=str(cputime)
else:
ret["type"]="Unknown type of item: "
ret["name"]=pathname
return ret
def getNamedDescription(self, pathname):
"""Get a description of a named function/type/lib"""
pathname=keywords.fixID(pathname)
with self.updateLock:
ret=dict()
item=self.imports.getItemByFullName(pathname)
if item is not None:
ret["name"]=pathname
desc=item.getDescription()
if desc is not None:
ret["desc"]=desc.get()
else:
ret["desc"]=""
if isinstance(item, lib.ImportLibrary):
ret["type"]="library"
rfuncs=[]
funcs=item.getFunctionList()
for f in funcs:
nf={ "name" : f }
desc=item.getFunction(f).getDescription()
if desc is not None:
nf["desc"] = desc.get()
else:
nf["desc"] = ""
rfuncs.append(nf)
ret["functions"]=rfuncs
rtypes=[]
types=item.getTypeList()
for t in types:
if not item.getType(t).isImplicit():
nf={ "name" : t }
desc=item.getType(t).getDescription()
if desc is not None:
nf["desc"] = desc.get()
else:
nf["desc"] = ""
rtypes.append(nf)
if len(rtypes)>0:
ret["types"]=rtypes
elif isinstance(item, function.Function):
ret["type"]="function"
ioitems=item.getInputs()
inps=[]
for key in ioitems.getMemberKeys():
retd=dict()
retd["name"]=key
retd["type"]=ioitems.getMember(key).getName()
desc=ioitems.getRecordMember(key).getDescription()
if desc is not None:
retd["desc"]=desc.get()
else:
retd["desc"]=""
inps.append(retd)
ret["inputs"]=inps
ioitems=item.getOutputs()
outs=[]
for key in ioitems.getMemberKeys():
retd=dict()
retd["name"]=key
retd["type"]=ioitems.getMember(key).getName()
desc=ioitems.getRecordMember(key).getDescription()
if desc is not None:
retd["desc"]=desc.get()
else:
retd["desc"]=""
outs.append(retd)
ret["outputs"]=outs
elif isinstance(item, vtype.Type):
ret["type"]="type"
else:
ret["name"]="Not found: %s"%pathname
ret["desc"]=""
return ret
def getDebugInfo(self, itemname):
"""Give debug info about a particular item."""
global profile
outf=StringIO()
if itemname == "":
outf.write("the item was empty")
if profile:
all_objects = muppy.get_objects()
sum1 = summary.summarize(all_objects)
summary.print_(sum1, 100)
ib = refbrowser.InteractiveBrowser(self)
ib.main()
return outf.getvalue()
itemname=keywords.fixID(itemname)
itemlist=vtype.parseItemList(itemname)
item=self.getSubValue(itemlist)
item.writeDebug(outf)
return outf.getvalue()
def getGraph(self, pathname):
"""Get an graph description based on a path name according to the rule
[instance]:[instance]."""
pathname=keywords.fixID(pathname)
with self.updateLock:
itemlist=vtype.parseItemList(pathname)
item=self.getSubValue(itemlist)
ret=dict()
if item is not None:
if isinstance(item, active_inst.ActiveInstance):
net=item.network
ret["name"]=pathname
ret["instances"]=net.getActiveInstanceList(True, True)
ret["connections"]=net.getConnectionList()
elif isinstance(item, Project):
net=item.network
ret["name"]=pathname
ret["instances"]=net.getActiveInstanceList(True, True)
ret["connections"]=net.getConnectionList()
else:
ret["name"]=pathname
ret["instances"]=[]
ret["connections"]=[]
return ret
def addInstance(self, name, functionName):
"""Add an instance with a name and function name to the top-level
network."""
name=keywords.fixID(name)
functionName=keywords.fixID(functionName)
with self.updateLock:
func=self.imports.getFunctionByFullName(functionName,
self.topLevelImport)
(net, instanceName)=self.network.getContainingNetwork(name)
nm=""
if net.inActiveInstance is not None:
nm=net.inActiveInstance.getCanonicalName()
#log.debug("net=%s, instanceName=%s"%(nm, instanceName))
inst=instance.Instance(instanceName, func, functionName)
net.addInstance(inst)
def importTopLevelFile(self, fileObject, filename):
"""Read a source file as a top-level description."""
with self.updateLock:
reader=readxml.ProjectXMLReader(self.topLevelImport, self.imports,
self)
reader.readFile(fileObject, filename)
def importName(self, name):
"""Import a named module."""
name=keywords.fixID(name)
with self.updateLock:
if not self.imports.exists(name):
baseFilename="%s.xml"%name.replace(keywords.ModSep, '/')
baseFilename2="%s/_import.xml"%name.replace(keywords.ModSep,
'/')
filename=None
for pathItem in self.conf.getImportPaths():
nfilename=os.path.join(pathItem, baseFilename)
if os.path.exists(nfilename):
filename=nfilename
break
nfilename=os.path.join(pathItem, baseFilename2)
if os.path.exists(nfilename):
filename=nfilename
break
if filename is None:
raise ProjectError("Library %s not found"%name)
log.debug("Importing library %s with file name %s"% (name,
filename))
newlib=lib.ImportLibrary(name, filename, None)
reader=readxml.ProjectXMLReader(newlib, self.imports, self)
reader.read(filename)
self.imports.add(newlib)
return newlib
else:
return self.imports.get(name)
#def getAllTasks(self):
#"""Get a list of all tasks to queue for execution."""
#taskList=[]
#self.network.getAllTasks(taskList)
#return taskList
def cancel(self):
"""Delete all queued commands."""
self.cmdQueue.deleteByProject(self)
def activate(self, pathname):
"""Activate all active instances."""
pathname=keywords.fixID(pathname)
with self.updateLock:
itemlist=vtype.parseItemList(pathname)
item=self.getSubValue(itemlist)
ret=dict()
if isinstance(item, active_inst.ActiveInstance):
item.activate()
elif isinstance(item, Project):
item.network.activateAll()
else:
raise ProjectError("%s is not an instance"%pathname)
def deactivate(self, pathname):
"""De-activate all active instances contained in pathname (or
everything if pathname is empty)."""
pathname=keywords.fixID(pathname)
with self.updateLock:
itemlist=vtype.parseItemList(pathname)
item=self.getSubValue(itemlist)
ret=dict()
log.debug("%s"%str(item))
if isinstance(item, active_inst.ActiveInstance):
item.deactivate()
elif isinstance(item, Project):
item.network.deactivateAll()
else:
raise ProjectError("%s is not an instance"%pathname)
def rerun(self, pathname, recursive, clearError, outf):
"""Re-run and optionally clear an error on an item."""
pathname=keywords.fixID(pathname)
with self.updateLock:
itemlist=vtype.parseItemList(pathname)
item=self.getSubValue(itemlist)
if isinstance(item, active_inst.ActiveInstance):
ret=item.rerun(recursive, clearError, outf)
if ret==0:
if clearError:
outf.write("No errors cleared.")
else:
outf.write("No reruns performed.")
else:
raise ProjectError("%s is not an instance"%pathname)
def getQueue(self):
"""Get the task queue."""
return self.queue
def writeXML(self, outf, indent=0):
"""Write the function definitions and top-level network description
in XML to outf."""
indstr=cpc.util.indStr*indent
iindstr=cpc.util.indStr*(indent+1)
outf.write('%s<cpc version="%d">\n'%(indstr, readxml.curVersion))
for name in self.imports.getLibNames():
outf.write('%s<import name="%s" />\n'%(iindstr,name))
outf.write('\n')
self.topLevelImport.writeXML(outf, indent+1)
outf.write('%s</cpc>\n'%indstr)
def writeXMLPointer(self, outFile):
"""Write a short pointer to where information can be found to an
XML file."""
outFile.write(' <cpc-project id="%s" dir=""/>\n'%(self.name))
def readState(self,stateFile="_state.xml"):
fname=os.path.join(self.basedir, stateFile)
if os.path.exists(fname):
log.debug("Importing project state from %s"%fname)
with self.updateLock:
reader=readxml.ProjectXMLReader(self.topLevelImport,
self.imports,
self)
reader.readFile(fname, fname)
tasks=reader.getTaskList()
for tsk in tasks:
cmds=tsk.getCommands()
if len(cmds) < 1:
log.debug("Queuing task")
self.queue.put(tsk)
else:
log.debug("Queuing command")
for cmd in cmds:
self.cmdQueue.add(cmd)
def writeState(self):
with self.updateLock:
fname=os.path.join(self.basedir, "_state.xml")
nfname=os.path.join(self.basedir, "_state.xml.new")
fout=open(nfname, 'w')
fout.write('<?xml version="1.0"?>\n')
self.writeXML(fout, 0)
fout.close()
# now we use POSIX file renaming atomicity to make sure the state
# is always a consistent file.
os.rename(nfname, fname)
########################################################
# Member functions from the ValueBase interface:
########################################################
def _getSubVal(self, itemList):
"""Helper function"""
subval=self.network.tryGetActiveInstance(itemList[0])
return subval
def getSubValue(self, itemList):
"""Get a specific subvalue through a list of subitems, or return None
if not found.
itemList = the path of the value to return"""
if len(itemList)==0:
return self
subval=self._getSubVal(itemList)
if subval is not None:
return subval.getSubValue(itemList[1:])
return None
def getCreateSubValue(self, itemList, createType=None,
setCreateSourceTag=None):
"""Get or create a specific subvalue through a list of subitems, or
return None if not found.
itemList = the path of the value to return/create
if createType == a type, a subitem will be created with the given
type
if setCreateSourceTag = not None, the source tag will be set for
any items that are created."""
if len(itemList)==0:
return self
subval=self._getSubVal(itemList)
if subval is not None:
return subval.getCreateSubValue(itemList[1:], createType,
setCreateSourceTag)
raise ValError("Cannot create sub value of project")
def getClosestSubValue(self, itemList):
"""Get the closest relevant subvalue through a list of subitems,
itemList = the path of the value to get the closest value for """
if len(itemList)==0:
return self
subval=self._getSubVal(itemList)
if subval is not None:
return subval.getClosestSubValue(itemList[1:])
return self
def getSubValueList(self):
"""Return a list of addressable subvalues."""
ailist=self.network.getActiveInstanceList(False, False)
return ailist.keys()
def getSubValueIterList(self):
"""Return an iterable list of addressable subvalues."""
return self.getSubValueList()
def hasSubValue(self, itemList):
"""Check whether a particular subvalue exists"""
if len(itemList) == 0:
return True
subval=self._getSubVal(itemList)
if subval is not None:
return subval.hasSubValue(itemList[1:])
return False
def getType(self):
"""Return the type associated with this value"""
return vtype.instanceType
def getDesc(self):
"""Return a 'description' of a value: an item that can be passed to
the client describing the value."""
ret=self.network.getActiveInstanceList(False, False)
return ret
########################################################
|
soellman/copernicus
|
cpc/dataflow/project.py
|
Python
|
gpl-2.0
| 27,210
|
# Copyright 2014 Scality
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testtools
from tempest.common import compute
from tempest.common import utils
from tempest.common import waiters
from tempest import config
from tempest.lib import decorators
from tempest.scenario import manager
CONF = config.CONF
class TestShelveInstance(manager.ScenarioTest):
"""This test shelves then unshelves a Nova instance
The following is the scenario outline:
* boot an instance and create a timestamp file in it
* shelve the instance
* unshelve the instance
* check the existence of the timestamp file in the unshelved instance
* check the existence of the timestamp file in the unshelved instance,
after a cold migrate
"""
credentials = ['primary', 'admin']
@classmethod
def setup_clients(cls):
super(TestShelveInstance, cls).setup_clients()
cls.admin_servers_client = cls.os_admin.servers_client
@classmethod
def skip_checks(cls):
super(TestShelveInstance, cls).skip_checks()
if not CONF.compute_feature_enabled.shelve:
raise cls.skipException("Shelve is not available.")
def _shelve_then_unshelve_server(self, server):
compute.shelve_server(self.servers_client, server['id'],
force_shelve_offload=True)
self.servers_client.unshelve_server(server['id'])
waiters.wait_for_server_status(self.servers_client, server['id'],
'ACTIVE')
def _cold_migrate_server(self, server):
src_host = self.get_host_for_server(server['id'])
self.admin_servers_client.migrate_server(server['id'])
waiters.wait_for_server_status(self.servers_client,
server['id'], 'VERIFY_RESIZE')
self.servers_client.confirm_resize_server(server['id'])
waiters.wait_for_server_status(self.servers_client,
server['id'], 'ACTIVE')
dst_host = self.get_host_for_server(server['id'])
self.assertNotEqual(src_host, dst_host)
def _create_server_then_shelve_and_unshelve(self, boot_from_volume=False,
cold_migrate=False):
keypair = self.create_keypair()
security_group = self.create_security_group()
security_groups = [{'name': security_group['name']}]
server = self.create_server(
key_name=keypair['name'],
security_groups=security_groups,
volume_backed=boot_from_volume)
instance_ip = self.get_server_ip(server)
timestamp = self.create_timestamp(instance_ip,
private_key=keypair['private_key'],
server=server)
# Prevent bug #1257594 from coming back
# Unshelve used to boot the instance with the original image, not
# with the instance snapshot
self._shelve_then_unshelve_server(server)
if cold_migrate:
# Prevent bug #1732428 from coming back
self._cold_migrate_server(server)
timestamp2 = self.get_timestamp(instance_ip,
private_key=keypair['private_key'],
server=server)
self.assertEqual(timestamp, timestamp2)
@decorators.attr(type='slow')
@decorators.idempotent_id('1164e700-0af0-4a4c-8792-35909a88743c')
@testtools.skipUnless(CONF.network.public_network_id,
'The public_network_id option must be specified.')
@utils.services('compute', 'network', 'image')
def test_shelve_instance(self):
self._create_server_then_shelve_and_unshelve()
@decorators.attr(type='slow')
@decorators.idempotent_id('c1b6318c-b9da-490b-9c67-9339b627271f')
@testtools.skipUnless(CONF.network.public_network_id,
'The public_network_id option must be specified.')
@utils.services('compute', 'volume', 'network', 'image')
def test_shelve_volume_backed_instance(self):
self._create_server_then_shelve_and_unshelve(boot_from_volume=True)
@decorators.attr(type='slow')
@decorators.idempotent_id('1295fd9e-193a-4cf8-b211-55358e021bae')
@testtools.skipUnless(CONF.network.public_network_id,
'The public_network_id option must be specified.')
@testtools.skipUnless(CONF.compute_feature_enabled.cold_migration,
'Cold migration not available.')
@testtools.skipUnless(CONF.compute_feature_enabled.shelve_migrate,
'Shelve migrate not available.')
@testtools.skipUnless(CONF.compute.min_compute_nodes > 1,
'Less than 2 compute nodes, skipping multinode '
'tests.')
@utils.services('compute', 'network', 'image')
def test_cold_migrate_unshelved_instance(self):
self._create_server_then_shelve_and_unshelve(cold_migrate=True)
|
openstack/tempest
|
tempest/scenario/test_shelve_instance.py
|
Python
|
apache-2.0
| 5,613
|
#=======================================================================
# TestRandomDelay
#=======================================================================
import random
from pymtl import *
from pclib.ifcs import InValRdyBundle, OutValRdyBundle
#-----------------------------------------------------------------------
# TestRandomDelay
#-----------------------------------------------------------------------
class TestRandomDelay( Model ):
'Inserts random delays between input and output val/rdy interfaces.'
def __init__( s, dtype, max_random_delay = 0, seed=0xb601bc01 ):
s.in_ = InValRdyBundle ( dtype )
s.out = OutValRdyBundle( dtype )
# We keep our own internal random number generator to keep the state
# of this generator completely separate from other generators. This
# ensure that any delays are reproducable.
s.rgen = random.Random()
s.rgen.seed(seed)
# If the maximum random delay is set to zero, then the inputs are
# directly connected to the outputs.
s.max_random_delay = max_random_delay
if max_random_delay == 0:
s.connect( s.in_, s.out )
# Buffer to hold message
s.buf = None
s.buf_full = False
s.counter = 0
#---------------------------------------------------------------------
# Tick
#---------------------------------------------------------------------
@s.tick
def tick():
# Ideally we could just not include this posedge_clk concurrent block
# at all in the simulation. We should be able to do this when we have
# an explicit elaborate function.
if s.max_random_delay == 0:
return
# At the end of the cycle, we AND together the val/rdy bits to
# determine if the input/output message transactions occured.
in_go = s.in_.val and s.in_.rdy
out_go = s.out.val and s.out.rdy
# If the output transaction occured, then clear the buffer full bit.
# Note that we do this _first_ before we process the input
# transaction so we can essentially pipeline this control logic.
if out_go:
s.buf_full = False
# If the input transaction occured, then write the input message into
# our internal buffer, update the buffer full bit, and reset the
# counter.
if in_go:
s.buf = s.in_.msg[:]
s.buf_full = True
s.counter = s.rgen.randint( 1, s.max_random_delay )
if s.counter > 0:
s.counter = s.counter - 1
# The output message is always the output of the buffer
if s.buf_full:
s.out.msg.next = s.buf
# The input is ready and the output is valid if counter is zero
s.in_.rdy.next = ( s.counter == 0 ) and not s.buf_full
s.out.val.next = ( s.counter == 0 ) and s.buf_full
def line_trace( s ):
return "{} ({:2}) {}".format( s.in_, s.counter, s.out )
|
Abhinav117/pymtl
|
pclib/test/TestRandomDelay.py
|
Python
|
bsd-3-clause
| 2,895
|
"""
A dropbox for uncategorized utility code that doesn't belong anywhere else.
"""
import warnings
import inspect
import functools
import os
from collections import deque
from twisted.python.reflect import fullyQualifiedName
from zope.interface import alsoProvides
class DecoratorPartial(object):
def __init__(self, func, argnum, partialiface, include_call_type, args, keywords):
self.func = func
self.args = args
self.keywords = keywords
self.argnum = argnum
if partialiface:
alsoProvides(self, partialiface)
self.partialiface = partialiface
self.include_call_type = include_call_type
def __call__(self, target):
args = self.args[:self.argnum] + (target,) + self.args[self.argnum:]
keywords = self.keywords
if self.include_call_type:
keywords["paramdecorator_simple_call"] = False
return self.func(*args, **keywords)
def copy(self):
return DecoratorPartial(self.func, self.argnum, self.partialiface, self.include_call_type, self.args, self.keywords)
def paramdecorator(decorator_func, argname=None, argnum=None, useself=None, partialiface=None, include_call_type=False):
"""
Paramater-taking decorator-decorator; That is, decorate a function with this to make it into a paramater-decorator
Use of the paramdecorator decorator:
Takes no special arguments; simply call as @paramdecorator. The first non-'self' argument of the decorated function
will be passed the function or class that the produced decorator is used to decorate. The first non-self argument
can also be passed as a keyword argument to the returned decorator, which will make it behave like a normal function.
for this reason it is strongly recommended that you name your first argument "target", "func", or "clazz"
(depending on what you accept).
Use of produced parameter decorators:
The produced decorator can be called either as @decorator with no arguments, or @decorator() with any arguments.
When called with only a builtin-type callable as an argument (as in the case of @decorator), it will assume
that it is being called with no arguments. When called with more arguments or when the first argument is not
a callable, it will assume it is being called with multiple arguments and return a closure'd decorator.
If you wish to force a parameter decorator to take the target function or class in the same call as arguments,
then give it a keyword argument of the function's target name.
Note: if the decorated decorator uses *args, you must provide argname and optionally one of argnum or useself.
argname may only be used on its own when the function will accept the target as a kwarg.
"""
if useself is not None:
if argnum is not None:
raise Exception("useself and argnum both do the same thing; they cannot be used at the same time")
argnum = 1 if useself else 0
if argnum is None and argname is None:
args = inspect.getargspec(decorator_func).args
if args[0] == "self":
argnum = 1
else:
argnum = 0
argname = args[argnum]
elif argnum is not None and argname is None:
args = inspect.getargspec(decorator_func).args
argname = args[argnum]
elif argnum is None and argname is not None:
args = inspect.getargspec(decorator_func).args
for index, name in enumerate(args):
if name == argname:
argnum = index
break
else:
raise Exception("argname must point to an arg that exists")
else:
pass # if both are provided, we're good
assert argnum is not None
makepartial = functools.partial(DecoratorPartial, decorator_func, argnum, partialiface, include_call_type)
@functools.wraps(decorator_func)
def meta_decorated(*args, **keywords):
"I'm tired of providing nonsense docstrings to functools.wrapped functions just to shut pylint up"
if argname in keywords:
# a way for callers to force a 'normal' function call
arg = keywords[argname]
del keywords[argname]
preparer = makepartial(args, keywords)
return preparer(arg)
# called as a simple decorator
if (len(args) == argnum+1 and
(inspect.isfunction(args[argnum]) or
inspect.isclass(args[argnum]) or
inspect.ismethod(args[argnum]))
and len(keywords) == 0):
if include_call_type:
return decorator_func(*args, paramdecorator_simple_call=True)
else:
return decorator_func(*args)
else: # called as an argument decorator
return makepartial(args, keywords)
meta_decorated.undecorated = decorator_func
return meta_decorated
paramdecorator = paramdecorator(paramdecorator) # we are ourselves!
class ExceptionWithMessage(Exception):
"""
Subclass this class and provide a docstring; the docstring will be
formatted with the __init__ args and then used as the error
(if you are seeing this as a result of an exception, someone - possibly you - forgot
to provide a subclass with a docstring!)
"""
def __init__(self, *args, **kwargs):
assert self.__class__ != ExceptionWithMessage
Exception.__init__(self, self.__class__.__doc__.format(*args, **kwargs))
class KeyAttributeCollisionError(ExceptionWithMessage):
"""Key {1!r} collides with attribute of the same name on AttrDict it is set in """
class AttrDict(dict):
"""
Dict with it's values accessible as attributes
"""
def __init__(self, *args, **keywords):
dict.__init__(self, *args, **keywords)
def __getattr__(self, name):
try:
return self[name]
except KeyError:
raise AttributeError
def __setattr__(self, name, attr):
self[name] = attr
'''
def setdefault(self, name, value, update=True):
if update:
try:
oldvalue = self[name]
except KeyError:
self[name] = value
else:
to_update = deque([(oldvalue, newvalue)])
while to_update:
target, source = to_update.pop_left()
if isinstance(target, dict) and isinstance(source, dict):
for key in source:
if key not in target:
target[key] = source[key]
else:
to_update.append((target[key], source[key]))
else:
return dict.setdefault(self, name, value)
'''
def __repr__(self):
return "AttrDict(%s)" % super(AttrDict, self).__repr__() #pragma: no cover
DEBUG = "CROW2_DEBUG" in os.environ
def DEBUG_calling_name(): #pragma: no cover
if not DEBUG:
# TODO: print warning if this code is run
return "<**CROW2_DEBUG not in environment**>"
stackframe = inspect.stack()[2] # 0 = us, 1 = who called us, 2 = who called them
frame = stackframe[0]
code_name = frame.f_code.co_name
module = inspect.getmodule(frame)
modulename = fullyQualifiedName(module)
return modulename + '.' + code_name
|
lahwran/crow2
|
crow2/util.py
|
Python
|
mit
| 7,384
|
menuscreen_kv = '''
<ScreenMenu>:
id: screen_menu
BoxLayout:
orientation: 'vertical'
InputField:
id: text_input_field
gid: 'text_input_field_global_id'
Button:
text: 'Save'
# this is great for local widget tree:
#on_release: text_input_field.save_text()
# this is great for registered widgets accessible globally:
on_release: app.get_widget('text_input_field_global_id').save_text()
ButtonLaunchWebView:
text: 'WebView'
Button:
text: 'Quit'
on_release: screen_menu.leave()
'''
from kivy.app import App
from kivy.uix.textinput import TextInput
from kivy.uix.button import Button
from generic import DynamicScreen
from generic import SelfRegister
class ScreenMenu(DynamicScreen):
def __init__(self, **kwargs):
self.name = 'screen_menu'
self.kv_string = menuscreen_kv
super(ScreenMenu, self).__init__(**kwargs)
def leave(self):
self.manager.current = 'screen_quit'
class InputField(TextInput, SelfRegister):
gid = 'text_input_field_global_id'
def __init__(self, **kwargs):
super(InputField, self).__init__(**kwargs)
def post_init_setup(self, *args):
app = App.get_running_app()
#try and get encrypted text from storage
try:
self.stored_text = app.decrypt(app.store.get('stored_text_section')['stored_text'])
except KeyError:
self.stored_text = ""
self.text = self.stored_text
def save_text(self, *args):
app = App.get_running_app()
app.store.put('stored_text_section', stored_text=app.encrypt(self.text))
class ButtonLaunchWebView(Button):
def on_release(self, *args):
screen_manager = App.get_running_app().root
if not screen_manager.has_screen('screen_webview'):
from screens.screenwebview import ScreenWebView
screen_manager.add_widget(ScreenWebView())
screen_manager.switch_screen('slide', 'screen_webview')
|
suchyDev/Kivy-Dynamic-Screens-Template
|
screens/screenmenu.py
|
Python
|
mit
| 2,103
|
# -*- coding: utf-8 -*-
from PyQt4 import QtGui, uic
from PyQt4.uic import loadUi
from epipy.ui.view import cwd
class SIRsimpleGroupBox(QtGui.QGroupBox):
"""This class represents the SIR Simple group box.
:returns: an instance of *SIRsimpleGroupBox*
"""
def __init__(self):
super(SIRsimpleGroupBox, self).__init__()
loadUi(cwd + '/sirsimplegroupbox.ui', self)
class SIRwbadGroupBox(QtGui.QGroupBox):
"""This class represents the SIR with deaths and births group box.
:returns: an instance of *SIRwbadGroupBox*
"""
def __init__(self):
super(SIRwbadGroupBox, self).__init__()
loadUi(cwd + '/sirwbadgroupbox.ui', self)
class SIRvaccineGroupBox(QtGui.QGroupBox):
"""This class represents the SIR Vaccine group box.
:returns: an instance of *SIRvaccineGroupBox*
"""
def __init__(self):
super(SIRvaccineGroupBox, self).__init__()
loadUi(cwd + '/sirvaccinegroupbox.ui', self)
|
ckaus/EpiPy
|
epipy/ui/view/sirgroupbox.py
|
Python
|
mit
| 978
|
#!/usr/bin/env python
# Copyright (C) 2015 Swift Navigation Inc.
# Contact: Bhaskar Mookerji <mookerji@swiftnav.com>
#
# This source is subject to the license found in the file 'LICENSE' which must
# be be distributed together with this source. All other rights reserved.
#
# THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND,
# EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR PURPOSE.
"""Generator for Java target.
This module consumes the YAML spec and generates Java code in the
target directory.
"""
import os
import os.path
from sbpg.targets.templating import JENV, ACRONYMS
TEMPLATE_NAME = "sbp_java.java.j2"
TEMPLATE_TABLE_NAME = "MessageTable.java.j2"
JAVA_TYPE_MAP = {'u8': 'int',
'u16': 'int',
'u32': 'long',
'u64': 'long',
's8': 'int',
's16': 'int',
's32': 'int',
's64': 'long',
'float': 'float',
'double': 'double',
'string': 'String'}
field_sizes = {
'u8' : 1,
'u16' : 2,
'u32' : 4,
'u64' : 8,
's8' : 1,
's16' : 2,
's32' : 4,
's64' : 8,
'float' : 4,
'double' : 8,
}
def classnameify(s):
"""
Makes a classname.
"""
return ''.join(w if w in ACRONYMS else w.title() for w in s.split('_'))
def commentify(value):
"""
Builds a comment.
"""
if value is None:
return
if len(value.split('\n')) == 1:
return "* " + value
else:
return '\n'.join([' * ' + l for l in value.split('\n')[:-1]])
def type_map(field):
if JAVA_TYPE_MAP.has_key(field.type_id):
return JAVA_TYPE_MAP[field.type_id]
elif field.type_id == 'array':
t = field.options['fill'].value
return JAVA_TYPE_MAP.get(t, t) + '[]'
else:
return field.type_id
def parse_type(field):
"""
Function to pull a type from the binary payload.
"""
if field.type_id == 'string':
if field.options.has_key('size'):
return "parser.getString(%d)" % field.options['size'].value
else:
return "parser.getString()"
elif field.type_id in JAVA_TYPE_MAP.keys():
# Primitive java types have extractor methods in SBPMessage.Parser
return "parser.get" + field.type_id.capitalize() + "()"
if field.type_id == 'array':
# Call function to build array
t = field.options['fill'].value
if t in JAVA_TYPE_MAP.keys():
if field.options.has_key('size'):
return "parser.getArrayof%s(%d)" % (t.capitalize(), field.options['size'].value)
else:
return "parser.getArrayof%s()" % t.capitalize()
else:
if field.options.has_key('size'):
return "parser.getArray(%s.class, %d)" % (t, field.options['size'].value)
else:
return "parser.getArray(%s.class)" % t
else:
# This is an inner class, call default constructor
return "new %s().parse(parser)" % field.type_id
def build_type(field):
"""
Function to pack a type into the binary payload.
"""
if field.type_id == 'string':
if field.options.has_key('size'):
return "builder.putString(%s, %d)" % (field.identifier, field.options['size'].value)
else:
return "builder.putString(%s)" % field.identifier
elif field.type_id in JAVA_TYPE_MAP.keys():
# Primitive java types have extractor methods in SBPMessage.Builder
return "builder.put%s(%s)" % (field.type_id.capitalize(), field.identifier)
if field.type_id == 'array':
# Call function to build array
t = field.options['fill'].value
if t in JAVA_TYPE_MAP.keys():
if field.options.has_key('size'):
return "builder.putArrayof%s(%s, %d)" % (t.capitalize(),
field.identifier,
field.options['size'].value)
else:
return "builder.putArrayof%s(%s)" % (t.capitalize(), field.identifier)
else:
if field.options.has_key('size'):
return "builder.putArray(%s, %d)" % (field.identifier, field.options['size'].value)
else:
return "builder.putArray(%s)" % field.identifier
else:
return "%s.build(builder)" % field.identifier
def jsonify(field):
if field.type_id in JAVA_TYPE_MAP.keys():
return field.identifier
elif field.type_id == 'array':
if field.options['fill'].value in JAVA_TYPE_MAP.keys():
return "new JSONArray(%s)" % field.identifier
else:
return "SBPStruct.toJSONArray(%s)" % field.identifier
else:
return field.identifier + ".toJSON()"
JENV.filters['classnameify'] = classnameify
JENV.filters['commentify'] = commentify
JENV.filters['type_map'] = type_map
JENV.filters['parse_type'] = parse_type
JENV.filters['build_type'] = build_type
JENV.filters['jsonify'] = jsonify
def render_source(output_dir, package_spec, jenv=JENV):
"""
Render and output
"""
path, module_name = package_spec.filepath
java_template = jenv.get_template(TEMPLATE_NAME)
module_path = "com." + package_spec.identifier
yaml_filepath = "/".join(package_spec.filepath) + ".yaml"
includes = [".".join(i.split(".")[:-1]) for i in package_spec.includes]
includes = [i for i in includes if i != "types"]
for msg in package_spec.definitions:
msg_name = classnameify(msg.identifier) if msg.sbp_id else msg.identifier
l = "/".join(package_spec.filepath)
destination_filename = "%s/com/%s/%s.java" % (output_dir, l , msg_name)
# Create the output directory if it doesn't exist
if not os.path.exists(os.path.dirname(destination_filename)):
os.mkdir(os.path.dirname(destination_filename))
with open(destination_filename, 'w+') as f:
print destination_filename
f.write(java_template.render(m=msg,
filepath=yaml_filepath,
module_path=module_path,
include=includes,
description=package_spec.description))
def render_table(output_dir, packages, jenv=JENV):
"""
Render and output dispatch table
"""
destination_filename = output_dir + "/com/swiftnav/sbp/client/MessageTable.java"
with open(destination_filename, 'w+') as f:
print destination_filename
f.write(jenv.get_template(TEMPLATE_TABLE_NAME).render(packages=packages))
|
paparazzi/libsbp
|
generator/sbpg/targets/java.py
|
Python
|
lgpl-3.0
| 6,418
|
import pytest
from pysubs2 import SSAEvent, make_time
def test_repr_dialogue():
ev = SSAEvent(start=make_time(m=1, s=30), end=make_time(m=1, s=35), text="Hello\\Nworld!")
ref = r"<SSAEvent type=Dialogue start=0:01:30 end=0:01:35 text='Hello\\Nworld!'>"
assert repr(ev) == ref
def test_repr_comment():
ev = SSAEvent(start=make_time(m=1, s=30), end=make_time(m=1, s=35), text="Hello\\Nworld!")
ev.is_comment = True
ref = r"<SSAEvent type=Comment start=0:01:30 end=0:01:35 text='Hello\\Nworld!'>"
assert repr(ev) == ref
def test_duration():
e = SSAEvent(start=0, end=10)
assert e.duration == 10
e.duration = 20
assert e.start == 0 and e.end == 20
e.duration = 5
assert e.start == 0 and e.end == 5
e.duration = 0
assert e.start == 0 and e.end == 0
with pytest.raises(ValueError):
e.duration = -20
def test_plaintext():
e = SSAEvent(text=r"First\NSecond\NThird\hline{with hidden text}")
assert e.plaintext == "First\nSecond\nThird line"
e.plaintext = "My\n Text "
assert e.text == r"My\N Text "
# SubStation has no way to escape braces, thus this wart
text = "My text{with braces}"
e.plaintext = text
assert e.plaintext != text
def test_shift():
e = SSAEvent(start=0, end=10)
with pytest.raises(ValueError):
e.shift(frames=5)
with pytest.raises(ValueError):
e.shift(fps=23.976)
with pytest.raises(ValueError):
e.shift(frames=5, fps=-1)
e2 = e.copy(); e2.shift(ms=5)
assert e2 == SSAEvent(start=5, end=15)
e2 = e.copy(); e2.shift(ms=-5)
assert e2 == SSAEvent(start=-5, end=5)
e2 = e.copy(); e2.shift(frames=1, fps=100.0)
assert e2 == SSAEvent(start=10, end=20)
e2 = e.copy(); e2.shift(frames=-1, fps=100.0)
assert e2 == SSAEvent(start=-10, end=0)
e2 = e.copy(); e2.shift(h=1, m=-60, s=2, ms=-2000)
assert e2 == e
def test_fields():
e = SSAEvent()
with pytest.warns(DeprecationWarning):
assert e.FIELDS == frozenset([
"start", "end", "text", "marked", "layer", "style",
"name", "marginl", "marginr", "marginv", "effect", "type"
])
|
tkarabela/pysubs2
|
tests/test_ssaevent.py
|
Python
|
mit
| 2,177
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (nested_scopes, generators, division, absolute_import, with_statement,
print_function, unicode_literals)
from collections import namedtuple
class Reporter(object):
"""Formats and emits reports.
Subclasses implement the callback methods, to provide specific reporting
functionality, e.g., to console or to browser.
"""
# Generic reporting settings.
# log_level: Display log messages up to this level.
# subsettings: subclass-specific settings.
Settings = namedtuple('Settings', ['log_level'])
def __init__(self, run_tracker, settings):
self.run_tracker = run_tracker
self.settings = settings
def open(self):
"""Begin the report."""
pass
def close(self):
"""End the report."""
pass
def start_workunit(self, workunit):
"""A new workunit has started."""
pass
def end_workunit(self, workunit):
"""A workunit has finished."""
pass
def handle_log(self, workunit, level, *msg_elements):
"""Handle a message logged by pants code.
level: One of the constants above.
Each element in msg_elements is either a message or a (message, detail) pair.
A subclass must show the message, but may choose to show the detail in some
sensible way (e.g., when the message text is clicked on in a browser).
This convenience implementation filters by log level and then delegates to do_handle_log.
"""
if level <= self.settings.log_level:
self.do_handle_log(workunit, level, *msg_elements)
def do_handle_log(self, workunit, level, *msg_elements):
"""Handle a message logged by pants code, after it's passed the log level check."""
pass
def handle_output(self, workunit, label, s):
"""Handle output captured from an invoked tool (e.g., javac).
workunit: The innermost WorkUnit in which the tool was invoked.
label: Classifies the output e.g., 'stdout' for output captured from a tool's stdout or
'debug' for debug output captured from a tool's logfiles.
s: The content captured.
"""
pass
def is_under_main_root(self, workunit):
"""Is the workunit running under the main thread's root."""
return self.run_tracker.is_under_main_root(workunit)
|
square/pants
|
src/python/pants/reporting/reporter.py
|
Python
|
apache-2.0
| 2,392
|
#!/usr/bin/python2.4
#
#
# Copyright 2008, The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides an interface to communicate with the device via the adb command.
Assumes adb binary is currently on system path.
"""
# Python imports
import os
import string
import time
# local imports
import am_instrument_parser
import errors
import logger
import run_command
class AdbInterface:
"""Helper class for communicating with Android device via adb."""
# argument to pass to adb, to direct command to specific device
_target_arg = ""
DEVICE_TRACE_DIR = "/data/test_results/"
def SetEmulatorTarget(self):
"""Direct all future commands to the only running emulator."""
self._target_arg = "-e"
def SetDeviceTarget(self):
"""Direct all future commands to the only connected USB device."""
self._target_arg = "-d"
def SetTargetSerial(self, serial):
"""Direct all future commands to Android target with the given serial."""
self._target_arg = "-s %s" % serial
def SendCommand(self, command_string, timeout_time=20, retry_count=3):
"""Send a command via adb.
Args:
command_string: adb command to run
timeout_time: number of seconds to wait for command to respond before
retrying
retry_count: number of times to retry command before raising
WaitForResponseTimedOutError
Returns:
string output of command
Raises:
WaitForResponseTimedOutError if device does not respond to command within time
"""
adb_cmd = "adb %s %s" % (self._target_arg, command_string)
logger.SilentLog("about to run %s" % adb_cmd)
return run_command.RunCommand(adb_cmd, timeout_time=timeout_time,
retry_count=retry_count)
def SendShellCommand(self, cmd, timeout_time=20, retry_count=3):
"""Send a adb shell command.
Args:
cmd: adb shell command to run
timeout_time: number of seconds to wait for command to respond before
retrying
retry_count: number of times to retry command before raising
WaitForResponseTimedOutError
Returns:
string output of command
Raises:
WaitForResponseTimedOutError: if device does not respond to command
"""
return self.SendCommand("shell %s" % cmd, timeout_time=timeout_time,
retry_count=retry_count)
def BugReport(self, path):
"""Dumps adb bugreport to the file specified by the path.
Args:
path: Path of the file where adb bugreport is dumped to.
"""
bug_output = self.SendShellCommand("bugreport", timeout_time=60)
bugreport_file = open(path, "w")
bugreport_file.write(bug_output)
bugreport_file.close()
def Push(self, src, dest):
"""Pushes the file src onto the device at dest.
Args:
src: file path of host file to push
dest: destination absolute file path on device
"""
self.SendCommand("push %s %s" % (src, dest), timeout_time=60)
def Pull(self, src, dest):
"""Pulls the file src on the device onto dest on the host.
Args:
src: absolute file path of file on device to pull
dest: destination file path on host
Returns:
True if success and False otherwise.
"""
# Create the base dir if it doesn't exist already
if not os.path.exists(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
if self.DoesFileExist(src):
self.SendCommand("pull %s %s" % (src, dest), timeout_time=60)
return True
else:
logger.Log("ADB Pull Failed: Source file %s does not exist." % src)
return False
def DoesFileExist(self, src):
"""Checks if the given path exists on device target.
Args:
src: file path to be checked.
Returns:
True if file exists
"""
output = self.SendShellCommand("ls %s" % src)
error = "No such file or directory"
if error in output:
return False
return True
def EnableAdbRoot(self):
"""Enable adb root on device."""
output = self.SendCommand("root")
if "adbd is already running as root" in output:
return True
elif "restarting adbd as root" in output:
# device will disappear from adb, wait for it to come back
self.SendCommand("wait-for-device")
return True
else:
logger.Log("Unrecognized output from adb root: %s" % output)
return False
def StartInstrumentationForPackage(
self, package_name, runner_name, timeout_time=60*10,
no_window_animation=False, instrumentation_args={}):
"""Run instrumentation test for given package and runner.
Equivalent to StartInstrumentation, except instrumentation path is
separated into its package and runner components.
"""
instrumentation_path = "%s/%s" % (package_name, runner_name)
return self.StartInstrumentation(instrumentation_path, timeout_time=timeout_time,
no_window_animation=no_window_animation,
instrumentation_args=instrumentation_args)
def StartInstrumentation(
self, instrumentation_path, timeout_time=60*10, no_window_animation=False,
profile=False, instrumentation_args={}, silent_log=False):
"""Runs an instrumentation class on the target.
Returns a dictionary containing the key value pairs from the
instrumentations result bundle and a list of TestResults. Also handles the
interpreting of error output from the device and raises the necessary
exceptions.
Args:
instrumentation_path: string. It should be the fully classified package
name, and instrumentation test runner, separated by "/"
e.g. com.android.globaltimelaunch/.GlobalTimeLaunch
timeout_time: Timeout value for the am command.
no_window_animation: boolean, Whether you want window animations enabled
or disabled
profile: If True, profiling will be turned on for the instrumentation.
instrumentation_args: Dictionary of key value bundle arguments to pass to
instrumentation.
silent_log: If True, the invocation of the instrumentation test runner
will not be logged.
Returns:
(test_results, inst_finished_bundle)
test_results: a list of TestResults
inst_finished_bundle (dict): Key/value pairs contained in the bundle that
is passed into ActivityManager.finishInstrumentation(). Included in this
bundle is the return code of the Instrumentation process, any error
codes reported by the activity manager, and any results explicitly added
by the instrumentation code.
Raises:
WaitForResponseTimedOutError: if timeout occurred while waiting for
response to adb instrument command
DeviceUnresponsiveError: if device system process is not responding
InstrumentationError: if instrumentation failed to run
"""
command_string = self._BuildInstrumentationCommandPath(
instrumentation_path, no_window_animation=no_window_animation,
profile=profile, raw_mode=True,
instrumentation_args=instrumentation_args)
if silent_log:
logger.SilentLog(command_string)
else:
logger.Log(command_string)
(test_results, inst_finished_bundle) = (
am_instrument_parser.ParseAmInstrumentOutput(
self.SendShellCommand(command_string, timeout_time=timeout_time,
retry_count=2)))
if "code" not in inst_finished_bundle:
logger.Log('No code available. inst_finished_bundle contains: %s '
% inst_finished_bundle)
raise errors.InstrumentationError("no test results... device setup "
"correctly?")
if inst_finished_bundle["code"] == "0":
long_msg_result = "no error message"
if "longMsg" in inst_finished_bundle:
long_msg_result = inst_finished_bundle["longMsg"]
logger.Log("Error! Test run failed: %s" % long_msg_result)
raise errors.InstrumentationError(long_msg_result)
if "INSTRUMENTATION_ABORTED" in inst_finished_bundle:
logger.Log("INSTRUMENTATION ABORTED!")
raise errors.DeviceUnresponsiveError
return (test_results, inst_finished_bundle)
def StartInstrumentationNoResults(
self, package_name, runner_name, no_window_animation=False,
raw_mode=False, instrumentation_args={}):
"""Runs instrumentation and dumps output to stdout.
Equivalent to StartInstrumentation, but will dump instrumentation
'normal' output to stdout, instead of parsing return results. Command will
never timeout.
"""
adb_command_string = self.PreviewInstrumentationCommand(
package_name, runner_name, no_window_animation=no_window_animation,
raw_mode=raw_mode, instrumentation_args=instrumentation_args)
logger.Log(adb_command_string)
run_command.RunCommand(adb_command_string, return_output=False)
def PreviewInstrumentationCommand(
self, package_name, runner_name, no_window_animation=False,
raw_mode=False, instrumentation_args={}):
"""Returns a string of adb command that will be executed."""
inst_command_string = self._BuildInstrumentationCommand(
package_name, runner_name, no_window_animation=no_window_animation,
raw_mode=raw_mode, instrumentation_args=instrumentation_args)
command_string = "adb %s shell %s" % (self._target_arg, inst_command_string)
return command_string
def _BuildInstrumentationCommand(
self, package, runner_name, no_window_animation=False, profile=False,
raw_mode=True, instrumentation_args={}):
instrumentation_path = "%s/%s" % (package, runner_name)
return self._BuildInstrumentationCommandPath(
instrumentation_path, no_window_animation=no_window_animation,
profile=profile, raw_mode=raw_mode,
instrumentation_args=instrumentation_args)
def _BuildInstrumentationCommandPath(
self, instrumentation_path, no_window_animation=False, profile=False,
raw_mode=True, instrumentation_args={}):
command_string = "am instrument"
if no_window_animation:
command_string += " --no_window_animation"
if profile:
self._CreateTraceDir()
command_string += (
" -p %s/%s.dmtrace" %
(self.DEVICE_TRACE_DIR, instrumentation_path.split(".")[-1]))
for key, value in instrumentation_args.items():
command_string += " -e %s '%s'" % (key, value)
if raw_mode:
command_string += " -r"
command_string += " -w %s" % instrumentation_path
return command_string
def _CreateTraceDir(self):
ls_response = self.SendShellCommand("ls /data/trace")
if ls_response.strip("#").strip(string.whitespace) != "":
self.SendShellCommand("create /data/trace", "mkdir /data/trace")
self.SendShellCommand("make /data/trace world writeable",
"chmod 777 /data/trace")
def WaitForDevicePm(self, wait_time=120):
"""Waits for targeted device's package manager to be up.
Args:
wait_time: time in seconds to wait
Raises:
WaitForResponseTimedOutError if wait_time elapses and pm still does not
respond.
"""
logger.Log("Waiting for device package manager...")
self.SendCommand("wait-for-device")
# Now the device is there, but may not be running.
# Query the package manager with a basic command
try:
self._WaitForShellCommandContents("pm path android", "package:",
wait_time)
except errors.WaitForResponseTimedOutError:
raise errors.WaitForResponseTimedOutError(
"Package manager did not respond after %s seconds" % wait_time)
def WaitForInstrumentation(self, package_name, runner_name, wait_time=120):
"""Waits for given instrumentation to be present on device
Args:
wait_time: time in seconds to wait
Raises:
WaitForResponseTimedOutError if wait_time elapses and instrumentation
still not present.
"""
instrumentation_path = "%s/%s" % (package_name, runner_name)
logger.Log("Waiting for instrumentation to be present")
# Query the package manager
try:
command = "pm list instrumentation | grep %s" % instrumentation_path
self._WaitForShellCommandContents(command, "instrumentation:", wait_time,
raise_abort=False)
except errors.WaitForResponseTimedOutError :
logger.Log(
"Could not find instrumentation %s on device. Does the "
"instrumentation in test's AndroidManifest.xml match definition"
"in test_defs.xml?" % instrumentation_path)
raise
def WaitForProcess(self, name, wait_time=120):
"""Wait until a process is running on the device.
Args:
name: the process name as it appears in `ps`
wait_time: time in seconds to wait
Raises:
WaitForResponseTimedOutError if wait_time elapses and the process is
still not running
"""
logger.Log("Waiting for process %s" % name)
self.SendCommand("wait-for-device")
self._WaitForShellCommandContents("ps", name, wait_time)
def WaitForProcessEnd(self, name, wait_time=120):
"""Wait until a process is no longer running on the device.
Args:
name: the process name as it appears in `ps`
wait_time: time in seconds to wait
Raises:
WaitForResponseTimedOutError if wait_time elapses and the process is
still running
"""
logger.Log("Waiting for process %s to end" % name)
self._WaitForShellCommandContents("ps", name, wait_time, invert=True)
def _WaitForShellCommandContents(self, command, expected, wait_time,
raise_abort=True, invert=False):
"""Wait until the response to a command contains a given output.
Assumes that a only successful execution of "adb shell <command>" contains
the substring expected. Assumes that a device is present.
Args:
command: adb shell command to execute
expected: the string that should appear to consider the
command successful.
wait_time: time in seconds to wait
raise_abort: if False, retry when executing the command raises an
AbortError, rather than failing.
invert: if True, wait until the command output no longer contains the
expected contents.
Raises:
WaitForResponseTimedOutError: If wait_time elapses and the command has not
returned an output containing expected yet.
"""
# Query the device with the command
success = False
attempts = 0
wait_period = 5
while not success and (attempts*wait_period) < wait_time:
# assume the command will always contain expected in the success case
try:
output = self.SendShellCommand(command, retry_count=1)
if ((not invert and expected in output)
or (invert and expected not in output)):
success = True
except errors.AbortError, e:
if raise_abort:
raise
# ignore otherwise
if not success:
time.sleep(wait_period)
attempts += 1
if not success:
raise errors.WaitForResponseTimedOutError()
def WaitForBootComplete(self, wait_time=120):
"""Waits for targeted device's bootcomplete flag to be set.
Args:
wait_time: time in seconds to wait
Raises:
WaitForResponseTimedOutError if wait_time elapses and pm still does not
respond.
"""
logger.Log("Waiting for boot complete...")
self.SendCommand("wait-for-device")
# Now the device is there, but may not be running.
# Query the package manager with a basic command
boot_complete = False
attempts = 0
wait_period = 5
while not boot_complete and (attempts*wait_period) < wait_time:
output = self.SendShellCommand("getprop dev.bootcomplete", retry_count=1)
output = output.strip()
if output == "1":
boot_complete = True
else:
time.sleep(wait_period)
attempts += 1
if not boot_complete:
raise errors.WaitForResponseTimedOutError(
"dev.bootcomplete flag was not set after %s seconds" % wait_time)
def Sync(self, retry_count=3, runtime_restart=False):
"""Perform a adb sync.
Blocks until device package manager is responding.
Args:
retry_count: number of times to retry sync before failing
runtime_restart: stop runtime during sync and restart afterwards, useful
for syncing system libraries (core, framework etc)
Raises:
WaitForResponseTimedOutError if package manager does not respond
AbortError if unrecoverable error occurred
"""
output = ""
error = None
if runtime_restart:
self.SendShellCommand("setprop ro.monkey 1", retry_count=retry_count)
# manual rest bootcomplete flag
self.SendShellCommand("setprop dev.bootcomplete 0",
retry_count=retry_count)
self.SendShellCommand("stop", retry_count=retry_count)
try:
output = self.SendCommand("sync", retry_count=retry_count)
except errors.AbortError, e:
error = e
output = e.msg
if "Read-only file system" in output:
logger.SilentLog(output)
logger.Log("Remounting read-only filesystem")
self.SendCommand("remount")
output = self.SendCommand("sync", retry_count=retry_count)
elif "No space left on device" in output:
logger.SilentLog(output)
logger.Log("Restarting device runtime")
self.SendShellCommand("stop", retry_count=retry_count)
output = self.SendCommand("sync", retry_count=retry_count)
self.SendShellCommand("start", retry_count=retry_count)
elif error is not None:
# exception occurred that cannot be recovered from
raise error
logger.SilentLog(output)
if runtime_restart:
# start runtime and wait till boot complete flag is set
self.SendShellCommand("start", retry_count=retry_count)
self.WaitForBootComplete()
# press the MENU key, this will disable key guard if runtime is started
# with ro.monkey set to 1
self.SendShellCommand("input keyevent 82", retry_count=retry_count)
else:
self.WaitForDevicePm()
return output
def GetSerialNumber(self):
"""Returns the serial number of the targeted device."""
return self.SendCommand("get-serialno").strip()
|
aospx-kitkat/platform_external_chromium_org
|
third_party/android_testrunner/adb_interface.py
|
Python
|
bsd-3-clause
| 18,939
|
# Copyright (c) 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from cdn.transport.validators import schema_base
class ServiceSchema(schema_base.SchemaBase):
"""JSON Schmema validation for /service
"""
schema = {
'service': {
'PUT': {
'type': 'object',
'properties': {
"domains": {
'type': 'array',
'items': {
'type': "object",
"properties": {
"domain": {
"type": "string",
'pattern': "^(([a-zA-Z]{1})|"
"([a-zA-Z]{1}[a-zA-Z]{1})|"
"([a-zA-Z]{1}[0-9]{1})"
"|([0-9]{1}[a-zA-Z]{1})|"
"([a-zA-Z0-9][a-zA-Z0-9-_]{1,61}"
"[a-zA-Z0-9]))\."
"([a-zA-Z]{2,6}|"
"[a-zA-Z0-9-]{2,30}\.[a-zA-Z]{2,3})$"
}}},
'required': True,
"minItems": 1},
"origins": {
'type': 'array',
'items': {
'type': "object",
"properties": {
"origin": {
"type": "string",
"required": True},
"port": {
"type": "integer",
"enum": [
80,
443]},
"ssl": {
"type": "boolean"}},
},
'required': True,
"minItems": 1},
"caching": {
'type': 'array',
'items': {
'type': "object",
"properties": {
"name": {
"type": "string",
"required": True},
"ttl": {
"type": "integer",
"required": True},
"rules": {
"type": "array",
'items': {
'type': "object",
"properties": {
'name': {
'type': 'string'},
'request_url': {
'type': 'string'}}},
}},
},
},
}},
'PATCH': {}},
}
|
obulpathi/cdn1
|
cdn/transport/validators/schemas/service.py
|
Python
|
apache-2.0
| 3,716
|
# pylint: disable=g-import-not-at-top
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""contrib module containing volatile or experimental code."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
# Add projects here, they will show up under tf.contrib.
from tensorflow.contrib import batching
from tensorflow.contrib import bayesflow
from tensorflow.contrib import checkpoint
if os.name != "nt":
from tensorflow.contrib import cloud
from tensorflow.contrib import cluster_resolver
from tensorflow.contrib import coder
from tensorflow.contrib import compiler
from tensorflow.contrib import constrained_optimization
from tensorflow.contrib import copy_graph
from tensorflow.contrib import crf
from tensorflow.contrib import cudnn_rnn
from tensorflow.contrib import data
from tensorflow.contrib import deprecated
from tensorflow.contrib import distribute
from tensorflow.contrib import distributions
from tensorflow.contrib import estimator
from tensorflow.contrib import factorization
from tensorflow.contrib import feature_column
from tensorflow.contrib import framework
from tensorflow.contrib import gan
from tensorflow.contrib import graph_editor
from tensorflow.contrib import grid_rnn
from tensorflow.contrib import image
from tensorflow.contrib import input_pipeline
from tensorflow.contrib import integrate
from tensorflow.contrib import keras
from tensorflow.contrib import kernel_methods
from tensorflow.contrib import kfac
from tensorflow.contrib import labeled_tensor
from tensorflow.contrib import layers
from tensorflow.contrib import learn
from tensorflow.contrib import legacy_seq2seq
from tensorflow.contrib import linalg
from tensorflow.contrib import linear_optimizer
from tensorflow.contrib import lookup
from tensorflow.contrib import losses
from tensorflow.contrib import memory_stats
from tensorflow.contrib import metrics
from tensorflow.contrib import mixed_precision
from tensorflow.contrib import model_pruning
from tensorflow.contrib import nccl
from tensorflow.contrib import nn
from tensorflow.contrib import opt
from tensorflow.contrib import periodic_resample
from tensorflow.contrib import predictor
from tensorflow.contrib import proto
from tensorflow.contrib import quantization
from tensorflow.contrib import quantize
from tensorflow.contrib import reduce_slice_ops
from tensorflow.contrib import resampler
from tensorflow.contrib import rnn
from tensorflow.contrib import rpc
from tensorflow.contrib import saved_model
from tensorflow.contrib import seq2seq
from tensorflow.contrib import signal
from tensorflow.contrib import slim
from tensorflow.contrib import solvers
from tensorflow.contrib import sparsemax
from tensorflow.contrib import staging
from tensorflow.contrib import stat_summarizer
from tensorflow.contrib import stateless
from tensorflow.contrib import tensor_forest
from tensorflow.contrib import tensorboard
from tensorflow.contrib import testing
from tensorflow.contrib import tfprof
from tensorflow.contrib import timeseries
from tensorflow.contrib import tpu
from tensorflow.contrib import training
from tensorflow.contrib import util
from tensorflow.contrib.eager.python import tfe as eager
if os.name != "nt":
from tensorflow.contrib.lite.python import lite
from tensorflow.contrib.optimizer_v2 import optimizer_v2_symbols as optimizer_v2
from tensorflow.contrib.receptive_field import receptive_field_api as receptive_field
from tensorflow.contrib.recurrent.python import recurrent_api as recurrent
from tensorflow.contrib.remote_fused_graph import pylib as remote_fused_graph
from tensorflow.contrib.specs import python as specs
from tensorflow.contrib.summary import summary
from tensorflow.python.util.lazy_loader import LazyLoader
ffmpeg = LazyLoader("ffmpeg", globals(),
"tensorflow.contrib.ffmpeg")
del os
del LazyLoader
del absolute_import
del division
del print_function
|
jart/tensorflow
|
tensorflow/contrib/__init__.py
|
Python
|
apache-2.0
| 4,581
|
import os, sys
APP_VERSION = '1.1.1'
CURR_DIR = os.path.dirname(os.path.realpath(os.path.join(os.getcwd(), os.path.expanduser('__file__'))))
WIN_EXE_LIB = os.path.normpath(os.path.join(CURR_DIR, 'library'))
if os.path.isdir(WIN_EXE_LIB):
sys.path.insert(0, WIN_EXE_LIB)
def client_main():
from dogepartycli import client
client.main()
def server_main():
from dogepartycli import server
server.main()
|
coinwarp/dogeparty-cli
|
dogepartycli/__init__.py
|
Python
|
mit
| 424
|
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base constants and handlers."""
from __future__ import annotations
import base64
import datetime
import functools
import hmac
import json
import logging
import os
import re
import time
import urllib
from core import feconf
from core import handler_schema_constants
from core import python_utils
from core import utils
from core.controllers import payload_validator
from core.domain import auth_domain
from core.domain import auth_services
from core.domain import config_domain
from core.domain import config_services
from core.domain import user_services
import webapp2
from typing import Any, Dict, Optional # isort: skip
ONE_DAY_AGO_IN_SECS = -24 * 60 * 60
DEFAULT_CSRF_SECRET = 'oppia csrf secret'
CSRF_SECRET = config_domain.ConfigProperty(
'oppia_csrf_secret', {'type': 'unicode'},
'Text used to encrypt CSRF tokens.', DEFAULT_CSRF_SECRET)
# NOTE: These handlers manage user sessions and serve auth pages. Thus, we
# should never reject or replace them when running in maintenance mode;
# otherwise admins will be unable to access the site.
AUTH_HANDLER_PATHS = (
'/csrfhandler',
'/login',
'/session_begin',
'/session_end',
)
@functools.lru_cache(maxsize=128)
def load_template(filename):
"""Return the HTML file contents at filepath.
Args:
filename: str. Name of the requested HTML file.
Returns:
str. The HTML file content.
"""
filepath = os.path.join(feconf.FRONTEND_TEMPLATES_DIR, filename)
with python_utils.open_file(filepath, 'r') as f:
html_text = f.read()
return html_text
class SessionBeginHandler(webapp2.RequestHandler):
"""Handler for creating new authentication sessions."""
def get(self):
"""Establishes a new auth session."""
auth_services.establish_auth_session(self.request, self.response)
class SessionEndHandler(webapp2.RequestHandler):
"""Handler for destroying existing authentication sessions."""
def get(self):
"""Destroys an existing auth session."""
auth_services.destroy_auth_session(self.response)
class UserFacingExceptions:
"""This class contains all the exception class definitions used."""
class NotLoggedInException(Exception):
"""Error class for users that are not logged in (error code 401)."""
pass
class InvalidInputException(Exception):
"""Error class for invalid input on the user side (error code 400)."""
pass
class UnauthorizedUserException(Exception):
"""Error class for unauthorized access."""
pass
class PageNotFoundException(Exception):
"""Error class for a page not found error (error code 404)."""
pass
class InternalErrorException(Exception):
"""Error class for an internal server side error (error code 500)."""
pass
class TemporaryMaintenanceException(Exception):
"""Error class for when the server is currently down for temporary
maintenance (error code 503).
"""
def __init__(self):
super(
UserFacingExceptions.TemporaryMaintenanceException, self
).__init__(
'Oppia is currently being upgraded, and the site should be up '
'and running again in a few hours. Thanks for your patience!')
class BaseHandler(webapp2.RequestHandler):
"""Base class for all Oppia handlers."""
# Whether to check POST and PUT payloads for CSRF tokens prior to
# processing them. Can be overridden by subclasses if this check is
# not necessary.
REQUIRE_PAYLOAD_CSRF_CHECK = True
# Whether to redirect requests corresponding to a logged-in user who has
# not completed signup in to the signup page. This ensures that logged-in
# users have agreed to the latest terms.
REDIRECT_UNFINISHED_SIGNUPS = True
# What format the get method returns when exception raised, json or html.
GET_HANDLER_ERROR_RETURN_TYPE = feconf.HANDLER_TYPE_HTML
POST_HANDLER_ERROR_RETURN_TYPE = feconf.HANDLER_TYPE_JSON
PUT_HANDLER_ERROR_RETURN_TYPE = feconf.HANDLER_TYPE_JSON
DELETE_HANDLER_ERROR_RETURN_TYPE = feconf.HANDLER_TYPE_JSON
# Using Dict[str, Any] here because the following schema can have a
# recursive structure and currently mypy doesn't support recursive type
# currently. See: https://github.com/python/mypy/issues/731
URL_PATH_ARGS_SCHEMAS: Optional[Dict[str, Any]] = None
# Using Dict[str, Any] here because the following schema can have a
# recursive structure and currently mypy doesn't support recursive type
# currently. See: https://github.com/python/mypy/issues/731
HANDLER_ARGS_SCHEMAS: Optional[Dict[str, Any]] = None
def __init__(self, request, response): # pylint: disable=super-init-not-called
# Set self.request, self.response and self.app.
self.initialize(request, response)
self.start_time = datetime.datetime.utcnow()
# Initializes the return dict for the handlers.
self.values = {}
# This try-catch block is intended to log cases where getting the
# request payload errors with ValueError: Invalid boundary in multipart
# form: b''. This is done to gather sufficient data to help debug the
# error if it arises in the future.
try:
payload_json_string = self.request.get('payload')
except ValueError as e:
logging.error('%s: request %s', e, self.request)
raise e
# TODO(#13155): Remove the if-else part once all the handlers have had
# schema validation implemented.
if payload_json_string:
self.payload = json.loads(payload_json_string)
else:
self.payload = None
self.iframed = False
self.user_id = None
self.username = None
self.email = None
self.partially_logged_in = False
self.user_is_scheduled_for_deletion = False
self.current_user_is_super_admin = False
# Once the attribute `normalized_request` is type annotated here, make
# sure to fix all the subclasses using normalized_request.get() method
# by removing their type: ignore[union-attr] and using a type cast
# instead to eliminate the possibility on union types.
# e.g. ClassroomAccessValidationHandler.
self.normalized_request = None
self.normalized_payload = None
try:
auth_claims = auth_services.get_auth_claims_from_request(request)
except auth_domain.StaleAuthSessionError:
auth_services.destroy_auth_session(self.response)
self.redirect(user_services.create_login_url(self.request.uri))
return
except auth_domain.UserDisabledError:
auth_services.destroy_auth_session(self.response)
self.redirect(
'/logout?redirect_url=%s' % feconf.PENDING_ACCOUNT_DELETION_URL)
return
except auth_domain.InvalidAuthSessionError:
logging.exception('User session is invalid!')
auth_services.destroy_auth_session(self.response)
self.redirect(user_services.create_login_url(self.request.uri))
return
else:
self.current_user_is_super_admin = (
auth_claims is not None and auth_claims.role_is_super_admin)
if auth_claims:
auth_id = auth_claims.auth_id
user_settings = user_services.get_user_settings_by_auth_id(auth_id)
if user_settings is None:
# If the user settings are not yet created and the request leads
# to signup page create a new user settings. Otherwise logout
# the not-fully registered user.
email = auth_claims.email
if 'signup?' in self.request.uri:
user_settings = (
user_services.create_new_user(auth_id, email))
else:
logging.exception(
'Cannot find user %s with email %s on page %s' % (
auth_id, email, self.request.uri))
auth_services.destroy_auth_session(self.response)
return
self.email = user_settings.email
self.values['user_email'] = user_settings.email
self.user_id = user_settings.user_id
if user_settings.deleted:
self.user_is_scheduled_for_deletion = user_settings.deleted
elif (self.REDIRECT_UNFINISHED_SIGNUPS and
not user_services.has_fully_registered_account(self.user_id)):
self.partially_logged_in = True
else:
self.username = user_settings.username
self.values['username'] = self.username
# In order to avoid too many datastore writes, we do not bother
# recording a log-in if the current time is sufficiently close
# to the last log-in time.
if (user_settings.last_logged_in is None or
not utils.are_datetimes_close(
datetime.datetime.utcnow(),
user_settings.last_logged_in)):
user_services.record_user_logged_in(self.user_id)
self.roles = (
[feconf.ROLE_ID_GUEST]
if self.user_id is None else user_settings.roles)
self.user = user_services.get_user_actions_info(self.user_id)
if not self._is_requested_path_currently_accessible_to_user():
auth_services.destroy_auth_session(self.response)
return
self.values['is_super_admin'] = self.current_user_is_super_admin
def dispatch(self):
"""Overrides dispatch method in webapp2 superclass.
Raises:
Exception. The CSRF token is missing.
UnauthorizedUserException. The CSRF token is invalid.
"""
request_split = urllib.parse.urlsplit(self.request.uri)
# If the request is to the old demo server, redirect it permanently to
# the new demo server. (Unless it is a cron job or tasks request,
# because cron job and tasks destination URLs are generated by
# App Engine and we can't change their destination.)
if (
request_split.netloc == 'oppiaserver.appspot.com' and
not request_split.path.startswith(('/cron/', '/task/'))
):
self.redirect('https://oppiatestserver.appspot.com', permanent=True)
return
if not self._is_requested_path_currently_accessible_to_user():
self.handle_exception(
self.TemporaryMaintenanceException(), self.app.debug)
return
if self.user_is_scheduled_for_deletion:
self.redirect(
'/logout?redirect_url=%s' % feconf.PENDING_ACCOUNT_DELETION_URL)
return
if self.partially_logged_in and request_split.path != '/logout':
self.redirect('/logout?redirect_url=%s' % request_split.path)
return
if self.payload is not None and self.REQUIRE_PAYLOAD_CSRF_CHECK:
try:
# If user opens a new tab during signup process, the user_id
# parameter is set to None and this causes the signup session
# to expire. The code here checks if user is on the signup
# page and the user_id is None, if that is the case an exception
# is raised which is handled by the frontend by showing a
# continue to registration modal.
if 'signup' in self.request.uri and not self.user_id:
raise self.UnauthorizedUserException(
'Registration session expired.')
csrf_token = self.request.get('csrf_token')
if not csrf_token:
raise self.UnauthorizedUserException(
'Missing CSRF token. Changes were not saved. '
'Please report this bug.')
is_csrf_token_valid = CsrfTokenManager.is_csrf_token_valid(
self.user_id, csrf_token)
if not is_csrf_token_valid:
raise self.UnauthorizedUserException(
'Your session has expired, and unfortunately your '
'changes cannot be saved. Please refresh the page.')
except Exception as e:
logging.exception('%s: payload %s', e, self.payload)
self.handle_exception(e, self.app.debug)
return
schema_validation_succeeded = True
try:
self.validate_and_normalize_args()
# TODO(#13155): Remove NotImplementedError once all the handlers
# have had schema validation implemented.
except (
NotImplementedError,
self.InternalErrorException,
self.InvalidInputException
) as e:
self.handle_exception(e, self.app.debug)
schema_validation_succeeded = False
if not schema_validation_succeeded:
return
super(BaseHandler, self).dispatch()
def validate_and_normalize_args(self):
"""Validates schema for controller layer handler class arguments.
Raises:
InvalidInputException. Schema validation failed.
NotImplementedError. Schema is not provided in handler class.
"""
handler_class_name = self.__class__.__name__
request_method = self.request.environ['REQUEST_METHOD']
# For HEAD requests, we use the schema of GET handler,
# because HEAD returns just the handlers of the GET request.
if request_method == 'HEAD':
request_method = 'GET'
url_path_args = self.request.route_kwargs
if (
handler_class_name in
handler_schema_constants.HANDLER_CLASS_NAMES_WITH_NO_SCHEMA
):
# TODO(#13155): Remove this clause once all the handlers have had
# schema validation implemented.
if self.URL_PATH_ARGS_SCHEMAS or self.HANDLER_ARGS_SCHEMAS:
raise self.InternalErrorException(
'Remove handler class name from '
'HANDLER_CLASS_NAMES_WHICH_STILL_NEED_SCHEMAS')
return
handler_args = {}
payload_arg_keys = []
request_arg_keys = []
for arg in self.request.arguments():
if arg == 'csrf_token':
# 'csrf_token' has been already validated in the
# dispatch method.
pass
elif arg == 'source':
source_url = self.request.get('source')
regex_pattern = (
r'http[s]?://(?:[a-zA-Z]|[0-9]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+' # pylint: disable=line-too-long
)
regex_verified_url = re.findall(regex_pattern, source_url)
if not regex_verified_url:
raise self.InvalidInputException(
'Not a valid source url.')
elif arg == 'payload':
payload_args = self.payload
if payload_args is not None:
payload_arg_keys = list(payload_args.keys())
handler_args.update(payload_args)
else:
request_arg_keys.append(arg)
handler_args[arg] = self.request.get(arg)
# For html handlers, extra args are allowed (to accommodate
# e.g. utm parameters which are not used by the backend but
# needed for analytics).
extra_args_are_allowed = (
self.GET_HANDLER_ERROR_RETURN_TYPE == feconf.HANDLER_TYPE_HTML and
request_method == 'GET')
if self.URL_PATH_ARGS_SCHEMAS is None:
raise NotImplementedError(
'Missing schema for url path args in %s handler class.' % (
handler_class_name))
schema_for_url_path_args = self.URL_PATH_ARGS_SCHEMAS
self.request.route_kwargs, errors = (
payload_validator.validate_arguments_against_schema(
url_path_args, schema_for_url_path_args, extra_args_are_allowed)
)
if errors:
raise self.InvalidInputException(
'At \'%s\' these errors are happening:\n%s' % (
self.request.uri, '\n'.join(errors)
)
)
# This check ensures that if a request method is not defined
# in the handler class then schema validation will not raise
# NotImplementedError for that corresponding request method.
if request_method in ['GET', 'POST', 'PUT', 'DELETE'] and (
getattr(self.__class__, request_method.lower()) ==
getattr(BaseHandler, request_method.lower())):
return
try:
schema_for_request_method = self.HANDLER_ARGS_SCHEMAS[
request_method]
except Exception:
raise NotImplementedError(
'Missing schema for %s method in %s handler class.' % (
request_method, handler_class_name))
allow_string_to_bool_conversion = request_method in ['GET', 'DELETE']
normalized_arg_values, errors = (
payload_validator.validate_arguments_against_schema(
handler_args, schema_for_request_method, extra_args_are_allowed,
allow_string_to_bool_conversion)
)
self.normalized_payload = {
arg: normalized_arg_values.get(arg) for arg in payload_arg_keys
}
self.normalized_request = {
arg: normalized_arg_values.get(arg) for arg in request_arg_keys
}
# The following keys are absent in request/payload but present in
# normalized_arg_values because these args are populated from their
# default_value provided in the schema.
keys_that_correspond_to_default_values = list(
set(normalized_arg_values.keys()) -
set(payload_arg_keys + request_arg_keys)
)
# Populate the payload/request with the default args before passing
# execution onwards to the handler.
for arg in keys_that_correspond_to_default_values:
if request_method in ['GET', 'DELETE']:
self.normalized_request[arg] = normalized_arg_values.get(arg)
else:
self.normalized_payload[arg] = normalized_arg_values.get(arg)
if errors:
raise self.InvalidInputException('\n'.join(errors))
@property
def current_user_is_site_maintainer(self):
"""Returns whether the current user is a site maintainer.
A super admin or release coordinator is also a site maintainer.
Returns:
bool. Whether the current user is a site maintainer.
"""
return (
self.current_user_is_super_admin or
feconf.ROLE_ID_RELEASE_COORDINATOR in self.roles)
def _is_requested_path_currently_accessible_to_user(self):
"""Checks whether the requested path is currently accessible to user.
Returns:
bool. Whether the requested path is currently accessible to user.
"""
return (
self.request.path in AUTH_HANDLER_PATHS or
not feconf.ENABLE_MAINTENANCE_MODE or
self.current_user_is_site_maintainer)
def get(self, *args, **kwargs): # pylint: disable=unused-argument
"""Base method to handle GET requests."""
logging.warning('Invalid URL requested: %s', self.request.uri)
self.error(404)
self._render_exception(
404, {
'error': 'Could not find the page %s.' % self.request.uri})
def post(self, *args): # pylint: disable=unused-argument
"""Base method to handle POST requests.
Raises:
PageNotFoundException. Page not found error (error code 404).
"""
raise self.PageNotFoundException
def put(self, *args): # pylint: disable=unused-argument
"""Base method to handle PUT requests.
Raises:
PageNotFoundException. Page not found error (error code 404).
"""
raise self.PageNotFoundException
def delete(self, *args): # pylint: disable=unused-argument
"""Base method to handle DELETE requests.
Raises:
PageNotFoundException. Page not found error (error code 404).
"""
raise self.PageNotFoundException
def head(self, *args, **kwargs):
"""Method to handle HEAD requests. The webapp library automatically
makes sure that HEAD only returns the headers of GET request.
"""
return self.get(*args, **kwargs)
def render_json(self, values: Dict[Any, Any]) -> None:
"""Prepares JSON response to be sent to the client.
Args:
values: dict. The key-value pairs to encode in the JSON response.
"""
self.response.content_type = 'application/json; charset=utf-8'
self.response.headers['Content-Disposition'] = (
'attachment; filename="oppia-attachment.txt"')
self.response.headers['Strict-Transport-Security'] = (
'max-age=31536000; includeSubDomains')
self.response.headers['X-Content-Type-Options'] = 'nosniff'
self.response.headers['X-Xss-Protection'] = '1; mode=block'
json_output = json.dumps(values, cls=utils.JSONEncoderForHTML)
# Write expects bytes, thus we need to encode the JSON output.
self.response.write(
b'%s%s' % (feconf.XSSI_PREFIX, json_output.encode('utf-8')))
def render_downloadable_file(self, file, filename, content_type):
"""Prepares downloadable content to be sent to the client.
Args:
file: BytesIO. The data of the downloadable file.
filename: str. The name of the file to be rendered.
content_type: str. The type of file to be rendered.
"""
self.response.headers['Content-Type'] = content_type
self.response.headers['Content-Disposition'] = (
'attachment; filename=%s' % filename)
self.response.charset = 'utf-8'
# We use this super in order to bypass the write method
# in webapp2.Response, since webapp2.Response doesn't support writing
# bytes.
super(webapp2.Response, self.response).write(file.getvalue()) # pylint: disable=bad-super-call
def render_template(self, filepath, iframe_restriction='DENY'):
"""Prepares an HTML response to be sent to the client.
Args:
filepath: str. The template filepath.
iframe_restriction: str or None. Possible values are
'DENY' and 'SAMEORIGIN':
DENY: Strictly prevents the template to load in an iframe.
SAMEORIGIN: The template can only be displayed in a frame
on the same origin as the page itself.
"""
# The 'no-store' must be used to properly invalidate the cache when we
# deploy a new version, using only 'no-cache' doesn't work properly.
self.response.cache_control.no_store = True
self.response.cache_control.must_revalidate = True
self.response.headers['Strict-Transport-Security'] = (
'max-age=31536000; includeSubDomains')
self.response.headers['X-Content-Type-Options'] = 'nosniff'
self.response.headers['X-Xss-Protection'] = '1; mode=block'
if iframe_restriction is not None:
if iframe_restriction in ['SAMEORIGIN', 'DENY']:
self.response.headers['X-Frame-Options'] = (
str(iframe_restriction))
else:
raise Exception(
'Invalid X-Frame-Options: %s' % iframe_restriction)
self.response.expires = 'Mon, 01 Jan 1990 00:00:00 GMT'
self.response.pragma = 'no-cache'
self.response.write(load_template(filepath))
def _render_exception_json_or_html(self, return_type, values):
"""Renders an error page, or an error JSON response.
Args:
return_type: str. Indicator to return JSON or HTML.
values: dict. The key-value pairs to include in the response.
"""
method = self.request.environ['REQUEST_METHOD']
if return_type == feconf.HANDLER_TYPE_HTML and method == 'GET':
self.values.update(values)
if self.iframed:
self.render_template(
'error-iframed.mainpage.html', iframe_restriction=None)
elif values['status_code'] == 503:
self.render_template('maintenance-page.mainpage.html')
elif values['status_code'] == 404:
# Only 404 routes can be handled with angular router as it only
# has access to the path, not to the status code.
# That's why 404 status code is treated differently.
self.render_template('oppia-root.mainpage.html')
else:
self.render_template(
'error-page-%s.mainpage.html' % values['status_code'])
else:
if return_type not in (
feconf.HANDLER_TYPE_JSON, feconf.HANDLER_TYPE_DOWNLOADABLE):
logging.warning(
'Not a recognized return type: defaulting to render JSON.')
self.render_json(values)
def _render_exception(self, error_code, values):
"""Renders an error page, or an error JSON response.
Args:
error_code: int. The HTTP status code (expected to be one of
400, 401, 404 or 500).
values: dict. The key-value pairs to include in the response.
"""
# The error codes here should be in sync with the error pages
# generated via webpack.common.config.ts.
assert error_code in [400, 401, 404, 500, 503]
values['status_code'] = error_code
method = self.request.environ['REQUEST_METHOD']
if method == 'GET':
self._render_exception_json_or_html(
self.GET_HANDLER_ERROR_RETURN_TYPE, values)
elif method == 'POST':
self._render_exception_json_or_html(
self.POST_HANDLER_ERROR_RETURN_TYPE, values)
elif method == 'PUT':
self._render_exception_json_or_html(
self.PUT_HANDLER_ERROR_RETURN_TYPE, values)
elif method == 'DELETE':
self._render_exception_json_or_html(
self.DELETE_HANDLER_ERROR_RETURN_TYPE, values)
else:
logging.warning('Not a recognized request method.')
self._render_exception_json_or_html(None, values)
def handle_exception(self, exception, unused_debug_mode):
"""Overwrites the default exception handler.
Args:
exception: Exception. The exception that was thrown.
unused_debug_mode: bool. True if the web application is running
in debug mode.
"""
if isinstance(exception, self.NotLoggedInException):
# This checks if the response should be JSON or HTML.
# For GET requests, there is no payload, so we check against
# GET_HANDLER_ERROR_RETURN_TYPE.
# Otherwise, we check whether self.payload exists.
if (self.payload is not None or
self.GET_HANDLER_ERROR_RETURN_TYPE ==
feconf.HANDLER_TYPE_JSON):
self.error(401)
self._render_exception(
401, {
'error': (
'You must be logged in to access this resource.')})
else:
self.redirect(user_services.create_login_url(self.request.uri))
return
logging.exception(
'Exception raised at %s: %s', self.request.uri, exception)
if isinstance(exception, self.PageNotFoundException):
logging.warning('Invalid URL requested: %s', self.request.uri)
self.error(404)
self._render_exception(
404, {
'error': 'Could not find the page %s.' % self.request.uri})
return
logging.exception('Exception raised: %s', exception)
if isinstance(exception, self.UnauthorizedUserException):
self.error(401)
self._render_exception(401, {'error': str(exception)})
return
if isinstance(exception, self.InvalidInputException):
self.error(400)
self._render_exception(400, {'error': str(exception)})
return
if isinstance(exception, self.InternalErrorException):
self.error(500)
self._render_exception(500, {'error': str(exception)})
return
if isinstance(exception, self.TemporaryMaintenanceException):
self.error(503)
self._render_exception(503, {'error': str(exception)})
return
self.error(500)
self._render_exception(500, {'error': str(exception)})
InternalErrorException = UserFacingExceptions.InternalErrorException
InvalidInputException = UserFacingExceptions.InvalidInputException
NotLoggedInException = UserFacingExceptions.NotLoggedInException
PageNotFoundException = UserFacingExceptions.PageNotFoundException
UnauthorizedUserException = UserFacingExceptions.UnauthorizedUserException
TemporaryMaintenanceException = (
UserFacingExceptions.TemporaryMaintenanceException)
class Error404Handler(BaseHandler):
"""Handles 404 errors."""
pass
class CsrfTokenManager:
"""Manages page/user tokens in memcache to protect against CSRF."""
# Max age of the token (48 hours).
_CSRF_TOKEN_AGE_SECS = 60 * 60 * 48
# Default user id for non-logged-in users.
_USER_ID_DEFAULT = 'non_logged_in_user'
@classmethod
def init_csrf_secret(cls):
"""Verify that non-default CSRF secret exists; creates one if not."""
# Any non-default value is fine.
if CSRF_SECRET.value and CSRF_SECRET.value != DEFAULT_CSRF_SECRET:
return
# Initialize to random value.
config_services.set_property(
feconf.SYSTEM_COMMITTER_ID, CSRF_SECRET.name,
base64.urlsafe_b64encode(os.urandom(20)))
@classmethod
def _create_token(cls, user_id, issued_on):
"""Creates a new CSRF token.
Args:
user_id: str|None. The user_id for which the token is generated.
issued_on: float. The timestamp at which the token was issued.
Returns:
str. The generated CSRF token.
"""
cls.init_csrf_secret()
# The token has 4 parts: hash of the actor user id, hash of the page
# name, hash of the time issued and plain text of the time issued.
if user_id is None:
user_id = cls._USER_ID_DEFAULT
# Round time to seconds.
issued_on = str(int(issued_on))
digester = hmac.new(CSRF_SECRET.value.encode('utf-8'))
digester.update(user_id.encode('utf-8'))
digester.update(b':')
digester.update(issued_on.encode('utf-8'))
digest = digester.digest()
# The b64encode returns bytes, so we first need to decode the returned
# bytes to string.
token = '%s/%s' % (
issued_on, base64.urlsafe_b64encode(digest).decode('utf-8'))
return token
@classmethod
def _get_current_time(cls):
"""Returns the current server time.
Returns:
float. The time in seconds as floating point number.
"""
return time.time()
@classmethod
def create_csrf_token(cls, user_id):
"""Creates a CSRF token for the given user_id.
Args:
user_id: str|None. The user_id for whom the token is generated.
Returns:
str. The generated CSRF token.
"""
return cls._create_token(user_id, cls._get_current_time())
@classmethod
def is_csrf_token_valid(cls, user_id, token):
"""Validates a given CSRF token.
Args:
user_id: str|None. The user_id to validate the CSRF token against.
token: str. The CSRF token to validate.
Returns:
bool. Whether the given CSRF token is valid.
"""
try:
parts = token.split('/')
if len(parts) != 2:
return False
issued_on = int(parts[0])
age = cls._get_current_time() - issued_on
if age > cls._CSRF_TOKEN_AGE_SECS:
return False
authentic_token = cls._create_token(user_id, issued_on)
if authentic_token == token:
return True
return False
except Exception:
return False
class CsrfTokenHandler(BaseHandler):
"""Handles sending CSRF tokens to the frontend."""
GET_HANDLER_ERROR_RETURN_TYPE = feconf.HANDLER_TYPE_JSON
REDIRECT_UNFINISHED_SIGNUPS = False
def get(self):
csrf_token = CsrfTokenManager.create_csrf_token(
self.user_id)
self.render_json({
'token': csrf_token,
})
class OppiaMLVMHandler(BaseHandler):
"""Base class for the handlers that communicate with Oppia-ML VM instances.
"""
def extract_request_message_vm_id_and_signature(self):
"""Returns the OppiaMLAuthInfo domain object containing
information from the incoming request that is necessary for
authentication.
Since incoming request can be either a protobuf serialized binary or
a JSON object, the derived classes must implement the necessary
logic to decode the incoming request and return a tuple of size 3
where message is at index 0, vm_id is at index 1 and signature is at
index 2.
Raises:
NotImplementedError. The derived child classes must implement the
necessary logic as described above.
"""
raise NotImplementedError
|
brianrodri/oppia
|
core/controllers/base.py
|
Python
|
apache-2.0
| 35,014
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import textwrap
from contextlib import closing
from xml.etree import ElementTree
from pants.backend.jvm.subsystems.scala_platform import ScalaPlatform
from pants.backend.jvm.subsystems.shader import Shader
from pants.backend.jvm.targets.jar_dependency import JarDependency
from pants.backend.jvm.tasks.jvm_compile.analysis_tools import AnalysisTools
from pants.backend.jvm.tasks.jvm_compile.jvm_compile import JvmCompile
from pants.backend.jvm.tasks.jvm_compile.scala.zinc_analysis import ZincAnalysis
from pants.backend.jvm.tasks.jvm_compile.scala.zinc_analysis_parser import ZincAnalysisParser
from pants.base.build_environment import get_buildroot
from pants.base.exceptions import TaskError
from pants.base.hash_utils import hash_file
from pants.base.workunit import WorkUnitLabel
from pants.java.distribution.distribution import DistributionLocator
from pants.option.custom_types import dict_option
from pants.util.contextutil import open_zip
from pants.util.dirutil import relativize_paths, safe_open
# Well known metadata file required to register scalac plugins with nsc.
_PLUGIN_INFO_FILE = 'scalac-plugin.xml'
class ZincCompile(JvmCompile):
"""Compile Scala and Java code using Zinc."""
_ZINC_MAIN = 'org.pantsbuild.zinc.Main'
_name = 'zinc'
_supports_concurrent_execution = True
@staticmethod
def write_plugin_info(resources_dir, target):
root = os.path.join(resources_dir, target.id)
plugin_info_file = os.path.join(root, _PLUGIN_INFO_FILE)
with safe_open(plugin_info_file, 'w') as f:
f.write(textwrap.dedent("""
<plugin>
<name>{}</name>
<classname>{}</classname>
</plugin>
""".format(target.plugin, target.classname)).strip())
return root, plugin_info_file
@classmethod
def subsystem_dependencies(cls):
return super(ZincCompile, cls).subsystem_dependencies() + (ScalaPlatform, DistributionLocator)
@classmethod
def get_args_default(cls, bootstrap_option_values):
return ('-S-encoding', '-SUTF-8', '-S-g:vars')
@classmethod
def get_warning_args_default(cls):
return ('-S-deprecation', '-S-unchecked')
@classmethod
def get_no_warning_args_default(cls):
return ('-S-nowarn',)
@classmethod
def register_options(cls, register):
super(ZincCompile, cls).register_options(register)
register('--plugins', advanced=True, action='append', fingerprint=True,
help='Use these scalac plugins.')
register('--plugin-args', advanced=True, type=dict_option, default={}, fingerprint=True,
help='Map from plugin name to list of arguments for that plugin.')
register('--name-hashing', advanced=True, action='store_true', default=False, fingerprint=True,
help='Use zinc name hashing.')
cls.register_jvm_tool(register,
'zinc',
classpath=[
JarDependency('org.pantsbuild', 'zinc', '1.0.8')
],
main=cls._ZINC_MAIN,
custom_rules=[
# The compiler-interface and sbt-interface tool jars carry xsbt and
# xsbti interfaces that are used across the shaded tool jar boundary so
# we preserve these root packages wholesale along with the core scala
# APIs.
Shader.exclude_package('scala', recursive=True),
Shader.exclude_package('xsbt', recursive=True),
Shader.exclude_package('xsbti', recursive=True),
])
def sbt_jar(name, **kwargs):
return JarDependency(org='com.typesafe.sbt', name=name, rev='0.13.9', **kwargs)
cls.register_jvm_tool(register,
'compiler-interface',
classpath=[
sbt_jar(name='compiler-interface',
classifier='sources',
# We just want the single compiler-interface jar and not its
# dep on scala-lang
intransitive=True)
])
cls.register_jvm_tool(register,
'sbt-interface',
classpath=[
sbt_jar(name='sbt-interface',
# We just want the single sbt-interface jar and not its dep
# on scala-lang
intransitive=True)
])
# By default we expect no plugin-jars classpath_spec is filled in by the user, so we accept an
# empty classpath.
cls.register_jvm_tool(register, 'plugin-jars', classpath=[])
def select(self, target):
return target.has_sources('.java') or target.has_sources('.scala')
def select_source(self, source_file_path):
return source_file_path.endswith('.java') or source_file_path.endswith('.scala')
def __init__(self, *args, **kwargs):
super(ZincCompile, self).__init__(*args, **kwargs)
# A directory independent of any other classpath which can contain per-target
# plugin resource files.
self._plugin_info_dir = os.path.join(self.workdir, 'scalac-plugin-info')
self._lazy_plugin_args = None
def create_analysis_tools(self):
return AnalysisTools(DistributionLocator.cached().real_home, ZincAnalysisParser(), ZincAnalysis)
def zinc_classpath(self):
# Zinc takes advantage of tools.jar if it's presented in classpath.
# For example com.sun.tools.javac.Main is used for in process java compilation.
def locate_tools_jar():
try:
return DistributionLocator.cached(jdk=True).find_libs(['tools.jar'])
except DistributionLocator.Error:
self.context.log.info('Failed to locate tools.jar. '
'Install a JDK to increase performance of Zinc.')
return []
return self.tool_classpath('zinc') + locate_tools_jar()
def compiler_classpath(self):
return ScalaPlatform.global_instance().compiler_classpath(self.context.products)
def extra_compile_time_classpath_elements(self):
# Classpath entries necessary for our compiler plugins.
return self.plugin_jars()
def plugin_jars(self):
"""The classpath entries for jars containing code for enabled plugins."""
if self.get_options().plugins:
return self.tool_classpath('plugin-jars')
else:
return []
def plugin_args(self):
if self._lazy_plugin_args is None:
self._lazy_plugin_args = self._create_plugin_args()
return self._lazy_plugin_args
def _create_plugin_args(self):
if not self.get_options().plugins:
return []
plugin_args = self.get_options().plugin_args
active_plugins = self._find_plugins()
ret = []
for name, jar in active_plugins.items():
ret.append('-S-Xplugin:{}'.format(jar))
for arg in plugin_args.get(name, []):
ret.append('-S-P:{}:{}'.format(name, arg))
return ret
def _find_plugins(self):
"""Returns a map from plugin name to plugin jar."""
# Allow multiple flags and also comma-separated values in a single flag.
plugin_names = set([p for val in self.get_options().plugins for p in val.split(',')])
plugins = {}
buildroot = get_buildroot()
for jar in self.plugin_jars():
with open_zip(jar, 'r') as jarfile:
try:
with closing(jarfile.open(_PLUGIN_INFO_FILE, 'r')) as plugin_info_file:
plugin_info = ElementTree.parse(plugin_info_file).getroot()
if plugin_info.tag != 'plugin':
raise TaskError(
'File {} in {} is not a valid scalac plugin descriptor'.format(_PLUGIN_INFO_FILE,
jar))
name = plugin_info.find('name').text
if name in plugin_names:
if name in plugins:
raise TaskError('Plugin {} defined in {} and in {}'.format(name, plugins[name], jar))
# It's important to use relative paths, as the compiler flags get embedded in the zinc
# analysis file, and we port those between systems via the artifact cache.
plugins[name] = os.path.relpath(jar, buildroot)
except KeyError:
pass
unresolved_plugins = plugin_names - set(plugins.keys())
if unresolved_plugins:
raise TaskError('Could not find requested plugins: {}'.format(list(unresolved_plugins)))
return plugins
def extra_products(self, target):
"""Override extra_products to produce a plugin information file."""
ret = []
if target.is_scalac_plugin and target.classname:
# NB: We don't yet support explicit in-line compilation of scala compiler plugins from
# the workspace to be used in subsequent compile rounds like we do for annotation processors
# with javac. This would require another GroupTask similar to AptCompile, but for scala.
root, plugin_info_file = self.write_plugin_info(self._plugin_info_dir, target)
ret.append((root, [plugin_info_file]))
return ret
def compile(self, args, classpath, sources, classes_output_dir, upstream_analysis, analysis_file,
log_file, settings):
# We add compiler_classpath to ensure the scala-library jar is on the classpath.
# TODO: This also adds the compiler jar to the classpath, which compiled code shouldn't
# usually need. Be more selective?
# TODO(John Sirois): Do we need to do this at all? If adding scala-library to the classpath is
# only intended to allow target authors to omit a scala-library dependency, then ScalaLibrary
# already overrides traversable_dependency_specs to achieve the same end; arguably at a more
# appropriate level and certainly at a more appropriate granularity.
relativized_classpath = relativize_paths(self.compiler_classpath() + classpath, get_buildroot())
zinc_args = []
zinc_args.extend([
'-log-level', self.get_options().level,
'-analysis-cache', analysis_file,
'-classpath', ':'.join(relativized_classpath),
'-d', classes_output_dir
])
if not self.get_options().colors:
zinc_args.append('-no-color')
if not self.get_options().name_hashing:
zinc_args.append('-no-name-hashing')
if log_file:
zinc_args.extend(['-capture-log', log_file])
zinc_args.extend(['-compiler-interface', self.tool_jar('compiler-interface')])
zinc_args.extend(['-sbt-interface', self.tool_jar('sbt-interface')])
zinc_args.extend(['-scala-path', ':'.join(self.compiler_classpath())])
zinc_args += self.plugin_args()
if upstream_analysis:
zinc_args.extend(['-analysis-map',
','.join('{}:{}'.format(*kv) for kv in upstream_analysis.items())])
zinc_args += args
zinc_args.extend([
'-C-source', '-C{}'.format(settings.source_level),
'-C-target', '-C{}'.format(settings.target_level),
])
zinc_args.extend(settings.args)
jvm_options = list(self._jvm_options)
zinc_args.extend(sources)
self.log_zinc_file(analysis_file)
if self.runjava(classpath=self.zinc_classpath(),
main=self._ZINC_MAIN,
jvm_options=jvm_options,
args=zinc_args,
workunit_name='zinc',
workunit_labels=[WorkUnitLabel.COMPILER]):
raise TaskError('Zinc compile failed.')
def log_zinc_file(self, analysis_file):
self.context.log.debug('Calling zinc on: {} ({})'
.format(analysis_file,
hash_file(analysis_file).upper()
if os.path.exists(analysis_file)
else 'nonexistent'))
|
scode/pants
|
src/python/pants/backend/jvm/tasks/jvm_compile/scala/zinc_compile.py
|
Python
|
apache-2.0
| 12,212
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Children',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('family_name', models.CharField(max_length=200, verbose_name='Family name', blank=True)),
('first_name', models.CharField(max_length=200, verbose_name='Firstname')),
('birthday_date', models.DateField(verbose_name='Birth date')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Examination',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('reason', models.TextField(verbose_name='Reason', blank=True)),
('reason_description', models.TextField(verbose_name='Reason description', blank=True)),
('orl', models.TextField(verbose_name='ORL Sphere', blank=True)),
('visceral', models.TextField(verbose_name='Visceral Sphere', blank=True)),
('pulmo', models.TextField(verbose_name='Cardio-Pulmo Sphere', blank=True)),
('uro_gyneco', models.TextField(verbose_name='Uro-gyneco Sphere', blank=True)),
('periphery', models.TextField(verbose_name='Periphery Sphere', blank=True)),
('general_state', models.TextField(verbose_name='General state', blank=True)),
('medical_examination', models.TextField(verbose_name='Medical examination', blank=True)),
('tests', models.TextField(verbose_name='Tests', blank=True)),
('diagnosis', models.TextField(verbose_name='Diagnosis', blank=True)),
('treatments', models.TextField(verbose_name='Treatments', blank=True)),
('conclusion', models.TextField(verbose_name='Conclusion', blank=True)),
('date', models.DateTimeField(verbose_name='Date')),
('status', models.SmallIntegerField(verbose_name='Status')),
('type', models.SmallIntegerField(verbose_name='Type')),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Patient',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('family_name', models.CharField(max_length=200, verbose_name='Family name')),
('original_name', models.CharField(max_length=200, verbose_name='Original name', blank=True)),
('first_name', models.CharField(max_length=200, verbose_name='Firstname', blank=True)),
('birth_date', models.DateField(verbose_name='Birth date')),
('address_street', models.CharField(max_length=500, verbose_name='Street', blank=True)),
('address_complement', models.CharField(max_length=500, verbose_name='Address complement', blank=True)),
('address_zipcode', models.CharField(max_length=200, verbose_name='Zipcode', blank=True)),
('address_city', models.CharField(max_length=200, verbose_name='City', blank=True)),
('phone', models.CharField(max_length=200, verbose_name='Phone', blank=True)),
('mobile_phone', models.CharField(max_length=200, verbose_name='Mobile phone', blank=True)),
('smoker', models.BooleanField(default=False, verbose_name='Smoker')),
('important_info', models.TextField(verbose_name='Important note', blank=True)),
('surgical_history', models.TextField(verbose_name='Surgical history', blank=True)),
('medical_history', models.TextField(verbose_name='Medical history', blank=True)),
('family_history', models.TextField(verbose_name='Family history', blank=True)),
('trauma_history', models.TextField(verbose_name='Trauma history', blank=True)),
('medical_reports', models.TextField(verbose_name='Medical reports', blank=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='RegularDoctor',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('family_name', models.CharField(max_length=200, verbose_name='Family name')),
('first_name', models.CharField(max_length=200, verbose_name='Firstname')),
('phone', models.CharField(max_length=100, verbose_name='Phone', blank=True)),
('city', models.CharField(max_length=200, verbose_name='City', blank=True)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='patient',
name='doctor',
field=models.ForeignKey(verbose_name='Regular doctor', blank=True, to='libreosteoweb.RegularDoctor', null=True),
preserve_default=True,
),
migrations.AddField(
model_name='examination',
name='patient',
field=models.ForeignKey(verbose_name='Patient', to='libreosteoweb.Patient'),
preserve_default=True,
),
migrations.AddField(
model_name='examination',
name='therapeut',
field=models.ForeignKey(verbose_name='Therapeut', blank=True, to=settings.AUTH_USER_MODEL, null=True),
preserve_default=True,
),
migrations.AddField(
model_name='children',
name='parent',
field=models.ForeignKey(verbose_name='Parent', to='libreosteoweb.Patient'),
preserve_default=True,
),
]
|
littlejo/Libreosteo
|
libreosteoweb/migrations/0001_initial.py
|
Python
|
gpl-3.0
| 6,226
|
#-*- coding: utf-8 -*-
import os
from django.utils.text import get_valid_filename as get_valid_filename_django
from django.template.defaultfilters import slugify
from django.core.files.uploadedfile import SimpleUploadedFile
class UploadException(Exception):
pass
def handle_upload(request):
if not request.method == "POST":
raise UploadException("AJAX request not valid: must be POST")
if request.is_ajax():
# the file is stored raw in the request
is_raw = True
filename = request.GET.get('qqfile', False) or request.GET.get('filename', False) or ''
upload = SimpleUploadedFile(name=filename, content=request.raw_post_data)
else:
if len(request.FILES) == 1:
# FILES is a dictionary in Django but Ajax Upload gives the uploaded file an
# ID based on a random number, so it cannot be guessed here in the code.
# Rather than editing Ajax Upload to pass the ID in the querystring, note that
# each upload is a separate request so FILES should only have one entry.
# Thus, we can just grab the first (and only) value in the dict.
is_raw = False
upload = request.FILES.values()[0]
filename = upload.name
else:
raise UploadException("AJAX request not valid: Bad Upload")
return upload, filename, is_raw
def get_valid_filename(s):
"""
like the regular get_valid_filename, but also slugifies away
umlauts and stuff.
"""
s = get_valid_filename_django(s)
filename, ext = os.path.splitext(s)
filename = slugify(filename)
ext = slugify(ext)
if ext:
return u"%s.%s" % (filename, ext)
else:
return u"%s" % (filename,)
|
MechanisM/django-filer
|
filer/utils/files.py
|
Python
|
bsd-3-clause
| 1,749
|
# Copyright 2018 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A client that performs inferences on a ResNet model using the REST API.
The client downloads a test image of a cat, queries the server over the REST API
with the test image repeatedly and measures how long it takes to respond.
The client expects a TensorFlow Serving ModelServer running a ResNet SavedModel
from:
https://github.com/tensorflow/models/tree/master/official/vision/image_classification/resnet#pretrained-models
Typical usage example:
resnet_client.py
"""
from __future__ import print_function
import base64
import io
import json
import numpy as np
from PIL import Image
import requests
# The server URL specifies the endpoint of your server running the ResNet
# model with the name "resnet" and using the predict interface.
SERVER_URL = 'http://localhost:8501/v1/models/resnet:predict'
# The image URL is the location of the image we should send to the server
IMAGE_URL = 'https://tensorflow.org/images/blogs/serving/cat.jpg'
# Current Resnet model in TF Model Garden (as of 7/2021) does not accept JPEG
# as input
MODEL_ACCEPT_JPG = False
def main():
# Download the image
dl_request = requests.get(IMAGE_URL, stream=True)
dl_request.raise_for_status()
if MODEL_ACCEPT_JPG:
# Compose a JSON Predict request (send JPEG image in base64).
jpeg_bytes = base64.b64encode(dl_request.content).decode('utf-8')
predict_request = '{"instances" : [{"b64": "%s"}]}' % jpeg_bytes
else:
# Compose a JOSN Predict request (send the image tensor).
jpeg_rgb = Image.open(io.BytesIO(dl_request.content))
# Normalize and batchify the image
jpeg_rgb = np.expand_dims(np.array(jpeg_rgb) / 255.0, 0).tolist()
predict_request = json.dumps({'instances': jpeg_rgb})
# Send few requests to warm-up the model.
for _ in range(3):
response = requests.post(SERVER_URL, data=predict_request)
response.raise_for_status()
# Send few actual requests and report average latency.
total_time = 0
num_requests = 10
for _ in range(num_requests):
response = requests.post(SERVER_URL, data=predict_request)
response.raise_for_status()
total_time += response.elapsed.total_seconds()
prediction = response.json()['predictions'][0]
print('Prediction class: {}, avg latency: {} ms'.format(
np.argmax(prediction), (total_time * 1000) / num_requests))
if __name__ == '__main__':
main()
|
tensorflow/serving
|
tensorflow_serving/example/resnet_client.py
|
Python
|
apache-2.0
| 3,042
|
# -*- coding: utf-8 -*-
#
# rkchunk documentation build configuration file, created by
# sphinx-quickstart on Thu Oct 9 10:45:35 2014.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.coverage', 'sphinx.ext.pngmath', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'rkchunk'
copyright = u'2014, Benjamin Bengfort'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'rkchunkdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'rkchunk.tex', u'rkchunk Documentation',
u'Benjamin Bengfort', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'rkchunk', u'rkchunk Documentation',
[u'Benjamin Bengfort'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'rkchunk', u'rkchunk Documentation',
u'Benjamin Bengfort', 'rkchunk', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
bbengfort/rkchunk
|
docs/conf.py
|
Python
|
mit
| 8,035
|
from cms.plugin_pool import plugin_pool
from cms.plugin_base import CMSPluginBase
from django.utils.translation import ugettext_lazy as _
import models
class FilerGalleryPlugin(CMSPluginBase):
model = models.FilerGallery
name = _("Gallery")
render_template = "cmsplugin_filer_gallery/gallery.html"
text_enabled = False
admin_preview = False
def get_folder_images(self, folder, user):
qs_files = folder.files.filter(_file_type_plugin_name='Image')
if user.is_staff:
return qs_files
else:
return qs_files.filter(is_public=True)
def get_children(self, folder):
return folder.get_children()
def render(self, context, instance, placeholder):
images = self.get_folder_images(instance.folder, context['request'].user)
context.update({
'object': instance,
'thumbnail_option': instance.thumbnail_option,
'size': (instance.thumbnail_option.width, instance.thumbnail_option.height),
'crop': instance.thumbnail_option.crop,
'upscale': instance.thumbnail_option.upscale,
'images': images,
'placeholder': placeholder
})
return context
plugin_pool.register_plugin(FilerGalleryPlugin)
|
philippbosch/cmsplugin-filer
|
src/cmsplugin_filer_gallery/cms_plugins.py
|
Python
|
mit
| 1,299
|
import renderdoc as rd
import rdtest
class VK_Leak_Check(rdtest.TestCase):
demos_test_name = 'VK_Leak_Check'
demos_frame_cap = 50000
demos_frame_count = 10
demos_timeout = 120
def check_capture(self):
memory: int = rd.GetCurrentProcessMemoryUsage()
if memory > 500*1000*1000:
raise rdtest.TestFailureException("Memory usage of {} is too high".format(memory))
rdtest.log.success("Capture {} opened with reasonable memory ({})".format(self.demos_frame_cap, memory))
|
Zorro666/renderdoc
|
util/test/tests/Vulkan/VK_Leak_Check.py
|
Python
|
mit
| 525
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Licensed under the GNU General Public License, version 3.
# See the file http://www.gnu.org/copyleft/gpl.txt
from pisi.actionsapi import cmaketools
from pisi.actionsapi import get
from pisi.actionsapi import pisitools
def setup():
cmaketools.configure("-DCMAKE_BUILD_TYPE:STRING=Release \
-DCMAKE_INSTALL_PREFIX=/usr \
-DCMAKE_INSTALL_LIBDIR=/usr/lib")
def build():
cmaketools.make()
def install():
cmaketools.rawInstall("DESTDIR=%s" % get.installDIR())
pisitools.dodoc("AUTHORS", "COPYING")
|
vdemir/pisi_package
|
LXQT/addon/qterminal/actions.py
|
Python
|
gpl-3.0
| 607
|
blah = 33
|
sjdv1982/seamless
|
seamless/graphs/multi_module/mytestpackage/mod4.py
|
Python
|
mit
| 10
|
# stdlib
from collections import defaultdict
import sys
from typing import Any as TypeAny
from typing import Callable
from typing import Dict
from typing import KeysView
from typing import List as TypeList
from typing import Set
# third party
from cachetools import cached
from cachetools.keys import hashkey
# relative
from ...ast import add_classes
from ...ast import add_methods
from ...ast import add_modules
from ...ast import globals
from ...logger import traceback_and_raise
from .union import lazy_pairing
def get_cache() -> Dict:
return dict()
@cached(cache=get_cache(), key=lambda path, lib_ast: hashkey(path))
def solve_ast_type_functions(path: str, lib_ast: globals.Globals) -> KeysView:
root = lib_ast
for path_element in path.split("."):
root = getattr(root, path_element)
return root.attrs.keys()
def get_allowed_functions(
lib_ast: globals.Globals, union_types: TypeList[str]
) -> Dict[str, bool]:
"""
This function generates a set of functions that can go into a union type.
A function has to meet the following requirements to be present on a union type:
1. If it's present on all Class attributes associated with the union types
on the ast, add it.
2. If it's not present on all Class attributes associated with the union
types, check if they exist on the original type functions list. If they
do exist, drop it, if not, add it.
Args:
lib_ast (Globals): the AST on which we want to generate the union pointer.
union_types (List[str]): the qualnames of the types on which we want a union.
Returns:
allowed_functions (dict): The keys of the dict are function names (str)
and the values are Bool (if they are allowed or not).
"""
allowed_functions: Dict[str, bool] = defaultdict(lambda: True)
def solve_real_type_functions(path: str) -> Set[str]:
parts = path.split(".")
klass_name = parts[-1]
# TODO: a better way. Loot at https://github.com/OpenMined/PySyft/issues/5249
# A way to walkaround the problem we can't `import torch.return_types` and
# get it from `sys.modules`.
if parts[-2] == "return_types":
modu = getattr(sys.modules["torch"], "return_types")
else:
modu = sys.modules[".".join(parts[:-1])]
return set(dir(getattr(modu, klass_name)))
for union_type in union_types:
real_type_function_set = solve_real_type_functions(union_type)
ast_type_function_set = solve_ast_type_functions(union_type, lib_ast)
rejected_function_set = real_type_function_set - ast_type_function_set
for accepted_function in ast_type_function_set:
allowed_functions[accepted_function] &= True
for rejected_function in rejected_function_set:
allowed_functions[rejected_function] = False
return allowed_functions
def create_union_ast(
lib_ast: globals.Globals, client: TypeAny = None
) -> globals.Globals:
ast = globals.Globals(client)
modules = ["syft", "syft.lib", "syft.lib.misc", "syft.lib.misc.union"]
classes = []
methods = []
for klass in lazy_pairing.keys():
classes.append(
(
f"syft.lib.misc.union.{klass.__name__}",
f"syft.lib.misc.union.{klass.__name__}",
klass,
)
)
union_types = lazy_pairing[klass]
allowed_functions = get_allowed_functions(lib_ast, union_types)
for target_method, allowed in allowed_functions.items():
if not allowed:
continue
def generate_func(target_method: str) -> Callable:
def func(self: TypeAny, *args: TypeAny, **kwargs: TypeAny) -> TypeAny:
func = getattr(self, target_method, None)
if func:
return func(*args, **kwargs)
else:
traceback_and_raise(
ValueError(
f"Can't call {target_method} on {klass} with the instance type of {type(self)}"
)
)
return func
def generate_attribute(target_attribute: str) -> TypeAny:
def prop_get(self: TypeAny) -> TypeAny:
prop = getattr(self, target_attribute, None)
if prop is not None:
return prop
else:
ValueError(
f"Can't call {target_attribute} on {klass} with the instance type of {type(self)}"
)
def prop_set(self: TypeAny, value: TypeAny) -> TypeAny:
setattr(self, target_attribute, value)
return property(prop_get, prop_set)
# TODO: Support dynamic properties for types in AST
# torch.Tensor.grad and torch.Tensor.data are not in the class
# Issue: https://github.com/OpenMined/PySyft/issues/5338
if target_method == "grad" and "Tensor" in klass.__name__:
setattr(klass, target_method, generate_attribute(target_method))
methods.append(
(
f"syft.lib.misc.union.{klass.__name__}.{target_method}",
"torch.Tensor",
)
)
continue
elif target_method == "data" and "Tensor" in klass.__name__:
setattr(klass, target_method, generate_attribute(target_method))
else:
setattr(klass, target_method, generate_func(target_method))
methods.append(
(
f"syft.lib.misc.union.{klass.__name__}.{target_method}",
"syft.lib.python.Any",
)
)
add_modules(ast, modules)
add_classes(ast, classes)
add_methods(ast, methods)
for ast_klass in ast.classes:
ast_klass.create_pointer_class()
ast_klass.create_send_method()
ast_klass.create_storable_object_attr_convenience_methods()
return ast
|
OpenMined/PySyft
|
packages/syft/src/syft/lib/misc/__init__.py
|
Python
|
apache-2.0
| 6,217
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Gouthaman Balaraman
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
#
"""
Support for SQLAlchmey. Provides SQLAlchemyTarget for storing in databases
supported by SQLAlchemy. The user would be responsible for installing the
required database driver to connect using SQLAlchemy.
Minimal example of a job to copy data to database using SQLAlchemy is as shown
below:
.. code-block:: python
from sqlalchemy import String
import luigi
from luigi.contrib import sqla
class SQLATask(sqla.CopyToTable):
# columns defines the table schema, with each element corresponding
# to a column in the format (args, kwargs) which will be sent to
# the sqlalchemy.Column(*args, **kwargs)
columns = [
(["item", String(64)], {"primary_key": True}),
(["property", String(64)], {})
]
connection_string = "sqlite://" # in memory SQLite database
table = "item_property" # name of the table to store data
def rows(self):
for row in [("item1" "property1"), ("item2", "property2")]:
yield row
if __name__ == '__main__':
task = SQLATask()
luigi.build([task], local_scheduler=True)
If the target table where the data needs to be copied already exists, then
the column schema definition can be skipped and instead the reflect flag
can be set as True. Here is a modified version of the above example:
.. code-block:: python
from sqlalchemy import String
import luigi
from luigi.contrib import sqla
class SQLATask(sqla.CopyToTable):
# If database table is already created, then the schema can be loaded
# by setting the reflect flag to True
reflect = True
connection_string = "sqlite://" # in memory SQLite database
table = "item_property" # name of the table to store data
def rows(self):
for row in [("item1" "property1"), ("item2", "property2")]:
yield row
if __name__ == '__main__':
task = SQLATask()
luigi.build([task], local_scheduler=True)
In the above examples, the data that needs to be copied was directly provided by
overriding the rows method. Alternately, if the data comes from another task, the
modified example would look as shown below:
.. code-block:: python
from sqlalchemy import String
import luigi
from luigi.contrib import sqla
from luigi.mock import MockFile
class BaseTask(luigi.Task):
def output(self):
return MockFile("BaseTask")
def run(self):
out = self.output().open("w")
TASK_LIST = ["item%d\\tproperty%d\\n" % (i, i) for i in range(10)]
for task in TASK_LIST:
out.write(task)
out.close()
class SQLATask(sqla.CopyToTable):
# columns defines the table schema, with each element corresponding
# to a column in the format (args, kwargs) which will be sent to
# the sqlalchemy.Column(*args, **kwargs)
columns = [
(["item", String(64)], {"primary_key": True}),
(["property", String(64)], {})
]
connection_string = "sqlite://" # in memory SQLite database
table = "item_property" # name of the table to store data
def requires(self):
return BaseTask()
if __name__ == '__main__':
task1, task2 = SQLATask(), BaseTask()
luigi.build([task1, task2], local_scheduler=True)
In the above example, the output from `BaseTask` is copied into the
database. Here we did not have to implement the `rows` method because
by default `rows` implementation assumes every line is a row with
column values separated by a tab. One can define `column_separator`
option for the task if the values are say comma separated instead of
tab separated.
You can pass in database specific connection arguments by setting the connect_args
dictionary. The options will be passed directly to the DBAPI's connect method as
keyword arguments.
The other option to `sqla.CopyToTable` that can be of help with performance aspect is the
`chunk_size`. The default is 5000. This is the number of rows that will be inserted in
a transaction at a time. Depending on the size of the inserts, this value can be tuned
for performance.
See here for a `tutorial on building task pipelines using luigi
<http://gouthamanbalaraman.com/blog/building-luigi-task-pipeline.html>`_ and
using `SQLAlchemy in workflow pipelines <http://gouthamanbalaraman.com/blog/sqlalchemy-luigi-workflow-pipeline.html>`_.
Author: Gouthaman Balaraman
Date: 01/02/2015
"""
import abc
import datetime
import itertools
import logging
import luigi
import os
import sqlalchemy
class SQLAlchemyTarget(luigi.Target):
"""
Database target using SQLAlchemy.
This will rarely have to be directly instantiated by the user.
Typical usage would be to override `luigi.contrib.sqla.CopyToTable` class
to create a task to write to the database.
"""
marker_table = None
connect_args = {}
_engine = None # sqlalchemy engine
_pid = None # the pid of the sqlalchemy engine object
def __init__(self, connection_string, target_table, update_id, echo=False, connect_args=None):
"""
Constructor for the SQLAlchemyTarget.
:param connection_string: SQLAlchemy connection string
:type connection_string: str
:param target_table: The table name for the data
:type target_table: str
:param update_id: An identifier for this data set
:type update_id: str
:param echo: Flag to setup SQLAlchemy logging
:type echo: bool
:param connect_args: A dictionary of connection arguments
:type connect_args: dict
:return:
"""
self.target_table = target_table
self.update_id = update_id
self.connection_string = connection_string
self.echo = echo
self.connect_args = connect_args
self.marker_table_bound = None
@property
def engine(self):
pid = os.getpid()
if (SQLAlchemyTarget._engine is None) or (SQLAlchemyTarget._pid != pid):
SQLAlchemyTarget._engine = sqlalchemy.create_engine(self.connection_string, connect_args=self.connect_args,
echo=self.echo)
SQLAlchemyTarget._pid = pid
return SQLAlchemyTarget._engine
def touch(self):
"""
Mark this update as complete.
"""
if self.marker_table_bound is None:
self.create_marker_table()
table = self.marker_table_bound
id_exists = self.exists()
with self.engine.begin() as conn:
if not id_exists:
ins = table.insert().values(update_id=self.update_id, target_table=self.target_table,
inserted=datetime.datetime.now())
else:
ins = table.update().where(sqlalchemy.and_(table.c.update_id == self.update_id,
table.c.target_table == self.target_table)).\
values(update_id=self.update_id, target_table=self.target_table,
inserted=datetime.datetime.now())
conn.execute(ins)
assert self.exists()
def exists(self):
row = None
if self.marker_table_bound is None:
self.create_marker_table()
with self.engine.begin() as conn:
table = self.marker_table_bound
s = sqlalchemy.select([table]).where(sqlalchemy.and_(table.c.update_id == self.update_id,
table.c.target_table == self.target_table)).limit(1)
row = conn.execute(s).fetchone()
return row is not None
def create_marker_table(self):
"""
Create marker table if it doesn't exist.
Using a separate connection since the transaction might have to be reset.
"""
if self.marker_table is None:
self.marker_table = luigi.configuration.get_config().get('sqlalchemy', 'marker-table', 'table_updates')
engine = self.engine
with engine.begin() as con:
metadata = sqlalchemy.MetaData()
if not con.dialect.has_table(con, self.marker_table):
self.marker_table_bound = sqlalchemy.Table(
self.marker_table, metadata,
sqlalchemy.Column("update_id", sqlalchemy.String(128), primary_key=True),
sqlalchemy.Column("target_table", sqlalchemy.String(128)),
sqlalchemy.Column("inserted", sqlalchemy.DateTime, default=datetime.datetime.now()))
metadata.create_all(engine)
else:
metadata.reflect(bind=engine)
self.marker_table_bound = metadata.tables[self.marker_table]
def open(self, mode):
raise NotImplementedError("Cannot open() SQLAlchemyTarget")
class CopyToTable(luigi.Task):
"""
An abstract task for inserting a data set into SQLAlchemy RDBMS
Usage:
* subclass and override the required `connection_string`, `table` and `columns` attributes.
"""
_logger = logging.getLogger('luigi-interface')
echo = False
connect_args = {}
@abc.abstractmethod
def connection_string(self):
return None
@abc.abstractproperty
def table(self):
return None
# specify the columns that define the schema. The format for the columns is a list
# of tuples. For example :
# columns = [
# (["id", sqlalchemy.Integer], dict(primary_key=True)),
# (["name", sqlalchemy.String(64)], {}),
# (["value", sqlalchemy.String(64)], {})
# ]
# The tuple (args_list, kwargs_dict) here is the args and kwargs
# that need to be passed to sqlalchemy.Column(*args, **kwargs).
# If the tables have already been setup by another process, then you can
# completely ignore the columns. Instead set the reflect value to True below
columns = []
# options
column_separator = "\t" # how columns are separated in the file copied into postgres
chunk_size = 5000 # default chunk size for insert
reflect = False # Set this to true only if the table has already been created by alternate means
def create_table(self, engine):
"""
Override to provide code for creating the target table.
By default it will be created using types specified in columns.
If the table exists, then it binds to the existing table.
If overridden, use the provided connection object for setting up the table in order to
create the table and insert data using the same transaction.
:param engine: The sqlalchemy engine instance
:type engine: object
"""
def construct_sqla_columns(columns):
retval = [sqlalchemy.Column(*c[0], **c[1]) for c in columns]
return retval
needs_setup = (len(self.columns) == 0) or (False in [len(c) == 2 for c in self.columns]) if not self.reflect else False
if needs_setup:
# only names of columns specified, no types
raise NotImplementedError("create_table() not implemented for %r and columns types not specified" % self.table)
else:
# if columns is specified as (name, type) tuples
with engine.begin() as con:
metadata = sqlalchemy.MetaData()
try:
if not con.dialect.has_table(con, self.table):
sqla_columns = construct_sqla_columns(self.columns)
self.table_bound = sqlalchemy.Table(self.table, metadata, *sqla_columns)
metadata.create_all(engine)
else:
metadata.reflect(bind=engine)
self.table_bound = metadata.tables[self.table]
except Exception as e:
self._logger.exception(self.table + str(e))
def update_id(self):
"""
This update id will be a unique identifier for this insert on this table.
"""
return self.task_id
def output(self):
return SQLAlchemyTarget(
connection_string=self.connection_string,
target_table=self.table,
update_id=self.update_id(),
connect_args=self.connect_args,
echo=self.echo)
def rows(self):
"""
Return/yield tuples or lists corresponding to each row to be inserted.
This method can be overridden for custom file types or formats.
"""
with self.input().open('r') as fobj:
for line in fobj:
yield line.strip("\n").split(self.column_separator)
def run(self):
self._logger.info("Running task copy to table for update id %s for table %s" % (self.update_id(), self.table))
output = self.output()
engine = output.engine
self.create_table(engine)
with engine.begin() as conn:
rows = iter(self.rows())
ins_rows = [dict(zip(("_" + c.key for c in self.table_bound.c), row))
for row in itertools.islice(rows, self.chunk_size)]
while ins_rows:
self.copy(conn, ins_rows, self.table_bound)
ins_rows = [dict(zip(("_" + c.key for c in self.table_bound.c), row))
for row in itertools.islice(rows, self.chunk_size)]
self._logger.info("Finished inserting %d rows into SQLAlchemy target" % len(ins_rows))
output.touch()
self._logger.info("Finished inserting rows into SQLAlchemy target")
def copy(self, conn, ins_rows, table_bound):
"""
This method does the actual insertion of the rows of data given by ins_rows into the
database. A task that needs row updates instead of insertions should overload this method.
:param conn: The sqlalchemy connection object
:param ins_rows: The dictionary of rows with the keys in the format _<column_name>. For example
if you have a table with a column name "property", then the key in the dictionary
would be "_property". This format is consistent with the bindparam usage in sqlalchemy.
:param table_bound: The object referring to the table
:return:
"""
bound_cols = dict((c, sqlalchemy.bindparam("_" + c.key)) for c in table_bound.columns)
ins = table_bound.insert().values(bound_cols)
conn.execute(ins, ins_rows)
|
anyman/luigi
|
luigi/contrib/sqla.py
|
Python
|
apache-2.0
| 15,647
|
"""Termwise semantic similarity"""
|
tanghaibao/goatools
|
goatools/semsim/termwise/__init__.py
|
Python
|
bsd-2-clause
| 35
|
import optparse
parser = optparse.OptionParser()
parser.add_option('-d', dest='default_tests',
action='store_true', default=None)
if __name__ == '__main__':
from wsgitest.run import run_tests
options, files = parser.parse_args()
if not files:
if options.default_tests is None:
options.default_tests = True
if options.default_tests:
from wsgitest import DEFAULT_TESTS_DIR
files.append(DEFAULT_TESTS_DIR)
result = run_tests(files)
print result.summary()
|
jonashaag/WSGITest
|
wsgitest.py
|
Python
|
bsd-2-clause
| 536
|
""" Implementaiton of a population for maintaining a GA population and
proposing structures to pair. """
from random import randrange, random
from math import tanh, sqrt, exp
from operator import itemgetter
import numpy as np
from ase.db.core import now
def count_looks_like(a, all_cand, comp):
"""Utility method for counting occurences."""
n = 0
for b in all_cand:
if a.info['confid'] == b.info['confid']:
continue
if comp.looks_like(a, b):
n += 1
return n
class Population(object):
"""Population class which maintains the current population
and proposes which candidates to pair together.
Parameters:
data_connection: DataConnection object
Bla bla bla.
population_size: int
The number of candidates in the population.
comparator: Comparator object
this will tell if two configurations are equal.
Default compare atoms objects directly.
logfile: str
Text file that contains information about the population
The format is::
timestamp: generation(if available): id1,id2,id3...
Using this file greatly speeds up convergence checks.
Default None meaning that no file is written.
use_extinct: boolean
Set this to True if mass extinction and the extinct key
are going to be used. Default is False.
"""
def __init__(self, data_connection, population_size,
comparator=None, logfile=None, use_extinct=False):
self.dc = data_connection
self.pop_size = population_size
if comparator is None:
from ase.ga.standard_comparators import AtomsComparator
comparator = AtomsComparator()
self.comparator = comparator
self.logfile = logfile
self.use_extinct = use_extinct
self.pop = []
self.pairs = None
self.all_cand = None
self.__initialize_pop__()
def __initialize_pop__(self):
""" Private method that initalizes the population when
the population is created. """
# Get all relaxed candidates from the database
ue = self.use_extinct
all_cand = self.dc.get_all_relaxed_candidates(use_extinct=ue)
all_cand.sort(key=lambda x: x.get_raw_score(), reverse=True)
# all_cand.sort(key=lambda x: x.get_potential_energy())
# Fill up the population with the self.pop_size most stable
# unique candidates.
i = 0
while i < len(all_cand) and len(self.pop) < self.pop_size:
c = all_cand[i]
i += 1
eq = False
for a in self.pop:
if self.comparator.looks_like(a, c):
eq = True
break
if not eq:
self.pop.append(c)
for a in self.pop:
a.info['looks_like'] = count_looks_like(a, all_cand,
self.comparator)
self.all_cand = all_cand
self.__calc_participation__()
def __calc_participation__(self):
""" Determines, from the database, how many times each
candidate has been used to generate new candidates. """
(participation, pairs) = self.dc.get_participation_in_pairing()
for a in self.pop:
if a.info['confid'] in participation.keys():
a.info['n_paired'] = participation[a.info['confid']]
else:
a.info['n_paired'] = 0
self.pairs = pairs
def update(self, new_cand=None):
""" New candidates can be added to the database
after the population object has been created.
This method extracts these new candidates from the
database and includes them in the population. """
if len(self.pop) == 0:
self.__initialize_pop__()
if new_cand is None:
ue = self.use_extinct
new_cand = self.dc.get_all_relaxed_candidates(only_new=True,
use_extinct=ue)
for a in new_cand:
self.__add_candidate__(a)
self.all_cand.append(a)
self.__calc_participation__()
self._write_log()
def get_current_population(self):
""" Returns a copy of the current population. """
self.update()
return [a.copy() for a in self.pop]
def get_population_after_generation(self, gen):
""" Returns a copy of the population as it where
after generation gen"""
if self.logfile is not None:
f = open(self.logfile, 'r')
gens = {}
for l in f:
_, no, popul = l.split(':')
gens[int(no)] = [int(i) for i in popul.split(',')]
f.close()
return [c.copy() for c in self.all_cand[::-1]
if c.info['relax_id'] in gens[gen]]
all_candidates = [c for c in self.all_cand
if c.info['key_value_pairs']['generation'] <= gen]
cands = [all_candidates[0]]
for b in all_candidates:
if b not in cands:
for a in cands:
if self.comparator.looks_like(a, b):
break
else:
cands.append(b)
pop = cands[:self.pop_size]
return [a.copy() for a in pop]
def __add_candidate__(self, a):
""" Adds a single candidate to the population. """
# check if the structure is too low in raw score
if a.get_raw_score() < self.pop[-1].get_raw_score() \
and len(self.pop) == self.pop_size:
return
# check if the new candidate should
# replace a similar structure in the population
for (i, b) in enumerate(self.pop):
if self.comparator.looks_like(a, b):
if b.get_raw_score() < a.get_raw_score():
del self.pop[i]
a.info['looks_like'] = count_looks_like(a,
self.all_cand,
self.comparator)
self.pop.append(a)
self.pop.sort(key=lambda x: x.get_raw_score(),
reverse=True)
return
# the new candidate needs to be added, so remove the highest
# energy one
if len(self.pop) == self.pop_size:
del self.pop[-1]
# add the new candidate
a.info['looks_like'] = count_looks_like(a,
self.all_cand,
self.comparator)
self.pop.append(a)
self.pop.sort(key=lambda x: x.get_raw_score(), reverse=True)
def __get_fitness__(self, indecies, with_history=True):
"""Calculates the fitness using the formula from
L.B. Vilhelmsen et al., JACS, 2012, 134 (30), pp 12807-12816
Sign change on the fitness compared to the formulation in the
abovementioned paper due to maximizing raw_score instead of
minimizing energy. (Set raw_score=-energy to optimize the energy)
"""
scores = [x.get_raw_score() for x in self.pop]
min_s = min(scores)
max_s = max(scores)
T = min_s - max_s
if isinstance(indecies, int):
indecies = [indecies]
f = [0.5 * (1. - tanh(2. * (scores[i] - max_s) / T - 1.))
for i in indecies]
if with_history:
M = [float(self.pop[i].info['n_paired']) for i in indecies]
L = [float(self.pop[i].info['looks_like']) for i in indecies]
f = [f[i] * 1. / sqrt(1. + M[i]) * 1. / sqrt(1. + L[i])
for i in range(len(f))]
return f
def get_two_candidates(self, with_history=True):
""" Returns two candidates for pairing employing the
fitness criteria from
L.B. Vilhelmsen et al., JACS, 2012, 134 (30), pp 12807-12816
and the roulete wheel selection scheme described in
R.L. Johnston Dalton Transactions,
Vol. 22, No. 22. (2003), pp. 4193-4207
"""
if len(self.pop) < 2:
self.update()
if len(self.pop) < 2:
return None
fit = self.__get_fitness__(range(len(self.pop)), with_history)
fmax = max(fit)
c1 = self.pop[0]
c2 = self.pop[0]
used_before = False
while c1.info['confid'] == c2.info['confid'] and not used_before:
nnf = True
while nnf:
t = randrange(0, len(self.pop), 1)
if fit[t] > random() * fmax:
c1 = self.pop[t]
nnf = False
nnf = True
while nnf:
t = randrange(0, len(self.pop), 1)
if fit[t] > random() * fmax:
c2 = self.pop[t]
nnf = False
c1id = c1.info['confid']
c2id = c2.info['confid']
used_before = (min([c1id, c2id]), max([c1id, c2id])) in self.pairs
return (c1.copy(), c2.copy())
def get_one_candidate(self, with_history=True):
"""Returns one candidate for mutation employing the
fitness criteria from
L.B. Vilhelmsen et al., JACS, 2012, 134 (30), pp 12807-12816
and the roulete wheel selection scheme described in
R.L. Johnston Dalton Transactions,
Vol. 22, No. 22. (2003), pp. 4193-4207
"""
if len(self.pop) < 1:
self.update()
if len(self.pop) < 1:
return None
fit = self.__get_fitness__(range(len(self.pop)), with_history)
fmax = max(fit)
nnf = True
while nnf:
t = randrange(0, len(self.pop), 1)
if fit[t] > random() * fmax:
c1 = self.pop[t]
nnf = False
return c1.copy()
def _write_log(self):
"""Writes the population to a logfile.
The format is::
timestamp: generation(if available): id1,id2,id3..."""
if self.logfile is not None:
ids = [str(a.info['relax_id']) for a in self.pop]
if ids != []:
try:
gen_nums = [c.info['key_value_pairs']['generation']
for c in self.all_cand]
max_gen = max(gen_nums)
except KeyError:
max_gen = ' '
f = open(self.logfile, 'a')
f.write('{time}: {gen}: {pop}\n'.format(time=now(),
pop=','.join(ids),
gen=max_gen))
f.close()
def is_uniform(self, func, min_std, pop=None):
"""Tests whether the current population is uniform or diverse.
Returns True if uniform, False otherwise.
Parameters:
func: function
that takes one argument an atoms object and returns a value that
will be used for testing against the rest of the population.
min_std: int or float
The minimum standard deviation, if the population has a lower
std dev it is uniform.
pop: list, optional
use this list of Atoms objects instead of the current population.
"""
if pop is None:
pop = self.pop
vals = [func(a) for a in pop]
stddev = np.std(vals)
if stddev < min_std:
return True
return False
def mass_extinction(self, ids):
"""Kills every candidate in the database with gaid in the
supplied list of ids. Typically used on the main part of the current
population if the diversity is to small.
Parameters:
ids: list
list of ids of candidates to be killed.
"""
for confid in ids:
self.dc.kill_candidate(confid)
self.pop = []
class RandomPopulation(Population):
def __init__(self, data_connection, population_size,
comparator=None, logfile=None, exclude_used_pairs=False,
bad_candidates=0, use_extinct=False):
self.exclude_used_pairs = exclude_used_pairs
self.bad_candidates = bad_candidates
Population.__init__(self, data_connection, population_size,
comparator, logfile, use_extinct)
def __initialize_pop__(self):
""" Private method that initalizes the population when
the population is created. """
# Get all relaxed candidates from the database
ue = self.use_extinct
all_cand = self.dc.get_all_relaxed_candidates(use_extinct=ue)
all_cand.sort(key=lambda x: x.get_raw_score(), reverse=True)
# all_cand.sort(key=lambda x: x.get_potential_energy())
if len(all_cand) > 0:
# Fill up the population with the self.pop_size most stable
# unique candidates.
ratings = []
best_raw = all_cand[0].get_raw_score()
i = 0
while i < len(all_cand):
c = all_cand[i]
i += 1
eq = False
for a in self.pop:
if self.comparator.looks_like(a, c):
eq = True
break
if not eq:
if len(self.pop) < self.pop_size - self.bad_candidates:
self.pop.append(c)
else:
exp_fact = exp(c.get_raw_score() / best_raw)
ratings.append([c, (exp_fact - 1) * random()])
ratings.sort(key=itemgetter(1), reverse=True)
for i in range(self.bad_candidates):
self.pop.append(ratings[i][0])
for a in self.pop:
a.info['looks_like'] = count_looks_like(a, all_cand,
self.comparator)
self.all_cand = all_cand
self.__calc_participation__()
def update(self):
""" The update method in Population will add to the end of
the population, that can't be used here since we might have
bad candidates that need to stay in the population, therefore
just recalc the population every time. """
self.pop = []
self.__initialize_pop__()
self._write_log()
def get_one_candidate(self):
"""Returns one candidates at random."""
if len(self.pop) < 1:
self.update()
if len(self.pop) < 1:
return None
t = randrange(0, len(self.pop), 1)
c = self.pop[t]
return c.copy()
def get_two_candidates(self):
"""Returns two candidates at random."""
if len(self.pop) < 2:
self.update()
if len(self.pop) < 2:
return None
c1 = self.pop[0]
c2 = self.pop[0]
used_before = False
while c1.info['confid'] == c2.info['confid'] and not used_before:
t = randrange(0, len(self.pop), 1)
c1 = self.pop[t]
t = randrange(0, len(self.pop), 1)
c2 = self.pop[t]
c1id = c1.info['confid']
c2id = c2.info['confid']
used_before = (tuple(sorted([c1id, c2id])) in self.pairs
and self.exclude_used_pairs)
return (c1.copy(), c2.copy())
|
suttond/MODOI
|
ase/ga/population.py
|
Python
|
lgpl-3.0
| 15,749
|
import pytest
from os import path as os_path
from cfme.login import login
from utils import version
from utils.appliance import ApplianceException
from utils.blockers import BZ
from utils.conf import cfme_data
from utils.log import logger
def pytest_generate_tests(metafunc):
argnames, argvalues, idlist = ['db_url', 'db_version', 'db_desc'], [], []
db_backups = cfme_data.get('db_backups', {})
if not db_backups:
return []
for key, data in db_backups.iteritems():
argvalues.append((data.url, data.version, data.desc))
idlist.append(key)
return metafunc.parametrize(argnames=argnames, argvalues=argvalues, ids=idlist)
@pytest.fixture(scope="module")
def temp_appliance_extended_db(temp_appliance_preconfig):
app = temp_appliance_preconfig
app.stop_evm_service()
app.extend_db_partition()
app.start_evm_service()
return app
@pytest.mark.ignore_stream('5.5', 'upstream')
@pytest.mark.tier(2)
@pytest.mark.uncollectif(
lambda db_version:
db_version >= version.current_version() or
version.get_stream(db_version) == version.current_stream())
@pytest.mark.meta(
blockers=[BZ(1354466, unblock=lambda db_url: 'ldap' not in db_url)])
def test_db_migrate(app_creds, temp_appliance_extended_db, db_url, db_version, db_desc):
app = temp_appliance_extended_db
# Download the database
logger.info("Downloading database: {}".format(db_desc))
url_basename = os_path.basename(db_url)
rc, out = app.ssh_client.run_command(
'curl -o "/tmp/{}" "{}"'.format(url_basename, db_url), timeout=30)
assert rc == 0, "Failed to download database: {}".format(out)
# The v2_key is potentially here
v2key_url = os_path.join(os_path.dirname(db_url), "v2_key")
# Stop EVM service and drop vmdb_production DB
app.stop_evm_service()
app.drop_database()
# restore new DB and migrate it
with app.ssh_client as ssh:
rc, out = ssh.run_command('createdb vmdb_production', timeout=30)
assert rc == 0, "Failed to create clean database: {}".format(out)
rc, out = ssh.run_command(
'pg_restore -v --dbname=vmdb_production /tmp/{}'.format(url_basename), timeout=600)
assert rc == 0, "Failed to restore new database: {}".format(out)
rc, out = ssh.run_rake_command("db:migrate", timeout=300)
assert rc == 0, "Failed to migrate new database: {}".format(out)
rc, out = ssh.run_rake_command(
'db:migrate:status 2>/dev/null | grep "^\s*down"', timeout=30)
assert rc != 0, "Migration failed; migrations in 'down' state found: {}".format(out)
# fetch GUID and REGION from the DB and use it to replace data in /var/www/miq/vmdb/GUID
# and /var/www/miq/vmdb/REGION respectively
data_query = {
'guid': 'select guid from miq_servers',
'region': 'select region from miq_regions'
}
for data_type, db_query in data_query.items():
data_filepath = '/var/www/miq/vmdb/{}'.format(data_type.upper())
rc, out = ssh.run_command(
'psql -d vmdb_production -t -c "{}"'.format(db_query), timeout=15)
assert rc == 0, "Failed to fetch {}: {}".format(data_type, out)
db_data = out.strip()
assert db_data, "No {} found in database; query '{}' returned no records".format(
data_type, db_query)
rc, out = ssh.run_command(
"echo -n '{}' > {}".format(db_data, data_filepath), timeout=15)
assert rc == 0, "Failed to replace data in {} with '{}': {}".format(
data_filepath, db_data, out)
# fetch v2_key
try:
rc, out = ssh.run_command(
'curl "{}"'.format(v2key_url), timeout=15)
assert rc == 0, "Failed to download v2_key: {}".format(out)
assert ":key:" in out, "Not a v2_key file: {}".format(out)
rc, out = ssh.run_command(
'curl -o "/var/www/miq/vmdb/certs/v2_key" "{}"'.format(v2key_url), timeout=15)
assert rc == 0, "Failed to download v2_key: {}".format(out)
# or change all invalid (now unavailable) passwords to 'invalid'
except AssertionError:
rc, out = ssh.run_command("fix_auth -i invalid", timeout=45)
assert rc == 0, "Failed to change invalid passwords: {}".format(out)
# fix db password
rc, out = ssh.run_command("fix_auth --databaseyml -i {}".format(
app_creds['password']), timeout=45)
assert rc == 0, "Failed to change invalid password: {}".format(out)
# start evmserverd, wait for web UI to start and try to log in
try:
app.start_evm_service()
except ApplianceException:
rc, out = app.ssh_client.run_rake_command("evm:start")
assert rc == 0, "Couldn't start evmserverd: {}".format(out)
app.wait_for_web_ui(timeout=600)
# Reset user's password, just in case (necessary for customer DBs)
rc, out = ssh.run_rails_command(
'"u = User.find_by_userid(\'admin\'); u.password = \'{}\'; u.save!"'
.format(app.user.credential.secret))
assert rc == 0, "Failed to change UI password of {} to {}:" \
.format(app.user.credential.principal, app.user.credential.secret, out)
login(app.user)
|
rlbabyuk/integration_tests
|
cfme/tests/test_db_migrate.py
|
Python
|
gpl-2.0
| 5,344
|
# -*- coding: utf-8 -*-
# -*- coding: utf-8 -*-
""" Authentication, Authorization, Accouting
@requires: U{B{I{gluon}} <http://web2py.com>}
@copyright: (c) 2010-2015 Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ("AuthS3",
"S3Permission",
"S3Audit",
"S3RoleManager",
"S3OrgRoleManager",
"S3PersonRoleManager",
)
import datetime
#import re
from uuid import uuid4
try:
import json # try stdlib (Python 2.6)
except ImportError:
try:
import simplejson as json # try external module
except:
import gluon.contrib.simplejson as json # fallback to pure-Python module
try:
# Python 2.7
from collections import OrderedDict
except:
# Python 2.6
from gluon.contrib.simplejson.ordered_dict import OrderedDict
from gluon import *
from gluon.sqlhtml import OptionsWidget
from gluon.storage import Storage
from gluon.tools import Auth, callback, DEFAULT, replace_id
from gluon.utils import web2py_uuid
from s3dal import Row, Rows, Query, Table
from s3datetime import S3DateTime
from s3error import S3PermissionError
from s3fields import S3Represent, s3_uid, s3_timestamp, s3_deletion_status, s3_comments
from s3rest import S3Method
from s3track import S3Tracker
from s3utils import s3_addrow, s3_get_extension, s3_mark_required
DEBUG = False
if DEBUG:
import sys
print >> sys.stderr, "S3AAA: DEBUG MODE"
def _debug(m):
print >> sys.stderr, m
else:
_debug = lambda m: None
# =============================================================================
class AuthS3(Auth):
"""
S3 extensions of the gluon.tools.Auth class
- override:
- __init__
- define_tables
- login_bare
- set_cookie
- login
- register
- email_reset_password
- verify_email
- profile
- has_membership
- requires_membership
- S3 extension for user registration:
- s3_register_validation
- s3_user_register_onaccept
- S3 extension for user administration:
- configure_user_fields
- s3_verify_user
- s3_approve_user
- s3_link_user
- s3_user_profile_onaccept
- s3_link_to_person
- s3_link_to_organisation
- s3_link_to_human_resource
- s3_link_to_member
- s3_approver
- S3 custom authentication methods:
- s3_impersonate
- s3_logged_in
- S3 user role management:
- get_system_roles
- s3_set_roles
- s3_create_role
- s3_delete_role
- s3_assign_role
- s3_withdraw_role
- s3_has_role
- s3_group_members
- S3 ACL management:
- s3_update_acls
- S3 user identification helpers:
- s3_get_user_id
- s3_user_pe_id
- s3_logged_in_person
- s3_logged_in_human_resource
- S3 core authorization methods:
- s3_has_permission
- s3_accessible_query
- S3 variants of web2py authorization methods:
- s3_has_membership
- s3_requires_membership
- S3 record ownership methods:
- s3_make_session_owner
- s3_session_owns
- s3_set_record_owner
"""
# Configuration of UIDs for system roles
S3_SYSTEM_ROLES = Storage(ADMIN = "ADMIN",
AUTHENTICATED = "AUTHENTICATED",
ANONYMOUS = "ANONYMOUS",
EDITOR = "EDITOR",
MAP_ADMIN = "MAP_ADMIN",
ORG_ADMIN = "ORG_ADMIN",
ORG_GROUP_ADMIN = "ORG_GROUP_ADMIN",
)
def __init__(self):
""" Initialise parent class & make any necessary modifications """
Auth.__init__(self, current.db)
self.settings.lock_keys = False
self.settings.login_userfield = "email"
self.settings.lock_keys = True
messages = self.messages
messages.lock_keys = False
# @ToDo Move these to deployment_settings
messages.approve_user = \
"""Your action is required to approve a New User for %(system_name)s:
%(first_name)s %(last_name)s
%(email)s
Please go to %(url)s to approve this user."""
messages.email_approver_failed = "Failed to send mail to Approver - see if you can notify them manually!"
messages.email_sent = "Verification Email sent - please check your email to validate. If you do not receive this email please check you junk email or spam filters"
messages.email_verification_failed = "Unable to send verification email - either your email is invalid or our email server is down"
messages.email_verified = "Email verified - you can now login"
messages.duplicate_email = "This email address is already in use"
messages.help_utc_offset = "The time difference between UTC and your timezone, specify as +HHMM for eastern or -HHMM for western timezones."
messages.help_mobile_phone = "Entering a phone number is optional, but doing so allows you to subscribe to receive SMS messages."
messages.help_organisation = "Entering an Organization is optional, but doing so directs you to the appropriate approver & means you automatically get the appropriate permissions."
messages.help_image = "You can either use %(gravatar)s or else upload a picture here. The picture will be resized to 50x50."
messages.label_image = "Profile Image"
messages.label_organisation_id = "Organization"
messages.label_org_group_id = "Coalition"
messages.label_remember_me = "Remember Me"
messages.label_utc_offset = "UTC Offset"
#messages.logged_in = "Signed In"
#messages.logged_out = "Signed Out"
#messages.submit_button = "Signed In"
messages.new_user = \
"""A New User has registered for %(system_name)s:
%(first_name)s %(last_name)s
%(email)s
No action is required."""
messages.password_reset_button='Request password reset'
messages.profile_save_button = "Apply changes"
messages.registration_disabled = "Registration Disabled!"
messages.registration_verifying = "You haven't yet Verified your account - please check your email"
messages.reset_password = "Click on the link %(url)s to reset your password"
messages.verify_email = "Click on the link %(url)s to verify your email"
messages.verify_email_subject = "%(system_name)s - Verify Email"
messages.welcome_email_subject = "Welcome to %(system_name)s"
messages.welcome_email = \
"""Welcome to %(system_name)s
- You can start using %(system_name)s at: %(url)s
- To edit your profile go to: %(url)s%(profile)s
Thank you"""
messages.lock_keys = True
# S3Permission
self.permission = S3Permission(self)
# Set to True to override any authorization
self.override = False
# Set to True to indicate that all current transactions
# are to be rolled back (e.g. trial phase of interactive imports)
self.rollback = False
# Site types (for OrgAuth)
T = current.T
if current.deployment_settings.get_ui_label_camp():
shelter = T("Camp")
else:
shelter = T("Shelter")
self.org_site_types = Storage(transport_airport = T("Airport"),
msg_basestation = T("Cell Tower"),
cr_shelter = shelter,
org_facility = T("Facility"),
#org_facility = T("Site"),
org_office = T("Office"),
transport_heliport = T("Heliport"),
hms_hospital = T("Hospital"),
#fire_station = T("Fire Station"),
dvi_morgue = T("Morgue"),
transport_seaport = T("Seaport"),
inv_warehouse = T("Warehouse"),
)
# Name prefixes of tables which must not be manipulated from remote,
# CLI can override with auth.override=True
self.PROTECTED = ("admin",)
# -------------------------------------------------------------------------
def define_tables(self, migrate=True, fake_migrate=False):
"""
to be called unless tables are defined manually
usages::
# defines all needed tables and table files
# UUID + "_auth_user.table", ...
auth.define_tables()
# defines all needed tables and table files
# "myprefix_auth_user.table", ...
auth.define_tables(migrate="myprefix_")
# defines all needed tables without migration/table files
auth.define_tables(migrate=False)
"""
db = current.db
settings = self.settings
messages = self.messages
deployment_settings = current.deployment_settings
define_table = db.define_table
# User table
utable = settings.table_user
uname = settings.table_user_name
if not utable:
utable_fields = [
Field("first_name", length=128, notnull=True,
default="",
requires = \
IS_NOT_EMPTY(error_message=messages.is_empty),
),
Field("last_name", length=128,
default=""),
Field("email", length=255, unique=True,
default=""),
# Used For chat in default deployment config
Field("username", length=255, default="",
readable=False, writable=False),
Field("language", length=16,
default = deployment_settings.get_L10n_default_language()),
Field("utc_offset", length=16,
readable=False, writable=False),
Field("organisation_id", "integer",
readable=False, writable=False),
Field("org_group_id", "integer",
readable=False, writable=False),
Field("site_id", "integer",
readable=False, writable=False),
Field("link_user_to", "list:string",
readable=False, writable=False),
Field("registration_key", length=512,
default="",
readable=False, writable=False),
Field("reset_password_key", length=512,
default="",
readable=False, writable=False),
Field("deleted", "boolean",
default=False,
readable=False, writable=False),
Field("timestmp", "datetime",
default="",
readable=False, writable=False),
s3_comments(readable=False, writable=False)
]
utable_fields += list(s3_uid())
utable_fields += list(s3_timestamp())
userfield = settings.login_userfield
if userfield != "email":
# Use username (not used by default in Sahana)
utable_fields.insert(2, Field(userfield, length=128,
default="",
unique=True))
# Insert password field after either email or username
passfield = settings.password_field
utable_fields.insert(3, Field(passfield, "password", length=512,
requires=CRYPT(key=settings.hmac_key,
min_length=deployment_settings.get_auth_password_min_length(),
digest_alg="sha512"),
readable=False,
label=messages.label_password))
define_table(uname,
migrate = migrate,
fake_migrate=fake_migrate,
*utable_fields)
utable = settings.table_user = db[uname]
# Fields configured in configure_user_fields
# Temporary User Table
# for storing User Data that will be used to create records for
# the user once they are approved
define_table("auth_user_temp",
Field("user_id", utable),
Field("home"),
Field("mobile"),
Field("image", "upload"),
*(s3_uid()+s3_timestamp()))
# Group table (roles)
gtable = settings.table_group
gname = settings.table_group_name
if not gtable:
define_table(gname,
# Group unique ID, must be notnull+unique:
Field("uuid", length=64, notnull=True, unique=True,
readable=False, writable=False),
# Group does not appear in the Role Manager:
# (can neither assign, nor modify, nor delete)
Field("hidden", "boolean",
readable=False, writable=False,
default=False),
# Group cannot be modified in the Role Manager:
# (can assign, but neither modify nor delete)
Field("system", "boolean",
readable=False, writable=False,
default=False),
# Group cannot be deleted in the Role Manager:
# (can assign and modify, but not delete)
Field("protected", "boolean",
readable=False, writable=False,
default=False),
# Role name:
Field("role", length=255, unique=True,
default="",
requires = IS_NOT_IN_DB(db, "%s.role" % gname),
label=messages.label_role),
Field("description", "text",
label=messages.label_description),
migrate = migrate,
fake_migrate=fake_migrate,
*(s3_timestamp() + s3_deletion_status()))
gtable = settings.table_group = db[gname]
# Group membership table (user<->role)
if not settings.table_membership:
define_table(
settings.table_membership_name,
Field("user_id", utable,
requires = IS_IN_DB(db, "%s.id" % uname,
"%(id)s: %(first_name)s %(last_name)s"),
label=messages.label_user_id),
Field("group_id", gtable,
requires = IS_IN_DB(db, "%s.id" % gname,
"%(id)s: %(role)s"),
represent = S3Represent(lookup=gname, fields=["role"]),
label=messages.label_group_id),
# Realm
Field("pe_id", "integer"),
migrate = migrate,
fake_migrate=fake_migrate,
*(s3_uid() + s3_timestamp() + s3_deletion_status()))
settings.table_membership = db[settings.table_membership_name]
# Define Eden permission table
self.permission.define_table(migrate=migrate,
fake_migrate=fake_migrate)
#security_policy = deployment_settings.get_security_policy()
#if security_policy not in (1, 2, 3, 4, 5, 6, 7, 8) and \
# not settings.table_permission:
# # Permissions table (group<->permission)
# # NB This Web2Py table is deprecated / replaced in Eden by S3Permission
# settings.table_permission = define_table(
# settings.table_permission_name,
# Field("group_id", gtable,
# requires = IS_IN_DB(db, "%s.id" % gname,
# "%(id)s: %(role)s"),
# label=messages.label_group_id),
# Field("name", default="default", length=512,
# requires = IS_NOT_EMPTY(),
# label=messages.label_name),
# Field("table_name", length=512,
# # Needs to be defined after all tables created
# #requires = IS_IN_SET(db.tables),
# label=messages.label_table_name),
# Field("record_id", "integer",
# requires = IS_INT_IN_RANGE(0, 10 ** 9),
# label=messages.label_record_id),
# migrate = migrate,
# fake_migrate=fake_migrate)
# Event table (auth_event)
# Records Logins & ?
# @ToDo: Deprecate? At least make it configurable?
if not settings.table_event:
request = current.request
define_table(
settings.table_event_name,
Field("time_stamp", "datetime",
default=request.utcnow,
#label=messages.label_time_stamp
),
Field("client_ip",
default=request.client,
#label=messages.label_client_ip
),
Field("user_id", utable, default=None,
requires = IS_IN_DB(db, "%s.id" % uname,
"%(id)s: %(first_name)s %(last_name)s"),
#label=messages.label_user_id
),
Field("origin", default="auth", length=512,
#label=messages.label_origin,
requires = IS_NOT_EMPTY()),
Field("description", "text", default="",
#label=messages.label_description,
requires = IS_NOT_EMPTY()),
migrate = migrate,
fake_migrate=fake_migrate,
*(s3_uid() + s3_timestamp() + s3_deletion_status()))
settings.table_event = db[settings.table_event_name]
# -------------------------------------------------------------------------
def login_bare(self, username, password):
"""
Logs user in
- extended to understand session.s3.roles
"""
settings = self.settings
utable = settings.table_user
userfield = settings.login_userfield
passfield = settings.password_field
query = (utable[userfield] == username)
user = current.db(query).select(limitby=(0, 1)).first()
password = utable[passfield].validate(password)[0]
if user:
if not user.registration_key and user[passfield] == password:
user = Storage(utable._filter_fields(user, id=True))
current.session.auth = Storage(user=user,
last_visit=current.request.now,
expiration=settings.expiration)
self.user = user
self.s3_set_roles()
return user
return False
# -------------------------------------------------------------------------
def set_cookie(self):
"""
Set a Cookie to the client browser so that we know this user has
registered & so we should present them with a login form instead
of a register form
"""
cookies = current.response.cookies
cookies["registered"] = "yes"
cookies["registered"]["expires"] = 365 * 24 * 3600 # 1 year
cookies["registered"]["path"] = "/"
# -------------------------------------------------------------------------
def login(self,
next = DEFAULT,
onvalidation = DEFAULT,
onaccept = DEFAULT,
log = DEFAULT,
inline = False, # Set to True to use an 'inline' variant of the style
lost_pw_link = True,
register_link = True,
):
"""
Overrides Web2Py's login() to use custom flash styles & utcnow
@return: a login form
"""
T = current.T
db = current.db
messages = self.messages
request = current.request
response = current.response
session = current.session
settings = self.settings
deployment_settings = current.deployment_settings
utable = settings.table_user
userfield = settings.login_userfield
old_requires = utable[userfield].requires
utable[userfield].requires = [IS_NOT_EMPTY(), IS_LOWER()]
passfield = settings.password_field
try:
utable[passfield].requires[-1].min_length = 0
except:
pass
if next is DEFAULT:
next = request.vars._next or settings.login_next
if onvalidation is DEFAULT:
onvalidation = settings.login_onvalidation
if onaccept is DEFAULT:
onaccept = settings.login_onaccept
if log is DEFAULT:
log = messages.login_log
user = None # default
response.title = T("Login")
# Do we use our own login form, or from a central source?
if settings.login_form == self:
if inline:
formstyle = deployment_settings.get_ui_inline_formstyle()
else:
formstyle = deployment_settings.get_ui_formstyle()
buttons = []
# Self-registration action link
self_registration = deployment_settings.get_security_registration_visible()
if self_registration and register_link:
if self_registration == "index":
# Custom Registration page
controller = "index"
else:
# Default Registration page
controller = "user"
register_link = A(T("Register for Account"),
_href=URL(f=controller, args="register"),
_id="register-btn",
_class="action-lnk",
)
buttons.append(register_link)
# Lost-password action link
if lost_pw_link:
lost_pw_link = A(T("Lost Password"),
_href=URL(f="user", args="retrieve_password"),
_class="action-lnk",
)
buttons.append(lost_pw_link)
# If we have custom buttons, add submit button
if buttons:
submit_button = INPUT(_type="submit", _value=T("Login"))
buttons.insert(0, submit_button)
form = SQLFORM(utable,
fields = [userfield, passfield],
hidden = dict(_next=request.vars._next),
showid = settings.showid,
submit_button = T("Login"),
delete_label = messages.delete_label,
formstyle = formstyle,
separator = settings.label_separator,
buttons = buttons,
)
# Identify form for CSS
form.add_class("auth_login")
if settings.remember_me_form:
# Add a new input checkbox "remember me for longer"
s3_addrow(form,
"",
DIV(INPUT(_type='checkbox',
_class='checkbox',
_id="auth_user_remember",
_name="remember",
),
LABEL(messages.label_remember_me,
_for="auth_user_remember",
),
),
"",
formstyle,
"auth_user_remember__row",
)
if deployment_settings.get_auth_set_presence_on_login():
s3_addrow(form,
"",
INPUT(_id="auth_user_clientlocation",
_name="auth_user_clientlocation",
_style="display:none",
),
"",
formstyle,
"auth_user_client_location",
)
response.s3.jquery_ready.append('''S3.getClientLocation($('#auth_user_clientlocation'))''')
captcha = settings.login_captcha or \
(settings.login_captcha != False and settings.captcha)
if captcha:
s3_addrow(form,
captcha.label,
captcha,
captcha.comment,
formstyle,
"captcha__row",
)
accepted_form = False
if form.accepts(request.post_vars, session,
formname="login", dbio=False,
onvalidation=onvalidation):
accepted_form = True
if userfield == "email":
# Check for Domains which can use Google's SMTP server for passwords
# @ToDo: an equivalent email_domains for other email providers
gmail_domains = current.deployment_settings.get_auth_gmail_domains()
if gmail_domains:
from gluon.contrib.login_methods.email_auth import email_auth
domain = form.vars[userfield].split("@")[1]
if domain in gmail_domains:
settings.login_methods.append(
email_auth("smtp.gmail.com:587", "@%s" % domain))
# Check for username in db
query = (utable[userfield] == form.vars[userfield])
user = db(query).select(limitby=(0, 1)).first()
if user:
# user in db, check if registration pending or disabled
temp_user = user
if temp_user.registration_key == "pending":
response.warning = deployment_settings.get_auth_registration_pending()
return form
elif temp_user.registration_key in ("disabled", "blocked"):
response.error = messages.login_disabled
return form
elif not temp_user.registration_key is None and \
temp_user.registration_key.strip():
response.warning = \
messages.registration_verifying
return form
# Try alternate logins 1st as these have the
# current version of the password
user = None
for login_method in settings.login_methods:
if login_method != self and \
login_method(request.vars[userfield],
request.vars[passfield]):
if not self in settings.login_methods:
# do not store password in db
form.vars[passfield] = None
user = self.get_or_create_user(form.vars)
break
if not user:
# Alternates have failed, maybe because service inaccessible
if settings.login_methods[0] == self:
# Try logging in locally using cached credentials
if temp_user[passfield] == form.vars.get(passfield, ""):
# Success
user = temp_user
else:
# User not in db
if not settings.alternate_requires_registration:
# We're allowed to auto-register users from external systems
for login_method in settings.login_methods:
if login_method != self and \
login_method(request.vars[userfield],
request.vars[passfield]):
if not self in settings.login_methods:
# Do not store password in db
form.vars[passfield] = None
user = self.get_or_create_user(form.vars)
break
if not user:
self.log_event(settings.login_failed_log,
request.post_vars)
# Invalid login
session.error = messages.invalid_login
if inline:
# If inline, stay on the same page
next_url = URL(args=request.args,
vars=request.get_vars)
else:
# If not inline, return to configured login page
next_url = self.url(args=request.args,
vars=request.get_vars)
redirect(next_url)
else:
# Use a central authentication server
cas = settings.login_form
cas_user = cas.get_user()
if cas_user:
cas_user[passfield] = None
user = self.get_or_create_user(utable._filter_fields(cas_user))
# @ToDo: Complete Registration for new users
#form = Storage()
#form.vars = user
#self.s3_user_register_onaccept(form)
elif hasattr(cas, "login_form"):
return cas.login_form()
else:
# we need to pass through login again before going on
next = "%s?_next=%s" % (URL(r=request), next)
redirect(cas.login_url(next))
# Process authenticated users
if user:
user = Storage(utable._filter_fields(user, id=True))
self.login_user(user)
if log and self.user:
self.log_event(log, self.user)
# How to continue
if settings.login_form == self:
if accepted_form:
if onaccept:
onaccept(form)
if isinstance(next, (list, tuple)):
# fix issue with 2.6
next = next[0]
if next and not next[0] == "/" and next[:4] != "http":
next = self.url(next.replace("[id]", str(form.vars.id)))
redirect(next)
utable[userfield].requires = old_requires
return form
else:
redirect(next)
# -------------------------------------------------------------------------
def change_password(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a form that lets the user change password
"""
if not self.is_logged_in():
redirect(self.settings.login_url,
client_side=self.settings.client_side)
messages = self.messages
settings = self.settings
utable = settings.table_user
s = self.db(utable.id == self.user.id)
request = current.request
session = current.session
if next is DEFAULT:
next = self.get_vars_next() or settings.change_password_next
if onvalidation is DEFAULT:
onvalidation = settings.change_password_onvalidation
if onaccept is DEFAULT:
onaccept = settings.change_password_onaccept
if log is DEFAULT:
log = messages["change_password_log"]
passfield = settings.password_field
form = SQLFORM.factory(
Field("old_password", "password",
label=messages.old_password,
requires=utable[passfield].requires),
Field("new_password", "password",
label=messages.new_password,
requires=utable[passfield].requires),
Field("new_password2", "password",
label=messages.verify_password,
requires=[IS_EXPR(
"value==%s" % repr(request.vars.new_password),
messages.mismatched_password)]),
submit_button=messages.password_change_button,
hidden=dict(_next=next),
formstyle=current.deployment_settings.get_ui_formstyle(),
separator=settings.label_separator
)
form.add_class("auth_change_password")
if form.accepts(request, session,
formname="change_password",
onvalidation=onvalidation,
hideerror=settings.hideerror):
if not form.vars["old_password"] == s.select(limitby=(0,1), orderby_on_limitby=False).first()[passfield]:
form.errors["old_password"] = messages.invalid_password
else:
d = {passfield: str(form.vars.new_password)}
s.update(**d)
session.confirmation = messages.password_changed
self.log_event(log, self.user)
callback(onaccept, form)
if not next:
next = self.url(args=request.args)
else:
next = replace_id(next, form)
redirect(next, client_side=settings.client_side)
return form
# -------------------------------------------------------------------------
def request_reset_password(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
Returns a form to reset the user password, overrides web2py's
version of the method to apply Eden formstyles.
@param next: URL to redirect to after successful form submission
@param onvalidation: callback to validate password reset form
@param onaccept: callback to post-process password reset request
@param log: event description for the log (string)
"""
messages = self.messages
settings = self.settings
utable = settings.table_user
request = current.request
response = current.response
session = current.session
captcha = settings.retrieve_password_captcha or \
(settings.retrieve_password_captcha != False and settings.captcha)
if next is DEFAULT:
next = self.get_vars_next() or settings.request_reset_password_next
if not settings.mailer:
response.error = messages.function_disabled
return ""
if onvalidation is DEFAULT:
onvalidation = settings.reset_password_onvalidation
if onaccept is DEFAULT:
onaccept = settings.reset_password_onaccept
if log is DEFAULT:
log = messages["reset_password_log"]
userfield = settings.login_userfield
if userfield == "email":
utable.email.requires = [
IS_EMAIL(error_message=messages.invalid_email),
IS_IN_DB(self.db, utable.email,
error_message=messages.invalid_email)]
else:
utable[userfield].requires = [
IS_IN_DB(self.db, utable[userfield],
error_message=messages.invalid_username)]
form = SQLFORM(utable,
fields=[userfield],
hidden=dict(_next=next),
showid=settings.showid,
submit_button=messages.password_reset_button,
delete_label=messages.delete_label,
formstyle=current.deployment_settings.get_ui_formstyle(),
separator=settings.label_separator
)
form.add_class("auth_reset_password")
if captcha:
addrow(form, captcha.label, captcha,
captcha.comment, settings.formstyle, "captcha__row")
if form.accepts(request, session if self.csrf_prevention else None,
formname="reset_password", dbio=False,
onvalidation=onvalidation,
hideerror=settings.hideerror):
user = utable(**{userfield:form.vars.get(userfield)})
if not user:
session.error = messages["invalid_%s" % userfield]
redirect(self.url(args=request.args),
client_side=settings.client_side)
elif user.registration_key in ("pending", "disabled", "blocked"):
session.warning = messages.registration_pending
redirect(self.url(args=request.args),
client_side=settings.client_side)
if self.email_reset_password(user):
session.confirmation = messages.email_sent
else:
session.error = messages.unable_to_send_email
self.log_event(log, user)
callback(onaccept, form)
if not next:
next = self.url(args=request.args)
else:
next = replace_id(next, form)
redirect(next, client_side=settings.client_side)
# old_requires = utable.email.requires
return form
# -------------------------------------------------------------------------
def login_user(self, user):
"""
Log the user in
- common function called by login() & register()
"""
db = current.db
deployment_settings = current.deployment_settings
request = current.request
session = current.session
settings = self.settings
vars = request.vars
# If the user hasn't set a personal UTC offset,
# then read the UTC offset from the form:
if not user.utc_offset:
user.utc_offset = session.s3.utc_offset
session.auth = Storage(
user=user,
last_visit=request.now,
expiration = vars.get("remember", False) and \
settings.long_expiration or settings.expiration,
remember = vars.has_key("remember"),
hmac_key = web2py_uuid()
)
self.user = user
self.s3_set_roles()
# Set a Cookie to present user with login box by default
self.set_cookie()
# Read their language from the Profile
language = user.language
current.T.force(language)
session.s3.language = language
session.confirmation = self.messages.logged_in
# Update the timestamp of the User so we know when they last logged-in
utable = settings.table_user
db(utable.id == self.user.id).update(timestmp = request.utcnow)
# Set user's position
# @ToDo: Per-User settings
if deployment_settings.get_auth_set_presence_on_login() and \
vars.has_key("auth_user_clientlocation") and \
vars.get("auth_user_clientlocation"):
position = vars.get("auth_user_clientlocation").split("|", 3)
userlat = float(position[0])
userlon = float(position[1])
accuracy = float(position[2]) / 1000 # Ensures accuracy is in km
closestpoint = 0;
closestdistance = 0;
gis = current.gis
# @ToDo: Filter to just Sites & Home Addresses?
locations = gis.get_features_in_radius(userlat, userlon, accuracy)
ignore_levels_for_presence = deployment_settings.get_auth_ignore_levels_for_presence()
greatCircleDistance = gis.greatCircleDistance
for location in locations:
if location.level not in ignore_levels_for_presence:
if closestpoint != 0:
currentdistance = greatCircleDistance(closestpoint.lat,
closestpoint.lon,
location.lat,
location.lon)
if currentdistance < closestdistance:
closestpoint = location
closestdistance = currentdistance
else:
closestpoint = location
s3tracker = S3Tracker()
person_id = self.s3_logged_in_person()
if closestpoint == 0 and deployment_settings.get_auth_create_unknown_locations():
# There wasn't any near-by location, so create one
newpoint = {"lat": userlat,
"lon": userlon,
"name": "Waypoint"
}
closestpoint = current.s3db.gis_location.insert(**newpoint)
s3tracker(db.pr_person,
person_id).set_location(closestpoint,
timestmp=request.utcnow)
elif closestpoint != 0:
s3tracker(db.pr_person,
person_id).set_location(closestpoint,
timestmp=request.utcnow)
# -------------------------------------------------------------------------
def register(self,
next = DEFAULT,
onvalidation = DEFAULT,
onaccept = DEFAULT,
log = DEFAULT,
js_validation = True, # Set to False if using custom validation
):
"""
Overrides Web2Py's register() to add new functionality:
- Checks whether registration is permitted
- Custom Flash styles
- Allow form to be embedded in other pages
- Optional addition of Mobile Phone field to the Register form
- Optional addition of Organisation field to the Register form
- Lookup Domains/Organisations to check for Whitelists
&/or custom Approver
@return: a registration form
"""
db = current.db
settings = self.settings
messages = self.messages
request = current.request
session = current.session
deployment_settings = current.deployment_settings
T = current.T
utable = self.settings.table_user
utablename = utable._tablename
passfield = settings.password_field
# S3: Don't allow registration if disabled
if not deployment_settings.get_security_self_registration():
session.error = messages.registration_disabled
redirect(URL(args=["login"]))
if self.is_logged_in() and request.function != "index":
redirect(settings.logged_url)
if next == DEFAULT:
next = request.vars._next or settings.register_next
if onvalidation == DEFAULT:
onvalidation = settings.register_onvalidation
if onaccept == DEFAULT:
onaccept = settings.register_onaccept
if log == DEFAULT:
log = messages.register_log
labels, required = s3_mark_required(utable)
formstyle = deployment_settings.get_ui_formstyle()
REGISTER = T("Register")
buttons = [INPUT(_type="submit", _value=REGISTER),
A(T("Login"),
_href=URL(f="user", args="login"),
_id="login-btn",
_class="action-lnk",
),
]
current.response.form_label_separator = ""
form = SQLFORM(utable,
hidden = dict(_next=request.vars._next),
labels = labels,
separator = "",
showid = settings.showid,
submit_button = REGISTER,
delete_label = messages.delete_label,
formstyle = formstyle,
buttons = buttons,
)
# Identify form for CSS & JS Validation
form.add_class("auth_register")
if js_validation:
# Client-side Validation
self.s3_register_validation()
# Insert a Password-confirmation field
for i, row in enumerate(form[0].components):
item = row.element("input", _name=passfield)
if item:
field_id = "%s_password_two" % utablename
s3_addrow(form,
LABEL(DIV("%s:" % messages.verify_password,
SPAN("*", _class="req"),
_for="password_two",
_id=field_id + SQLFORM.ID_LABEL_SUFFIX,
),
),
INPUT(_name="password_two",
_id=field_id,
_type="password",
requires=IS_EXPR("value==%s" % \
repr(request.vars.get(passfield, None)),
error_message=messages.mismatched_password)
),
"",
formstyle,
field_id + SQLFORM.ID_ROW_SUFFIX,
position = i + 1,
)
# Add an opt in clause to receive emails depending on the deployment settings
if deployment_settings.get_auth_opt_in_to_email():
field_id = "%s_opt_in" % utablename
comment = DIV(DIV(_class="tooltip",
_title="%s|%s" % (T("Mailing list"),
T("By selecting this you agree that we may contact you."))))
checked = deployment_settings.get_auth_opt_in_default() and "selected"
s3_addrow(form,
LABEL("%s:" % T("Receive updates"),
_for="opt_in",
_id=field_id + SQLFORM.ID_LABEL_SUFFIX,
),
INPUT(_name="opt_in", _id=field_id, _type="checkbox", _checked=checked),
comment,
formstyle,
field_id + SQLFORM.ID_ROW_SUFFIX,
)
# S3: Insert Home phone field into form
if deployment_settings.get_auth_registration_requests_home_phone():
for i, row in enumerate(form[0].components):
item = row.element("input", _name="email")
if item:
field_id = "%s_home" % utablename
s3_addrow(form,
LABEL("%s:" % T("Home Phone"),
_for="home",
_id=field_id + SQLFORM.ID_LABEL_SUFFIX,
),
INPUT(_name="home", _id=field_id),
"",
formstyle,
field_id + SQLFORM.ID_ROW_SUFFIX,
position = i + 1,
)
# S3: Insert Mobile phone field into form
if deployment_settings.get_auth_registration_requests_mobile_phone():
for i, row in enumerate(form[0].components):
item = row.element("input", _name="email")
if item:
field_id = "%s_mobile" % utablename
if deployment_settings.get_auth_registration_mobile_phone_mandatory():
mandatory = SPAN("*", _class="req")
comment = ""
else:
mandatory = ""
comment = DIV(_class="tooltip",
_title="%s|%s" % (deployment_settings.get_ui_label_mobile_phone(),
messages.help_mobile_phone))
s3_addrow(form,
LABEL("%s:" % deployment_settings.get_ui_label_mobile_phone(),
mandatory,
_for="mobile",
_id=field_id + SQLFORM.ID_LABEL_SUFFIX,
),
INPUT(_name="mobile", _id=field_id),
comment,
formstyle,
field_id + SQLFORM.ID_ROW_SUFFIX,
position = i + 1,
)
# S3: Insert Photo widget into form
if deployment_settings.get_auth_registration_requests_image():
label = self.messages.label_image
comment = DIV(_class="stickytip",
_title="%s|%s" % (label,
self.messages.help_image % \
dict(gravatar = A("Gravatar",
_target="top",
_href="http://gravatar.com"))))
field_id = "%s_image" % utablename
widget = SQLFORM.widgets["upload"].widget(current.s3db.pr_image.image, None)
s3_addrow(form,
LABEL("%s:" % label,
_for="image",
_id=field_id + SQLFORM.ID_LABEL_SUFFIX,
),
widget,
comment,
formstyle,
field_id + SQLFORM.ID_ROW_SUFFIX,
)
if deployment_settings.get_auth_terms_of_service():
field_id = "%s_tos" % utablename
label = T("I agree to the %(terms_of_service)s") % \
dict(terms_of_service=A(T("Terms of Service"),
_href=URL(c="default", f="tos"),
_target="_blank",
))
label = XML("%s:" % label)
s3_addrow(form,
LABEL(label,
_for="tos",
_id=field_id + SQLFORM.ID_LABEL_SUFFIX,
),
INPUT(_name="tos",
_id=field_id,
_type="checkbox",
),
"",
formstyle,
field_id + SQLFORM.ID_ROW_SUFFIX,
)
if settings.captcha != None:
form[0].insert(-1, DIV("", settings.captcha, ""))
utable.registration_key.default = key = str(uuid4())
if form.accepts(request.vars, session, formname="register",
onvalidation=onvalidation):
# Save temporary user fields
self.s3_user_register_onaccept(form)
users = db(utable.id > 0).select(utable.id,
limitby=(0, 2))
if len(users) == 1:
# 1st user to register doesn't need verification/approval
self.s3_approve_user(form.vars)
current.session.confirmation = self.messages.registration_successful
# 1st user gets Admin rights
admin_group_id = 1
self.add_membership(admin_group_id, users.first().id)
# Log them in
if "language" not in form.vars:
# Was missing from login form
form.vars.language = T.accepted_language
user = Storage(utable._filter_fields(form.vars, id=True))
self.login_user(user)
self.s3_send_welcome_email(form.vars)
elif settings.registration_requires_verification:
# Send the Verification email
if not settings.mailer or \
not settings.mailer.settings.server or \
not settings.mailer.send(to=form.vars.email,
subject=messages.verify_email_subject % \
dict(system_name=deployment_settings.get_system_name()),
message=messages.verify_email % \
dict(url="%s/default/user/verify_email/%s" % \
(current.response.s3.base_url, key))):
current.response.error = messages.email_verification_failed
return form
# @ToDo: Deployment Setting?
#session.confirmation = messages.email_sent
next = URL(c="default", f="message",
args = ["verify_email_sent"],
vars = {"email": form.vars.email})
else:
# Does the user need to be approved?
approved = self.s3_verify_user(form.vars)
if approved:
# Log them in
if "language" not in form.vars:
# Was missing from login form
form.vars.language = T.accepted_language
user = Storage(utable._filter_fields(form.vars, id=True))
self.login_user(user)
# Set a Cookie to present user with login box by default
self.set_cookie()
if log:
self.log_event(log, form.vars)
if onaccept:
onaccept(form)
if not next:
next = self.url(args = request.args)
elif isinstance(next, (list, tuple)):
# fix issue with 2.6
next = next[0]
elif next and not next[0] == "/" and next[:4] != "http":
next = self.url(next.replace("[id]", str(form.vars.id)))
redirect(next)
return form
# -------------------------------------------------------------------------
def email_reset_password(self, user):
"""
Overrides Web2Py's email_reset_password() to modify the message
structure
@param user: the auth_user record (Row)
"""
mailer = self.settings.mailer
if not mailer:
return False
import time
reset_password_key = str(int(time.time())) + '-' + web2py_uuid()
reset_password_url = "%s/default/user/reset_password/%s" % \
(current.response.s3.base_url, reset_password_key)
message = self.messages.reset_password % dict(url=reset_password_url)
if mailer.send(to=user.email,
subject=self.messages.reset_password_subject,
message=message):
user.update_record(reset_password_key=reset_password_key)
return True
return False
# -------------------------------------------------------------------------
def add_membership(self, group_id=None, user_id=None, role=None,
entity=None):
"""
gives user_id membership of group_id or role
if user is None than user_id is that of current logged in user
S3: extended to support Entities
"""
group_id = group_id or self.id_group(role)
try:
group_id = int(group_id)
except:
group_id = self.id_group(group_id) # interpret group_id as a role
if not user_id and self.user:
user_id = self.user.id
membership = self.settings.table_membership
record = membership(user_id=user_id, group_id=group_id, pe_id=entity)
if record:
return record.id
else:
id = membership.insert(group_id=group_id, user_id=user_id, pe_id=entity)
self.update_groups()
self.log_event(self.messages.add_membership_log,
dict(user_id=user_id, group_id=group_id))
return id
# -------------------------------------------------------------------------
def verify_email(self,
next=DEFAULT,
log=DEFAULT):
"""
action user to verify the registration email, XXXXXXXXXXXXXXXX
.. method:: Auth.verify_email([next=DEFAULT [, onvalidation=DEFAULT
[, log=DEFAULT]]])
"""
settings = self.settings
messages = self.messages
key = current.request.args[-1]
utable = settings.table_user
query = (utable.registration_key == key)
user = current.db(query).select(limitby=(0, 1)).first()
if not user:
redirect(settings.verify_email_next)
if log == DEFAULT:
log = messages.verify_email_log
if next == DEFAULT:
next = settings.verify_email_next
approved = self.s3_verify_user(user)
if approved:
# Log them in
user = Storage(utable._filter_fields(user, id=True))
self.login_user(user)
if log:
self.log_event(log, user)
redirect(next)
# -------------------------------------------------------------------------
def profile(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
returns a form that lets the user change his/her profile
.. method:: Auth.profile([next=DEFAULT [, onvalidation=DEFAULT
[, onaccept=DEFAULT [, log=DEFAULT]]]])
Patched for S3 to use s3_mark_required and handle opt_in mailing lists
"""
if not self.is_logged_in():
redirect(self.settings.login_url)
messages = self.messages
settings = self.settings
utable = settings.table_user
passfield = settings.password_field
utable[passfield].writable = False
request = current.request
session = current.session
deployment_settings = current.deployment_settings
if deployment_settings.get_auth_show_utc_offset():
utable.utc_offset.readable = True
utable.utc_offset.writable = True
# Users should not be able to change their Org affiliation
utable.organisation_id.writable = False
## Only allowed to select Orgs that the user has update access to
#utable.organisation_id.requires = \
# current.s3db.org_organisation_requires(updateable = True)
if next == DEFAULT:
next = request.get_vars._next \
or request.post_vars._next \
or settings.profile_next
if onvalidation == DEFAULT:
onvalidation = settings.profile_onvalidation
if onaccept == DEFAULT:
onaccept = settings.profile_onaccept
if log == DEFAULT:
log = messages.profile_log
labels, required = s3_mark_required(utable)
# If we have an opt_in and some post_vars then update the opt_in value
opt_in_to_email = deployment_settings.get_auth_opt_in_to_email()
if opt_in_to_email:
team_list = deployment_settings.get_auth_opt_in_team_list()
if request.post_vars:
removed = []
selected = []
for opt_in in team_list:
if opt_in in request.post_vars:
selected.append(opt_in)
else:
removed.append(opt_in)
db = current.db
s3db = current.s3db
ptable = s3db.pr_person
putable = s3db.pr_person_user
query = (putable.user_id == request.post_vars.id) & \
(putable.pe_id == ptable.pe_id)
person_id = db(query).select(ptable.id, limitby=(0, 1)).first().id
db(ptable.id == person_id).update(opt_in = selected)
g_table = s3db["pr_group"]
gm_table = s3db["pr_group_membership"]
# Remove them from any team they are a member of in the removed list
for team in removed:
query = (g_table.name == team) & \
(gm_table.group_id == g_table.id) & \
(gm_table.person_id == person_id)
gm_rec = db(query).select(g_table.id, limitby=(0, 1)).first()
if gm_rec:
db(gm_table.id == gm_rec.id).delete()
# Add them to the team (if they are not already a team member)
for team in selected:
query = (g_table.name == team) & \
(gm_table.group_id == g_table.id) & \
(gm_table.person_id == person_id)
gm_rec = db(query).select(g_table.id, limitby=(0, 1)).first()
if not gm_rec:
query = (g_table.name == team)
team_rec = db(query).select(g_table.id,
limitby=(0, 1)).first()
# if the team doesn't exist then add it
if team_rec == None:
team_id = g_table.insert(name=team, group_type=5)
else:
team_id = team_rec.id
gm_table.insert(group_id = team_id,
person_id = person_id)
formstyle = deployment_settings.get_ui_formstyle()
current.response.form_label_separator = ""
form = SQLFORM(utable,
self.user.id,
fields = settings.profile_fields,
labels = labels,
hidden = dict(_next=next),
showid = settings.showid,
submit_button = messages.profile_save_button,
delete_label = messages.delete_label,
upload = settings.download_url,
formstyle = formstyle,
separator = ""
)
form.add_class("auth_profile")
if deployment_settings.get_auth_openid():
from gluon.contrib.login_methods.openid_auth import OpenIDAuth
openid_login_form = OpenIDAuth(self)
form = DIV(form, openid_login_form.list_user_openids())
if form.accepts(request, session,
formname="profile",
onvalidation=onvalidation,
hideerror=settings.hideerror):
self.auth_user_onaccept(form.vars.email, self.user.id)
self.user.update(utable._filter_fields(form.vars))
session.flash = messages.profile_updated
if log:
self.log_event(log, self.user)
callback(onaccept, form)
if not next:
next = self.url(args=request.args)
elif isinstance(next, (list, tuple)): ### fix issue with 2.6
next = next[0]
elif next and not next[0] == "/" and next[:4] != "http":
next = self.url(next.replace("[id]", str(form.vars.id)))
redirect(next)
if opt_in_to_email:
T = current.T
ptable = s3db.pr_person
ltable = s3db.pr_person_user
team_list = deployment_settings.get_auth_opt_in_team_list()
query = (ltable.user_id == form.record.id) & \
(ltable.pe_id == ptable.pe_id)
db_opt_in_list = db(query).select(ptable.opt_in,
limitby=(0, 1)).first().opt_in
for opt_in in team_list:
field_id = "%s_opt_in_%s" % (utable, team_list)
if opt_in in db_opt_in_list:
checked = "selected"
else:
checked = None
s3_addrow(form,
LABEL(T("Receive %(opt_in)s updates:") % \
dict(opt_in=opt_in),
_for="opt_in",
_id=field_id + SQLFORM.ID_LABEL_SUFFIX),
INPUT(_name=opt_in, _id=field_id,
_type="checkbox", _checked=checked),
"",
formstyle,
field_id + SQLFORM.ID_ROW_SUFFIX,
)
return form
# -------------------------------------------------------------------------
def configure_user_fields(self, pe_ids=None):
"""
Configure User Fields - for registration & user administration
pe_ids: an optional list of pe_ids for the Org Filter
i.e. org_admin coming from admin.py/user()
"""
from s3validators import IS_ONE_OF
T = current.T
db = current.db
s3db = current.s3db
request = current.request
messages = self.messages
cmessages = current.messages
settings = self.settings
deployment_settings = current.deployment_settings
if deployment_settings.get_ui_multiselect_widget():
from s3widgets import S3MultiSelectWidget
multiselect_widget = True
else:
multiselect_widget = False
utable = self.settings.table_user
utable.password.label = T("Password") #messages.label_password
first_name = utable.first_name
first_name.label = T("First Name") #messages.label_first_name
first_name.requires = IS_NOT_EMPTY(error_message=messages.is_empty),
last_name = utable.last_name
last_name.label = T("Last Name") #messages.label_last_name
if deployment_settings.get_L10n_mandatory_lastname():
last_name.notnull = True
last_name.requires = IS_NOT_EMPTY(error_message=messages.is_empty)
userfield = settings.login_userfield
if userfield != "email":
utable[userfield].requires = \
IS_NOT_IN_DB(db, "%s.%s" % (utable._tablename,
userfield))
email = utable.email
email.label = T("Email") #messages.label_email
email.requires = [IS_EMAIL(error_message=messages.invalid_email),
IS_LOWER(),
IS_NOT_IN_DB(db,
"%s.email" % utable._tablename,
error_message=messages.duplicate_email)
]
language = utable.language
language.label = T("Language")
language.comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Language"),
T("The language you wish the site to be displayed in.")))
languages = current.deployment_settings.get_L10n_languages()
language.represent = lambda opt: \
languages.get(opt, cmessages.UNKNOWN_OPT)
# Default the profile language to the one currently active
language.default = T.accepted_language
if multiselect_widget:
language.widget = S3MultiSelectWidget(multiple=False)
utc_offset = utable.utc_offset
utc_offset.label = messages.label_utc_offset
utc_offset.comment = DIV(_class="tooltip",
_title="%s|%s" % (messages.label_utc_offset,
messages.help_utc_offset)
)
try:
from s3validators import IS_UTC_OFFSET
utc_offset.requires = IS_EMPTY_OR(IS_UTC_OFFSET())
except:
pass
utable.registration_key.label = messages.label_registration_key
#utable.reset_password_key.label = messages.label_registration_key
# Organisation
if self.s3_has_role("ADMIN"):
show_org = deployment_settings.get_auth_admin_sees_organisation()
else:
show_org = deployment_settings.get_auth_registration_requests_organisation()
if show_org:
if pe_ids:
# Filter orgs to just those belonging to the Org Admin's Org
# & Descendants (or realms for which they are Org Admin)
filterby = "pe_id"
filter_opts = pe_ids
else:
filterby = None
filter_opts = None
organisation_id = utable.organisation_id
organisation_id.label = messages.label_organisation_id
organisation_id.readable = organisation_id.writable = True
organisation_id.default = deployment_settings.get_auth_registration_organisation_id_default()
org_represent = s3db.org_organisation_represent
organisation_id.represent = org_represent
requires = IS_ONE_OF(db, "org_organisation.id",
org_represent,
filterby=filterby,
filter_opts=filter_opts,
orderby="org_organisation.name",
sort=True)
if deployment_settings.get_auth_registration_organisation_required():
organisation_id.requires = requires
else:
organisation_id.requires = IS_EMPTY_OR(requires)
from s3layouts import S3AddResourceLink
organisation_id.comment = S3AddResourceLink(c="org",
f="organisation",
label=s3db.crud_strings["org_organisation"].label_create,
title=s3db.crud_strings["org_organisation"].title_list,)
#from s3widgets import S3OrganisationAutocompleteWidget
#organisation_id.widget = S3OrganisationAutocompleteWidget()
#organisation_id.comment = DIV(_class="tooltip",
# _title="%s|%s" % (T("Organization"),
# cmessages.AUTOCOMPLETE_HELP))
if multiselect_widget:
organisation_id.widget = S3MultiSelectWidget(multiple=False)
# Organisation Group
if deployment_settings.get_auth_registration_requests_organisation_group():
org_group_id = utable.org_group_id
org_group_id.label = messages.label_org_group_id
org_group_id.readable = org_group_id.writable = True
org_group_represent = s3db.org_group_represent
org_group_id.represent = org_group_represent
requires = IS_ONE_OF(db, "org_group.id",
org_group_represent,
# @ToDo: Filter org groups to just those belonging to the Org Admin's Org
# @ToDo: Dynamically filter groups to just those that the selected Org is a member of
#filterby=filterby,
#filter_opts=filter_opts,
orderby="org_group.name",
sort=True)
if deployment_settings.get_auth_registration_organisation_group_required():
org_group_id.requires = requires
else:
org_group_id.requires = IS_EMPTY_OR(requires)
#from s3layouts import S3AddResourceLink
#org_group_id.comment = S3AddResourceLink(c="org",
# f="group",
# label=s3db.crud_strings["org_group"].label_create,
# title=s3db.crud_strings["org_group"].title_list,)
if multiselect_widget:
org_group_id.widget = S3MultiSelectWidget(multiple=False)
# Site
if deployment_settings.get_auth_registration_requests_site():
site_id = request.get_vars.get("site_id", None)
field = utable.site_id
field.label = deployment_settings.get_org_site_label()
site_represent = s3db.org_site_represent
field.represent = site_represent
if site_id:
field.default = site_id
field.readable = True
else:
field.readable = field.writable = True
#field.default = deployment_settings.get_auth_registration_site_id_default()
site_required = deployment_settings.get_auth_registration_site_required()
if show_org:
from s3validators import IS_ONE_OF_EMPTY
requires = IS_ONE_OF_EMPTY(db, "org_site.site_id",
site_represent,
orderby="org_site.name",
sort=True)
if site_required:
site_optional = ""
else:
site_optional = ''',
'optional': true'''
current.response.s3.jquery_ready.append('''
$.filterOptionsS3({
'trigger':'organisation_id',
'target':'site_id',
'lookupField':'site_id',
'lookupResource':'site',
'lookupURL':S3.Ap.concat('/org/sites_for_org/')%s
})''' % site_optional)
else:
requires = IS_ONE_OF(db, "org_site.site_id",
site_represent,
orderby="org_site.name",
sort=True)
#from s3widgets import S3SiteAutocompleteWidget
#field.widget = S3SiteAutocompleteWidget()
field.comment = DIV(_class="tooltip",
_title="%s|%s" % (T("Facility"),
T("Select the default site.")))
if site_required:
field.requires = requires
else:
field.requires = IS_EMPTY_OR(requires)
if "profile" in request.args:
return
# Link User to
link_user_to_opts = deployment_settings.get_auth_registration_link_user_to()
if link_user_to_opts:
link_user_to = utable.link_user_to
link_user_to_default = deployment_settings.get_auth_registration_link_user_to_default()
req_vars = request.vars
for type in ["staff", "volunteer", "member"]:
if "link_user_to_%s" % type in req_vars:
link_user_to_default.append(type)
if link_user_to_default:
link_user_to.default = link_user_to_default
else:
link_user_to.readable = link_user_to.writable = True
link_user_to.label = T("Register As")
link_user_to.requires = IS_IN_SET(link_user_to_opts,
multiple = True
)
link_user_to.represent = lambda ids: \
ids and ", ".join([str(link_user_to_opts[id]) for id in ids]) or cmessages["NONE"]
#if multiselect_widget:
# link_user_to.widget = S3MultiSelectWidget()
#else:
link_user_to.widget = SQLFORM.widgets.checkboxes.widget
link_user_to.comment = DIV(_class="tooltip",
_title="%s|%s" % (link_user_to.label,
T("Will create and link your user account to the following records")))
# -------------------------------------------------------------------------
def s3_import_prep(self, data):
"""
Called when users are imported from CSV
Lookups Pseudo-reference Integer fields from Names
e.g.:
auth_membership.pe_id from organisation.name=<Org Name>
@ToDo: Add support for Sites
"""
from s3utils import s3_debug
db = current.db
s3db = current.s3db
update_super = s3db.update_super
otable = s3db.org_organisation
resource, tree = data
ORG_ADMIN = not self.s3_has_role("ADMIN")
# Memberships
elements = tree.getroot().xpath("/s3xml//resource[@name='auth_membership']/data[@field='pe_id']")
looked_up = dict(org_organisation = {})
for element in elements:
pe_string = element.text
if pe_string and "=" in pe_string:
pe_type, pe_value = pe_string.split("=")
pe_tablename, pe_field = pe_type.split(".")
if pe_tablename in looked_up and \
pe_value in looked_up[pe_tablename]:
# Replace string with pe_id
element.text = looked_up[pe_tablename][pe_value]["pe_id"]
# Don't check again
continue
if pe_tablename == "org_organisation":
# @ToDo: Add support for Organisation+BRANCH+Branch
table = otable
else:
table = s3db[pe_tablename]
if pe_tablename not in looked_up:
looked_up[pe_tablename] = {}
record = db(table[pe_field] == pe_value).select(table.id, # Stored for Org/Groups later
table.pe_id,
limitby=(0, 1)
).first()
if not record:
# Add a new record
id = table.insert(**{pe_field: pe_value})
update_super(table, Storage(id=id))
self.s3_set_record_owner(table, id)
record = db(table.id == id).select(table.id,
table.pe_id,
limitby=(0, 1)).first()
new_value = str(record.pe_id)
# Replace string with pe_id
element.text = new_value
# Store in case we get called again with same value
looked_up[pe_tablename][pe_value] = dict(pe_id=new_value,
id=str(record.id),
)
# Organisations
elements = tree.getroot().xpath("/s3xml//resource[@name='auth_user']/data[@field='organisation_id']")
orgs = looked_up["org_organisation"]
for element in elements:
org_full = element.text
if org_full in orgs:
# Replace string with id
element.text = orgs[org_full]["id"]
# Don't check again
continue
try:
# Is this the 2nd phase of a 2-phase import & hence values have already been replaced?
int(org_full)
except ValueError:
# This is a non-integer, so must be 1st or only phase
if "+BRANCH+" in org_full:
parent, org = org_full.split("+BRANCH+")
else:
parent = None
org = org_full
if parent:
btable = s3db.org_organisation_branch
ptable = db.org_organisation.with_alias("org_parent_organisation")
query = (otable.name == org) & \
(ptable.name == parent) & \
(btable.organisation_id == ptable.id) & \
(btable.branch_id == otable.id)
else:
query = (otable.name == org)
records = db(query).select(otable.id)
if len(records) == 1:
id = records.first().id
elif len(records) > 1:
# Ambiguous
s3_debug("Cannot set Organisation %s for user as there are multiple matches" % org)
id = ""
else:
if ORG_ADMIN:
# NB ORG_ADMIN has the list of permitted pe_ids already in filter_opts
s3_debug("Cannot create new Organisation %s as ORG_ADMIN cannot create new Orgs during User Imports" % org)
id = ""
else:
# Add a new record
id = otable.insert(name=org)
update_super(otable, Storage(id=id))
self.s3_set_record_owner(otable, id)
# @ToDo: Call onaccept?
if parent:
records = db(otable.name == parent).select(otable.id)
if len(records) == 1:
# Add branch link
link_id = btable.insert(organisation_id = records.first().id,
branch_id = id)
onaccept = s3db.get_config("org_organisation_branch", "onaccept")
callback(onaccept, Storage(vars=Storage(id=link_id)))
elif len(records) > 1:
# Ambiguous
s3_debug("Cannot set branch link for new Organisation %s as there are multiple matches for parent %s" % (org, parent))
else:
# Create Parent
parent_id = otable.insert(name=parent)
update_super(otable, Storage(id=parent_id))
self.s3_set_record_owner(otable, id)
# @ToDo: Call onaccept?
# Create link
link_id = btable.insert(organisation_id == parent_id,
branch_id == id)
onaccept = s3db.get_config("org_organisation_branch", "onaccept")
callback(onaccept, Storage(vars=Storage(id=link_id)))
# Replace string with id
id = str(id)
element.text = id
# Store in case we get called again with same value
orgs[org_full] = dict(id=id)
else:
# Store in case we get called again with same value
orgs[org_full] = dict(id=org_full)
# Organisation Groups
elements = tree.getroot().xpath("/s3xml//resource[@name='auth_user']/data[@field='org_group_id']")
if elements:
gtable = s3db.org_group
org_groups = looked_up.get("org_organisation_group", {})
for element in elements:
name = element.text
if name in org_groups:
# Replace string with id
element.text = org_groups[name]["id"]
# Don't check again
continue
try:
# Is this the 2nd phase of a 2-phase import & hence values have already been replaced?
int(org_full)
except ValueError:
# This is a non-integer, so must be 1st or only phase
record = db(gtable.name == name).select(gtable.id,
limitby=(0, 1)
).first()
if record:
id = record.id
else:
# Add a new record
id = gtable.insert(name=name)
update_super(gtable, Storage(id=id))
# Replace string with id
id = str(id)
element.text = id
# Store in case we get called again with same value
org_groups[name] = dict(id=id)
else:
# Store in case we get called again with same value
org_groups[name] = dict(id=name)
# -------------------------------------------------------------------------
@staticmethod
def s3_register_validation():
"""
JavaScript client-side validation for Registration / User profile
- needed to check for passwords being same, etc
"""
T = current.T
request = current.request
appname = request.application
settings = current.deployment_settings
s3 = current.response.s3
# Static Scripts
scripts_append = s3.scripts.append
if s3.debug:
scripts_append("/%s/static/scripts/jquery.validate.js" % appname)
scripts_append("/%s/static/scripts/jquery.pstrength.2.1.0.js" % appname)
scripts_append("/%s/static/scripts/S3/s3.register_validation.js" % appname)
else:
scripts_append("/%s/static/scripts/jquery.validate.min.js" % appname)
scripts_append("/%s/static/scripts/jquery.pstrength.2.1.0.min.js" % appname)
scripts_append("/%s/static/scripts/S3/s3.register_validation.min.js" % appname)
# Configuration
js_global = []
js_append = js_global.append
if settings.get_auth_registration_mobile_phone_mandatory():
js_append('''S3.auth_registration_mobile_phone_mandatory=1''')
if settings.get_auth_registration_organisation_required():
js_append('''S3.auth_registration_organisation_required=1''')
js_append('''i18n.enter_your_organisation="%s"''' % T("Enter your organization"))
if settings.get_auth_terms_of_service():
js_append('''S3.auth_terms_of_service=1''')
js_append('''i18n.tos_required="%s"''' % T("You must agree to the Terms of Service"))
if request.controller != "admin":
if settings.get_auth_registration_organisation_hidden():
js_append('''S3.auth_registration_hide_organisation=1''')
# Check for Whitelists
table = current.s3db.auth_organisation
query = (table.organisation_id != None) & \
(table.domain != None)
whitelists = current.db(query).select(table.organisation_id,
table.domain)
if whitelists:
domains = []
domains_append = domains.append
for whitelist in whitelists:
domains_append("'%s':%s" % (whitelist.domain,
whitelist.organisation_id))
domains = ''','''.join(domains)
domains = '''S3.whitelists={%s}''' % domains
js_append(domains)
js_append('''i18n.enter_first_name="%s"''' % T("Enter your first name"))
js_append('''i18n.provide_password="%s"''' % T("Provide a password"))
js_append('''i18n.repeat_your_password="%s"''' % T("Repeat your password"))
js_append('''i18n.enter_same_password="%s"''' % T("Enter the same password as above"))
js_append('''i18n.please_enter_valid_email="%s"''' % T("Please enter a valid email address"))
js_append('''S3.password_min_length=%i''' % settings.get_auth_password_min_length())
js_append('''i18n.password_min_chars="%s"''' % T("You must enter a minimum of %d characters"))
js_append('''i18n.weak="%s"''' % T("Weak"))
js_append('''i18n.normal="%s"''' % T("Normal"))
js_append('''i18n.medium="%s"''' % T("Medium"))
js_append('''i18n.strong="%s"''' % T("Strong"))
js_append('''i18n.very_strong="%s"''' % T("Very Strong"))
script = '''\n'''.join(js_global)
s3.js_global.append(script)
# Call script after Global config done
s3.jquery_ready.append('''s3_register_validation()''')
# -------------------------------------------------------------------------
def auth_user_onaccept(self, email, user_id):
db = current.db
if self.settings.login_userfield != "username":
deployment_settings = current.deployment_settings
chat_username = email.replace("@", "_")
db(db.auth_user.id == user_id).update(username = chat_username)
chat_server = deployment_settings.get_chat_server()
if chat_server:
chatdb = DAL(deployment_settings.get_chatdb_string(), migrate=False)
# Using RawSQL as table not created in web2py
sql_query="insert into ofGroupUser values (\'%s\',\'%s\' ,0);" % (chat_server["groupname"], chat_username)
chatdb.executesql(sql_query)
# -------------------------------------------------------------------------
def s3_user_register_onaccept(self, form):
"""
S3 framework function
Designed to be called when a user is created through:
- registration
Does the following:
- Stores the user's email & profile image in auth_user_temp
to be added to their person record when created on approval
@ToDo: If these fields are implemented with the InlineForms functionality,
this function may become redundant
"""
db = current.db
s3db = current.s3db
session = current.session
utable = self.settings.table_user
temptable = s3db.auth_user_temp
form_vars = form.vars
user_id = form_vars.id
if not user_id:
return None
# If the user hasn't set a personal UTC offset,
# then read the UTC offset from the form:
if not form_vars.utc_offset:
db(utable.id == user_id).update(utc_offset = session.s3.utc_offset)
record = dict(user_id = user_id)
# Add the home_phone to pr_contact
home = form_vars.home
if home:
record["home"] = home
# Add the mobile to pr_contact
mobile = form_vars.mobile
if mobile:
record["mobile"] = mobile
# Insert the profile picture
image = form_vars.image
if image != None and hasattr(image, "file"):
# @ToDo: DEBUG!!!
source_file = image.file
original_filename = image.filename
field = temptable.image
newfilename = field.store(source_file,
original_filename,
field.uploadfolder)
if isinstance(field.uploadfield, str):
form_vars[field.uploadfield] = source_file.read()
record["image"] = newfilename
if len(record) > 1:
temptable.update_or_insert(**record)
# -------------------------------------------------------------------------
def s3_verify_user(self, user):
""""
Designed to be called when a user is verified through:
- responding to their verification email
- if verification isn't required
Does the following:
- Sends a message to the approver to notify them if a user needs approval
- If deployment_settings.auth.always_notify_approver = True,
send them notification regardless
- If approval isn't required - calls s3_approve_user
@returns boolean - if the user has been approved
"""
db = current.db
deployment_settings = current.deployment_settings
session = current.session
utable = self.settings.table_user
# Lookup the Approver
approver, organisation_id = self.s3_approver(user)
if deployment_settings.get_auth_registration_requires_approval() and approver:
approved = False
db(utable.id == user.id).update(registration_key = "pending")
if user.registration_key:
# User has just been verified
session.information = deployment_settings.get_auth_registration_pending_approval()
else:
# No Verification needed
session.information = deployment_settings.get_auth_registration_pending()
message = "approve_user"
else:
approved = True
if organisation_id and not user.get("organisation_id", None):
# Use the whitelist
user["organisation_id"] = organisation_id
db(utable.id == user.id).update(organisation_id = organisation_id)
link_user_to = deployment_settings.get_auth_registration_link_user_to_default()
if link_user_to and not user.get("link_user_to", None):
user["link_user_to"] = link_user_to
self.s3_link_user(user)
self.s3_approve_user(user)
session.confirmation = self.messages.email_verified
session.flash = self.messages.registration_successful
if not deployment_settings.get_auth_always_notify_approver():
return True
message = "new_user"
# Ensure that we send out the mails in the language that the approver(s) want
if "@" in approver:
# Look up language of the user
record = db(utable.email == approver).select(utable.language,
limitby=(0, 1)
).first()
if record:
language = record.language
else:
language = deployment_settings.get_L10n_default_language()
approvers = [{"email": approver,
"language": language,
}]
languages = [language]
else:
approvers = []
aappend = approvers.append
languages = []
for each_approver in approver:
language = each_approver["language"]
if language not in languages:
languages.append(language)
aappend(each_approver)
T = current.T
auth_messages = self.messages
subjects = {}
messages = {}
first_name = user.first_name
last_name = user.last_name
email = user.email
user_id = user.id
base_url = current.response.s3.base_url
system_name = deployment_settings.get_system_name()
for language in languages:
T.force(language)
if message == "approve_user":
subjects[language] = \
T("%(system_name)s - New User Registration Approval Pending") % \
{"system_name": system_name}
messages[language] = auth_messages.approve_user % \
dict(system_name = system_name,
first_name = first_name,
last_name = last_name,
email = email,
url = "%(base_url)s/admin/user/%(id)s" % \
dict(base_url=base_url,
id=user_id))
elif message == "new_user":
subjects[language] = \
T("%(system_name)s - New User Registered") % \
{"system_name": system_name}
messages[language] = \
auth_messages.new_user % dict(system_name = system_name,
first_name = first_name,
last_name = last_name,
email = email)
# Restore language for UI
T.force(session.s3.language)
mailer = self.settings.mailer
if mailer.settings.server:
for approver in approvers:
language = approver["language"]
result = mailer.send(to = approver["email"],
subject = subjects[language],
message = messages[language])
else:
# Email system not configured (yet)
result = None
if not result:
# Don't prevent registration just because email not configured
#db.rollback()
current.response.error = self.messages.email_send_failed
return False
return approved
# -------------------------------------------------------------------------
def s3_approve_user(self, user):
"""
S3 framework function
Designed to be called when a user is created through:
- prepop
- approved automatically during registration
- approved by admin
- added by admin
- updated by admin
Does the following:
- Adds user to the 'Authenticated' role
- Adds any default roles for the user
- @ToDo: adds them to the Org_x Access role
"""
user_id = user.id
if not user_id:
return None
db = current.db
s3db = current.s3db
deployment_settings = current.deployment_settings
utable = self.settings.table_user
# Add to 'Authenticated' role
authenticated = self.id_group("Authenticated")
self.add_membership(authenticated, user_id)
# Add User to required registration roles
entity_roles = deployment_settings.get_auth_registration_roles()
if entity_roles:
gtable = self.settings.table_group
for entity in entity_roles.keys():
roles = entity_roles[entity]
# Get User's Organisation or Site pe_id
if entity in ("organisation_id", "org_group_id", "site_id"):
tablename = "org_%s" % entity.split("_")[0]
entity = s3db.pr_get_pe_id(tablename, user[entity])
if not entity:
continue
query = (gtable.uuid.belongs(roles))
rows = db(query).select(gtable.id)
for role in rows:
self.add_membership(role.id, user_id, entity=entity)
if deployment_settings.has_module("delphi"):
# Add user as a participant of the default problem group
table = s3db.delphi_group
query = (table.uuid == "DEFAULT")
group = db(query).select(table.id,
limitby=(0, 1)).first()
if group:
table = s3db.delphi_membership
table.insert(group_id=group.id,
user_id=user_id,
status=3)
self.s3_link_user(user)
if current.response.s3.bulk is True:
# Non-interactive imports should stop here
user_email = db(utable.id == user_id).select(utable.email).first().email
self.auth_user_onaccept(user_email, user_id)
return
# Allow them to login
db(utable.id == user_id).update(registration_key = "")
# Approve User's Organisation
if user.organisation_id and \
"org_organisation" in deployment_settings.get_auth_record_approval_required_for():
s3db.resource("org_organisation", user.organisation_id, unapproved=True).approve()
user_email = db(utable.id == user_id).select(utable.email).first().email
self.auth_user_onaccept(user_email, user_id)
# Send Welcome mail
self.s3_send_welcome_email(user)
return
# -------------------------------------------------------------------------
def s3_link_user(self, user):
"""
S3 framework function
Designed to be called when a user is created & approved through:
- prepop
- approved automatically during registration
- approved by admin
- added by admin
- updated by admin
Does the following:
- Calls s3_link_to_organisation:
Creates (if not existing) User's Organisation and links User
- Calls s3_link_to_person:
Creates (if not existing) User's Person Record and links User
- Calls s3_link_to_human_resource:
Creates (if not existing) User's Human Resource Record and links User
- Calls s3_link_to_member
"""
# Create/Update/Link to organisation,
organisation_id = self.s3_link_to_organisation(user)
# Add to user Person Registry and Email/Mobile to pr_contact
person_id = self.s3_link_to_person(user, organisation_id)
if user.org_group_id:
self.s3_link_to_org_group(user, person_id)
utable = self.settings.table_user
link_user_to = user.link_user_to or utable.link_user_to.default
if link_user_to:
if "staff" in link_user_to:
# Add Staff Record
human_resource_id = self.s3_link_to_human_resource(user, person_id,
type=1)
if "volunteer" in link_user_to:
# Add Volunteer Record
human_resource_id = self.s3_link_to_human_resource(user, person_id,
type=2)
if "member" in link_user_to:
# Add Member Record
member_id = self.s3_link_to_member(user, person_id)
return
# -------------------------------------------------------------------------
@staticmethod
def s3_user_profile_onaccept(form):
""" Update the UI locale from user profile """
if form.vars.language:
current.session.s3.language = form.vars.language
# -------------------------------------------------------------------------
def s3_link_to_person(self,
user=None,
organisation_id=None):
"""
Links user accounts to person registry entries
@param user: the user record
@param organisation_id: the user's organisation_id
to get the person's realm_entity
Policy for linking to pre-existing person records:
If this user is already linked to a person record with a different
first_name, last_name, email or realm_entity these will be
updated to those of the user.
If a person record with exactly the same first name and
last name exists, which has a contact information record
with exactly the same email address as used in the user
account, and is not linked to another user account, then
this person record will be linked to this user account.
Otherwise, a new person record is created, and a new email
contact record with the email address from the user record
is registered for that person.
"""
db = current.db
s3db = current.s3db
utable = self.settings.table_user
ttable = s3db.auth_user_temp
ptable = s3db.pr_person
ctable = s3db.pr_contact
atable = s3db.pr_address
gctable = s3db.gis_config
ltable = s3db.pr_person_user
# Organisation becomes the realm entity of the person record
# (unless deployment settings specify something else)
if organisation_id:
org_pe_id = s3db.pr_get_pe_id("org_organisation",
organisation_id)
else:
org_pe_id = None
left = [ltable.on(ltable.user_id == utable.id),
ptable.on(ptable.pe_id == ltable.pe_id),
ttable.on(utable.id == ttable.user_id)]
if user is not None:
if not isinstance(user, (list, tuple)):
user = [user]
user_ids = [u.id for u in user]
query = (utable.id.belongs(user_ids))
else:
query = (utable.id != None)
rows = db(query).select(utable.id,
utable.first_name,
utable.last_name,
utable.email,
ltable.pe_id,
ptable.id,
ptable.first_name,
ptable.last_name,
ttable.home,
ttable.mobile,
ttable.image,
left=left, distinct=True)
person_ids = [] # Collect the person IDs
if current.request.vars.get("opt_in", None):
opt_in = current.deployment_settings.get_auth_opt_in_team_list()
else:
opt_in = []
for row in rows:
# The user record
user = row.auth_user
# The temporary user record
tuser = row.auth_user_temp
# The person record
person = row.pr_person
# The link table record
link = row.pr_person_user
pe_id = link.pe_id
if pe_id is not None:
# There is an existing person record linked to this user account
# => update it
# Update the person names if changed
if user.first_name != person.first_name or \
user.last_name != person.first_name:
query = (ptable.pe_id == pe_id)
db(query).update(first_name = user.first_name,
last_name = user.last_name)
# Add the user's email address to the person record if missing
query = (ctable.pe_id == pe_id) & \
(ctable.contact_method == "EMAIL") & \
(ctable.value == user.email)
item = db(query).select(ctable.id,
limitby=(0, 1)).first()
if item is None:
ctable.insert(pe_id = pe_id,
contact_method = "EMAIL",
value = user.email)
# Add the user's mobile_phone to the person record if missing
if tuser.mobile:
query = (ctable.pe_id == pe_id) & \
(ctable.contact_method == "SMS") & \
(ctable.value == tuser.mobile)
item = db(query).select(ctable.id,
limitby=(0, 1)).first()
if item is None:
ctable.insert(pe_id = pe_id,
contact_method = "SMS",
value = tuser.mobile)
#@ToDo: Also update home phone? profile image? Groups?
person_ids.append(person.id)
else:
# This user account isn't yet linked to a person record
# => try to find a person record with same first name,
# last name and email address
first_name = user.first_name
last_name = user.last_name
email = user.email.lower()
if email:
query = (ptable.first_name == first_name) & \
(ptable.last_name == last_name) & \
(ctable.pe_id == ptable.pe_id) & \
(ctable.contact_method == "EMAIL") & \
(ctable.value.lower() == email)
person = db(query).select(ptable.id,
ptable.pe_id,
limitby=(0, 1)).first()
else:
# Can't find a match without an email address
person = None
# Users own their person records
owner = Storage(owned_by_user=user.id)
if person:
other = db(ltable.pe_id == person.pe_id).select(ltable.id,
limitby=(0, 1)
).first()
if person and not other:
# Match found, and it isn't linked to another user account
# => link to this person record (+update it)
pe_id = person.pe_id
# Get the realm entity
realm_entity = self.get_realm_entity(ptable, person)
if not realm_entity:
# Default to organisation
realm_entity = org_pe_id
owner.realm_entity = realm_entity
# Insert a link
ltable.insert(user_id=user.id, pe_id=pe_id)
# Assign ownership of the Person record
person.update_record(**owner)
# Assign ownership of the Contact record(s)
db(ctable.pe_id == pe_id).update(**owner)
# Assign ownership of the Address record(s)
db(atable.pe_id == pe_id).update(**owner)
# Assign ownership of the GIS Config record(s)
db(gctable.pe_id == pe_id).update(**owner)
# Set pe_id if this is the current user
if self.user and self.user.id == user.id:
self.user.pe_id = pe_id
person_ids.append(person.id)
else:
# There is no match or it is linked to another user account
# => create a new person record (+link to it)
# Create a new person record
person_id = ptable.insert(first_name = first_name,
last_name = last_name,
opt_in = opt_in,
modified_by = user.id,
**owner)
if person_id:
# Update the super-entities
person = Storage(id=person_id)
s3db.update_super(ptable, person)
pe_id = person.pe_id
# Get the realm entity
realm_entity = self.get_realm_entity(ptable, person)
if not realm_entity:
# Default to organisation
realm_entity = org_pe_id
self.set_realm_entity(ptable, person,
entity=realm_entity,
)
owner.realm_entity = realm_entity
# Insert a link
ltable.insert(user_id=user.id, pe_id=pe_id)
# Add the email to pr_contact
ctable.insert(pe_id = pe_id,
contact_method = "EMAIL",
priority = 1,
value = email,
**owner)
# Add the user to each team if they have chosen to opt-in
gtable = s3db.pr_group
mtable = s3db.pr_group_membership
for team in opt_in:
team_rec = db(gtable.name == team).select(gtable.id,
limitby=(0, 1)
).first()
# if the team doesn't exist then add it
if team_rec == None:
team_id = gtable.insert(name = team,
group_type = 5)
else:
team_id = team_rec.id
mtable.insert(group_id = team_id,
person_id = person_id)
person_ids.append(person_id)
else:
pe_id = None
if pe_id is not None:
# Insert data from the temporary user data record
tuser = row.auth_user_temp
# Add the mobile phone number from the temporary
# user data into pr_contact
mobile = tuser.mobile
if mobile:
ctable.insert(pe_id = pe_id,
contact_method = "SMS",
priority = 2,
value = mobile,
**owner)
# Add the home phone number from the temporary
# user data into pr_contact
home = tuser.home
if home:
ctable.insert(pe_id = pe_id,
contact_method = "HOME_PHONE",
priority = 3,
value = home,
**owner)
# Insert the profile picture from the temporary
# user data into pr_image
image = tuser.image
if image: # and hasattr(image, "file"):
itable = s3db.pr_image
url = URL(c="default", f="download", args=image)
itable.insert(pe_id=pe_id,
profile=True,
image=image,
url = url,
description=current.T("Profile Picture"))
# Set pe_id if this is the current user
if self.user and self.user.id == user.id:
self.user.pe_id = pe_id
if len(person_ids) == 1:
return person_ids[0]
else:
return person_ids
# -------------------------------------------------------------------------
def s3_link_to_organisation(self, user):
"""
Link a user account to an organisation
@param user: the user account record
"""
db = current.db
s3db = current.s3db
user_id = user.id
# Lookup the organisation_id for the domain of this email address
approver, organisation_id = self.s3_approver(user)
if organisation_id:
user.organisation_id = organisation_id
else:
# Use what the user has specified
organisation_id = user.organisation_id
# @ToDo: Is it correct to override the organisation entered by the user?
# Ideally (if the deployment_settings.auth.registration_requests_organisation = True)
# the org could be selected based on the email and the user could then override
if not organisation_id:
# Create a new Organisation
name = user.get("organisation_name", None)
acronym = user.get("organisation_acronym", None)
if name:
# Create new organisation
otable = s3db.org_organisation
record = Storage(name=name,
acronym=acronym)
organisation_id = otable.insert(**record)
# Callbacks
if organisation_id:
record["id"] = organisation_id
s3db.update_super(otable, record)
s3db.onaccept(otable, record, method="create")
self.s3_set_record_owner(otable, organisation_id)
# Update user record
user.organisation_id = organisation_id
utable = self.settings.table_user
db(utable.id == user_id).update(organisation_id=organisation_id)
if not organisation_id:
return None
# Update link to Organisation
ltable = s3db.org_organisation_user
# Update if the User's Organisation has changed
query = (ltable.user_id == user_id)
rows = db(query).select(ltable.organisation_id,
limitby=(0, 2))
if len(rows) == 1:
# We know which record to update - this should always be 1
if rows.first().organisation_id != organisation_id:
db(query).update(organisation_id=organisation_id)
# No more action required
return organisation_id
else:
# Create link (if it doesn't exist)
query = (ltable.user_id == user_id) & \
(ltable.organisation_id == organisation_id)
row = db(query).select(ltable.id, limitby=(0, 1)).first()
if not row:
ltable.insert(user_id=user_id,
organisation_id=organisation_id)
return organisation_id
# -------------------------------------------------------------------------
def s3_link_to_org_group(self, user, person_id):
"""
Link a user account to an organisation group
@param user: the user account record
@param person_id: the person record ID associated with this user
"""
db = current.db
s3db = current.s3db
org_group_id = user.get("org_group_id")
if not org_group_id or not person_id:
return None
# Default status to "Member"
stable = s3db.org_group_person_status
query = (stable.name.lower() == "member") & \
(stable.deleted != True)
row = db(query).select(stable.id, limitby=(0, 1)).first()
if row:
status_id = row.id
else:
status_id = None
# Check if link exists
ltable = s3db.org_group_person
query = (ltable.person_id == person_id) & \
(ltable.org_group_id == org_group_id) & \
(ltable.deleted != True)
row = db(query).select(ltable.id, limitby=(0, 1)).first()
if not row:
# Make sure person record and org_group record exist
ptable = s3db.pr_person
gtable = s3db.org_group
if ptable[person_id] and gtable[org_group_id]:
ltable.insert(person_id=person_id,
org_group_id=org_group_id,
status_id=status_id,
)
return org_group_id
# -------------------------------------------------------------------------
def s3_link_to_human_resource(self,
user,
person_id,
type,
):
"""
Take ownership of the HR records of the person record
@ToDo: Add user to the Org Access role.
"""
db = current.db
s3db = current.s3db
user_id = user.id
organisation_id = user.organisation_id
htablename = "hrm_human_resource"
htable = s3db.table(htablename)
if not htable or (not organisation_id and \
current.deployment_settings.get_hrm_org_required()):
return None
# Update existing HR record for this user
if type == 1:
site_id = user.site_id
else:
site_id = None
ptable = s3db.pr_person
ltable = s3db.pr_person_user
query = (htable.deleted == False) & \
(htable.status == 1) & \
(htable.type == type) & \
(htable.person_id == ptable.id) & \
(ptable.pe_id == ltable.pe_id) & \
(ltable.user_id == user_id)
rows = db(query).select(htable.id,
limitby=(0, 2))
if len(rows) == 1:
# Only update if there is a single HR Record
hr_id = rows.first().id
db(htable.id == hr_id).update(organisation_id = organisation_id,
site_id = site_id)
# Update record ownership
self.s3_set_record_owner(htable, hr_id, force_update=True)
# Update Site link
hstable = s3db.hrm_human_resource_site
query = (hstable.human_resource_id == hr_id)
row = db(query).select(hstable.id,
limitby=(0, 1)).first()
if row:
db(query).update(site_id = site_id,
human_resource_id = hr_id,
owned_by_user = user_id)
else:
hstable.insert(site_id=site_id,
human_resource_id=hr_id,
owned_by_user=user_id)
# Create an HR record, if one doesn't already exist
if isinstance(person_id, list):
person_ids = person_id
else:
person_ids = [person_id]
query = (htable.person_id.belongs(person_ids)) & \
(htable.organisation_id == organisation_id) & \
(htable.type == type) & \
(htable.site_id == site_id)
row = db(query).select(htable.id, limitby=(0, 1)).first()
if row:
hr_id = row.id
else:
record = Storage(person_id=person_ids[0],
organisation_id=organisation_id,
site_id = site_id,
type=type,
owned_by_user=user_id,
)
hr_id = htable.insert(**record)
if hr_id:
record["id"] = hr_id
s3db.update_super(htable, record)
self.s3_set_record_owner(htable, hr_id)
s3db.onaccept(htablename, record, method="create")
return hr_id
# -------------------------------------------------------------------------
def s3_link_to_member(self,
user,
person_id = None
):
"""
Link to a member Record
"""
db = current.db
s3db = current.s3db
user_id = user.id
organisation_id = user.organisation_id
mtablename = "member_membership"
mtable = s3db.table(mtablename)
if not mtable or not organisation_id:
return None
# Update existing Member record for this user
ptable = s3db.pr_person
ltable = s3db.pr_person_user
query = (mtable.deleted == False) & \
(mtable.person_id == ptable.id) & \
(ptable.pe_id == ltable.pe_id) & \
(ltable.user_id == user_id)
rows = db(query).select(mtable.id,
limitby=(0, 2))
if len(rows) == 1:
# Only update if there is a single member Record
member_id = rows.first().id
db(mtable.id == member_id).update(organisation_id = organisation_id)
# Update record ownership
self.s3_set_record_owner(mtable, member_id, force_update=True)
# Create a Member record, if one doesn't already exist
if isinstance(person_id, list):
person_ids = person_id
else:
person_ids = [person_id]
query = (mtable.person_id.belongs(person_ids)) & \
(mtable.organisation_id == organisation_id)
row = db(query).select(mtable.id, limitby=(0, 1)).first()
if row:
member_id = row.id
else:
record = Storage(person_id=person_ids[0],
organisation_id=organisation_id,
owned_by_user=user_id,
)
member_id = mtable.insert(**record)
if member_id:
record["id"] = member_id
self.s3_set_record_owner(mtable, member_id)
s3db.onaccept(mtablename, record, method="create")
return member_id
# -------------------------------------------------------------------------
def s3_approver(self, user):
"""
Returns the Approver for a new Registration &
the organisation_id field
@param: user - the user record (form.vars when done direct)
@ToDo: Support multiple approvers per Org - via Org Admin (or specific Role?)
Split into separate functions to returning approver & finding users' org from auth_organisations
@returns approver, organisation_id - if approver = False, user is automatically approved by whitelist
"""
db = current.db
approver = None
organisation_id = user.get("organisation_id")
table = current.s3db.auth_organisation
if organisation_id:
# Check for an Organisation-specific Approver
query = (table.organisation_id == organisation_id) & \
(table.deleted == False)
record = db(query).select(table.approver,
limitby=(0, 1)).first()
elif "email" in user and user["email"] and "@" in user["email"]:
# Check for Domain: Whitelist or specific Approver
domain = user.email.split("@", 1)[-1]
query = (table.domain == domain) & \
(table.deleted == False)
record = db(query).select(table.organisation_id,
table.approver,
limitby=(0, 1)).first()
else:
record = None
if record:
if not organisation_id:
organisation_id = record.organisation_id
approver = record.approver
if not approver:
# Default Approver
approver = current.deployment_settings.get_mail_approver()
if "@" not in approver:
# Must be the UUID of a Group
utable = db.auth_user
mtable = db.auth_membership
gtable = db.auth_group
query = (gtable.uuid == approver) & \
(gtable.id == mtable.group_id) & \
(mtable.user_id == utable.id)
rows = db(query).select(utable.email,
utable.language,
distinct=True)
approver = rows.as_list()
return approver, organisation_id
# -------------------------------------------------------------------------
def s3_send_welcome_email(self, user):
"""
Send a welcome mail to newly-registered users
- especially suitable for users from Facebook/Google who don't
verify their emails
"""
messages = self.messages
settings = current.deployment_settings
if not settings.get_mail_sender():
current.response.error = messages.unable_send_email
return
#if "name" in user:
# user["first_name"] = user["name"]
#if "family_name" in user:
# # Facebook
# user["last_name"] = user["family_name"]
# Ensure that we send out the mails in the language that the recipient wants
T = current.T
T.force(user["language"])
system_name = settings.get_system_name()
subject = messages.welcome_email_subject % \
dict(system_name=system_name)
message = messages.welcome_email % \
dict(system_name = system_name,
url = settings.get_base_public_url(),
profile = URL("default", "user", args=["profile"])
)
# Restore language for UI
T.force(current.session.s3.language)
to = user["email"]
if settings.has_module("msg"):
results = current.msg.send_email(to, subject=subject,
message=message)
else:
results = current.mail.send(to, subject=subject, message=message)
if not results:
current.response.error = messages.unable_send_email
return
# -------------------------------------------------------------------------
# S3-specific authentication methods
# -------------------------------------------------------------------------
def s3_impersonate(self, user_id):
"""
S3 framework function
Designed to be used within tasks, which are run in a separate
request & hence don't have access to current.auth
@param user_id: auth.user.id or auth.user.email
"""
settings = self.settings
utable = settings.table_user
query = None
if not user_id:
# Anonymous
user = None
elif isinstance(user_id, basestring) and not user_id.isdigit():
query = (utable[settings.login_userfield] == user_id)
else:
query = (utable.id == user_id)
if query is not None:
user = current.db(query).select(limitby=(0, 1)).first()
if not user:
# Invalid user ID
raise ValueError("User not found")
else:
user = Storage(utable._filter_fields(user, id=True))
self.user = user
session = current.session
session.auth = Storage(user=user,
last_visit=current.request.now,
expiration=settings.expiration)
self.s3_set_roles()
if user:
# Set the language from the Profile
language = user.language
current.T.force(language)
session.s3.language = language
return user
# -------------------------------------------------------------------------
def s3_logged_in(self):
"""
Check whether the user is currently logged-in
- tries Basic if not
"""
if self.override:
return True
if not self.is_logged_in():
# @note: MUST NOT send an HTTP Auth challenge here because
# otherwise, negative tests (e.g. if not auth.s3_logged_in())
# would always raise and never succeed => omit basic_auth_realm,
# and send the challenge in permission.fail() instead
basic = self.basic()
try:
return basic[2]
except TypeError:
# old web2py
return basic
except:
return False
return True
# -------------------------------------------------------------------------
# Role Management
# -------------------------------------------------------------------------
def get_system_roles(self):
"""
Get the IDs of the session roles by their UIDs, and store them
in the current session, as these IDs should never change.
"""
s3 = current.session.s3
try:
system_roles = s3.system_roles
except:
s3 = Storage()
else:
if system_roles:
return system_roles
gtable = self.settings.table_group
if gtable is not None:
S3_SYSTEM_ROLES = self.S3_SYSTEM_ROLES
query = (gtable.deleted != True) & \
gtable.uuid.belongs(S3_SYSTEM_ROLES.values())
rows = current.db(query).select(gtable.id, gtable.uuid)
system_roles = Storage([(role.uuid, role.id) for role in rows])
else:
system_roles = Storage([(uid, None) for uid in S3_SYSTEM_ROLES])
s3.system_roles = system_roles
return system_roles
# -------------------------------------------------------------------------
def s3_set_roles(self):
""" Update pe_id, roles and realms for the current user """
session = current.session
s3 = current.response.s3
if "permissions" in s3:
del s3["permissions"]
if "restricted_tables" in s3:
del s3["restricted_tables"]
system_roles = self.get_system_roles()
ANONYMOUS = system_roles.ANONYMOUS
if ANONYMOUS:
session.s3.roles = [ANONYMOUS]
else:
session.s3.roles = []
if self.user:
db = current.db
s3db = current.s3db
user_id = self.user.id
# Set pe_id for current user
ltable = s3db.table("pr_person_user")
if ltable is not None:
query = (ltable.user_id == user_id)
row = db(query).select(ltable.pe_id,
limitby=(0, 1),
cache=s3db.cache).first()
if row:
self.user["pe_id"] = row.pe_id
else:
self.user["pe_id"] = None
# Get all current auth_memberships of the user
mtable = self.settings.table_membership
query = (mtable.deleted != True) & \
(mtable.user_id == user_id) & \
(mtable.group_id != None)
rows = db(query).select(mtable.group_id, mtable.pe_id,
cacheable=True)
# Add all group_ids to session.s3.roles
session.s3.roles.extend(list(set([row.group_id for row in rows])))
# Realms:
# Permissions of a group apply only for records owned by any of
# the entities which belong to the realm of the group membership
if not self.permission.entity_realm:
# Group memberships have no realms (policy 5 and below)
self.user["realms"] = Storage([(row.group_id, None) for row in rows])
self.user["delegations"] = Storage()
else:
# Group memberships are limited to realms (policy 6 and above)
realms = {}
delegations = {}
# These roles can't be realm-restricted:
unrestrictable = (system_roles.ADMIN,
system_roles.ANONYMOUS,
system_roles.AUTHENTICATED,
)
default_realm = s3db.pr_realm(self.user["pe_id"])
# Store the realms:
for row in rows:
group_id = row.group_id
if group_id in realms and realms[group_id] is None:
continue
if group_id in unrestrictable:
realms[group_id] = None
continue
if group_id not in realms:
realms[group_id] = []
realm = realms[group_id]
pe_id = row.pe_id
if pe_id is None:
if default_realm:
realm.extend([e for e in default_realm
if e not in realm])
if not realm:
del realms[group_id]
elif pe_id == 0:
# Site-wide
realms[group_id] = None
elif pe_id not in realm:
realms[group_id].append(pe_id)
if self.permission.entity_hierarchy:
# Realms include subsidiaries of the realm entities
# Get all entities in realms
all_entities = []
append = all_entities.append
for realm in realms.values():
if realm is not None:
for entity in realm:
if entity not in all_entities:
append(entity)
# Lookup all delegations to any OU ancestor of the user
if self.permission.delegations and self.user.pe_id:
ancestors = s3db.pr_get_ancestors(self.user.pe_id)
dtable = s3db.pr_delegation
rtable = s3db.pr_role
atable = s3db.pr_affiliation
dn = dtable._tablename
rn = rtable._tablename
an = atable._tablename
query = (dtable.deleted != True) & \
(atable.role_id == dtable.role_id) & \
(atable.pe_id.belongs(ancestors)) & \
(rtable.id == dtable.role_id)
rows = db(query).select(rtable.pe_id,
dtable.group_id,
atable.pe_id,
cacheable=True)
extensions = []
partners = []
for row in rows:
extensions.append(row[rn].pe_id)
partners.append(row[an].pe_id)
else:
rows = []
extensions = []
partners = []
# Lookup the subsidiaries of all realms and extensions
entities = all_entities + extensions + partners
descendants = s3db.pr_descendants(entities)
pmap = {}
for p in partners:
if p in all_entities:
pmap[p] = [p]
elif p in descendants:
d = descendants[p]
pmap[p] = [e for e in all_entities if e in d] or [p]
# Add the subsidiaries to the realms
for group_id in realms:
realm = realms[group_id]
if realm is None:
continue
append = realm.append
for entity in list(realm):
if entity in descendants:
for subsidiary in descendants[entity]:
if subsidiary not in realm:
append(subsidiary)
# Process the delegations
if self.permission.delegations:
for row in rows:
# owner == delegates group_id to ==> partner
owner = row[rn].pe_id
partner = row[an].pe_id
group_id = row[dn].group_id
if group_id in delegations and \
owner in delegations[group_id]:
# Duplicate
continue
if partner not in pmap:
continue
# Find the realm
if group_id not in delegations:
delegations[group_id] = Storage()
groups = delegations[group_id]
r = [owner]
if owner in descendants:
r.extend(descendants[owner])
for p in pmap[partner]:
if p not in groups:
groups[p] = []
realm = groups[p]
realm.extend(r)
self.user["realms"] = realms
self.user["delegations"] = delegations
if ANONYMOUS:
# Anonymous role has no realm
self.user["realms"][ANONYMOUS] = None
return
# -------------------------------------------------------------------------
def s3_create_role(self, role, description=None, *acls, **args):
"""
Back-end method to create roles with ACLs
@param role: display name for the role
@param description: description of the role (optional)
@param acls: list of initial ACLs to assign to this role
@param args: keyword arguments (see below)
@keyword name: a unique name for the role
@keyword hidden: hide this role completely from the RoleManager
@keyword system: role can be assigned, but neither modified nor
deleted in the RoleManager
@keyword protected: role can be assigned and edited, but not
deleted in the RoleManager
"""
table = self.settings.table_group
hidden = args.get("hidden")
system = args.get("system")
protected = args.get("protected")
if isinstance(description, dict):
acls = [description] + acls
description = None
uid = args.get("uid", None)
if uid:
record = current.db(table.uuid == uid).select(table.id,
limitby=(0, 1)
).first()
else:
record = None
uid = uuid4()
system_data = {}
if hidden is not None:
system_data["hidden"] = hidden
if protected is not None:
system_data["protected"] = protected
if system is not None:
system_data["system"] = system
if record:
role_id = record.id
record.update_record(deleted=False,
role=role,
description=description,
**system_data)
else:
role_id = table.insert(uuid=uid,
role=role,
description=description,
**system_data)
if role_id:
for acl in acls:
self.permission.update_acl(role_id, **acl)
return role_id
# -------------------------------------------------------------------------
def s3_delete_role(self, role_id):
"""
Remove a role from the system.
@param role_id: the ID or UID of the role
@note: protected roles cannot be deleted with this function,
need to reset the protected-flag first to override
"""
db = current.db
table = self.settings.table_group
if isinstance(role_id, str) and not role_id.isdigit():
gquery = (table.uuid == role_id)
else:
role_id = int(role_id)
gquery = (table.id == role_id)
role = db(gquery).select(limitby=(0, 1)).first()
if role and not role.protected:
# Remove all memberships for this role
mtable = self.settings.table_membership
mquery = (mtable.group_id == role.id)
db(mquery).update(deleted=True)
# Remove all ACLs for this role
ptable = self.permission.table
pquery = (ptable.group_id == role.id)
db(pquery).update(deleted=True)
# Remove the role
db(gquery).update(role=None, deleted=True)
# -------------------------------------------------------------------------
def s3_assign_role(self, user_id, group_id, for_pe=None):
"""
Assigns a role to a user (add the user to a user group)
@param user_id: the record ID of the user account
@param group_id: the record ID(s)/UID(s) of the group
@param for_pe: the person entity (pe_id) to restrict the group
membership to, possible values:
- None: use default realm (entities the user is
affiliated with)
- 0: site-wide realm (no entity-restriction)
- X: restrict to records owned by entity X
@note: strings are assumed to be group UIDs
@note: for_pe will be ignored for ADMIN, ANONYMOUS and AUTHENTICATED
"""
db = current.db
gtable = self.settings.table_group
mtable = self.settings.table_membership
# Find the group IDs
query = None
uuids = None
if isinstance(group_id, (list, tuple)):
if isinstance(group_id[0], str):
uuids = group_id
query = (gtable.uuid.belongs(group_id))
else:
group_ids = group_id
elif isinstance(group_id, str) and not group_id.isdigit():
uuids = [group_id]
query = (gtable.uuid == group_id)
else:
group_ids = [group_id]
if query is not None:
query = (gtable.deleted != True) & query
groups = db(query).select(gtable.id, gtable.uuid)
group_ids = [g.id for g in groups]
missing = [uuid for uuid in uuids
if uuid not in [g.uuid for g in groups]]
for m in missing:
group_id = self.s3_create_role(m, uid=m)
if group_id:
group_ids.append(group_id)
# Find the assigned groups
query = (mtable.deleted != True) & \
(mtable.user_id == user_id) & \
(mtable.group_id.belongs(group_ids) & \
(mtable.pe_id == for_pe))
assigned = db(query).select(mtable.group_id)
assigned_groups = [g.group_id for g in assigned]
# Add missing memberships
sr = self.get_system_roles()
unrestrictable = [str(sr.ADMIN),
str(sr.ANONYMOUS),
str(sr.AUTHENTICATED)]
for group_id in group_ids:
if group_id not in assigned_groups:
membership = {"user_id": user_id,
"group_id": group_id}
if for_pe is not None and str(group_id) not in unrestrictable:
membership["pe_id"] = for_pe
membership_id = mtable.insert(**membership)
# Update roles for current user if required
if self.user and str(user_id) == str(self.user.id):
self.s3_set_roles()
return
# -------------------------------------------------------------------------
def s3_withdraw_role(self, user_id, group_id, for_pe=None):
"""
Removes a role assignment from a user account
@param user_id: the record ID of the user account
@param group_id: the record ID(s)/UID(s) of the role
@param for_pe: only remove the group membership for this
realm, possible values:
- None: only remove for the default realm
- 0: only remove for the site-wide realm
- X: only remove for entity X
- []: remove for any realms
@note: strings are assumed to be role UIDs
"""
if not group_id:
return
db = current.db
gtable = self.settings.table_group
mtable = self.settings.table_membership
# Find the group IDs
query = None
if isinstance(group_id, (list, tuple)):
if isinstance(group_id[0], str):
query = (gtable.uuid.belongs(group_id))
else:
group_ids = group_id
elif isinstance(group_id, str):
query = (gtable.uuid == group_id)
else:
group_ids = [group_id]
if query is not None:
query = (gtable.deleted != True) & query
groups = db(query).select(gtable.id)
group_ids = [g.id for g in groups]
# Get the assigned groups
query = (mtable.deleted != True) & \
(mtable.user_id == user_id) & \
(mtable.group_id.belongs(group_ids))
sr = self.get_system_roles()
unrestrictable = [str(sr.ADMIN),
str(sr.ANONYMOUS),
str(sr.AUTHENTICATED)]
if for_pe != []:
query &= ((mtable.pe_id == for_pe) | \
(mtable.group_id.belongs(unrestrictable)))
memberships = db(query).select()
# Archive the memberships
for m in memberships:
deleted_fk = {"user_id": m.user_id,
"group_id": m.group_id}
if for_pe:
deleted_fk["pe_id"] = for_pe
deleted_fk = json.dumps(deleted_fk)
m.update_record(deleted=True,
deleted_fk=deleted_fk,
user_id=None,
group_id=None)
# Update roles for current user if required
if self.user and str(user_id) == str(self.user.id):
self.s3_set_roles()
return
# -------------------------------------------------------------------------
def s3_get_roles(self, user_id, for_pe=[]):
"""
Lookup all roles which have been assigned to user for an entity
@param user_id: the user_id
@param for_pe: the entity (pe_id) or list of entities
"""
if not user_id:
return []
mtable = self.settings.table_membership
query = (mtable.deleted != True) & \
(mtable.user_id == user_id)
if isinstance(for_pe, (list, tuple)):
if len(for_pe):
query &= (mtable.pe_id.belongs(for_pe))
else:
query &= (mtable.pe_id == for_pe)
rows = current.db(query).select(mtable.group_id)
return list(set([row.group_id for row in rows]))
# -------------------------------------------------------------------------
def s3_has_role(self, role, for_pe=None):
"""
Check whether the currently logged-in user has a certain role
(auth_group membership).
@param role: the record ID or UID of the role
@param for_pe: check for this particular realm, possible values:
None - for any entity
0 - site-wide
X - for entity X
"""
# Allow override
if self.override:
return True
system_roles = self.get_system_roles()
if role == system_roles.ANONYMOUS:
# All users have the anonymous role
return True
s3 = current.session.s3
# Trigger HTTP basic auth
self.s3_logged_in()
# Get the realms
if not s3:
return False
realms = None
if self.user:
realms = self.user.realms
elif s3.roles:
realms = Storage([(r, None) for r in s3.roles])
if not realms:
return False
# Administrators have all roles
if system_roles.ADMIN in realms:
return True
# Resolve role ID/UID
if isinstance(role, str):
if role.isdigit():
role = int(role)
else:
gtable = self.settings.table_group
query = (gtable.deleted != True) & \
(gtable.uuid == role)
row = current.db(query).select(gtable.id,
limitby=(0, 1)).first()
if row:
role = row.id
else:
return False
# Check the realm
if role in realms:
realm = realms[role]
if realm is None or for_pe is None or for_pe in realm:
return True
return False
# -------------------------------------------------------------------------
def s3_group_members(self, group_id, for_pe=[]):
"""
Get a list of members of a group
@param group_id: the group record ID
@param for_pe: show only group members for this PE
@return: a list of the user_ids for members of a group
"""
mtable = self.settings.table_membership
query = (mtable.deleted != True) & \
(mtable.group_id == group_id)
if for_pe is None:
query &= (mtable.pe_id == None)
elif for_pe:
query &= (mtable.pe_id == for_pe)
members = current.db(query).select(mtable.user_id)
return [m.user_id for m in members]
# -------------------------------------------------------------------------
def s3_delegate_role(self,
group_id,
entity,
receiver=None,
role=None,
role_type=None):
"""
Delegate a role (auth_group) from one entity to another
@param group_id: the role ID or UID (or a list of either)
@param entity: the delegating entity
@param receiver: the pe_id of the receiving entity (or a list of pe_ids)
@param role: the affiliation role
@param role_type: the role type for the affiliation role (default=9)
@note: if role is None, a new role of role_type 0 will be created
for each entity in receiver and used for the delegation
(1:1 delegation)
@note: if both receiver and role are specified, the delegation will
add all receivers to this role and create a 1:N delegation to
this role. If the role does not exist, it will be created (using
the given role type)
"""
if not self.permission.delegations:
return False
db = current.db
s3db = current.s3db
dtable = s3db.table("pr_delegation")
rtable = s3db.table("pr_role")
atable = s3db.table("pr_affiliation")
if dtable is None or \
rtable is None or \
atable is None:
return False
if not group_id or not entity or not receiver and not role:
return False
# Find the group IDs
gtable = self.settings.table_group
query = None
uuids = None
if isinstance(group_id, (list, tuple)):
if isinstance(group_id[0], str):
uuids = group_id
query = (gtable.uuid.belongs(group_id))
else:
group_ids = group_id
elif isinstance(group_id, str) and not group_id.isdigit():
uuids = [group_id]
query = (gtable.uuid == group_id)
else:
group_ids = [group_id]
if query is not None:
query = (gtable.deleted != True) & query
groups = db(query).select(gtable.id, gtable.uuid)
group_ids = [g.id for g in groups]
missing = [u for u in uuids if u not in [g.uuid for g in groups]]
for m in missing:
group_id = self.s3_create_role(m, uid=m)
if group_id:
group_ids.append(group_id)
if not group_ids:
return False
if receiver is not None:
if not isinstance(receiver, (list, tuple)):
receiver = [receiver]
query = (dtable.deleted != True) & \
(dtable.group_id.belongs(group_ids)) & \
(dtable.role_id == rtable.id) & \
(rtable.deleted != True) & \
(atable.role_id == rtable.id) & \
(atable.deleted != True) & \
(atable.pe_id.belongs(receiver))
rows = db(query).select(atable.pe_id)
assigned = [row.pe_id for row in rows]
receivers = [r for r in receiver if r not in assigned]
else:
receivers = None
if role_type is None:
role_type = 9 # Other
roles = []
if role is None:
if receivers is None:
return False
for pe_id in receivers:
role_name = "__DELEGATION__%s__%s__" % (entity, pe_id)
query = (rtable.role == role_name)
role = db(query).select(limitby=(0, 1)).first()
if role is not None:
if role.deleted:
role.update_record(deleted=False,
role_type=0)
role_id = role.id
else:
role_id = s3db.pr_add_affiliation(entity, pe_id,
role=role_name,
role_type=0)
if role_id:
roles.append(role_id)
else:
query = (rtable.deleted != True) & \
(rtable.pe_id == entity) & \
(rtable.role == role)
row = db(query).select(rtable.id, limitby=(0, 1)).first()
if row is None:
role_id = rtable.insert(pe_id = entity,
role = role,
role_type = role_type)
else:
role_id = row.id
if role_id:
if receivers is not None:
pr_rebuild_path = s3db.pr_rebuild_path
for pe_id in receivers:
atable.insert(role_id=role_id,
pe_id=pe_id)
pr_rebuild_path(pe_id, clear=True)
roles.append(role_id)
for role_id in roles:
for group_id in group_ids:
dtable.insert(role_id=role_id, group_id=group_id)
# Update roles for current user if required
self.s3_set_roles()
return True
# -------------------------------------------------------------------------
def s3_remove_delegation(self,
group_id,
entity,
receiver=None,
role=None):
"""
Remove a delegation.
@param group_id: the auth_group ID or UID (or a list of either)
@param entity: the delegating entity
@param receiver: the receiving entity
@param role: the affiliation role
@note: if receiver is specified, only 1:1 delegations (to role_type 0)
will be removed, but not 1:N delegations => to remove for 1:N
you must specify the role instead of the receiver
@note: if both receiver and role are None, all delegations with this
group_id will be removed for the entity
"""
if not self.permission.delegations:
return False
db = current.db
s3db = current.s3db
dtable = s3db.table("pr_delegation")
rtable = s3db.table("pr_role")
atable = s3db.table("pr_affiliation")
if dtable is None or \
rtable is None or \
atable is None:
return False
if not group_id or not entity or not receiver and not role:
return False
# Find the group IDs
gtable = self.settings.table_group
query = None
#uuids = None
if isinstance(group_id, (list, tuple)):
if isinstance(group_id[0], str):
#uuids = group_id
query = (gtable.uuid.belongs(group_id))
else:
group_ids = group_id
elif isinstance(group_id, str) and not group_id.isdigit():
#uuids = [group_id]
query = (gtable.uuid == group_id)
else:
group_ids = [group_id]
if query is not None:
query = (gtable.deleted != True) & query
groups = db(query).select(gtable.id, gtable.uuid)
group_ids = [g.id for g in groups]
if not group_ids:
return False
# Get all delegations
query = (dtable.deleted != True) & \
(dtable.group_id.belongs(group_ids)) & \
(dtable.role_id == rtable.id) & \
(rtable.pe_id == entity) & \
(atable.role_id == rtable.id)
if receiver:
if not isinstance(receiver, (list, tuple)):
receiver = [receiver]
query &= (atable.pe_id.belongs(receiver))
elif role:
query &= (rtable.role == role)
rows = db(query).select(dtable.id,
dtable.group_id,
rtable.id,
rtable.role_type)
# Remove properly
rmv = Storage()
for row in rows:
if not receiver or row[rtable.role_type] == 0:
deleted_fk = {"role_id": row[rtable.id],
"group_id": row[dtable.group_id]}
rmv[row[dtable.id]] = json.dumps(deleted_fk)
for record_id in rmv:
query = (dtable.id == record_id)
data = {"role_id": None,
"group_id": None,
"deleted_fk": rmv[record_id]}
db(query).update(**data)
# Maybe update the current user's delegations?
if len(rmv):
self.s3_set_roles()
return True
# -------------------------------------------------------------------------
def s3_get_delegations(self, entity, role_type=0, by_role=False):
"""
Lookup delegations for an entity, ordered either by
receiver (by_role=False) or by affiliation role (by_role=True)
@param entity: the delegating entity (pe_id)
@param role_type: limit the lookup to this affiliation role type,
(can use 0 to lookup 1:1 delegations)
@param by_role: group by affiliation roles
@return: a Storage {<receiver>: [group_ids]}, or
a Storage {<rolename>: {entities:[pe_ids], groups:[group_ids]}}
"""
if not entity or not self.permission.delegations:
return None
s3db = current.s3db
dtable = s3db.pr_delegation
rtable = s3db.pr_role
atable = s3db.pr_affiliation
if None in (dtable, rtable, atable):
return None
query = (rtable.deleted != True) & \
(dtable.deleted != True) & \
(atable.deleted != True) & \
(rtable.pe_id == entity) & \
(dtable.role_id == rtable.id) & \
(atable.role_id == rtable.id)
if role_type is not None:
query &= (rtable.role_type == role_type)
rows = current.db(query).select(atable.pe_id,
rtable.role,
dtable.group_id)
delegations = Storage()
for row in rows:
receiver = row[atable.pe_id]
role = row[rtable.role]
group_id = row[dtable.group_id]
if by_role:
if role not in delegations:
delegations[role] = Storage(entities=[], groups=[])
delegation = delegations[role]
if receiver not in delegation.entities:
delegation.entities.append(receiver)
if group_id not in delegation.groups:
delegation.groups.append(group_id)
else:
if receiver not in delegations:
delegations[receiver] = [group_id]
else:
delegations[receiver].append(group_id)
return delegations
# -------------------------------------------------------------------------
# ACL management
# -------------------------------------------------------------------------
def s3_update_acls(self, role, *acls):
""" Wrapper for permission.update_acl to allow batch updating """
for acl in acls:
self.permission.update_acl(role, **acl)
# -------------------------------------------------------------------------
# User Identity
# -------------------------------------------------------------------------
def s3_get_user_id(self, person_id=None, pe_id=None):
"""
Get the user_id for a person_id
@param person_id: the pr_person record ID
@param pe_id: the person entity ID, alternatively
"""
if isinstance(person_id, basestring) and not person_id.isdigit():
utable = self.settings.table_user
query = (utable.email == person_id)
user = current.db(query).select(utable.id,
limitby=(0, 1)).first()
if user:
return user.id
else:
s3db = current.s3db
ltable = s3db.pr_person_user
if not ltable:
return None
if person_id:
ptable = s3db.pr_person
if not ptable:
return None
query = (ptable.id == person_id) & \
(ptable.pe_id == ltable.pe_id)
else:
query = (ltable.pe_id == pe_id)
link = current.db(query).select(ltable.user_id,
limitby=(0, 1)).first()
if link:
return link.user_id
return None
# -------------------------------------------------------------------------
def s3_user_pe_id(self, user_id):
"""
Get the person pe_id for a user ID
@param user_id: the user ID
"""
table = current.s3db.pr_person_user
row = current.db(table.user_id == user_id).select(table.pe_id,
limitby=(0, 1)).first()
if row:
return row.pe_id
return None
# -------------------------------------------------------------------------
def s3_logged_in_person(self):
"""
Get the person record ID for the current logged-in user
"""
if self.s3_logged_in():
ptable = current.s3db.pr_person
try:
query = (ptable.pe_id == self.user.pe_id)
except AttributeError:
# Prepop
pass
else:
record = current.db(query).select(ptable.id,
limitby=(0, 1)).first()
if record:
return record.id
return None
# -------------------------------------------------------------------------
def s3_logged_in_human_resource(self):
"""
Get the first HR record ID for the current logged-in user
"""
if self.s3_logged_in():
s3db = current.s3db
ptable = s3db.pr_person
htable = s3db.hrm_human_resource
try:
query = (htable.person_id == ptable.id) & \
(ptable.pe_id == self.user.pe_id)
except AttributeError:
# Prepop
pass
else:
record = current.db(query).select(htable.id,
orderby =~htable.modified_on,
limitby=(0, 1)).first()
if record:
return record.id
return None
# -------------------------------------------------------------------------
# Core Authorization Methods
# -------------------------------------------------------------------------
def s3_has_permission(self, method, table, record_id=None, c=None, f=None):
"""
S3 framework function to define whether a user can access a record
in manner "method". Designed to be called from the RESTlike
controller.
@param method: the access method as string, one of
"create", "read", "update", "delete"
@param table: the table or tablename
@param record_id: the record ID (if any)
@param c: the controller name (overrides current.request)
@param f: the function name (overrides current.request)
"""
if self.override:
return True
sr = self.get_system_roles()
if not hasattr(table, "_tablename"):
table = current.s3db.table(table, db_only=True)
if table is None:
current.log.warning("Permission check on Table %s failed as couldn't load table. Module disabled?")
# Return a different Falsy value
return None
policy = current.deployment_settings.get_security_policy()
# Simple policy
if policy == 1:
# Anonymous users can Read.
if method == "read":
authorised = True
else:
# Authentication required for Create/Update/Delete.
authorised = self.s3_logged_in()
# Editor policy
elif policy == 2:
# Anonymous users can Read.
if method == "read":
authorised = True
elif method == "create":
# Authentication required for Create.
authorised = self.s3_logged_in()
elif record_id == 0 and method == "update":
# Authenticated users can update at least some records
authorised = self.s3_logged_in()
else:
# Editor role required for Update/Delete.
authorised = self.s3_has_role(sr.EDITOR)
if not authorised and self.user and "owned_by_user" in table:
# Creator of Record is allowed to Edit
query = (table.id == record_id)
record = current.db(query).select(table.owned_by_user,
limitby=(0, 1)).first()
if record and self.user.id == record.owned_by_user:
authorised = True
# Use S3Permission ACLs
elif policy in (3, 4, 5, 6, 7, 8):
authorised = self.permission.has_permission(method,
c = c,
f = f,
t = table,
record = record_id)
# Web2py default policy
else:
if self.s3_logged_in():
# Administrators are always authorised
if self.s3_has_role(sr.ADMIN):
authorised = True
else:
# Require records in auth_permission to specify access
# (default Web2Py-style)
authorised = self.has_permission(method, table, record_id)
else:
# No access for anonymous
authorised = False
return authorised
# -------------------------------------------------------------------------
def s3_accessible_query(self, method, table, c=None, f=None):
"""
Returns a query with all accessible records for the currently
logged-in user
@param method: the access method as string, one of:
"create", "read", "update" or "delete"
@param table: the table or table name
@param c: the controller name (overrides current.request)
@param f: the function name (overrides current.request)
@note: This method does not work on GAE because it uses JOIN and IN
"""
if self.override:
return table.id > 0
sr = self.get_system_roles()
if not hasattr(table, "_tablename"):
table = current.s3db[table]
policy = current.deployment_settings.get_security_policy()
if policy == 1:
# "simple" security policy: show all records
return table.id > 0
elif policy == 2:
# "editor" security policy: show all records
return table.id > 0
elif policy in (3, 4, 5, 6, 7, 8):
# ACLs: use S3Permission method
query = self.permission.accessible_query(method, table, c=c, f=f)
return query
# "Full" security policy
if self.s3_has_role(sr.ADMIN):
# Administrators can see all data
return table.id > 0
# If there is access to the entire table then show all records
try:
user_id = self.user.id
except:
user_id = 0
if self.has_permission(method, table, 0, user_id):
return table.id > 0
# Filter Records to show only those to which the user has access
current.session.warning = current.T("Only showing accessible records!")
membership = self.settings.table_membership
permission = self.settings.table_permission
query = (membership.user_id == user_id) & \
(membership.group_id == permission.group_id) & \
(permission.name == method) & \
(permission.table_name == table)
return table.id.belongs(current.db(query)._select(permission.record_id))
# -------------------------------------------------------------------------
# S3 Variants of web2py Authorization Methods
# -------------------------------------------------------------------------
def s3_has_membership(self, group_id=None, user_id=None, role=None):
"""
Checks if user is member of group_id or role
Extends Web2Py's requires_membership() to add new functionality:
- Custom Flash style
- Uses s3_has_role()
"""
# Allow override
if self.override:
return True
group_id = group_id or self.id_group(role)
try:
group_id = int(group_id)
except:
group_id = self.id_group(group_id) # interpret group_id as a role
if self.s3_has_role(group_id):
r = True
else:
r = False
log = self.messages.has_membership_log
if log:
if not user_id and self.user:
user_id = self.user.id
self.log_event(log, dict(user_id=user_id,
group_id=group_id, check=r))
return r
# Override original method
has_membership = s3_has_membership
# -------------------------------------------------------------------------
def s3_requires_membership(self, role):
"""
Decorator that prevents access to action if not logged in or
if user logged in is not a member of group_id. If role is
provided instead of group_id then the group_id is calculated.
Extends Web2Py's requires_membership() to add new functionality:
- Custom Flash style
- Uses s3_has_role()
- Administrators (id=1) are deemed to have all roles
"""
def decorator(action):
def f(*a, **b):
if self.override:
return action(*a, **b)
ADMIN = self.get_system_roles().ADMIN
if not self.s3_has_role(role) and not self.s3_has_role(ADMIN):
self.permission.fail()
return action(*a, **b)
f.__doc__ = action.__doc__
return f
return decorator
# Override original method
requires_membership = s3_requires_membership
# -------------------------------------------------------------------------
# Record Ownership
# -------------------------------------------------------------------------
def s3_make_session_owner(self, table, record_id):
"""
Makes the current session owner of a record
@param table: the table or table name
@param record_id: the record ID
"""
if hasattr(table, "_tablename"):
table = table._tablename
if not self.user:
session = current.session
if "owned_records" not in session:
session.owned_records = Storage()
records = session.owned_records.get(table, [])
record_id = str(record_id)
if record_id not in records:
records.append(record_id)
session.owned_records[table] = records
return
# -------------------------------------------------------------------------
def s3_session_owns(self, table, record_id):
"""
Checks whether the current session owns a record
@param table: the table or table name
@param record_id: the record ID
"""
session = current.session
if "owned_records" not in session:
return False
if hasattr(table, "_tablename"):
table = table._tablename
if record_id and not self.user:
try:
records = session.owned_records.get(table, [])
except:
records = []
if str(record_id) in records:
return True
return False
# -------------------------------------------------------------------------
def s3_clear_session_ownership(self, table=None, record_id=None):
"""
Removes session ownership for a record
@param table: the table or table name (default: all tables)
@param record_id: the record ID (default: all records)
"""
session = current.session
if "owned_records" not in session:
return
if table is not None:
if hasattr(table, "_tablename"):
table = table._tablename
if table in session.owned_records:
records = session.owned_records[table]
if record_id is not None:
if str(record_id) in records:
records.remove(str(record_id))
else:
del session.owned_records[table]
else:
session.owned_records = Storage()
return
# -------------------------------------------------------------------------
def s3_update_record_owner(self, table, record, update=False, **fields):
"""
Update ownership fields in a record (DRY helper method for
s3_set_record_owner and set_realm_entity)
@param table: the table
@param record: the record or record ID
@param update: True to update realm_entity in all realm-components
@param fields: dict of {ownership_field:value}
"""
# Ownership fields
OUSR = "owned_by_user"
OGRP = "owned_by_group"
REALM = "realm_entity"
ownership_fields = (OUSR, OGRP, REALM)
pkey = table._id.name
if isinstance(record, (Row, dict)) and pkey in record:
record_id = record[pkey]
else:
record_id = record
data = Storage()
for key in fields:
if key in ownership_fields:
data[key] = fields[key]
if data:
s3db = current.s3db
db = current.db
# Update record
q = (table._id == record_id)
success = db(q).update(**data)
# Update realm-components
if success and update and REALM in data:
rc = s3db.get_config(table, "realm_components", ())
resource = s3db.resource(table, components=rc)
realm = {REALM:data[REALM]}
for component in resource.components.values():
ctable = component.table
if REALM not in ctable.fields:
continue
query = component.get_join() & q
rows = db(query).select(ctable._id)
ids = list(set([row[ctable._id] for row in rows]))
if ids:
db(ctable._id.belongs(ids)).update(**realm)
# Update super-entity
self.update_shared_fields(table, record, **data)
else:
return None
# -------------------------------------------------------------------------
def s3_set_record_owner(self,
table,
record,
force_update=False,
**fields):
"""
Set the record owned_by_user, owned_by_group and realm_entity
for a record (auto-detect values).
To be called by CRUD and Importer during record creation.
@param table: the Table (or table name)
@param record: the record (or record ID)
@param force_update: True to update all fields regardless of
the current value in the record, False
to only update if current value is None
@param fields: override auto-detected values, see keywords
@keyword owned_by_user: the auth_user ID of the owner user
@keyword owned_by_group: the auth_group ID of the owner group
@keyword realm_entity: the pe_id of the realm entity, or a tuple
(instance_type, instance_id) to lookup the
pe_id, e.g. ("org_organisation", 2)
@note: Only use with force_update for deliberate owner changes (i.e.
with explicit owned_by_user/owned_by_group) - autodetected
values can have undesirable side-effects. For mere realm
updates use set_realm_entity instead.
@note: If used with force_update, this will also update the
realm_entity in all configured realm_components, i.e.
no separate call to set_realm_entity required.
"""
s3db = current.s3db
# Ownership fields
OUSR = "owned_by_user"
OGRP = "owned_by_group"
REALM = "realm_entity"
ownership_fields = (OUSR, OGRP, REALM)
# Entity reference fields
EID = "pe_id"
OID = "organisation_id"
SID = "site_id"
GID = "group_id"
PID = "person_id"
entity_fields = (EID, OID, SID, GID, PID)
# Find the table
if hasattr(table, "_tablename"):
tablename = table._tablename
else:
tablename = table
table = s3db.table(tablename)
if not table:
return
# Get the record ID
pkey = table._id.name
if isinstance(record, (Row, dict)):
if pkey not in record:
return
else:
record_id = record[pkey]
else:
record_id = record
record = Storage()
# Find the available fields
fields_in_table = [f for f in ownership_fields if f in table.fields]
if not fields_in_table:
return
fields_in_table += [f for f in entity_fields if f in table.fields]
# Get all available fields for the record
fields_missing = [f for f in fields_in_table if f not in record]
if fields_missing:
fields_to_load = [table._id] + [table[f] for f in fields_in_table]
query = (table._id == record_id)
row = current.db(query).select(limitby=(0, 1),
*fields_to_load).first()
else:
row = record
if not row:
return
# Prepare the update
data = Storage()
# Find owned_by_user
if OUSR in fields_in_table:
pi = ("pr_person",
"pr_identity",
"pr_education",
"pr_contact",
"pr_address",
"pr_contact_emergency",
"pr_physical_description",
"pr_group_membership",
"pr_image",
"hrm_training",
)
if OUSR in fields:
data[OUSR] = fields[OUSR]
elif not row[OUSR] or tablename in pi:
user_id = None
# Records in PI tables should be owned by the person
# they refer to (if that person has a user account)
if tablename == "pr_person":
user_id = self.s3_get_user_id(person_id = row[table._id])
elif PID in row and tablename in pi:
user_id = self.s3_get_user_id(person_id = row[PID])
elif EID in row and tablename in pi:
user_id = self.s3_get_user_id(pe_id = row[EID])
if not user_id and self.s3_logged_in() and self.user:
# Fallback to current user
user_id = self.user.id
if user_id:
data[OUSR] = user_id
# Find owned_by_group
if OGRP in fields_in_table:
# Check for type-specific handler to find the owner group
handler = s3db.get_config(tablename, "owner_group")
if handler:
if callable(handler):
data[OGRP] = handler(table, row)
else:
data[OGRP] = handler
# Otherwise, only set if explicitly specified
elif OGRP in fields:
data[OGRP] = fields[OGRP]
# Find realm entity
if REALM in fields_in_table:
if REALM in row and row[REALM] and not force_update:
pass
else:
if REALM in fields:
entity = fields[REALM]
else:
entity = 0
realm_entity = self.get_realm_entity(table, row,
entity=entity)
data[REALM] = realm_entity
self.s3_update_record_owner(table, row, update=force_update, **data)
# -------------------------------------------------------------------------
def set_realm_entity(self, table, records, entity=0, force_update=False):
"""
Update the realm entity for records, will also update the
realm in all configured realm-entities, see:
http://eden.sahanafoundation.org/wiki/S3AAA/OrgAuth#Realms1
To be called by CRUD and Importer during record update.
@param table: the Table (or tablename)
@param records: - a single record
- a single record ID
- a list of records, or a Rows object
- a list of record IDs
- a query to find records in table
@param entity: - an entity ID
- a tuple (table, instance_id)
- 0 for default lookup
"""
db = current.db
s3db = current.s3db
REALM = "realm_entity"
EID = "pe_id"
OID = "organisation_id"
SID = "site_id"
GID = "group_id"
entity_fields = (EID, OID, SID, GID)
# Find the table
if hasattr(table, "_tablename"):
tablename = table._tablename
else:
tablename = table
table = s3db.table(tablename)
if not table or REALM not in table.fields:
return
# Find the available fields
fields_in_table = [table._id.name, REALM] + \
[f for f in entity_fields if f in table.fields]
fields_to_load = [table[f] for f in fields_in_table]
# Realm entity specified by call?
realm_entity = entity
if isinstance(realm_entity, tuple):
realm_entity = s3db.pr_get_pe_id(realm_entity)
if not realm_entity:
return
if isinstance(records, Query):
query = records
else:
query = None
# Bulk update?
if realm_entity != 0 and force_update and query is not None:
data = {REALM:realm_entity}
db(query).update(**data)
self.update_shared_fields(table, query, **data)
return
# Find the records
if query is not None:
if not force_update:
query &= (table[REALM] == None)
records = db(query).select(*fields_to_load)
elif not isinstance(records, (list, Rows)):
records = [records]
if not records:
return
# Update record by record
get_realm_entity = self.get_realm_entity
s3_update_record_owner = self.s3_update_record_owner
for record in records:
if not isinstance(record, (Row, Storage)):
record_id = record
row = Storage()
else:
row = record
if table._id.name not in record:
continue
record_id = row[table._id.name]
q = (table._id == record_id)
# Do we need to reload the record?
fields_missing = [f for f in fields_in_table if f not in row]
if fields_missing:
row = db(q).select(*fields_to_load, limitby = (0, 1)).first()
if not row:
continue
# Do we need to update the record at all?
if row[REALM] and not force_update:
continue
_realm_entity = get_realm_entity(table, row,
entity=realm_entity)
data = {REALM:_realm_entity}
s3_update_record_owner(table, row,
update=force_update, **data)
return
# -------------------------------------------------------------------------
def get_realm_entity(self, table, record, entity=0):
"""
Lookup the realm entity for a record
@param table: the Table
@param record: the record (as Row or dict)
@param entity: the entity (pe_id)
"""
if "realm_entity" not in table:
return None
s3db = current.s3db
# Entity specified by call?
if isinstance(entity, tuple):
realm_entity = s3db.pr_get_pe_id(entity)
else:
realm_entity = entity
# See if there is a deployment-global method to determine the realm entity
if realm_entity == 0:
handler = current.deployment_settings.get_auth_realm_entity()
if callable(handler):
realm_entity = handler(table, record)
# Fall back to table-specific method
if realm_entity == 0:
handler = s3db.get_config(table, "realm_entity")
if callable(handler):
realm_entity = handler(table, record)
# Fall back to standard lookup cascade
if realm_entity == 0:
if "pe_id" in record and \
table._tablename not in ("pr_person", "dvi_body"):
realm_entity = record["pe_id"]
elif "organisation_id" in record:
realm_entity = s3db.pr_get_pe_id("org_organisation",
record["organisation_id"])
elif "site_id" in record:
realm_entity = s3db.pr_get_pe_id("org_site",
record["site_id"])
elif "group_id" in record:
realm_entity = s3db.pr_get_pe_id("pr_group",
record["group_id"])
else:
realm_entity = None
return realm_entity
# -------------------------------------------------------------------------
def update_shared_fields(self, table, record, **data):
"""
Update the shared fields in data in all super-entity rows linked
with this record.
@param table: the table
@param record: a record, record ID or a query
@param data: the field/value pairs to update
"""
db = current.db
s3db = current.s3db
super_entities = s3db.get_config(table, "super_entity")
if not super_entities:
return
if not isinstance(super_entities, (list, tuple)):
super_entities = [super_entities]
tables = dict()
load = s3db.table
super_key = s3db.super_key
for se in super_entities:
supertable = load(se)
if not supertable or \
not any([f in supertable.fields for f in data]):
continue
tables[super_key(supertable)] = supertable
if not isinstance(record, (Row, dict)) or \
any([f not in record for f in tables]):
if isinstance(record, Query):
query = record
limitby = None
elif isinstance(record, (Row, dict)):
query = table._id == record[table._id.name]
limitby = (0, 1)
else:
query = table._id == record
limitby = (0, 1)
fields = [table[f] for f in tables]
records = db(query).select(limitby=limitby, *fields)
else:
records = [record]
if not records:
return
for record in records:
for skey in tables:
supertable = tables[skey]
if skey in record:
query = (supertable[skey] == record[skey])
else:
continue
updates = dict((f, data[f])
for f in data if f in supertable.fields)
if not updates:
continue
db(query).update(**updates)
return
# -------------------------------------------------------------------------
def permitted_facilities(self,
table=None,
error_msg=None,
redirect_on_error=True,
facility_type=None):
"""
If there are no facilities that the user has permission for,
prevents create & update of records in table & gives a
warning if the user tries to.
@param table: the table or table name
@param error_msg: error message
@param redirect_on_error: whether to redirect on error
@param facility_type: restrict to this particular type of
facilities (a tablename)
"""
T = current.T
ERROR = T("You do not have permission for any facility to perform this action.")
HINT = T("Create a new facility or ensure that you have permissions for an existing facility.")
if not error_msg:
error_msg = ERROR
s3db = current.s3db
site_ids = []
if facility_type is None:
site_types = self.org_site_types
else:
if facility_type not in self.org_site_types:
return
site_types = [s3db[facility_type]]
for site_type in site_types:
try:
ftable = s3db[site_type]
if not "site_id" in ftable.fields:
continue
query = self.s3_accessible_query("update", ftable)
if "deleted" in ftable:
query &= (ftable.deleted != True)
rows = current.db(query).select(ftable.site_id)
site_ids += [row.site_id for row in rows]
except:
# Module disabled
pass
if site_ids:
return site_ids
args = current.request.args
if "update" in args or "create" in args:
if redirect_on_error:
# Trying to create or update
# If they do no have permission to any facilities
current.session.error = "%s %s" % (error_msg, HINT)
redirect(URL(c="default", f="index"))
elif table is not None:
if hasattr(table, "_tablename"):
tablename = table._tablename
else:
tablename = table
s3db.configure(tablename,
insertable = False)
return site_ids # Will be []
# -------------------------------------------------------------------------
def permitted_organisations(self,
table=None,
error_msg=None,
redirect_on_error=True):
"""
If there are no organisations that the user has update
permission for, prevents create & update of a record in
table & gives an warning if the user tries to.
@param table: the table or table name
@param error_msg: error message
@param redirect_on_error: whether to redirect on error
"""
T = current.T
ERROR = T("You do not have permission for any organization to perform this action.")
HINT = T("Create a new organization or ensure that you have permissions for an existing organization.")
if not error_msg:
error_msg = ERROR
s3db = current.s3db
org_table = s3db.org_organisation
query = self.s3_accessible_query("update", org_table)
query &= (org_table.deleted == False)
rows = current.db(query).select(org_table.id)
if rows:
return [org.id for org in rows]
request = current.request
if "update" in request.args or "create" in request.args:
if redirect_on_error:
current.session.error = error_msg + " " + HINT
redirect(URL(c="default", f="index"))
elif table is not None:
if hasattr(table, "_tablename"):
tablename = table._tablename
else:
tablename = table
s3db.configure(tablename, insertable = False)
return []
# -------------------------------------------------------------------------
def root_org(self):
"""
Return the current user's root organisation ID or None
"""
if not self.user:
return None
org_id = self.user.organisation_id
if not org_id:
return None
if not current.deployment_settings.get_org_branches():
return org_id
return current.cache.ram(
# Common key for all users of this org & vol_service_record() & hrm_training_event_realm_entity()
"root_org_%s" % org_id,
lambda: current.s3db.org_root_organisation(org_id),
time_expire=120
)
# -------------------------------------------------------------------------
def root_org_name(self):
"""
Return the current user's root organisation name or None
"""
if not self.user:
return None
org_id = self.user.organisation_id
if not org_id:
return None
if not current.deployment_settings.get_org_branches():
s3db = current.s3db
table = s3db.org_organisation
row = current.db(table.id == org_id).select(table.name,
cache = s3db.cache,
limitby=(0, 1)).first()
try:
return row.name
except:
# Org not found!
return None
return current.cache.ram(
# Common key for all users of this org
"root_org_name_%s" % org_id,
lambda: current.s3db.org_root_organisation_name(org_id),
time_expire=120
)
# -------------------------------------------------------------------------
def filter_by_root_org(self, table):
"""
Function to return a query to filter a table to only display results
for the user's root org OR record with no root org
@ToDo: Restore Realms and add a role/functionality support for Master Data
Then this function is redundant
"""
root_org = self.root_org()
if root_org:
return (table.organisation_id == root_org) | (table.organisation_id == None)
else:
return (table.organisation_id == None)
# =============================================================================
class S3Permission(object):
""" S3 Class to handle permissions """
TABLENAME = "s3_permission"
CREATE = 0x0001 # Permission to create new records
READ = 0x0002 # Permission to read records
UPDATE = 0x0004 # Permission to update records
DELETE = 0x0008 # Permission to delete records
REVIEW = 0x0010 # Permission to review unapproved records
APPROVE = 0x0020 # Permission to approve records
PUBLISH = 0x0040 # Permission to publish records outside of Eden
ALL = CREATE | READ | UPDATE | DELETE | REVIEW | APPROVE | PUBLISH
NONE = 0x0000 # must be 0!
PERMISSION_OPTS = OrderedDict([
#(NONE, "NONE"),
[CREATE, "CREATE"],
[READ, "READ"],
[UPDATE, "UPDATE"],
[DELETE, "DELETE"],
[REVIEW, "REVIEW"],
[APPROVE, "APPROVE"],
[PUBLISH, "PUBLISH"],
])
# Method <-> required permission
METHODS = Storage({
"create": CREATE,
"read": READ,
"update": UPDATE,
"delete": DELETE,
"map": READ,
"report": READ,
#"search": READ,
"timeplot": READ,
"import": CREATE,
"review": REVIEW,
"approve": APPROVE,
"reject": APPROVE,
"publish": PUBLISH,
})
# Lambda expressions for ACL handling
required_acl = lambda self, methods: \
reduce(lambda a, b: a | b,
[self.METHODS[m]
for m in methods if m in self.METHODS],
self.NONE)
most_permissive = lambda self, acl: \
reduce(lambda x, y: (x[0]|y[0], x[1]|y[1]),
acl, (self.NONE, self.NONE))
most_restrictive = lambda self, acl: \
reduce(lambda x, y: (x[0]&y[0], x[1]&y[1]),
acl, (self.ALL, self.ALL))
# -------------------------------------------------------------------------
def __init__(self, auth, tablename=None):
"""
Constructor, invoked by AuthS3.__init__
@param auth: the AuthS3 instance
@param tablename: the name for the permissions table
"""
db = current.db
# Instantiated once per request, but before Auth tables
# are defined and authentication is checked, thus no use
# to check permissions in the constructor
# Store auth reference in self because current.auth is not
# available at this point yet, but needed in define_table.
self.auth = auth
self.error = S3PermissionError
settings = current.deployment_settings
# Policy: which level of granularity do we want?
self.policy = settings.get_security_policy()
# ACLs to control access per controller:
self.use_cacls = self.policy in (3, 4, 5, 6, 7 ,8)
# ACLs to control access per function within controllers:
self.use_facls = self.policy in (4, 5, 6, 7, 8)
# ACLs to control access per table:
self.use_tacls = self.policy in (5, 6, 7, 8)
# Authorization takes realm entity into account:
self.entity_realm = self.policy in (6, 7, 8)
# Permissions shared along the hierarchy of entities:
self.entity_hierarchy = self.policy in (7, 8)
# Permission sets can be delegated:
self.delegations = self.policy == 8
# Permissions table
self.tablename = tablename or self.TABLENAME
if self.tablename in db:
self.table = db[self.tablename]
else:
self.table = None
# Error messages
T = current.T
self.INSUFFICIENT_PRIVILEGES = T("Insufficient Privileges")
self.AUTHENTICATION_REQUIRED = T("Authentication Required")
# Request information
request = current.request
self.controller = request.controller
self.function = request.function
# Request format
self.format = s3_get_extension()
# Page permission cache
self.page_acls = Storage()
self.table_acls = Storage()
# Pages which never require permission:
# Make sure that any data access via these pages uses
# accessible_query explicitly!
self.unrestricted_pages = ("default/index",
"default/user",
"default/contact",
"default/about")
# Default landing pages
_next = URL(args=request.args, vars=request.get_vars)
self.homepage = URL(c="default", f="index")
self.loginpage = URL(c="default", f="user", args="login",
vars=dict(_next=_next))
# -------------------------------------------------------------------------
def define_table(self, migrate=True, fake_migrate=False):
"""
Define permissions table, invoked by AuthS3.define_tables()
"""
table_group = self.auth.settings.table_group
if table_group is None:
table_group = "integer" # fallback (doesn't work with requires)
if not self.table:
db = current.db
db.define_table(self.tablename,
Field("group_id", table_group),
Field("controller", length=64),
Field("function", length=512),
Field("tablename", length=512),
Field("record", "integer"),
Field("oacl", "integer", default=self.ALL),
Field("uacl", "integer", default=self.READ),
# apply this ACL only to records owned
# by this entity
Field("entity", "integer"),
# apply this ACL to all record regardless
# of the realm entity
Field("unrestricted", "boolean",
default=False),
migrate=migrate,
fake_migrate=fake_migrate,
*(s3_uid()+s3_timestamp()+s3_deletion_status()))
self.table = db[self.tablename]
# -------------------------------------------------------------------------
# ACL Management
# -------------------------------------------------------------------------
def update_acl(self, group,
c=None,
f=None,
t=None,
record=None,
oacl=None,
uacl=None,
entity=None,
delete=False):
"""
Update an ACL
@param group: the ID or UID of the auth_group this ACL applies to
@param c: the controller
@param f: the function
@param t: the tablename
@param record: the record (as ID or Row with ID)
@param oacl: the ACL for the owners of the specified record(s)
@param uacl: the ACL for all other users
@param entity: restrict this ACL to the records owned by this
entity (pe_id), specify "any" for any entity
@param delete: delete the ACL instead of updating it
"""
ANY = "any"
unrestricted = entity == ANY
if unrestricted:
entity = None
table = self.table
if not table:
# ACLs not relevant to this security policy
return None
s3 = current.response.s3
if "permissions" in s3:
del s3["permissions"]
if "restricted_tables" in s3:
del s3["restricted_tables"]
if c is None and f is None and t is None:
return None
if t is not None:
c = f = None
else:
record = None
if uacl is None:
uacl = self.NONE
if oacl is None:
oacl = uacl
success = False
if group:
group_id = None
acl = dict(group_id=group_id,
deleted=False,
controller=c,
function=f,
tablename=t,
record=record,
oacl=oacl,
uacl=uacl,
unrestricted=unrestricted,
entity=entity)
if isinstance(group, basestring) and not group.isdigit():
gtable = self.auth.settings.table_group
query = (gtable.uuid == group) & \
(table.group_id == gtable.id)
else:
query = (table.group_id == group)
group_id = group
query &= ((table.controller == c) & \
(table.function == f) & \
(table.tablename == t) & \
(table.record == record) & \
(table.unrestricted == unrestricted) & \
(table.entity == entity))
record = current.db(query).select(table.id,
table.group_id,
limitby=(0, 1)).first()
if record:
if delete:
acl = dict(
group_id = None,
deleted = True,
deleted_fk = '{"group_id": %d}' % record.group_id
)
else:
acl["group_id"] = record.group_id
record.update_record(**acl)
success = record.id
elif group_id:
acl["group_id"] = group_id
success = table.insert(**acl)
else:
# Lookup the group_id
record = current.db(gtable.uuid == group).select(gtable.id,
limitby=(0, 1)
).first()
if record:
acl["group_id"] = group_id
success = table.insert(**acl)
return success
# -------------------------------------------------------------------------
def delete_acl(self, group,
c=None,
f=None,
t=None,
record=None,
entity=None):
"""
Delete an ACL
@param group: the ID or UID of the auth_group this ACL applies to
@param c: the controller
@param f: the function
@param t: the tablename
@param record: the record (as ID or Row with ID)
@param entity: restrict this ACL to the records owned by this
entity (pe_id), specify "any" for any entity
"""
return self.update_acl(group,
c=c,
f=f,
t=t,
record=record,
entity=entity,
delete=True)
# -------------------------------------------------------------------------
# Record Ownership
# -------------------------------------------------------------------------
def get_owners(self, table, record):
"""
Get the entity/group/user owning a record
@param table: the table
@param record: the record ID (or the Row, if already loaded)
@note: if passing a Row, it must contain all available ownership
fields (id, owned_by_user, owned_by_group, realm_entity),
otherwise the record will be re-loaded by this function.
@return: tuple of (realm_entity, owner_group, owner_user)
"""
realm_entity = None
owner_group = None
owner_user = None
record_id = None
DEFAULT = (None, None, None)
# Load the table, if necessary
if table and not hasattr(table, "_tablename"):
table = current.s3db.table(table)
if not table:
return DEFAULT
# Check which ownership fields the table defines
ownership_fields = ("realm_entity",
"owned_by_group",
"owned_by_user")
fields = [f for f in ownership_fields if f in table.fields]
if not fields:
# Ownership is not defined for this table
return DEFAULT
if isinstance(record, Row):
# Check if all necessary fields are present
missing = [f for f in fields if f not in record]
if missing:
# Have to reload the record :(
if table._id.name in record:
record_id = record[table._id.name]
record = None
else:
# Record ID given, must load the record anyway
record_id = record
record = None
if not record and record_id:
# Get the record
fs = [table[f] for f in fields] + [table.id]
query = (table._id == record_id)
record = current.db(query).select(limitby=(0, 1), *fs).first()
if not record:
# Record does not exist
return DEFAULT
if "realm_entity" in record:
realm_entity = record["realm_entity"]
if "owned_by_group" in record:
owner_group = record["owned_by_group"]
if "owned_by_user" in record:
owner_user = record["owned_by_user"]
return (realm_entity, owner_group, owner_user)
# -------------------------------------------------------------------------
def is_owner(self, table, record, owners=None, strict=False):
"""
Check whether the current user owns the record
@param table: the table or tablename
@param record: the record ID (or the Row if already loaded)
@param owners: override the actual record owners by a tuple
(realm_entity, owner_group, owner_user)
@return: True if the current user owns the record, else False
"""
user_id = None
sr = self.auth.get_system_roles()
if self.auth.user is not None:
user_id = self.auth.user.id
session = current.session
roles = [sr.ANONYMOUS]
if session.s3 is not None:
roles = session.s3.roles or roles
if sr.ADMIN in roles:
# Admin owns all records
return True
elif owners is not None:
realm_entity, owner_group, owner_user = owners
elif record:
realm_entity, owner_group, owner_user = \
self.get_owners(table, record)
else:
# All users own no records
return True
# Session ownership?
if not user_id:
if isinstance(record, (Row, dict)):
record_id = record[table._id.name]
else:
record_id = record
if self.auth.s3_session_owns(table, record_id):
# Session owns record
return True
else:
return False
# Individual record ownership
if owner_user and owner_user == user_id:
return True
# Public record?
if not any((realm_entity, owner_group, owner_user)) and not strict:
return True
elif strict:
return False
# OrgAuth: apply only group memberships within the realm
if self.entity_realm and realm_entity:
realms = self.auth.user.realms
roles = [sr.ANONYMOUS]
append = roles.append
for r in realms:
realm = realms[r]
if realm is None or realm_entity in realm:
append(r)
# Ownership based on user role
if owner_group and owner_group in roles:
return True
else:
return False
# -------------------------------------------------------------------------
def owner_query(self,
table,
user,
use_realm=True,
realm=None,
no_realm=None):
"""
Returns a query to select the records in table owned by user
@param table: the table
@param user: the current auth.user (None for not authenticated)
@param use_realm: use realms
@param realm: limit owner access to these realms
@param no_realm: don't include these entities in role realms
@return: a web2py Query instance, or None if no query can be
constructed
"""
OUSR = "owned_by_user"
OGRP = "owned_by_group"
OENT = "realm_entity"
if realm is None:
realm = []
if no_realm is None:
no_realm = []
query = None
if user is None:
# Session ownership?
if hasattr(table, "_tablename"):
tablename = table._tablename
else:
tablename = table
session = current.session
if "owned_records" in session and \
tablename in session.owned_records:
query = (table._id.belongs(session.owned_records[tablename]))
else:
use_realm = use_realm and \
OENT in table.fields and self.entity_realm
# Individual owner query
if OUSR in table.fields:
user_id = user.id
query = (table[OUSR] == user_id)
if use_realm:
# Limit owner access to permitted realms
if realm:
realm_query = self.realm_query(table, realm)
if realm_query:
query &= realm_query
else:
query = None
if not current.deployment_settings.get_security_strict_ownership():
# Any authenticated user owns all records with no owner
public = None
if OUSR in table.fields:
public = (table[OUSR] == None)
if OGRP in table.fields:
q = (table[OGRP] == None)
if public:
public &= q
else:
public = q
if use_realm:
q = (table[OENT] == None)
if public:
public &= q
else:
public = q
if public is not None:
if query is not None:
query |= public
else:
query = public
# Group ownerships
if OGRP in table.fields:
any_entity = []
g = None
for group_id in user.realms:
role_realm = user.realms[group_id]
if role_realm is None or not use_realm:
any_entity.append(group_id)
continue
role_realm = [e for e in role_realm if e not in no_realm]
if role_realm:
q = (table[OGRP] == group_id) & (table[OENT].belongs(role_realm))
if g is None:
g = q
else:
g |= q
if any_entity:
q = (table[OGRP].belongs(any_entity))
if g is None:
g = q
else:
g |= q
if g is not None:
if query is None:
query = g
else:
query |= g
return query
# -------------------------------------------------------------------------
def realm_query(self, table, entities):
"""
Returns a query to select the records owned by one of the entities.
@param table: the table
@param entities: list of entities
@return: a web2py Query instance, or None if no query can be
constructed
"""
ANY = "ANY"
OENT = "realm_entity"
if ANY in entities:
return None
elif not entities:
return None
elif OENT in table.fields:
public = (table[OENT] == None)
if len(entities) == 1:
return (table[OENT] == entities[0]) | public
else:
return (table[OENT].belongs(entities)) | public
return None
# -------------------------------------------------------------------------
def permitted_realms(self, tablename, method="read"):
"""
Returns a list of the realm entities which a user can access for
the given table.
@param tablename: the tablename
@param method: the method
@return: a list of pe_ids or None (for no restriction)
"""
if not self.entity_realm:
# Security Policy doesn't use Realms, so unrestricted
return None
auth = self.auth
sr = auth.get_system_roles()
user = auth.user
if auth.is_logged_in():
realms = user.realms
if sr.ADMIN in realms:
# ADMIN can see all Realms
return None
delegations = user.delegations
else:
realms = Storage({sr.ANONYMOUS:None})
delegations = Storage()
racl = self.required_acl([method])
request = current.request
acls = self.applicable_acls(racl,
realms=realms,
delegations=delegations,
c=request.controller,
f=request.function,
t=tablename)
if "ANY" in acls:
# User is permitted access for all Realms
return None
entities = []
for entity in acls:
acl = acls[entity]
if acl[0] & racl == racl:
entities.append(entity)
return entities
# -------------------------------------------------------------------------
# Record approval
# -------------------------------------------------------------------------
def approved(self, table, record, approved=True):
"""
Check whether a record has been approved or not
@param table: the table
@param record: the record or record ID
@param approved: True = check if approved,
False = check if unapproved
"""
if "approved_by" not in table.fields or \
not self.requires_approval(table):
return approved
if isinstance(record, (Row, dict)):
if "approved_by" not in record:
record_id = record[table._id]
record = None
else:
record_id = record
record = None
if record is None and record_id:
record = current.db(table._id == record_id).select(table.approved_by,
limitby=(0, 1)
).first()
if not record:
return False
if approved and record["approved_by"] is not None:
return True
elif not approved and record["approved_by"] is None:
return True
else:
return False
# -------------------------------------------------------------------------
def unapproved(self, table, record):
"""
Check whether a record has not been approved yet
@param table: the table
@param record: the record or record ID
"""
return self.approved(table, record, approved=False)
# -------------------------------------------------------------------------
@classmethod
def requires_approval(cls, table):
"""
Check whether record approval is required for a table
@param table: the table (or tablename)
"""
settings = current.deployment_settings
if settings.get_auth_record_approval():
tables = settings.get_auth_record_approval_required_for()
if tables is not None:
table = table._tablename if type(table) is Table else table
if table in tables:
return True
else:
return False
elif current.s3db.get_config(table, "requires_approval"):
return True
else:
return False
else:
return False
# -------------------------------------------------------------------------
@classmethod
def set_default_approver(cls, table, force=False):
"""
Set the default approver for new records in table
@param table: the table
@param force: whether to force approval for tables which require manual approval
"""
APPROVER = "approved_by"
if APPROVER in table and (force or table._tablename not in \
current.deployment_settings.get_auth_record_approval_manual()):
auth = current.auth
approver = table[APPROVER]
if auth.override:
approver.default = 0
elif auth.s3_logged_in() and \
auth.s3_has_permission("approve", table):
approver.default = auth.user.id
else:
approver.default = None
return
# -------------------------------------------------------------------------
# Authorization
# -------------------------------------------------------------------------
def has_permission(self, method, c=None, f=None, t=None, record=None):
"""
Check permission to access a record with method
@param method: the access method (string)
@param c: the controller name (falls back to current request)
@param f: the function name (falls back to current request)
@param t: the table or tablename
@param record: the record or record ID (None for any record)
"""
# Multiple methods?
if isinstance(method, (list, tuple)):
#query = None
for m in method:
if self.has_permission(m, c=c, f=f, t=t, record=record):
return True
return False
else:
method = [method]
if record == 0:
record = None
_debug("\nhas_permission('%s', c=%s, f=%s, t=%s, record=%s)" % \
("|".join(method),
c or current.request.controller,
f or current.request.function,
t, record))
# Auth override, system roles and login
auth = self.auth
if auth.override:
_debug("==> auth.override")
_debug("*** GRANTED ***")
return True
sr = auth.get_system_roles()
logged_in = auth.s3_logged_in()
# Required ACL
racl = self.required_acl(method)
_debug("==> required ACL: %04X" % racl)
# Get realms and delegations
if not logged_in:
realms = Storage({sr.ANONYMOUS:None})
delegations = Storage()
else:
realms = auth.user.realms
delegations = auth.user.delegations
# Administrators have all permissions
if sr.ADMIN in realms:
_debug("==> user is ADMIN")
_debug("*** GRANTED ***")
return True
if not self.use_cacls:
_debug("==> simple authorization")
# Fall back to simple authorization
if logged_in:
_debug("*** GRANTED ***")
return True
else:
if self.page_restricted(c=c, f=f):
permitted = racl == self.READ
else:
_debug("==> unrestricted page")
permitted = True
if permitted:
_debug("*** GRANTED ***")
else:
_debug("*** DENIED ***")
return permitted
# Do we need to check the owner role (i.e. table+record given)?
if t is not None and record is not None:
owners = self.get_owners(t, record)
is_owner = self.is_owner(t, record, owners=owners)
entity = owners[0]
else:
owners = []
is_owner = True
entity = None
# Fall back to current request
c = c or self.controller
f = f or self.function
response = current.response
key = "%s/%s/%s/%s/%s" % (method, c, f, t, record)
if "permissions" not in response.s3:
response.s3.permissions = Storage()
if key in response.s3.permissions:
permitted = response.s3.permissions[key]
if permitted is None:
pass
elif permitted:
_debug("*** GRANTED (cached) ***")
else:
_debug("*** DENIED (cached) ***")
return response.s3.permissions[key]
# Get the applicable ACLs
acls = self.applicable_acls(racl,
realms=realms,
delegations=delegations,
c=c,
f=f,
t=t,
entity=entity)
permitted = None
if acls is None:
_debug("==> no ACLs defined for this case")
permitted = True
elif not acls:
_debug("==> no applicable ACLs")
permitted = False
else:
if entity:
if entity in acls:
uacl, oacl = acls[entity]
elif "ANY" in acls:
uacl, oacl = acls["ANY"]
else:
_debug("==> Owner entity outside realm")
permitted = False
else:
uacl, oacl = self.most_permissive(acls.values())
_debug("==> uacl: %04X, oacl: %04X" % (uacl, oacl))
if permitted is None:
if uacl & racl == racl:
permitted = True
elif oacl & racl == racl:
if is_owner and record:
_debug("==> User owns the record")
elif record:
_debug("==> User does not own the record")
permitted = is_owner
else:
permitted = False
if permitted is None:
raise self.error("Cannot determine permission.")
elif permitted and \
t is not None and record is not None and \
self.requires_approval(t):
# Approval possible for this table?
if not hasattr(t, "_tablename"):
table = current.s3db.table(t)
if not table:
raise AttributeError("undefined table %s" % t)
else:
table = t
if "approved_by" in table.fields:
approval_methods = ("approve", "review", "reject")
access_approved = not all([m in approval_methods for m in method])
access_unapproved = any([m in method for m in approval_methods])
if access_unapproved:
if not access_approved:
permitted = self.unapproved(table, record)
if not permitted:
_debug("==> Record already approved")
else:
permitted = self.approved(table, record) or \
self.is_owner(table, record, owners, strict=True) or \
self.has_permission("review", t=table, record=record)
if not permitted:
_debug("==> Record not approved")
_debug("==> is owner: %s" % is_owner)
else:
# Approval not possible for this table => no change
pass
if permitted:
_debug("*** GRANTED ***")
else:
_debug("*** DENIED ***")
response.s3.permissions[key] = permitted
return permitted
# -------------------------------------------------------------------------
def accessible_query(self, method, table, c=None, f=None, deny=True):
"""
Returns a query to select the accessible records for method
in table.
@param method: the method as string or a list of methods (AND)
@param table: the database table or table name
@param c: controller name (falls back to current request)
@param f: function name (falls back to current request)
"""
# Get the table
if not hasattr(table, "_tablename"):
tablename = table
error = AttributeError("undefined table %s" % tablename)
table = current.s3db.table(tablename,
db_only = True,
default = error)
if not isinstance(method, (list, tuple)):
method = [method]
_debug("\naccessible_query(%s, '%s')" % (table, ",".join(method)))
# Defaults
ALL_RECORDS = (table._id > 0)
NO_RECORDS = (table._id == 0) if deny else None
# Record approval required?
if self.requires_approval(table) and \
"approved_by" in table.fields:
requires_approval = True
APPROVED = (table.approved_by != None)
UNAPPROVED = (table.approved_by == None)
else:
requires_approval = False
APPROVED = ALL_RECORDS
UNAPPROVED = NO_RECORDS
# Approval method?
approval_methods = ("review", "approve", "reject")
unapproved = any([m in method for m in approval_methods])
approved = not all([m in approval_methods for m in method])
# What does ALL RECORDS mean?
ALL_RECORDS = ALL_RECORDS if approved and unapproved \
else UNAPPROVED if unapproved \
else APPROVED
# Auth override, system roles and login
auth = self.auth
if auth.override:
_debug("==> auth.override")
_debug("*** ALL RECORDS ***")
return ALL_RECORDS
sr = auth.get_system_roles()
logged_in = auth.s3_logged_in()
# Get realms and delegations
user = auth.user
if not logged_in:
realms = Storage({sr.ANONYMOUS:None})
delegations = Storage()
else:
realms = user.realms
delegations = user.delegations
# Don't filter out unapproved records owned by the user
if requires_approval and not unapproved and \
"owned_by_user" in table.fields:
ALL_RECORDS = (table.approved_by != None)
if user:
owner_query = (table.owned_by_user == user.id)
else:
owner_query = self.owner_query(table, None)
if owner_query is not None:
ALL_RECORDS |= owner_query
# Administrators have all permissions
if sr.ADMIN in realms:
_debug("==> user is ADMIN")
_debug("*** ALL RECORDS ***")
return ALL_RECORDS
# Multiple methods?
if len(method) > 1:
query = None
for m in method:
q = self.accessible_query(m, table, c=c, f=f, deny=False)
if q is not None:
if query is None:
query = q
else:
query |= q
if query is None:
query = NO_RECORDS
return query
# Required ACL
racl = self.required_acl(method)
_debug("==> required permissions: %04X" % racl)
# Use ACLs?
if not self.use_cacls:
_debug("==> simple authorization")
# Fall back to simple authorization
if logged_in:
_debug("*** ALL RECORDS ***")
return ALL_RECORDS
else:
permitted = racl == self.READ
if permitted:
_debug("*** ALL RECORDS ***")
return ALL_RECORDS
else:
_debug("*** ACCESS DENIED ***")
return NO_RECORDS
# Fall back to current request
c = c or self.controller
f = f or self.function
# Get the applicable ACLs
acls = self.applicable_acls(racl,
realms=realms,
delegations=delegations,
c=c,
f=f,
t=table)
if acls is None:
_debug("==> no ACLs defined for this case")
_debug("*** ALL RECORDS ***")
return ALL_RECORDS
elif not acls:
_debug("==> no applicable ACLs")
_debug("*** ACCESS DENIED ***")
return NO_RECORDS
oacls = []
uacls = []
for entity in acls:
acl = acls[entity]
if acl[0] & racl == racl:
uacls.append(entity)
elif acl[1] & racl == racl and entity not in uacls:
oacls.append(entity)
query = None
no_realm = []
check_owner_acls = True
#OENT = "realm_entity"
if "ANY" in uacls:
_debug("==> permitted for any records")
query = ALL_RECORDS
check_owner_acls = False
elif uacls:
query = self.realm_query(table, uacls)
if query is None:
_debug("==> permitted for any records")
query = ALL_RECORDS
check_owner_acls = False
else:
_debug("==> permitted for records owned by entities %s" % str(uacls))
no_realm = uacls
if check_owner_acls:
use_realm = "ANY" not in oacls
owner_query = self.owner_query(table,
user,
use_realm=use_realm,
realm=oacls,
no_realm=no_realm,
)
if owner_query is not None:
_debug("==> permitted for owned records (limit to realms=%s)" % use_realm)
if query is not None:
query |= owner_query
else:
query = owner_query
elif use_realm:
_debug("==> permitted for any records owned by entities %s" % str(uacls+oacls))
query = self.realm_query(table, uacls+oacls)
if query is not None and requires_approval:
base_filter = None if approved and unapproved else \
UNAPPROVED if unapproved else APPROVED
if base_filter is not None:
query = base_filter & query
# Fallback
if query is None:
query = NO_RECORDS
_debug("*** Accessible Query ***")
_debug(str(query))
return query
# -------------------------------------------------------------------------
def accessible_url(self,
c=None,
f=None,
p=None,
t=None,
a=None,
args=[],
vars={},
anchor="",
extension=None,
env=None):
"""
Return a URL only if accessible by the user, otherwise False
- used for Navigation Items
@param c: the controller
@param f: the function
@param p: the permission (defaults to READ)
@param t: the tablename (defaults to <c>_<f>)
@param a: the application name
@param args: the URL arguments
@param vars: the URL variables
@param anchor: the anchor (#) of the URL
@param extension: the request format extension
@param env: the environment
"""
if c != "static":
# Hide disabled modules
settings = current.deployment_settings
if not settings.has_module(c):
return False
if t is None:
t = "%s_%s" % (c, f)
table = current.s3db.table(t)
if not table:
t = None
if not p:
p = "read"
permitted = self.has_permission(p, c=c, f=f, t=t)
if permitted:
return URL(a=a,
c=c,
f=f,
args=args,
vars=vars,
anchor=anchor,
extension=extension,
env=env)
else:
return False
# -------------------------------------------------------------------------
def fail(self):
""" Action upon insufficient permissions """
if self.format == "html":
# HTML interactive request => flash message + redirect
if self.auth.s3_logged_in():
current.session.error = self.INSUFFICIENT_PRIVILEGES
redirect(self.homepage)
else:
current.session.error = self.AUTHENTICATION_REQUIRED
redirect(self.loginpage)
else:
# non-HTML request => raise proper HTTP error
if self.auth.s3_logged_in():
raise HTTP(403, body=self.INSUFFICIENT_PRIVILEGES)
else:
# RFC1945/2617 compliance:
# Must raise an HTTP Auth challenge with status 401
challenge = {"WWW-Authenticate":
u"Basic realm=%s" % current.request.application}
raise HTTP(401, body=self.AUTHENTICATION_REQUIRED, **challenge)
# -------------------------------------------------------------------------
# ACL Lookup
# -------------------------------------------------------------------------
def applicable_acls(self, racl,
realms=None,
delegations=None,
c=None,
f=None,
t=None,
entity=[]):
"""
Find all applicable ACLs for the specified situation for
the specified realms and delegations
@param racl: the required ACL
@param realms: the realms
@param delegations: the delegations
@param c: the controller name, falls back to current request
@param f: the function name, falls back to current request
@param t: the tablename
@param entity: the realm entity
@return: None for no ACLs defined (allow),
[] for no ACLs applicable (deny),
or list of applicable ACLs
"""
if not self.use_cacls:
# We do not use ACLs at all (allow all)
return None
else:
acls = {}
db = current.db
table = self.table
c = c or self.controller
f = f or self.function
page_restricted = self.page_restricted(c=c, f=f)
# Get all roles
if realms:
roles = set(realms.keys())
if delegations:
for role in delegations:
roles.add(role)
else:
# No roles available (deny all)
return acls
# Base query
query = (table.deleted != True) & \
(table.group_id.belongs(roles))
# Page ACLs
if page_restricted:
q = (table.function == None)
if f and self.use_facls:
q = (q | (table.function == f))
q &= (table.controller == c)
else:
q = None
# Table ACLs
table_restricted = False
if t and self.use_tacls:
tq = (table.controller == None) & \
(table.function == None) & \
(table.tablename == t)
if q:
q = q | tq
else:
q = tq
table_restricted = self.table_restricted(t)
# Retrieve the ACLs
if q:
query &= q
rows = db(query).select(table.group_id,
table.controller,
table.function,
table.tablename,
table.unrestricted,
table.entity,
table.uacl,
table.oacl,
cacheable=True)
else:
rows = []
# Cascade ACLs
ANY = "ANY"
ALL = (self.ALL, self.ALL)
NONE = (self.NONE, self.NONE)
use_facls = self.use_facls
def rule_type(r):
if r.controller is not None:
if r.function is None:
return "c"
elif use_facls:
return "f"
elif r.tablename is not None:
return "t"
return None
most_permissive = lambda x, y: (x[0] | y[0], x[1] | y[1])
most_restrictive = lambda x, y: (x[0] & y[0], x[1] & y[1])
# Realms
delegation_rows = []
append_delegation = delegation_rows.append
for row in rows:
# Get the assigning entities
group_id = row.group_id
if group_id in delegations:
append_delegation(row)
if group_id not in realms:
continue
elif self.entity_realm:
entities = realms[group_id]
else:
entities = None
# Get the rule type
rtype = rule_type(row)
if rtype is None:
continue
# Resolve the realm
if row.unrestricted:
entities = [ANY]
elif entities is None:
if row.entity is not None:
entities = [row.entity]
else:
entities = [ANY]
# Merge the ACL
acl = (row["uacl"], row["oacl"])
for e in entities:
if e not in acls:
acls[e] = {rtype: acl}
elif rtype in acls[e]:
acls[e][rtype] = most_permissive(acls[e][rtype], acl)
else:
acls[e][rtype] = acl
if ANY in acls:
default = dict(acls[ANY])
else:
default = None
# Delegations
if self.delegations:
for row in delegation_rows:
# Get the rule type
rtype = rule_type(row)
if rtype is None:
continue
# Get the delegation realms
group_id = row.group_id
if group_id not in delegations:
continue
else:
drealms = delegations[group_id]
acl = (row["uacl"], row["oacl"])
# Resolve the delegation realms
# @todo: optimize
for receiver in drealms:
drealm = drealms[receiver]
# Skip irrelevant delegations
if entity:
if entity not in drealm:
continue
else:
drealm = [entity]
# What ACLs do we have for the receiver?
if receiver in acls:
dacls = dict(acls[receiver])
elif default is not None:
dacls = default
else:
continue
# Filter the delegated ACLs
if rtype in dacls:
dacls[rtype] = most_restrictive(dacls[rtype], acl)
else:
dacls[rtype] = acl
# Add/extend the new realms (e=entity, t=rule type)
# @todo: optimize
for e in drealm:
if e in acls:
for t in ("c", "f", "t"):
if t in acls[e]:
if t in dacls:
dacls[t] = most_restrictive(dacls[t], acls[e][t])
else:
dacls[t] = acls[e][t]
acls[e] = dacls
acl = acls.get(ANY, {})
# Default page ACL
if "c" in acl:
if "f" in acl:
default_page_acl = acl["f"]
else:
default_page_acl = acl["c"]
elif page_restricted:
default_page_acl = NONE
else:
default_page_acl = ALL
# Default table ACL
if "t" in acl:
default_table_acl = acl["t"]
elif table_restricted:
default_table_acl = default_page_acl
else:
default_table_acl = ALL
# Fall back to default page acl
if not acls and not (t and self.use_tacls):
acls[ANY] = {"c": default_page_acl}
# Order by precedence
s3db = current.s3db
ancestors = set()
if entity and self.entity_hierarchy and \
s3db.pr_instance_type(entity) == "pr_person":
# If the realm entity is a person, then we apply the ACLs
# for the immediate OU ancestors, for two reasons:
# a) it is not possible to assign roles for personal realms anyway
# b) looking up OU ancestors of a person (=a few) is much more
# efficient than looking up pr_person OU descendants of the
# role realm (=could be tens or hundreds of thousands)
ancestors = set(s3db.pr_realm(entity))
result = {}
for e in acls:
# Skip irrelevant ACLs
if entity and e != entity and e != ANY:
if e in ancestors:
key = entity
else:
continue
else:
key = e
acl = acls[e]
# Get the page ACL
if "f" in acl:
page_acl = acl["f"]
elif "c" in acl:
page_acl = acl["c"]
elif page_restricted:
page_acl = NONE
else:
page_acl = ALL
page_acl = most_permissive(default_page_acl, page_acl)
# Get the table ACL
if "t" in acl:
table_acl = acl["t"]
elif table_restricted:
table_acl = NONE
else:
table_acl = ALL
table_acl = most_permissive(default_table_acl, table_acl)
# Merge
acl = most_restrictive(page_acl, table_acl)
# Include ACL if relevant
if acl[0] & racl == racl or acl[1] & racl == racl:
result[key] = acl
#for pe in result:
#print "ACL for PE %s: %04X %04X" % (pe, result[pe][0], result[pe][1])
return result
# -------------------------------------------------------------------------
# Utilities
# -------------------------------------------------------------------------
def page_restricted(self, c=None, f=None):
"""
Checks whether a page is restricted (=whether ACLs
are to be applied)
@param c: controller name
@param f: function name
"""
modules = current.deployment_settings.modules
page = "%s/%s" % (c, f)
if page in self.unrestricted_pages:
return False
elif c not in modules or \
c in modules and not modules[c].restricted:
return False
return True
# -------------------------------------------------------------------------
def table_restricted(self, t=None):
"""
Check whether access to a table is restricted
@param t: the table name or Table
"""
s3 = current.response.s3
if not "restricted_tables" in s3:
table = self.table
query = (table.deleted != True) & \
(table.controller == None) & \
(table.function == None)
rows = current.db(query).select(table.tablename,
groupby=table.tablename)
s3.restricted_tables = [row.tablename for row in rows]
return str(t) in s3.restricted_tables
# -------------------------------------------------------------------------
def hidden_modules(self):
""" List of modules to hide from the main menu """
hidden_modules = []
if self.use_cacls:
sr = self.auth.get_system_roles()
modules = current.deployment_settings.modules
restricted_modules = [m for m in modules
if modules[m].restricted]
roles = []
if current.session.s3 is not None:
roles = current.session.s3.roles or []
if sr.ADMIN in roles or sr.EDITOR in roles:
return []
if not roles:
hidden_modules = restricted_modules
else:
t = self.table
query = (t.deleted != True) & \
(t.controller.belongs(restricted_modules)) & \
(t.tablename == None)
if roles:
query = query & (t.group_id.belongs(roles))
else:
query = query & (t.group_id == None)
rows = current.db(query).select()
acls = dict()
for acl in rows:
if acl.controller not in acls:
acls[acl.controller] = self.NONE
acls[acl.controller] |= acl.oacl | acl.uacl
hidden_modules = [m for m in restricted_modules
if m not in acls or not acls[m]]
return hidden_modules
# -------------------------------------------------------------------------
def ownership_required(self, method, table, c=None, f=None):
"""
Checks whether ownership can be required to access records in
this table (this may not apply to every record in this table).
@param method: the method as string or a list of methods (AND)
@param table: the database table or table name
@param c: controller name (falls back to current request)
@param f: function name (falls back to current request)
"""
if not self.use_cacls:
if self.policy in (1, 2):
return False
else:
return True
if not hasattr(table, "_tablename"):
tablename = table
table = current.s3db.table(tablename)
if not table:
raise AttributeError("undefined table %s" % tablename)
# If the table doesn't have any ownership fields, then no
if "owned_by_user" not in table.fields and \
"owned_by_group" not in table.fields and \
"realm_entity" not in table.fields:
return False
if not isinstance(method, (list, tuple)):
method = [method]
# Auth override, system roles and login
auth = self.auth
if self.auth.override or not self.use_cacls:
return False
sr = auth.get_system_roles()
logged_in = auth.s3_logged_in()
# Required ACL
racl = self.required_acl(method)
# Get realms and delegations
user = auth.user
if not logged_in:
realms = Storage({sr.ANONYMOUS:None})
delegations = Storage()
else:
realms = user.realms
delegations = user.delegations
# Admin always owns all records
if sr.ADMIN in realms:
return False
# Fall back to current request
c = c or self.controller
f = f or self.function
# Get the applicable ACLs
acls = self.applicable_acls(racl,
realms=realms,
delegations=delegations,
c=c,
f=f,
t=table)
acls = [entity for entity in acls if acls[entity][0] & racl == racl]
# If we have a UACL and it is not limited to any realm, then no
if "ANY" in acls or acls and "realm_entity" not in table.fields:
return False
# In all other cases: yes
return True
# -------------------------------------------------------------------------
def forget(self, table=None, record_id=None):
"""
Remove any cached permissions for a record. This can be
necessary in methods which change the status of the record
(e.g. approval).
@param table: the table
@param record_id: the record ID
"""
if table is None:
current.response.s3.permissions = Storage()
return
try:
permissions = current.response.s3.permissions
except:
return
if not permissions:
return
if hasattr(table, "_tablename"):
tablename = table._tablename
else:
tablename = table
for key in list(permissions.keys()):
r = key.split("/")
if len(r) > 1 and r[-2] == tablename:
if record_id is None or \
record_id is not None and r[-1] == str(record_id):
del permissions[key]
return
# =============================================================================
class S3Audit(object):
""" S3 Audit Trail Writer Class """
def __init__(self,
tablename="s3_audit",
migrate=True,
fake_migrate=False):
"""
Constructor
@param tablename: the name of the audit table
@param migrate: migration setting
@note: this defines the audit table
"""
settings = current.deployment_settings
audit_read = settings.get_security_audit_read()
audit_write = settings.get_security_audit_write()
if not audit_read and not audit_write:
# Auditing is Disabled
self.table = None
return
db = current.db
if tablename not in db:
db.define_table(tablename,
Field("timestmp", "datetime",
represent = S3DateTime.datetime_represent,
),
Field("user_id", db.auth_user),
Field("method"),
Field("tablename"),
Field("record_id", "integer"),
Field("representation"),
# List of Key:Values
Field("old_value", "text"),
# List of Key:Values
Field("new_value", "text"),
migrate=migrate,
fake_migrate=fake_migrate,
)
self.table = db[tablename]
user = current.auth.user
if user:
self.user_id = user.id
else:
self.user_id = None
# -------------------------------------------------------------------------
def __call__(self, method, prefix, name,
form=None,
record=None,
representation="unknown"):
"""
Audit
@param method: Method to log, one of
"create", "update", "read", "list" or "delete"
@param prefix: the module prefix of the resource
@param name: the name of the resource (without prefix)
@param form: the form
@param record: the record ID
@param representation: the representation format
"""
table = self.table
if not table:
# Don't Audit
return True
#if DEBUG:
# _debug("Audit %s: %s_%s record=%s representation=%s" % \
# (method, prefix, name, record, representation))
if method in ("list", "read"):
audit = current.deployment_settings.get_security_audit_read()
elif method in ("create", "update", "delete"):
audit = current.deployment_settings.get_security_audit_write()
else:
# Don't Audit
return True
if not audit:
# Don't Audit
return True
tablename = "%s_%s" % (prefix, name)
if record:
if isinstance(record, Row):
record = record.get("id", None)
if not record:
return True
try:
record = int(record)
except ValueError:
record = None
elif form:
try:
record = form.vars["id"]
except:
try:
record = form["id"]
except:
record = None
if record:
try:
record = int(record)
except ValueError:
record = None
else:
record = None
if callable(audit):
audit = audit(method, tablename, form, record, representation)
if not audit:
# Don't Audit
return True
if method in ("list", "read"):
table.insert(timestmp = datetime.datetime.utcnow(),
user_id = self.user_id,
method = method,
tablename = tablename,
record_id = record,
representation = representation,
)
elif method == "create":
if form:
form_vars = form.vars
if not record:
record = form_vars["id"]
new_value = ["%s:%s" % (var, str(form_vars[var]))
for var in form_vars if form_vars[var]]
else:
new_value = []
table.insert(timestmp = datetime.datetime.utcnow(),
user_id = self.user_id,
method = method,
tablename = tablename,
record_id = record,
representation = representation,
new_value = new_value,
)
elif method == "update":
if form:
rvars = form.record
if rvars:
old_value = ["%s:%s" % (var, str(rvars[var]))
for var in rvars]
else:
old_value = []
fvars = form.vars
if not record:
record = fvars["id"]
new_value = ["%s:%s" % (var, str(fvars[var]))
for var in fvars]
else:
new_value = []
old_value = []
table.insert(timestmp = datetime.datetime.utcnow(),
user_id = self.user_id,
method = method,
tablename = tablename,
record_id = record,
representation = representation,
old_value = old_value,
new_value = new_value,
)
elif method == "delete":
db = current.db
query = (db[tablename].id == record)
row = db(query).select(limitby=(0, 1)).first()
old_value = []
if row:
old_value = ["%s:%s" % (field, row[field])
for field in row]
table.insert(timestmp = datetime.datetime.utcnow(),
user_id = self.user_id,
method = method,
tablename = tablename,
record_id = record,
representation = representation,
old_value = old_value,
)
return True
# -------------------------------------------------------------------------
def represent(self, records):
"""
Provide a Human-readable representation of Audit records
- currently unused
@param record: the record IDs
"""
table = self.table
# Retrieve the records
if isinstance(records, int):
limit = 1
query = (table.id == records)
else:
limit = len(records)
query = (table.id.belongs(records))
records = current.db(query).select(table.tablename,
table.method,
table.user_id,
table.old_value,
table.new_value,
limitby=(0, limit)
)
# Convert to Human-readable form
s3db = current.s3db
output = []
oappend = output.append
for record in records:
table = s3db[record.tablename]
method = record.method
if method == "create":
new_value = record.new_value
if not new_value:
continue
diff = []
dappend = diff.append
for v in new_value:
fieldname, value = v.split(":", 1)
represent = table[fieldname].represent
if represent:
value = represent(value)
label = table[fieldname].label or fieldname
dappend("%s is %s" % (label, value))
elif method == "update":
old_values = record.old_value
new_values = record.new_value
if not new_value:
continue
changed = {}
for v in new_values:
fieldname, new_value = v.split(":", 1)
old_value = old_values.get(fieldname, None)
if new_value != old_value:
type = table[fieldname].type
if type == "integer" or \
type.startswith("reference"):
if new_value:
new_value = int(new_value)
if new_value == old_value:
continue
represent = table[fieldname].represent
if represent:
new_value = represent(new_value)
label = table[fieldname].label or fieldname
if old_value:
if represent:
old_value = represent(old_value)
changed[fieldname] = "%s changed from %s to %s" % \
(label, old_value, new_value)
else:
changed[fieldname] = "%s changed to %s" % \
(label, new_value)
diff = []
dappend = diff.append
for fieldname in changed:
dappend(changed[fieldname])
elif method == "delete":
old_value = record.old_value
if not old_value:
continue
diff = []
dappend = diff.append
for v in old_value:
fieldname, value = v.split(":", 1)
represent = table[fieldname].represent
if represent:
value = represent(value)
label = table[fieldname].label or fieldname
dappend("%s was %s" % (label, value))
oappend("\n".join(diff))
return output
# =============================================================================
class S3RoleManager(S3Method):
""" REST Method to manage ACLs (Role Manager UI for administrators) """
# @ToDo: Support settings.L10n.translate_org_organisation
# Controllers to hide from the permissions matrix
HIDE_CONTROLLER = ("admin", "default")
# Roles to hide from the permissions matrix
# @todo: deprecate
HIDE_ROLES = []
controllers = Storage()
# -------------------------------------------------------------------------
def apply_method(self, r, **attr):
"""
Apply role manager
"""
method = self.method
if method == "list":
output = self._list(r, **attr)
elif method in ("read", "create", "update"):
output = self._edit(r, **attr)
elif method == "delete":
output = self._delete(r, **attr)
elif method == "roles" and r.name == "user":
output = self._roles(r, **attr)
elif method == "users":
output = self._users(r, **attr)
else:
r.error(405, current.ERROR.BAD_METHOD)
if r.http == "GET" and method not in ("create", "update", "delete"):
current.session.s3.cancel = r.url()
return output
# -------------------------------------------------------------------------
def _list(self, r, **attr):
"""
List roles/permissions
"""
if r.id:
return self._edit(r, **attr)
output = dict()
if r.interactive:
T = current.T
db = current.db
response = current.response
resource = self.resource
auth = current.auth
options = auth.permission.PERMISSION_OPTS
NONE = auth.permission.NONE
get_vars = self.request.get_vars
table = self.table
# Show permission matrix?
# (convert value to a boolean)
show_matrix = get_vars.get("matrix", False) and True
# Title and subtitle
output.update(title = T("List of Roles"))
# Undeletable roles (these shall never have a delete button)
sr = auth.get_system_roles()
undeletable = [sr.ADMIN, sr.ANONYMOUS, sr.AUTHENTICATED]
# Filter out hidden roles
resource.add_filter((~(table.id.belongs(self.HIDE_ROLES))) &
(table.hidden != True))
resource.load(orderby=table.role,
fields=("id", "role", "description", "protected"))
# Get active controllers
controllers = [c for c in self.controllers.keys()
if c not in self.HIDE_CONTROLLER]
# ACLs
acl_table = auth.permission.table
query = resource.get_query()
query = query & \
(acl_table.group_id == self.table.id) & \
(acl_table.deleted != True)
records = db(query).select(acl_table.ALL)
any = "ANY"
acls = Storage({any: Storage()})
for acl in records:
c = acl.controller
f = acl.function
if not f:
f = any
role_id = acl.group_id
if f not in acls:
acls[f] = Storage()
if c not in acls[f]:
acls[f][c] = Storage()
acls[f][c][str(role_id)] = Storage(oacl = acl.oacl,
uacl = acl.uacl)
for c in controllers:
if c not in acls[any]:
acls[any][c] = Storage()
if any not in acls[any][c]:
acls[any][c][any] = Storage(oacl = NONE,
uacl = NONE)
# Table header
columns = []
headers = [TH("ID"), TH(T("Role"))]
if show_matrix:
for c in controllers:
if c in acls[any]:
headers.append(TH(self.controllers[c].name_nice))
columns.append((c, any))
for f in acls:
if f != any and c in acls[f]:
headers.append(TH(self.controllers[c].name_nice,
BR(), f))
columns.append((c, f))
else:
headers += [TH(T("Description"))]
thead = THEAD(TR(headers))
# Table body
trows = []
for i, role in enumerate(resource):
role_id = role.id
role_name = role.role
role_desc = role.description
actions = []
# Edit button to edit permissions of the role
if role_id != sr.ADMIN:
edit_btn = A(T("Edit"),
_href=URL(c="admin", f="role",
args=[role_id],
vars=get_vars,
),
_class="action-btn")
actions.append(edit_btn)
# Users button to manage users for this role
users_btn = A(T("Users"),
_href=URL(c="admin", f="role",
args=[role_id, "users"],
),
_class="action-btn")
actions.append(users_btn)
# Delete button to delete this role
if not role.protected and role_id not in undeletable:
delete_btn = A(T("Delete"),
_href=URL(c="admin", f="role",
args=[role_id, "delete"],
vars=get_vars,
),
_class="delete-btn")
actions.append(delete_btn)
tdata = [TD(actions), TD(role_name)]
if show_matrix:
# Display the permission matrix
for c, f in columns:
if f in acls and c in acls[f] and \
str(role_id) in acls[f][c]:
oacl = acls[f][c][str(role_id)].oacl
uacl = acls[f][c][str(role_id)].uacl
else:
oacl = acls[any][c][any].oacl
uacl = acls[any][c][any].oacl
oaclstr = ""
uaclstr = ""
for o in options:
if o == NONE and oacl == NONE:
oaclstr = "%s%s" % (oaclstr, options[o][0])
elif oacl and oacl & o:
oaclstr = "%s%s" % (oaclstr, options[o][0])
else:
oaclstr = "%s-" % oaclstr
if o == NONE and uacl == NONE:
uaclstr = "%s%s" % (uaclstr, options[o][0])
elif uacl and uacl & o:
uaclstr = "%s%s" % (uaclstr, options[o][0])
else:
uaclstr = "%s-" % uaclstr
values = "%s (%s)" % (uaclstr, oaclstr)
tdata += [TD(values, _nowrap="nowrap")]
else:
# Display role descriptions
tdata += [TD(role_desc)]
_class = i % 2 and "even" or "odd"
trows.append(TR(tdata, _class=_class))
tbody = TBODY(trows)
# Create datatable
items = TABLE(thead,
tbody,
_class="dataTable display",
_id="datatable",
)
s3 = response.s3
s3.no_formats = True
s3.actions = []
s3.no_sspag = True
from s3data import S3DataTable
dt = S3DataTable.htmlConfig(items, "datatable", [[1, "asc"]],
dt_pagination=False)
output["items"] = dt
# Add-button
add_btn = A(T("Create Role"),
_href=URL(c="admin", f="role", args=["create"]),
_class="action-btn")
output["add_btn"] = add_btn
response.view = "admin/role_list.html"
elif r.representation == "xls":
# Not implemented yet
r.error(501, current.ERROR.BAD_FORMAT)
else:
r.error(501, current.ERROR.BAD_FORMAT)
return output
# -------------------------------------------------------------------------
def _edit(self, r, **attr):
"""
Create/update role
"""
output = dict()
request = self.request
session = current.session
db = current.db
T = current.T
CACL = T("Application Permissions")
FACL = T("Function Permissions")
TACL = T("Table Permissions")
CANCEL = T("Cancel")
auth = current.auth
permission = auth.permission
acl_table = permission.table
NONE = permission.NONE
if r.interactive:
# Get the current record (if any)
if r.record:
output.update(title=T("Edit Role"))
role_id = r.record.id
role_name = r.record.role
role_desc = r.record.description
else:
output.update(title=T("New Role"))
role_id = None
role_name = None
role_desc = None
sr = auth.get_system_roles()
if role_id == sr.ADMIN:
# Pointless attempt
r.error(400, T("ADMIN Permissions can not be changed."),
next = r.url(method="", id=0))
# Form helpers ----------------------------------------------------
mandatory = lambda l: DIV(l, XML(" "),
SPAN("*", _class="req"))
from s3validators import IS_ACL
acl_table.oacl.requires = IS_ACL(permission.PERMISSION_OPTS)
acl_table.uacl.requires = IS_ACL(permission.PERMISSION_OPTS)
from s3widgets import S3ACLWidget
acl_widget = lambda f, n, v: \
S3ACLWidget.widget(acl_table[f], v, _id=n, _name=n,
_class="acl-widget")
using_default = SPAN(T("using default"), _class="using-default")
delete_acl = lambda _id: _id is not None and \
A(T("Delete"),
_href = URL(c="admin", f="acl",
args=[_id, "delete"],
vars=dict(_next=r.url())),
_class = "delete-btn") or using_default
new_acl = SPAN(T("new ACL"), _class="new-acl")
form = FORM()
# Role form -------------------------------------------------------
formstyle = current.deployment_settings.get_ui_formstyle()
id1 = "role_name"
label1 = LABEL(mandatory("%s:" % T("Role Name")))
widget1 = INPUT(value=role_name,
_name="role_name",
_type="text",
requires=IS_NOT_IN_DB(db, "auth_group.role",
allowed_override=[role_name]
),
)
id2 = "role_desc"
label2 = LABEL("%s:" % T("Description"))
widget2 = TEXTAREA(value=role_desc,
_name="role_desc",
_rows="4")
if callable(formstyle):
form_rows = formstyle(form, [[id1, label1, widget1, ""],
[id2, label2, widget2, ""],
]
)
form_rows.update(_id="role_form")
else:
# Fallback to DIVs
form_rows = DIV(label1, widget1, _id=id1) + \
DIV(label2, widget2, _id=id2)
key_row = DIV(T("* Required Fields"), _class="req")
role_form = DIV(key_row, form_rows, _id="role-form")
form.append(role_form)
# Prepare ACL forms -----------------------------------------------
ANY = "ANY"
controllers = [c for c in self.controllers.keys()
if c not in self.HIDE_CONTROLLER]
ptables = []
query = (acl_table.deleted != True) & \
(acl_table.group_id == role_id)
records = db(query).select()
acl_forms = []
# Relevant ACLs
acls = Storage()
for acl in records:
if acl.controller in controllers:
if acl.controller not in acls:
acls[acl.controller] = Storage()
if not acl.function:
f = ANY
else:
if permission.use_facls:
f = acl.function
else:
continue
acls[acl.controller][f] = acl
# Controller ACL table --------------------------------------------
# Table header
thead = THEAD(TR(TH(T("Application")),
TH(T("All Records")),
TH(T("Owned Records")),
TH()))
# Rows for existing ACLs
form_rows = []
i = 0
for c in controllers:
default = Storage(id = None,
controller = c,
function = ANY,
tablename = None,
uacl = NONE,
oacl = NONE)
if c in acls:
acl_list = acls[c]
if ANY not in acl_list:
acl_list[ANY] = default
else:
acl_list = Storage(ANY=default)
acl = acl_list[ANY]
_class = i % 2 and "even" or "odd"
i += 1
uacl = NONE
oacl = NONE
if acl.oacl is not None:
oacl = acl.oacl
if acl.uacl is not None:
uacl = acl.uacl
_id = acl.id
delete_btn = delete_acl(_id)
n = "%s-%s-ANY-ANY" % (_id, c)
uacl = acl_widget("uacl", "acl_u_%s" % n, uacl)
oacl = acl_widget("oacl", "acl_o_%s" % n, oacl)
cn = self.controllers[c].name_nice
form_rows.append(TR(TD(cn),
TD(uacl),
TD(oacl),
TD(delete_btn),
_class=_class))
# Tabs
tabs = [SPAN(A(CACL), _class="tab_here")]
if permission.use_facls:
_class = permission.use_tacls and \
"tab_other" or "tab_last"
tabs.append(SPAN(A(FACL, _class="facl-tab"), _class=_class))
if permission.use_tacls:
tabs.append(SPAN(A(TACL, _class="tacl-tab"),
_class="tab_last"))
acl_forms.append(DIV(DIV(tabs, _class="tabs"),
TABLE(thead, TBODY(form_rows)),
_id="controller-acls"))
# Function ACL table ----------------------------------------------
if permission.use_facls:
# Table header
thead = THEAD(TR(TH(T("Application")),
TH(T("Function")),
TH(T("All Records")),
TH(T("Owned Records")),
TH()))
# Rows for existing ACLs
form_rows = []
i = 0
for c in controllers:
if c in acls:
acl_list = acls[c]
else:
continue
keys = acl_list.keys()
keys.sort()
for f in keys:
if f == ANY:
continue
acl = acl_list[f]
_class = i % 2 and "even" or "odd"
i += 1
uacl = NONE
oacl = NONE
if acl.oacl is not None:
oacl = acl.oacl
if acl.uacl is not None:
uacl = acl.uacl
_id = acl.id
delete_btn = delete_acl(_id)
n = "%s-%s-%s-ANY" % (_id, c, f)
uacl = acl_widget("uacl", "acl_u_%s" % n, uacl)
oacl = acl_widget("oacl", "acl_o_%s" % n, oacl)
cn = self.controllers[c].name_nice
form_rows.append(TR(TD(cn),
TD(f),
TD(uacl),
TD(oacl),
TD(delete_btn),
_class=_class))
# Row to enter a new controller ACL
_class = i % 2 and "even" or "odd"
c_opts = [OPTION("", _value=None, _selected="selected")] + \
[OPTION(self.controllers[c].name_nice,
_value=c) for c in controllers]
c_select = SELECT(_name="new_controller", *c_opts)
form_rows.append(TR(
TD(c_select),
TD(INPUT(_type="text", _name="new_function")),
TD(acl_widget("uacl", "new_c_uacl", NONE)),
TD(acl_widget("oacl", "new_c_oacl", NONE)),
TD(new_acl), _class=_class))
# Tabs to change to the other view
tabs = [SPAN(A(CACL, _class="cacl-tab"),
_class="tab_other"),
SPAN(A(FACL), _class="tab_here")]
if permission.use_tacls:
tabs.append(SPAN(A(TACL, _class="tacl-tab"),
_class="tab_last"))
acl_forms.append(DIV(DIV(tabs, _class="tabs"),
TABLE(thead, TBODY(form_rows)),
_id="function-acls"))
# Table ACL table -------------------------------------------------
if permission.use_tacls:
query = (acl_table.deleted != True) & \
(acl_table.tablename != None)
tacls = db(query).select(acl_table.tablename, distinct=True)
if tacls:
ptables = [acl.tablename for acl in tacls]
# Relevant ACLs
acls = dict((acl.tablename, acl) for acl in records
if acl.tablename in ptables)
# Table header
thead = THEAD(TR(TH(T("Tablename")),
TH(T("All Records")),
TH(T("Owned Records")),
TH()))
# Rows for existing table ACLs
form_rows = []
i = 0
for t in ptables:
_class = i % 2 and "even" or "odd"
i += 1
uacl = NONE
oacl = NONE
_id = None
if t in acls:
acl = acls[t]
if acl.uacl is not None:
uacl = acl.uacl
if acl.oacl is not None:
oacl = acl.oacl
_id = acl.id
delete_btn = delete_acl(_id)
n = "%s-ANY-ANY-%s" % (_id, t)
uacl = acl_widget("uacl", "acl_u_%s" % n, uacl)
oacl = acl_widget("oacl", "acl_o_%s" % n, oacl)
form_rows.append(TR(TD(t),
TD(uacl),
TD(oacl),
TD(delete_btn),
_class=_class))
# Row to enter a new table ACL
_class = i % 2 and "even" or "odd"
# @todo: find a better way to provide a selection of tables
#all_tables = [t._tablename for t in current.db]
form_rows.append(TR(
TD(INPUT(_type="text", _name="new_table")),
# @todo: doesn't work with conditional models
#requires=IS_EMPTY_OR(IS_IN_SET(all_tables,
#zero=None,
#error_message=T("Undefined Table"))))),
TD(acl_widget("uacl", "new_t_uacl", NONE)),
TD(acl_widget("oacl", "new_t_oacl", NONE)),
TD(new_acl), _class=_class))
# Tabs
tabs = [SPAN(A(CACL, _class="cacl-tab"),
_class="tab_other")]
if permission.use_facls:
tabs.append(SPAN(A(FACL, _class="facl-tab"),
_class="tab_other"))
tabs.append(SPAN(A(TACL), _class="tab_here"))
acl_forms.append(DIV(DIV(tabs, _class="tabs"),
TABLE(thead, TBODY(form_rows)),
_id="table-acls"))
# Append to form
acl_form = DIV(acl_forms, _id="table-container")
form.append(acl_form)
# Action row ------------------------------------------------------
if session.s3.cancel:
cancel = session.s3.cancel
else:
cancel = URL(c="admin", f="role",
vars=request.get_vars)
action_row = DIV(INPUT(_type="submit",
_value=T("Save"),
_class="small primary button",
),
A(CANCEL,
_href=cancel,
_class="action-lnk",
),
_id="action-row")
# Append to form
form.append(action_row)
# Append role_id
if role_id:
form.append(INPUT(_type="hidden",
_name="role_id",
value=role_id))
# Process the form ------------------------------------------------
if form.accepts(request.post_vars, session):
vars = form.vars
# Update the role
role = Storage(role=vars.role_name, description=vars.role_desc)
if r.record:
r.record.update_record(**role)
role_id = form.vars.role_id
session.confirmation = '%s "%s" %s' % (T("Role"),
role.role,
T("updated"))
else:
role.uuid = uuid4()
role_id = self.table.insert(**role)
session.confirmation = '%s "%s" %s' % (T("Role"),
role.role,
T("created"))
if role_id:
# Collect the ACLs
acls = Storage()
for v in vars:
if v[:4] == "acl_":
acl_type, name = v[4:].split("_", 1)
n = name.split("-", 3)
i, c, f, t = map(lambda item: \
item != ANY and item or None, n)
if i.isdigit():
i = int(i)
else:
i = None
name = "%s-%s-%s" % (c, f, t)
if name not in acls:
acls[name] = Storage()
acls[name].update({"id": i,
"group_id": role_id,
"controller": c,
"function": f,
"tablename": t,
"%sacl" % acl_type: vars[v]})
for v in ("new_controller", "new_table"):
if v in vars and vars[v]:
c = v == "new_controller" and \
vars.new_controller or None
f = v == "new_controller" and \
vars.new_function or None
t = v == "new_table" and vars.new_table or None
name = "%s-%s-%s" % (c, f, t)
x = v == "new_table" and "t" or "c"
uacl = vars["new_%s_uacl" % x]
oacl = vars["new_%s_oacl" % x]
if name not in acls:
acls[name] = Storage()
acls[name].update(group_id=role_id,
controller=c,
function=f,
tablename=t,
oacl=oacl,
uacl=uacl)
# Save the ACLs
for acl in acls.values():
_id = acl.pop("id", None)
if _id:
query = (acl_table.deleted != True) & \
(acl_table.id == _id)
db(query).update(**acl)
elif acl.oacl or acl.uacl:
_id = acl_table.insert(**acl)
redirect(URL(f="role", vars=request.get_vars))
output.update(form=form)
if form.errors:
if "new_table" in form.errors:
output.update(acl="table")
elif "new_controller" in form.errors:
output.update(acl="function")
current.response.view = "admin/role_edit.html"
else:
r.error(501, current.ERROR.BAD_FORMAT)
return output
# -------------------------------------------------------------------------
def _delete(self, r, **attr):
"""
Delete role
"""
session = current.session
request = self.request
T = current.T
auth = current.auth
if r.interactive:
if r.record:
role = r.record
role_id = role.id
role_name = role.role
if role.protected or role.system:
session.error = '%s "%s" %s' % (T("Role"),
role_name,
T("cannot be deleted."))
redirect(URL(c="admin", f="role",
vars=request.get_vars))
else:
db = current.db
# Delete all ACLs for this role:
acl_table = auth.permission.table
query = (acl_table.deleted != True) & \
(acl_table.group_id == role_id)
db(query).update(deleted=True)
# Remove all memberships:
membership_table = db.auth_membership
query = (membership_table.deleted != True) & \
(membership_table.group_id == role_id)
db(query).update(deleted=True)
# Update roles in session:
session.s3.roles = [role
for role in session.s3.roles
if role != role_id]
# Remove role:
query = (self.table.deleted != True) & \
(self.table.id == role_id)
db(query).update(role=None,
deleted=True)
# Confirmation:
session.confirmation = '%s "%s" %s' % (T("Role"),
role_name,
T("deleted"))
else:
session.error = T("No role to delete")
else:
r.error(501, current.ERROR.BAD_FORMAT)
redirect(URL(c="admin", f="role", vars=request.get_vars))
# -------------------------------------------------------------------------
def _roles(self, r, **attr):
T = current.T
db = current.db
auth = current.auth
request = current.request
session = current.session
settings = auth.settings
userfield = settings.login_userfield
output = dict()
# Unrestrictable roles
sr = auth.get_system_roles()
unrestrictable = [sr.ADMIN, sr.ANONYMOUS, sr.AUTHENTICATED]
if r.record:
user = r.record
user_id = r.id
user_name = user[userfield]
use_realms = auth.permission.entity_realm
# These roles are assigned by the system:
unassignable = set((sr.ANONYMOUS, sr.AUTHENTICATED))
has_role = auth.s3_has_role
for role in (sr.ADMIN, sr.ORG_ADMIN, sr.ORG_GROUP_ADMIN):
if not has_role(role):
# Users must have the role themselves in order to
# assign it to others
unassignable.add(role)
elif role == sr.ADMIN and user_id == auth.user_id:
# Admins can not remove their own ADMIN role (to prevent
# them from locking out themselves)
unassignable.add(role)
# Catch incomplete system roles setups (legacy databases)
unassignable.discard(None)
if r.representation == "html":
arrow = TD(IMG(_src="/%s/static/img/arrow-turn.png" % request.application),
_style="text-align:center; vertical-align:middle; width:48px;")
# Get current memberships
mtable = settings.table_membership
gtable = settings.table_group
query = (mtable.deleted != True) & \
(mtable.user_id == user_id) & \
(gtable.deleted != True) & \
(mtable.group_id == gtable.id)
rows = db(query).select(mtable.id,
mtable.pe_id,
gtable.id,
gtable.role)
entities = [row[mtable.pe_id] for row in rows]
entity_repr = self._entity_represent(entities)
assigned = [row[gtable.id] for row in rows]
# Page Title
title = "%s: %s" % (T("Roles of User"), user_name)
# Remove-Form -------------------------------------------------
# Subtitle
rmvtitle = T("Roles currently assigned")
trow = TR(TH(), TH(T("Role")))
if use_realms:
trow.append(TH(T("For Entity")))
thead = THEAD(trow)
# Rows
if rows:
i = 0
trows = []
remove = False
for row in rows:
group_id = row[gtable.id]
_class = i % 2 and "even" or "odd"
i += 1
trow = TR(_class=_class)
# Row selector
if group_id in unassignable:
trow.append(TD())
else:
trow.append(TD(INPUT(_type="checkbox",
_name="d_%s" % row[mtable.id],
_class="remove_item")))
remove = True
# Role
name = row[gtable.role]
trow.append(TD(name))
# Entity
if use_realms:
if row[gtable.id] in unrestrictable:
pe_id = 0
else:
pe_id = row[mtable.pe_id]
pe_repr = entity_repr[pe_id] or T("unknown")
trow.append(TD(pe_repr))
trows.append(trow)
# Remove button
if remove:
submit_row = TR(arrow,
TD(INPUT(_id="submit_delete_button",
_type="submit",
_class="tiny alert button",
_value=T("Remove"),
)
),
)
if use_realms:
submit_row.append(TD())
trows.append(submit_row)
# Assemble form
tbody = TBODY(trows)
rmvform = FORM(DIV(TABLE(thead, tbody,
_class="dataTable display"),
_id="table-container"))
else:
rmvform = FORM(DIV(T("No roles currently assigned to this user.")))
# Process Remove-Form
if rmvform.accepts(request.post_vars, session,
formname="rmv_user_%s_roles" % user_id):
removed = 0
for opt in rmvform.vars:
if rmvform.vars[opt] == "on" and opt.startswith("d_"):
membership_id = opt[2:]
query = (mtable.id == membership_id)
row = db(query).select(mtable.user_id,
mtable.group_id,
mtable.pe_id,
limitby=(0, 1)).first()
if row:
if use_realms:
pe_id = row.pe_id
else:
pe_id = []
auth.s3_withdraw_role(row.user_id,
row.group_id,
for_pe=pe_id)
removed += 1
if removed:
session.confirmation = T("%(count)s Roles of the user removed") % \
dict(count=removed)
redirect(r.url())
# Add form ----------------------------------------------------
# Subtitle
addtitle = T("Assign another Role")
if use_realms:
help_txt = "(%s)" % T("Default Realm = All Entities the User is a Staff Member of")
else:
help_txt = ""
trow = TR(TH(T("Role"), _colspan="2"))
if use_realms:
trow.append(TH(T("For Entity")))
thead = THEAD(trow)
# Roles selector
gtable = settings.table_group
query = (gtable.deleted != True) & \
(~(gtable.id.belongs(unassignable)))
rows = db(query).select(gtable.id, gtable.role)
select_grp = SELECT(OPTION(_value=None, _selected="selected"),
_name="group_id")
options = [(row.role, row.id)
for row in rows
if row.id not in unrestrictable or \
row.id not in assigned]
options.sort()
[select_grp.append(OPTION(role, _value=gid))
for role, gid in options]
# Add button
submit_btn = INPUT(_id="submit_add_button",
_type="submit",
_class="tiny primary button",
_value=T("Add"),
)
# Assemble form
trow = TR(TD(select_grp, _colspan="2"), _class="odd")
srow = TR(arrow, TD(submit_btn))
if use_realms:
# Entity Selector
trow.append(TD(self._entity_select()))
srow.append(TD())
addform = FORM(DIV(TABLE(thead, TBODY(trow, srow),
_class="dataTable display")))
# Process Add-Form
if addform.accepts(request.post_vars, session,
formname="add_user_%s_roles" % user_id):
try:
group_id = int(addform.vars.group_id)
except ValueError:
group_id = None
pe_id = addform.vars.pe_id
if pe_id == "__NONE__" or not use_realms:
pe_id = None
if group_id in unrestrictable:
pe_id = 0
if group_id:
auth.s3_assign_role(user_id, group_id, for_pe=pe_id)
session.confirmation = T("Role assigned to User")
redirect(r.url())
# Action links
list_btn = A(T("Back to Users List"),
_href=URL(c="admin", f="user"),
_class="action-btn")
add_btn = A(T("Create Role"),
_href=URL(c="admin", f="role",
args="create"),
_class="action-lnk")
output = dict(title=title,
rmvtitle=rmvtitle,
rmvform=rmvform,
addtitle=addtitle,
help_txt=help_txt,
addform=addform,
list_btn=list_btn,
add_btn=add_btn)
current.response.view = "admin/membership_manage.html"
else:
r.error(501, current.ERROR.BAD_FORMAT)
else:
r.error(404, current.ERROR.BAD_RECORD)
return output
# -------------------------------------------------------------------------
def _users(self, r, **attr):
T = current.T
db = current.db
auth = current.auth
request = current.request
session = current.session
settings = auth.settings
userfield = settings.login_userfield
output = dict()
# Unrestrictable roles
sr = auth.get_system_roles()
unrestrictable = [sr.ADMIN, sr.ANONYMOUS, sr.AUTHENTICATED]
if r.record:
group = r.record
group_id = r.id
group_role = group.role
use_realms = auth.permission.entity_realm and \
group_id not in unrestrictable
assignable = group_id not in [sr.ANONYMOUS, sr.AUTHENTICATED]
if r.representation == "html":
arrow = TD(IMG(_src="/%s/static/img/arrow-turn.png" % request.application),
_style="text-align:center; vertical-align:middle; width:48px;")
# Get current memberships
mtable = settings.table_membership
utable = settings.table_user
query = (mtable.deleted != True) & \
(mtable.group_id == group_id) & \
(utable.deleted != True) & \
(mtable.user_id == utable.id)
if not use_realms:
query &= ((mtable.pe_id == None) | (mtable.pe_id == 0))
rows = db(query).select(mtable.id,
mtable.pe_id,
utable.id,
utable.first_name,
utable.last_name,
utable[userfield],
orderby=utable.first_name)
entities = [row[mtable.pe_id] for row in rows]
if use_realms:
entity_repr = self._entity_represent(entities)
else:
entity_repr = Storage()
assigned = [row[utable.id] for row in rows]
# Page title
title = "%s: %s" % (T("User with Role"), group_role)
# Remove-Form -------------------------------------------------
rmvtitle = T("Users with this Role")
if assigned:
# Table Header
trow = TR()
if assignable:
trow.append(TH())
trow.append(TH(T("Name")))
trow.append(TH(T("Username")))
if use_realms:
trow.append(TH(T("For Entity")))
thead = THEAD(trow)
# Rows
i = 0
trows = []
remove = False
for row in rows:
_class = i % 2 and "even" or "odd"
i += 1
trow = TR(_class=_class)
# User cannot remove themselves from the ADMIN role
if row[utable.id] == auth.user.id and \
group_id == sr.ADMIN:
removable = False
else:
removable = True
# Row selector
if assignable and removable:
remove = True
trow.append(TD(INPUT(_type="checkbox",
_name="d_%s" % row[mtable.id],
_class="remove_item")))
else:
trow.append(TD())
# Name
name = "%s %s" % (row[utable.first_name],
row[utable.last_name])
trow.append(TD(name))
# Username
uname = row[utable[userfield]]
trow.append(TD(uname))
# Entity
if use_realms:
pe_id = row[mtable.pe_id]
pe_repr = entity_repr[pe_id] or T("unknown")
trow.append(TD(pe_repr))
trows.append(trow)
# Remove button
if assignable and remove:
submit_row = TR(arrow,
TD(INPUT(_id="submit_delete_button",
_type="submit",
_class="tiny alert button",
_value=T("Remove"),
),
),
TD())
if use_realms:
submit_row.append(TD())
trows.append(submit_row)
# Assemble form
tbody = TBODY(trows)
rmvform = FORM(DIV(TABLE(thead, tbody,
_class="dataTable display")))
else:
rmvform = FORM(DIV(T("No users with this role at the moment.")))
# Process Remove-Form
if rmvform.accepts(request.post_vars, session,
formname="rmv_role_%s_users" % group_id):
removed = 0
for opt in rmvform.vars:
if rmvform.vars[opt] == "on" and opt.startswith("d_"):
membership_id = opt[2:]
query = mtable.id == membership_id
row = db(query).select(mtable.user_id,
mtable.group_id,
mtable.pe_id,
limitby=(0, 1)).first()
if row:
auth.s3_withdraw_role(row.user_id,
row.group_id,
for_pe=row.pe_id)
removed += 1
if removed:
session.confirmation = T("%(count)s Users removed from Role") % \
dict(count=removed)
redirect(r.url())
# Add-Form ----------------------------------------------------
# Subtitle and help text
addtitle = T("Assign Role to a User")
if use_realms and assignable:
help_txt = "(%s)" % T("Default Realm = All Entities the User is a Staff Member of")
else:
help_txt = ""
# Form header
trow = TR(TH(T("User"), _colspan="2"))
if use_realms:
trow.append(TH(T("For Entity")))
thead = THEAD(trow)
# User selector
utable = settings.table_user
query = (utable.deleted != True)
if group_id in unrestrictable and assigned:
query &= (~(utable.id.belongs(assigned)))
rows = db(query).select(utable.id,
utable.first_name,
utable.last_name,
utable[userfield])
if rows and assignable:
select_usr = SELECT(OPTION("",
_value=None,
_selected="selected"),
_name="user_id")
options = [("%s (%s %s)" % (row[userfield],
row.first_name,
row.last_name),
row.id) for row in rows]
options.sort()
[select_usr.append(OPTION(name, _value=uid)) for name, uid in options]
# Add button
submit_btn = INPUT(_id="submit_add_button",
_type="submit",
_class="tiny primary button",
_value=T("Add"))
# Assemble form
trow = TR(TD(select_usr, _colspan="2"), _class="odd")
srow = TR(arrow,
TD(submit_btn))
if use_realms:
# Entity Selector
trow.append(TD(self._entity_select()))
srow.append(TD())
addform = FORM(DIV(TABLE(thead, TBODY(trow, srow),
_class="dataTable display")))
elif not assignable:
addform = FORM(DIV(T("This role can not be assigned to users.")))
else:
addform = FORM(DIV(T("No further users can be assigned.")))
# Process Add-form
if addform.accepts(request.post_vars, session,
formname="add_role_%s_users" % group_id):
pe_id = addform.vars.pe_id
if pe_id == "__NONE__":
pe_id = None
if group_id in unrestrictable:
pe_id = 0
user_id = addform.vars.user_id
if user_id:
auth.s3_assign_role(user_id, group_id, for_pe=pe_id)
session.confirmation = T("User added to Role")
redirect(r.url())
# Action links
list_btn = A(T("Back to Roles List"),
_href=URL(c="admin", f="role"),
_class="action-btn")
if group_id != sr.ADMIN:
edit_btn = A(T("Edit Permissions for %(role)s") % dict(role=group_role),
_href=URL(c="admin", f="role",
args=[group_id]),
_class="action-lnk")
else:
edit_btn = ""
add_btn = A(T("Create User"),
_href=URL(c="admin", f="user",
args="create"),
_class="action-lnk")
# Assemble output
output = dict(title=title,
rmvtitle=rmvtitle,
rmvform=rmvform,
addtitle=addtitle,
help_txt=help_txt,
addform=addform,
list_btn=list_btn,
edit_btn=edit_btn,
add_btn=add_btn)
current.response.view = "admin/membership_manage.html"
else:
r.error(501, current.ERROR.BAD_FORMAT)
else:
r.error(404, current.ERROR.BAD_RECORD)
return output
# -------------------------------------------------------------------------
def _entity_select(self):
""" Get a SELECT of person entities for realm assignment """
T = current.T
s3db = current.s3db
auth = current.auth
system_roles = auth.get_system_roles()
has_role = auth.s3_has_role
is_admin = has_role(system_roles.ADMIN)
if is_admin:
all_entities = OPTION(T("All Entities"), _value=0)
else:
all_entities = ""
select = SELECT(
OPTGROUP(
OPTION(T("Default Realm"), _value="__NONE__", _selected="selected"),
all_entities,
_label=T("Multiple")),
_name="pe_id")
table = s3db.table("pr_pentity")
if table is None:
return select
instance_type_nice = table.instance_type.represent
types = current.deployment_settings.get_auth_realm_entity_types()
pe_ids = []
if not is_admin:
# Limit selection to the realms of the role
if has_role(system_roles.ORG_GROUP_ADMIN):
realms = auth.user.realms[system_roles.ORG_GROUP_ADMIN]
if realms:
pe_ids.extend(realms)
if has_role(system_roles.ORG_ADMIN):
realms = auth.user.realms[system_roles.ORG_ADMIN]
if realms:
pe_ids.extend(realms)
# Retrieve all entities, grouped by type
entities = s3db.pr_get_entities(pe_ids=pe_ids, types=types, group=True)
for instance_type in types:
if instance_type in entities:
optgroup = OPTGROUP(_label=instance_type_nice(instance_type))
items = [(n, i) for i, n in entities[instance_type].items()]
if not items:
continue
items.sort()
for name, pe_id in items:
optgroup.append(OPTION(name, _value=pe_id))
select.append(optgroup)
return select
# -------------------------------------------------------------------------
def _entity_represent(self, entities):
"""
Get a representation dict for a list of pe_ids
@param entities: the pe_ids of the entities
"""
T = current.T
pe_ids = [e for e in entities if e is not None and e != 0]
if pe_ids:
representation = current.s3db.pr_get_entities(pe_ids=pe_ids)
else:
representation = Storage()
representation[None] = T("Default Realm")
representation[0] = T("All Entities")
return representation
# =============================================================================
class S3GroupedOptionsWidget(OptionsWidget):
"""
A custom Field widget to create a SELECT element with grouped options.
"""
@classmethod
def widget(cls, field, value, options, **attributes):
"""
Generates a SELECT tag, with OPTIONs grouped by OPTGROUPs
@param field: the field needing the widget
@param value: value
@param options: list
@param options: a list of tuples, each either (label, value) or (label, {options})
@param attributes: any other attributes to be applied
@return: SELECT object
"""
default = dict(value=value)
attr = cls._attributes(field, default, **attributes)
select_items = []
for option in options:
if isinstance(option[1], dict):
items = [(v, k) for k, v in option[1].items()]
if not items:
continue
items.sort()
opts = [OPTION(v, _value=k) for v, k in items]
select_items.append(OPTGROUP(*opts, _label=option[0]))
else:
select_items.append(OPTION(option[1], _label=option[0]))
return SELECT(select_items, **attr)
# =============================================================================
class S3EntityRoleManager(S3Method):
""" Entity/User role manager """
ENTITY_TYPES = ["org_organisation",
"org_office",
"inv_warehouse",
"hms_hospital",
"pr_group",
]
def __init__(self, *args, **kwargs):
""" Constructor """
super(S3EntityRoleManager, self).__init__(*args, **kwargs)
# Dictionary of pentities this admin can manage
self.realm = self.get_realm()
# The list of user accounts linked to pentities in this realm
self.realm_users = current.s3db.pr_realm_users(self.realm)
# Create the dictionary of roles
self.roles = {}
self.modules = self.get_modules()
self.acls = self.get_access_levels()
for module_uid, module_label in self.modules.items():
for acl_uid, acl_label in self.acls.items():
role_uid = "%s_%s" % (module_uid, acl_uid)
self.roles[role_uid] = {
"module": {
"uid": module_uid,
"label": module_label
},
"acl": {
"uid": acl_uid,
"label": acl_label
}
}
# -------------------------------------------------------------------------
@classmethod
def set_method(cls, r, entity=None, record_id=None):
"""
Plug-in OrgAdmin Role Managers when appropriate
@param r: the S3Request
@param entity: override target entity (default: r.tablename)
@param record_id: specify target record ID (only for OU's)
"""
s3db = current.s3db
auth = current.auth
if not current.deployment_settings.get_auth_entity_role_manager() or \
auth.user is None:
return False
sr = auth.get_system_roles()
realms = auth.user.realms or Storage()
ORG_ADMIN = sr.ORG_ADMIN
admin = sr.ADMIN in realms
org_admin = ORG_ADMIN in realms
if admin or org_admin:
if entity is not None:
tablename = entity
record = None
else:
tablename = r.tablename
record = r.record
all_entities = admin or org_admin and realms[ORG_ADMIN] is None
if not all_entities and tablename in cls.ENTITY_TYPES:
if not record and record_id is not None:
# Try to load the record and check pe_id
table = s3db.table(tablename)
if table and "pe_id" in table.fields:
record = current.db(table._id==record_id).select(table.pe_id,
limitby = (0, 1)).first()
if record and record.pe_id not in realms[ORG_ADMIN]:
return False
if entity is not None:
# Configure as custom method for this resource
prefix, name = tablename.split("_", 1)
s3db.set_method(prefix, name, method="roles", action=cls)
elif tablename in cls.ENTITY_TYPES:
# Configure as method handler for this request
r.set_handler("roles", cls)
else:
# Unsupported entity
return False
return True
# -------------------------------------------------------------------------
def apply_method(self, r, **attr):
"""
"""
if self.method == "roles" and \
(r.tablename in self.ENTITY_TYPES + ["pr_person"]):
context = self.get_context_data(r, **attr)
else:
r.error(405, current.ERROR.BAD_METHOD)
# Set the default view
current.response.view = "admin/manage_roles.html"
return context
# -------------------------------------------------------------------------
def get_context_data(self, r, **attr):
"""
@todo: description?
@return: dictionary for the view
{
# All the possible roles
"roles": {
"staff_reader": {
"module": {
"uid": "staff",
"label": "Staff"
},
...
},
...
},
# The roles currently assigned to users for entit(y/ies)
"assigned_roles": {
"1": [
"staff_reader",
"project_editor",
...
],
...
},
"pagination_list": [
(
"User One",
"1"
),
...
],
# The object (user/entity) we are assigning roles for
"foreign_object": {
"id": "1",
"name": "User One"
}
or
"foreign_object": {
"id": "70",
"name": "Organisation Seventy"
}
}
"""
T = current.T
# organisation or office entity
self.entity = self.get_entity()
# user account to assigned roles to
self.user = self.get_user()
# roles already assigned to a user or users
self.assigned_roles = self.get_assigned_roles()
# The foreign object is the one selected in the role form
# for a person this is the entity
# for an entity (organisation or office) this is a user
self.foreign_object = self.get_foreign_object()
form = self.get_form()
# if we are editing roles, set those assigned roles as initial values
# for the form
form.vars.update(self.get_form_vars())
if form.accepts(r.post_vars, current.session):
before = self.assigned_roles[self.foreign_object["id"]] if self.foreign_object else []
after = ["%s_%s" % (mod_uid, acl_uid) for mod_uid, acl_uid
in form.vars.items()
if mod_uid in self.modules.keys()
and acl_uid in self.acls.keys()]
# either both values will have been specified or one will
# be supplied by the form (for roles on new objects)
user_id = self.user["id"] if self.user else form.vars.foreign_object
entity_id = self.entity["id"] if self.entity else form.vars.foreign_object
self.update_roles(user_id, entity_id, before, after)
current.session.confirmation = T("Roles updated")
redirect(r.url(vars={}))
context = {"roles": self.roles,
"foreign_object": self.foreign_object,
"form": form,
"title": T("Roles"),
}
if not self.foreign_object:
# how many assigned roles to show per page
pagination_size = int(r.get_vars.get("page_size", 4))
# what page of assigned roles to view
pagination_offset = int(r.get_vars.get("page_offset", 0))
# the number of pages of assigned roles
import math
pagination_pages = int(math.ceil(len(self.assigned_roles) / float(pagination_size)))
# the list of objects to show on this page sorted by name
pagination_list = [(self.objects[id], id) for id in self.assigned_roles]
pagination_list = sorted(pagination_list)[pagination_offset * pagination_size:pagination_offset * pagination_size + pagination_size]
context.update({"assigned_roles": self.assigned_roles,
"pagination_size": pagination_size,
"pagination_offset": pagination_offset,
"pagination_list": pagination_list,
"pagination_pages": pagination_pages,
})
return context
# -------------------------------------------------------------------------
def get_realm(self):
"""
Returns the realm (list of pe_ids) that this user can manage
or raises a permission error if the user is not logged in
"""
auth = current.auth
system_roles = auth.get_system_roles()
ORG_ADMIN = system_roles.ORG_ADMIN
ADMIN = system_roles.ADMIN
if auth.user:
realms = auth.user.realms
else:
# User is not logged in
auth.permission.fail()
# Get the realm from the current realms
if ADMIN in realms:
return realms[ADMIN]
elif ORG_ADMIN in realms:
return realms[ORG_ADMIN]
else:
# raise an error here - user is not permitted
# to access the role matrix
auth.permission.fail()
# -------------------------------------------------------------------------
def get_modules(self):
"""
This returns an OrderedDict of modules with their uid as the key,
e.g., {hrm: "Human Resources",}
@return: OrderedDict
"""
return current.deployment_settings.get_auth_role_modules()
# -------------------------------------------------------------------------
def get_access_levels(self):
"""
This returns an OrderedDict of access levels and their uid as
the key, e.g., {reader: "Reader",}
@return: OrderedDict
"""
return current.deployment_settings.get_auth_access_levels()
# -------------------------------------------------------------------------
def get_assigned_roles(self, entity_id=None, user_id=None):
"""
If an entity ID is provided, the dict will be the users
with roles assigned to that entity. The key will be the user IDs.
If a user ID is provided, the dict will be the entities the
user has roles for. The key will be the entity pe_ids.
If both an entity and user ID is provided, the dict will be
the roles assigned to that user for that entity. The key will be
the user ID.
@type entity_id: int
@param entity_id: the pe_id of the entity
@type user_id: int
@param user_id: id of the user account
@return: dict
{
1: [
"staff_reader",
"project_reader",
...
]
2: [
...
],
...
}
"""
if not entity_id and not user_id:
raise RuntimeError("Not enough arguments")
mtable = current.auth.settings.table_membership
gtable = current.auth.settings.table_group
utable = current.auth.settings.table_user
query = (mtable.deleted != True) & \
(gtable.deleted != True) & \
(gtable.id == mtable.group_id) & \
(utable.deleted != True) & \
(utable.id == mtable.user_id)
if user_id:
field = mtable.pe_id
query &= (mtable.user_id == user_id) & \
(mtable.pe_id != None)
if entity_id:
field = utable.id
query &= (mtable.pe_id == entity_id)
rows = current.db(query).select(utable.id,
gtable.uuid,
mtable.pe_id)
assigned_roles = OrderedDict()
roles = self.roles
for row in rows:
object_id = row[field]
role_uid = row[gtable.uuid]
if role_uid in roles:
if object_id not in assigned_roles:
assigned_roles[object_id] = []
assigned_roles[object_id].append(role_uid)
return assigned_roles
# -------------------------------------------------------------------------
def get_form(self):
"""
Contructs the role form
@return: SQLFORM
"""
fields = self.get_form_fields()
form = SQLFORM.factory(*fields,
table_name="roles",
_id="role-form",
_action="",
_method="POST")
return form
# -------------------------------------------------------------------------
def get_form_fields(self):
"""
@todo: description?
@return: list of Fields
"""
fields = []
requires = IS_EMPTY_OR(IS_IN_SET(self.acls.keys(),
labels=self.acls.values()))
for module_uid, module_label in self.modules.items():
field = Field(module_uid,
label=module_label,
requires=requires)
fields.append(field)
return fields
# -------------------------------------------------------------------------
def get_form_vars(self):
"""
Get the roles currently assigned for a user/entity and put it
into a Storage object for the form
@return: Storage() to pre-populate the role form
"""
form_vars = Storage()
fo = self.foreign_object
roles = self.roles
if fo and fo["id"] in self.assigned_roles:
for role in self.assigned_roles[fo["id"]]:
mod_uid = roles[role]["module"]["uid"]
acl_uid = roles[role]["acl"]["uid"]
form_vars[mod_uid] = acl_uid
return form_vars
# -------------------------------------------------------------------------
def update_roles(self, user_id, entity_id, before, after):
"""
Update the users roles on entity based on the selected roles
in before and after
@param user_id: id (pk) of the user account to modify
@param entity_id: id of the pentity to modify roles for
@param before: list of role_uids (current values for the user)
@param after: list of role_uids (new values from the admin)
"""
auth = current.auth
assign_role = auth.s3_assign_role
withdraw_role = auth.s3_withdraw_role
for role_uid in before:
# If role_uid is not in after,
# the access level has changed.
if role_uid not in after:
withdraw_role(user_id, role_uid, entity_id)
for role_uid in after:
# If the role_uid is not in before,
# the access level has changed
if role_uid != "None" and role_uid not in before:
assign_role(user_id, role_uid, entity_id)
# =============================================================================
class S3OrgRoleManager(S3EntityRoleManager):
def __init__(self, *args, **kwargs):
super(S3OrgRoleManager, self).__init__(*args, **kwargs)
# dictionary {id: name, ...} of user accounts
self.objects = current.s3db.pr_realm_users(None)
# -------------------------------------------------------------------------
def get_context_data(self, r, **attr):
"""
Override to set the context from the perspective of an entity
@return: dictionary for view
"""
context = super(S3OrgRoleManager, self).get_context_data(r, **attr)
context["foreign_object_label"] = current.T("Users")
return context
# -------------------------------------------------------------------------
def get_entity(self):
"""
We are on an entity (org/office) so we can fetch the entity
details from the request record.
@return: dictionary containing the ID and name of the entity
"""
entity = dict(id=int(self.request.record.pe_id))
entity["name"] = current.s3db.pr_get_entities(pe_ids=[entity["id"]],
types=self.ENTITY_TYPES)[entity["id"]]
return entity
# -------------------------------------------------------------------------
def get_user(self):
"""
The edit parameter
@return: dictionary containing the ID and username/email of
the user account.
"""
user = self.request.get_vars.get("edit", None)
if user:
user = dict(id=int(user), name=self.objects.get(int(user), None))
return user
# -------------------------------------------------------------------------
def get_foreign_object(self):
"""
We are on an entity so our target is a user account.
@return: dictionary with ID and username/email of user account
"""
return self.user
# -------------------------------------------------------------------------
def get_assigned_roles(self):
"""
Override to get assigned roles for this entity
@return: dictionary with user IDs as the keys.
"""
assigned_roles = super(S3OrgRoleManager, self).get_assigned_roles
return assigned_roles(entity_id=self.entity["id"])
# -------------------------------------------------------------------------
def get_form_fields(self):
"""
Override the standard method so we can add the user-selection
field to the list.
@return: list of Fields
"""
T = current.T
fields = super(S3OrgRoleManager, self).get_form_fields()
if not self.user:
assigned_roles = self.assigned_roles
realm_users = Storage([(k, v)
for k, v in self.realm_users.items()
if k not in assigned_roles])
nonrealm_users = Storage([(k, v)
for k, v in self.objects.items()
if k not in assigned_roles and \
k not in self.realm_users])
options = [("", ""),
(T("Users in my Organizations"), realm_users),
(T("Other Users"), nonrealm_users)]
object_field = Field("foreign_object",
T("User"),
requires=IS_IN_SET(self.objects),
widget=lambda field, value:
S3GroupedOptionsWidget.widget(field,
value,
options=options))
fields.insert(0, object_field)
return fields
# =============================================================================
class S3PersonRoleManager(S3EntityRoleManager):
""" Role Manager for Person Records """
def __init__(self, *args, **kwargs):
""" Constructor """
super(S3PersonRoleManager, self).__init__(*args, **kwargs)
# dictionary {id: name, ...} of pentities
self.objects = current.s3db.pr_get_entities(types=self.ENTITY_TYPES)
# -------------------------------------------------------------------------
def get_context_data(self, r, **attr):
"""
Override to set the context from the perspective of a person
@return: dictionary for view
"""
context = super(S3PersonRoleManager, self).get_context_data(r, **attr)
context["foreign_object_label"] = current.T("Organizations / Teams / Facilities")
return context
# -------------------------------------------------------------------------
def get_entity(self):
"""
An entity needs to be specified with the "edit" query string
parameter.
@return: dictionary with pe_id and name of the org/office.
"""
entity = self.request.get_vars.get("edit", None)
if entity:
entity = dict(id=int(entity),
name=self.objects.get(int(entity), None))
return entity
# -------------------------------------------------------------------------
def get_user(self):
"""
We are on a person account so we need to find the associated user
account.
@return: dictionary with ID and username/email of the user account
"""
settings = current.auth.settings
utable = settings.table_user
ptable = current.s3db.pr_person_user
pe_id = int(self.request.record.pe_id)
userfield = settings.login_userfield
query = (ptable.pe_id == pe_id) & \
(ptable.user_id == utable.id)
record = current.db(query).select(utable.id,
utable[userfield],
limitby=(0, 1)).first()
return dict(id=record.id,
name=record[utable[userfield]]) if record else None
# -------------------------------------------------------------------------
def get_foreign_object(self):
"""
We are on a user/person so we want to target an entity (org/office)
"""
return self.entity
# -------------------------------------------------------------------------
def get_assigned_roles(self):
"""
@todo: description?
@return: dictionary of assigned roles with entity pe_id as the keys
"""
user_id = self.user["id"]
return super(S3PersonRoleManager, self).get_assigned_roles(user_id=user_id)
# -------------------------------------------------------------------------
def get_form_fields(self):
"""
Return a list of fields, including a field for selecting
an organisation or office.
@return: list of Fields
"""
s3db = current.s3db
fields = super(S3PersonRoleManager, self).get_form_fields()
if not self.entity:
options = s3db.pr_get_entities(pe_ids=self.realm,
types=self.ENTITY_TYPES,
group=True)
nice_name = s3db.table("pr_pentity").instance_type.represent
# filter out options that already have roles assigned
filtered_options = []
for entity_type, entities in options.items():
entities = Storage([(entity_id, entity_name)
for entity_id, entity_name
in entities.items()
if entity_id not in self.assigned_roles])
filtered_options.append((nice_name(entity_type), entities))
object_field = Field("foreign_object",
current.T("Entity"),
requires=IS_IN_SET(self.objects),
widget=lambda field, value:
S3GroupedOptionsWidget.widget(field,
value,
options=filtered_options))
fields.insert(0, object_field)
return fields
# END =========================================================================
|
bobrock/eden
|
modules/s3/s3aaa.py
|
Python
|
mit
| 358,316
|
#!/usr/bin/env python
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import logging
import subprocess
import platform
import time
def ShouldStartXvfb():
return platform.system() == 'Linux'
def StartXvfb():
display = ':99'
xvfb_command = ['Xvfb', display, '-screen', '0', '1024x769x24', '-ac']
xvfb_process = subprocess.Popen(
xvfb_command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
time.sleep(0.2)
returncode = xvfb_process.poll()
if returncode is None:
os.environ['DISPLAY'] = display
else:
logging.error('Xvfb did not start, returncode: %s, stdout:\n%s',
returncode, xvfb_process.stdout.read())
xvfb_process = None
return xvfb_process
|
mrtnrdl/.macdots
|
scripts/bin/platform-tools/systrace/catapult/common/py_utils/py_utils/xvfb.py
|
Python
|
unlicense
| 828
|
import os
import twisted
import six
from twisted.trial import unittest
from twisted.protocols.policies import WrappingFactory
from twisted.python.filepath import FilePath
from twisted.internet import reactor, defer, error
from twisted.web import server, static, util, resource
from twisted.web.test.test_webclient import ForeverTakingResource, \
NoLengthResource, HostHeaderResource, \
PayloadResource, BrokenDownloadResource
from twisted.protocols.ftp import FTPRealm, FTPFactory
from twisted.cred import portal, checkers, credentials
from twisted.protocols.ftp import FTPClient, ConnectionLost
from w3lib.url import path_to_file_uri
from scrapy import twisted_version
from scrapy.core.downloader.handlers import DownloadHandlers
from scrapy.core.downloader.handlers.file import FileDownloadHandler
from scrapy.core.downloader.handlers.http import HTTPDownloadHandler, HttpDownloadHandler
from scrapy.core.downloader.handlers.http10 import HTTP10DownloadHandler
from scrapy.core.downloader.handlers.http11 import HTTP11DownloadHandler
from scrapy.core.downloader.handlers.s3 import S3DownloadHandler
from scrapy.core.downloader.handlers.ftp import FTPDownloadHandler
from scrapy.spiders import Spider
from scrapy.http import Request
from scrapy.settings import Settings
from scrapy import optional_features
from scrapy.utils.test import get_crawler
from scrapy.exceptions import NotConfigured
from tests.mockserver import MockServer
from tests.spiders import SingleRequestSpider
class DummyDH(object):
def __init__(self, crawler):
pass
class OffDH(object):
def __init__(self, crawler):
raise NotConfigured
class LoadTestCase(unittest.TestCase):
def test_enabled_handler(self):
handlers = {'scheme': 'tests.test_downloader_handlers.DummyDH'}
crawler = get_crawler(settings_dict={'DOWNLOAD_HANDLERS': handlers})
dh = DownloadHandlers(crawler)
self.assertIn('scheme', dh._schemes)
for scheme in handlers: # force load handlers
dh._get_handler(scheme)
self.assertIn('scheme', dh._handlers)
self.assertNotIn('scheme', dh._notconfigured)
def test_not_configured_handler(self):
handlers = {'scheme': 'tests.test_downloader_handlers.OffDH'}
crawler = get_crawler(settings_dict={'DOWNLOAD_HANDLERS': handlers})
dh = DownloadHandlers(crawler)
self.assertIn('scheme', dh._schemes)
for scheme in handlers: # force load handlers
dh._get_handler(scheme)
self.assertNotIn('scheme', dh._handlers)
self.assertIn('scheme', dh._notconfigured)
def test_disabled_handler(self):
handlers = {'scheme': None}
crawler = get_crawler(settings_dict={'DOWNLOAD_HANDLERS': handlers})
dh = DownloadHandlers(crawler)
self.assertNotIn('scheme', dh._schemes)
for scheme in handlers: # force load handlers
dh._get_handler(scheme)
self.assertNotIn('scheme', dh._handlers)
self.assertIn('scheme', dh._notconfigured)
class FileTestCase(unittest.TestCase):
def setUp(self):
self.tmpname = self.mktemp()
fd = open(self.tmpname + '^', 'w')
fd.write('0123456789')
fd.close()
self.download_request = FileDownloadHandler(Settings()).download_request
def test_download(self):
def _test(response):
self.assertEquals(response.url, request.url)
self.assertEquals(response.status, 200)
self.assertEquals(response.body, '0123456789')
request = Request(path_to_file_uri(self.tmpname + '^'))
assert request.url.upper().endswith('%5E')
return self.download_request(request, Spider('foo')).addCallback(_test)
def test_non_existent(self):
request = Request('file://%s' % self.mktemp())
d = self.download_request(request, Spider('foo'))
return self.assertFailure(d, IOError)
class HttpTestCase(unittest.TestCase):
download_handler_cls = HTTPDownloadHandler
def setUp(self):
name = self.mktemp()
os.mkdir(name)
FilePath(name).child("file").setContent("0123456789")
r = static.File(name)
r.putChild("redirect", util.Redirect("/file"))
r.putChild("wait", ForeverTakingResource())
r.putChild("hang-after-headers", ForeverTakingResource(write=True))
r.putChild("nolength", NoLengthResource())
r.putChild("host", HostHeaderResource())
r.putChild("payload", PayloadResource())
r.putChild("broken", BrokenDownloadResource())
self.site = server.Site(r, timeout=None)
self.wrapper = WrappingFactory(self.site)
self.port = reactor.listenTCP(0, self.wrapper, interface='127.0.0.1')
self.portno = self.port.getHost().port
self.download_handler = self.download_handler_cls(Settings())
self.download_request = self.download_handler.download_request
@defer.inlineCallbacks
def tearDown(self):
yield self.port.stopListening()
if hasattr(self.download_handler, 'close'):
yield self.download_handler.close()
def getURL(self, path):
return "http://127.0.0.1:%d/%s" % (self.portno, path)
def test_download(self):
request = Request(self.getURL('file'))
d = self.download_request(request, Spider('foo'))
d.addCallback(lambda r: r.body)
d.addCallback(self.assertEquals, "0123456789")
return d
def test_download_head(self):
request = Request(self.getURL('file'), method='HEAD')
d = self.download_request(request, Spider('foo'))
d.addCallback(lambda r: r.body)
d.addCallback(self.assertEquals, '')
return d
def test_redirect_status(self):
request = Request(self.getURL('redirect'))
d = self.download_request(request, Spider('foo'))
d.addCallback(lambda r: r.status)
d.addCallback(self.assertEquals, 302)
return d
def test_redirect_status_head(self):
request = Request(self.getURL('redirect'), method='HEAD')
d = self.download_request(request, Spider('foo'))
d.addCallback(lambda r: r.status)
d.addCallback(self.assertEquals, 302)
return d
@defer.inlineCallbacks
def test_timeout_download_from_spider(self):
spider = Spider('foo')
meta = {'download_timeout': 0.2}
# client connects but no data is received
request = Request(self.getURL('wait'), meta=meta)
d = self.download_request(request, spider)
yield self.assertFailure(d, defer.TimeoutError, error.TimeoutError)
# client connects, server send headers and some body bytes but hangs
request = Request(self.getURL('hang-after-headers'), meta=meta)
d = self.download_request(request, spider)
yield self.assertFailure(d, defer.TimeoutError, error.TimeoutError)
def test_host_header_not_in_request_headers(self):
def _test(response):
self.assertEquals(response.body, '127.0.0.1:%d' % self.portno)
self.assertEquals(request.headers, {})
request = Request(self.getURL('host'))
return self.download_request(request, Spider('foo')).addCallback(_test)
def test_host_header_seted_in_request_headers(self):
def _test(response):
self.assertEquals(response.body, 'example.com')
self.assertEquals(request.headers.get('Host'), 'example.com')
request = Request(self.getURL('host'), headers={'Host': 'example.com'})
return self.download_request(request, Spider('foo')).addCallback(_test)
d = self.download_request(request, Spider('foo'))
d.addCallback(lambda r: r.body)
d.addCallback(self.assertEquals, 'example.com')
return d
def test_payload(self):
body = '1'*100 # PayloadResource requires body length to be 100
request = Request(self.getURL('payload'), method='POST', body=body)
d = self.download_request(request, Spider('foo'))
d.addCallback(lambda r: r.body)
d.addCallback(self.assertEquals, body)
return d
class DeprecatedHttpTestCase(HttpTestCase):
"""HTTP 1.0 test case"""
download_handler_cls = HttpDownloadHandler
class Http10TestCase(HttpTestCase):
"""HTTP 1.0 test case"""
download_handler_cls = HTTP10DownloadHandler
class Http11TestCase(HttpTestCase):
"""HTTP 1.1 test case"""
download_handler_cls = HTTP11DownloadHandler
if 'http11' not in optional_features:
skip = 'HTTP1.1 not supported in twisted < 11.1.0'
def test_download_without_maxsize_limit(self):
request = Request(self.getURL('file'))
d = self.download_request(request, Spider('foo'))
d.addCallback(lambda r: r.body)
d.addCallback(self.assertEquals, "0123456789")
return d
@defer.inlineCallbacks
def test_download_with_maxsize(self):
request = Request(self.getURL('file'))
# 10 is minimal size for this request and the limit is only counted on
# response body. (regardless of headers)
d = self.download_request(request, Spider('foo', download_maxsize=10))
d.addCallback(lambda r: r.body)
d.addCallback(self.assertEquals, "0123456789")
yield d
d = self.download_request(request, Spider('foo', download_maxsize=9))
yield self.assertFailure(d, defer.CancelledError, error.ConnectionAborted)
@defer.inlineCallbacks
def test_download_with_maxsize_per_req(self):
meta = {'download_maxsize': 2}
request = Request(self.getURL('file'), meta=meta)
d = self.download_request(request, Spider('foo'))
yield self.assertFailure(d, defer.CancelledError, error.ConnectionAborted)
@defer.inlineCallbacks
def test_download_with_small_maxsize_per_spider(self):
request = Request(self.getURL('file'))
d = self.download_request(request, Spider('foo', download_maxsize=2))
yield self.assertFailure(d, defer.CancelledError, error.ConnectionAborted)
def test_download_with_large_maxsize_per_spider(self):
request = Request(self.getURL('file'))
d = self.download_request(request, Spider('foo', download_maxsize=100))
d.addCallback(lambda r: r.body)
d.addCallback(self.assertEquals, "0123456789")
return d
class Http11MockServerTestCase(unittest.TestCase):
"""HTTP 1.1 test case with MockServer"""
if 'http11' not in optional_features:
skip = 'HTTP1.1 not supported in twisted < 11.1.0'
def setUp(self):
self.mockserver = MockServer()
self.mockserver.__enter__()
def tearDown(self):
self.mockserver.__exit__(None, None, None)
@defer.inlineCallbacks
def test_download_with_content_length(self):
crawler = get_crawler(SingleRequestSpider)
# http://localhost:8998/partial set Content-Length to 1024, use download_maxsize= 1000 to avoid
# download it
yield crawler.crawl(seed=Request(url='http://localhost:8998/partial', meta={'download_maxsize': 1000}))
failure = crawler.spider.meta['failure']
self.assertIsInstance(failure.value, defer.CancelledError)
@defer.inlineCallbacks
def test_download(self):
crawler = get_crawler(SingleRequestSpider)
yield crawler.crawl(seed=Request(url='http://localhost:8998'))
failure = crawler.spider.meta.get('failure')
self.assertTrue(failure == None)
reason = crawler.spider.meta['close_reason']
self.assertTrue(reason, 'finished')
@defer.inlineCallbacks
def test_download_gzip_response(self):
if six.PY2 and twisted_version > (12, 3, 0):
crawler = get_crawler(SingleRequestSpider)
body = '1'*100 # PayloadResource requires body length to be 100
request = Request('http://localhost:8998/payload', method='POST', body=body, meta={'download_maxsize': 50})
yield crawler.crawl(seed=request)
failure = crawler.spider.meta['failure']
# download_maxsize < 100, hence the CancelledError
self.assertIsInstance(failure.value, defer.CancelledError)
request.headers.setdefault('Accept-Encoding', 'gzip,deflate')
request = request.replace(url='http://localhost:8998/xpayload')
yield crawler.crawl(seed=request)
# download_maxsize = 50 is enough for the gzipped response
failure = crawler.spider.meta.get('failure')
self.assertTrue(failure == None)
reason = crawler.spider.meta['close_reason']
self.assertTrue(reason, 'finished')
else:
raise unittest.SkipTest("xpayload and payload endpoint only enabled for twisted > 12.3.0 and python 2.x")
class UriResource(resource.Resource):
"""Return the full uri that was requested"""
def getChild(self, path, request):
return self
def render(self, request):
return request.uri
class HttpProxyTestCase(unittest.TestCase):
download_handler_cls = HTTPDownloadHandler
def setUp(self):
site = server.Site(UriResource(), timeout=None)
wrapper = WrappingFactory(site)
self.port = reactor.listenTCP(0, wrapper, interface='127.0.0.1')
self.portno = self.port.getHost().port
self.download_handler = self.download_handler_cls(Settings())
self.download_request = self.download_handler.download_request
@defer.inlineCallbacks
def tearDown(self):
yield self.port.stopListening()
if hasattr(self.download_handler, 'close'):
yield self.download_handler.close()
def getURL(self, path):
return "http://127.0.0.1:%d/%s" % (self.portno, path)
def test_download_with_proxy(self):
def _test(response):
self.assertEquals(response.status, 200)
self.assertEquals(response.url, request.url)
self.assertEquals(response.body, 'http://example.com')
http_proxy = self.getURL('')
request = Request('http://example.com', meta={'proxy': http_proxy})
return self.download_request(request, Spider('foo')).addCallback(_test)
def test_download_with_proxy_https_noconnect(self):
def _test(response):
self.assertEquals(response.status, 200)
self.assertEquals(response.url, request.url)
self.assertEquals(response.body, 'https://example.com')
http_proxy = '%s?noconnect' % self.getURL('')
request = Request('https://example.com', meta={'proxy': http_proxy})
return self.download_request(request, Spider('foo')).addCallback(_test)
def test_download_without_proxy(self):
def _test(response):
self.assertEquals(response.status, 200)
self.assertEquals(response.url, request.url)
self.assertEquals(response.body, '/path/to/resource')
request = Request(self.getURL('path/to/resource'))
return self.download_request(request, Spider('foo')).addCallback(_test)
class DeprecatedHttpProxyTestCase(unittest.TestCase):
"""Old deprecated reference to http10 downloader handler"""
download_handler_cls = HttpDownloadHandler
class Http10ProxyTestCase(HttpProxyTestCase):
download_handler_cls = HTTP10DownloadHandler
class Http11ProxyTestCase(HttpProxyTestCase):
download_handler_cls = HTTP11DownloadHandler
if 'http11' not in optional_features:
skip = 'HTTP1.1 not supported in twisted < 11.1.0'
class HttpDownloadHandlerMock(object):
def __init__(self, settings):
pass
def download_request(self, request, spider):
return request
class S3TestCase(unittest.TestCase):
download_handler_cls = S3DownloadHandler
try:
# can't instance without settings, but ignore that
download_handler_cls({})
except NotConfigured:
skip = 'missing boto library'
except KeyError: pass
# test use same example keys than amazon developer guide
# http://s3.amazonaws.com/awsdocs/S3/20060301/s3-dg-20060301.pdf
# and the tests described here are the examples from that manual
AWS_ACCESS_KEY_ID = '0PN5J17HBGZHT7JJ3X82'
AWS_SECRET_ACCESS_KEY = 'uV3F3YluFJax1cknvbcGwgjvx4QpvB+leU8dUj2o'
def setUp(self):
s3reqh = S3DownloadHandler(Settings(), self.AWS_ACCESS_KEY_ID, \
self.AWS_SECRET_ACCESS_KEY, \
httpdownloadhandler=HttpDownloadHandlerMock)
self.download_request = s3reqh.download_request
self.spider = Spider('foo')
def test_request_signing1(self):
# gets an object from the johnsmith bucket.
req = Request('s3://johnsmith/photos/puppy.jpg',
headers={'Date': 'Tue, 27 Mar 2007 19:36:42 +0000'})
httpreq = self.download_request(req, self.spider)
self.assertEqual(httpreq.headers['Authorization'], \
'AWS 0PN5J17HBGZHT7JJ3X82:xXjDGYUmKxnwqr5KXNPGldn5LbA=')
def test_request_signing2(self):
# puts an object into the johnsmith bucket.
req = Request('s3://johnsmith/photos/puppy.jpg', method='PUT', headers={
'Content-Type': 'image/jpeg',
'Date': 'Tue, 27 Mar 2007 21:15:45 +0000',
'Content-Length': '94328',
})
httpreq = self.download_request(req, self.spider)
self.assertEqual(httpreq.headers['Authorization'], \
'AWS 0PN5J17HBGZHT7JJ3X82:hcicpDDvL9SsO6AkvxqmIWkmOuQ=')
def test_request_signing3(self):
# lists the content of the johnsmith bucket.
req = Request('s3://johnsmith/?prefix=photos&max-keys=50&marker=puppy', \
method='GET', headers={
'User-Agent': 'Mozilla/5.0',
'Date': 'Tue, 27 Mar 2007 19:42:41 +0000',
})
httpreq = self.download_request(req, self.spider)
self.assertEqual(httpreq.headers['Authorization'], \
'AWS 0PN5J17HBGZHT7JJ3X82:jsRt/rhG+Vtp88HrYL706QhE4w4=')
def test_request_signing4(self):
# fetches the access control policy sub-resource for the 'johnsmith' bucket.
req = Request('s3://johnsmith/?acl', \
method='GET', headers={'Date': 'Tue, 27 Mar 2007 19:44:46 +0000'})
httpreq = self.download_request(req, self.spider)
self.assertEqual(httpreq.headers['Authorization'], \
'AWS 0PN5J17HBGZHT7JJ3X82:thdUi9VAkzhkniLj96JIrOPGi0g=')
def test_request_signing5(self):
# deletes an object from the 'johnsmith' bucket using the
# path-style and Date alternative.
req = Request('s3://johnsmith/photos/puppy.jpg', \
method='DELETE', headers={
'Date': 'Tue, 27 Mar 2007 21:20:27 +0000',
'x-amz-date': 'Tue, 27 Mar 2007 21:20:26 +0000',
})
httpreq = self.download_request(req, self.spider)
self.assertEqual(httpreq.headers['Authorization'], \
'AWS 0PN5J17HBGZHT7JJ3X82:k3nL7gH3+PadhTEVn5Ip83xlYzk=')
def test_request_signing6(self):
# uploads an object to a CNAME style virtual hosted bucket with metadata.
req = Request('s3://static.johnsmith.net:8080/db-backup.dat.gz', \
method='PUT', headers={
'User-Agent': 'curl/7.15.5',
'Host': 'static.johnsmith.net:8080',
'Date': 'Tue, 27 Mar 2007 21:06:08 +0000',
'x-amz-acl': 'public-read',
'content-type': 'application/x-download',
'Content-MD5': '4gJE4saaMU4BqNR0kLY+lw==',
'X-Amz-Meta-ReviewedBy': 'joe@johnsmith.net,jane@johnsmith.net',
'X-Amz-Meta-FileChecksum': '0x02661779',
'X-Amz-Meta-ChecksumAlgorithm': 'crc32',
'Content-Disposition': 'attachment; filename=database.dat',
'Content-Encoding': 'gzip',
'Content-Length': '5913339',
})
httpreq = self.download_request(req, self.spider)
self.assertEqual(httpreq.headers['Authorization'], \
'AWS 0PN5J17HBGZHT7JJ3X82:C0FlOtU8Ylb9KDTpZqYkZPX91iI=')
def test_request_signing7(self):
# ensure that spaces are quoted properly before signing
req = Request(
("s3://johnsmith/photos/my puppy.jpg"
"?response-content-disposition=my puppy.jpg"),
method='GET',
headers={
'Date': 'Tue, 27 Mar 2007 19:42:41 +0000',
})
httpreq = self.download_request(req, self.spider)
self.assertEqual(
httpreq.headers['Authorization'],
'AWS 0PN5J17HBGZHT7JJ3X82:+CfvG8EZ3YccOrRVMXNaK2eKZmM=')
class FTPTestCase(unittest.TestCase):
username = "scrapy"
password = "passwd"
if twisted_version < (10, 2, 0):
skip = "Twisted pre 10.2.0 doesn't allow to set home path other than /home"
def setUp(self):
# setup dirs and test file
self.directory = self.mktemp()
os.mkdir(self.directory)
userdir = os.path.join(self.directory, self.username)
os.mkdir(userdir)
fp = FilePath(userdir)
fp.child('file.txt').setContent("I have the power!")
fp.child('file with spaces.txt').setContent("Moooooooooo power!")
# setup server
realm = FTPRealm(anonymousRoot=self.directory, userHome=self.directory)
p = portal.Portal(realm)
users_checker = checkers.InMemoryUsernamePasswordDatabaseDontUse()
users_checker.addUser(self.username, self.password)
p.registerChecker(users_checker, credentials.IUsernamePassword)
self.factory = FTPFactory(portal=p)
self.port = reactor.listenTCP(0, self.factory, interface="127.0.0.1")
self.portNum = self.port.getHost().port
self.download_handler = FTPDownloadHandler(Settings())
self.addCleanup(self.port.stopListening)
def _add_test_callbacks(self, deferred, callback=None, errback=None):
def _clean(data):
self.download_handler.client.transport.loseConnection()
return data
deferred.addCallback(_clean)
if callback:
deferred.addCallback(callback)
if errback:
deferred.addErrback(errback)
return deferred
def test_ftp_download_success(self):
request = Request(url="ftp://127.0.0.1:%s/file.txt" % self.portNum,
meta={"ftp_user": self.username, "ftp_password": self.password})
d = self.download_handler.download_request(request, None)
def _test(r):
self.assertEqual(r.status, 200)
self.assertEqual(r.body, 'I have the power!')
self.assertEqual(r.headers, {'Local Filename': [''], 'Size': ['17']})
return self._add_test_callbacks(d, _test)
def test_ftp_download_path_with_spaces(self):
request = Request(
url="ftp://127.0.0.1:%s/file with spaces.txt" % self.portNum,
meta={"ftp_user": self.username, "ftp_password": self.password}
)
d = self.download_handler.download_request(request, None)
def _test(r):
self.assertEqual(r.status, 200)
self.assertEqual(r.body, 'Moooooooooo power!')
self.assertEqual(r.headers, {'Local Filename': [''], 'Size': ['18']})
return self._add_test_callbacks(d, _test)
def test_ftp_download_notexist(self):
request = Request(url="ftp://127.0.0.1:%s/notexist.txt" % self.portNum,
meta={"ftp_user": self.username, "ftp_password": self.password})
d = self.download_handler.download_request(request, None)
def _test(r):
self.assertEqual(r.status, 404)
return self._add_test_callbacks(d, _test)
def test_ftp_local_filename(self):
local_fname = "/tmp/file.txt"
request = Request(url="ftp://127.0.0.1:%s/file.txt" % self.portNum,
meta={"ftp_user": self.username, "ftp_password": self.password, "ftp_local_filename": local_fname})
d = self.download_handler.download_request(request, None)
def _test(r):
self.assertEqual(r.body, local_fname)
self.assertEqual(r.headers, {'Local Filename': ['/tmp/file.txt'], 'Size': ['17']})
self.assertTrue(os.path.exists(local_fname))
with open(local_fname) as f:
self.assertEqual(f.read(), "I have the power!")
os.remove(local_fname)
return self._add_test_callbacks(d, _test)
def test_invalid_credentials(self):
request = Request(url="ftp://127.0.0.1:%s/file.txt" % self.portNum,
meta={"ftp_user": self.username, "ftp_password": 'invalid'})
d = self.download_handler.download_request(request, None)
def _test(r):
self.assertEqual(r.type, ConnectionLost)
return self._add_test_callbacks(d, errback=_test)
|
agreen/scrapy
|
tests/test_downloader_handlers.py
|
Python
|
bsd-3-clause
| 25,048
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2016-09-19 17:39
from __future__ import unicode_literals
import applications.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('applications', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='application',
name='group_choice',
field=models.CharField(choices=[('DEVOPS', 'DEVOPS'), ('PR', 'PR'), ('LABOPS', 'LABOPS')], max_length=255, verbose_name='Ønsket gruppe'),
),
migrations.AlterField(
model_name='application',
name='phone',
field=models.CharField(max_length=8, validators=[applications.validators.validate_phone_number], verbose_name='Telefon'),
),
]
|
hackerspace-ntnu/website
|
applications/migrations/0002_auto_20160919_1939.py
|
Python
|
mit
| 826
|
# coding: utf-8
f = open("Sentiment140-Lexicon-v0.1/unigrams-pmilexicon.txt","r")
unigrams = dict()
for line in f:
word,score,poscount,negcount = line.rstrip().split("\t")
unigrams[word] = score
f.close()
# len(unigrams)
f = open("Sentiment140-Lexicon-v0.1/bigrams-pmilexicon.txt","r")
bigrams = dict()
for line in f:
word,score,poscount,negcount = line.rstrip().split("\t")
bigrams[word] = score
f.close()
# len(bigrams)
f = open("Sentiment140-Lexicon-v0.1/pairs-pmilexicon.txt","r")
pairs = dict()
for line in f:
word,score,poscount,negcount = line.rstrip().split("\t")
pairs[word] = score
f.close()
# len(pairs)
# len(pairs)+len(bigrams)+len(unigrams)
# all = unigrams.copy()
# len(all)
# all.update(bigrams)
# all.update(pairs)
# len(all)
# pos_words = [word for word in all if all[word] > 0]
# neg_words = [word for word in all if all[word] < 0]
# neu_words = [word for word in all if all[word] == 0]
# len(pos_words)
# len(neg_words)
# all['happy']
# pos_words = [word for word in all if float(all[word]) > 0]
# len(neg_words)
# len(pos_words)
# neg_words = [word for word in all if float(all[word]) < 0]
# neu_words = [word for word in all if float(all[word]) == 0]
# len(neg_words)
# len(neu_words)
|
TransientObject/labMTComparison
|
labMT-simple/labMTsimple/data/NRC/load_NRC.py
|
Python
|
apache-2.0
| 1,238
|
"""
Tools to read and write georeferenced pointclouds such as output from Pix4D,
based on the plyfile module.
"""
import collections
import itertools
import json
import tempfile
import warnings
import numpy as np
import plyfile
UTM_COORD = collections.namedtuple(
'UTMCoord', ['easting', 'northing', 'zone', 'northern'])
def get_tmpfile():
"""Create a temporary file, for easy use of np.memmap"""
# TODO: allow user to specify where this is, eg. $JOBFS
return tempfile.SpooledTemporaryFile(max_size=2**20)
class GeoPly(plyfile.PlyData):
"""A pointcloud, with the UTM georeference for the origin coordinate.
Spatial units are meters east (X) and north (Y) of the origin.
Pointcloud data typically has a substantial XY offset from the UTM
origin, which is retained due to the precision limits of 32-bit floats.
"""
# Marker for special UTM coord comments
_COORD_MARKER = 'UTM_COORD='
#pylint:disable=too-many-arguments
def __init__(self, elements=None, text=False, byte_order='=',
comments=None, obj_info=None, *, utm_coord, memmap=None):
"""Create a GeoPly instance. utm_coord is a required keyword arg."""
# Validate utm_coord a little
self.utm_coord = utm_coord
if not isinstance(self.utm_coord, UTM_COORD):
raise ValueError('Must include the UTM coords of the local origin')
# Handle the more flexible argument types allowed here
if isinstance(elements, np.ndarray):
elements = [plyfile.PlyElement.describe(elements, 'vertex')]
# Call parent __init__, avoiding mutable default arguments
elements = elements or []
comments = comments or []
obj_info = obj_info or []
super().__init__(elements, text, byte_order, comments, obj_info)
# Memmap if requested, or autodetecting and many vertices
if memmap is None:
memmap = self['vertex'].data.size >= 10**7
if memmap and not isinstance(self['vertex'].data, np.memmap):
mmap = np.memmap(get_tmpfile(), dtype=self['vertex'].data.dtype,
shape=self['vertex'].data.shape)
mmap[:] = self['vertex'].data[:]
self['vertex'].data = mmap
@staticmethod
def read(stream):
"""Reads vertices from ``stream``, with UTM offset and data cleaning.
stream may be a filename, or a file-like object.
The UTM coordinate (self.utm_coord) is read from
- comments in the file header, if the pointcloud was created
by this class.
- the corresponding '_ply_offset.xyz' file, if the pointcloud was
created by Pix4D. In this case, the Z-offset is added to vertex
coordinates.
Data cleaning consists of:
- discarding non-"vertex" elements (if present)
- removing the marker comment if written by Meshlab, and if a uniform
alpha channel was added removing that too
"""
data = plyfile.PlyData.read(stream)
verts = data['vertex']
# Remove meshlab cruft
if 'VCGLIB generated' in data.comments:
names = verts.data.dtype.names # field names of each vertex
if 'alpha' in names and len(np.unique(verts['alpha'])) == 1:
# properties of the PlyElement instance are manually updated
verts.properties = [p for p in verts.properties
if p.name != 'alpha']
# removal of a vertex field is via fancy indexing
verts.data = verts.data[[n for n in names if n != 'alpha']]
data.comments.remove('VCGLIB generated')
# Add UTM coordinates if known or discoverable
utm_coord = None
coords = []
for c in data.comments:
if c.startswith(GeoPly._COORD_MARKER):
data.comments.remove(c)
serialised = c.lstrip(GeoPly._COORD_MARKER)
coords.append(UTM_COORD(**json.loads(serialised)))
if coords:
utm_coord = coords[0]
if len(coords) > 1:
msg = RuntimeWarning('Found multiple coordinates in comments:'
'{}, using first...'.format(coords))
warnings.warn(msg)
else:
# Try to find and apply the Pix4D offset, which may raise...
z_offset, utm_coord = GeoPly._offset_from_pix4d(stream)
with warnings.catch_warnings():
warnings.simplefilter('ignore', FutureWarning)
# Numpy wories about writing to multiple columns here
verts['z'] += z_offset
# Return as GeoPly instance with only vertex elements
return GeoPly([verts], data.text, data.byte_order,
data.comments, data.obj_info, utm_coord=utm_coord)
def write(self, stream):
"""Write to a file, serialising utm_coord as a special comment."""
assert not any(c.startswith(self._COORD_MARKER) for c in self.comments)
# Serialise as JSON dict following the marker string
serialised = self._COORD_MARKER + json.dumps(self.utm_coord._asdict())
# Insert, write, pop - keeps comments in correct state
self.comments.insert(0, serialised)
super().write(stream)
self.comments.pop(0)
@staticmethod
def _offset_from_pix4d(ply_filename, utm_zone=55,
in_northern_hemisphere=False):
"""Return a (offset_z, UTM coord) for a .ply file from the
corresponding Pix4D offset file and provided zone.
Raises FileNotFoundError if the offset file is invalid or nonexistent.
"""
assert ply_filename.endswith('.ply')
offset_file = ply_filename[:-4] + '_ply_offset.xyz'
try:
with open(offset_file) as f:
line = f.readline().strip()
x, y, z = (float(n) for n in line.split(' '))
except Exception:
raise FileNotFoundError
return z, UTM_COORD(x, y, utm_zone, in_northern_hemisphere)
@property
def vertices(self):
"""Return a read-only view of the vertex data."""
vertices = np.ndarray.view(self['vertex'].data)
vertices.flags.writeable = False
return vertices
@staticmethod
def from_iterable(iterable, utm_coord, **kwargs):
"""Create a GeoPly from an iterable of vertices and a UTM offset.
The iterable must contain numpy scalars with a consistent dtype.
"""
it = iter(iterable)
first = next(it)
assert isinstance(first, np.void)
array = np.fromiter(itertools.chain([first], it), first.dtype)
return GeoPly(array, utm_coord=utm_coord, **kwargs)
@staticmethod
def from_array(array, utm_coord, **kwargs):
"""Create a GeoPly from a Numpy array of vertices and a UTM offset."""
assert isinstance(array, np.ndarray)
assert all(dim in array.dtype.names for dim in 'xyz')
return GeoPly(array, utm_coord=utm_coord, **kwargs)
@classmethod
def from_geoplys(cls, *geoplys):
"""Create a new geoply by combining two or more GeoPly instances.
All inputs must have compatible georeferences and datatypes.
The output GeoPly uses the base georeference and comcatenates all
input vertices, applying relative offsets. If any of the inputs
stored vertices in a np.memmap, so will the output.
"""
assert len(geoplys) >= 2
assert all(isinstance(p, cls) for p in geoplys)
assert all(p.utm_coord is not None for p in geoplys)
assert len(set(p.utm_coord.zone for p in geoplys)) == 1
assert len(set(p.utm_coord.northern for p in geoplys)) == 1
assert len(set(p['vertex'].data.dtype for p in geoplys)) == 1
# flatten and deduplicate comments
comments = [c for pf in geoplys for c in pf.comments]
comments = sorted(set(comments), key=comments.index)
# paste arrays into single memmap, handling UTM offsets
using_memmap = any(isinstance(p['vertex'].data, np.memmap)
for p in geoplys)
to_arr = (np.memmap if using_memmap else np.array)(
get_tmpfile(), dtype=geoplys[0]['vertex'].data.dtype,
shape=(sum([p['vertex'].data.size for p in geoplys]),))
base, *other_files = geoplys
start = base['vertex'].data.size
to_arr[:start] = base['vertex'].data
for pf in other_files:
arr = np.copy(pf['vertex'].data)
arr['x'] += (pf.utm_coord.easting - base.utm_coord.easting)
arr['y'] += (pf.utm_coord.northing - base.utm_coord.northing)
to_arr[start:start+arr.size] = arr
start += arr.size
# Load data back into the complete structure and return
return cls(to_arr, comments=comments, utm_coord=base.utm_coord)
|
Zac-HD/3D-tools
|
src/geoply.py
|
Python
|
gpl-3.0
| 8,980
|
class Solution(object):
def romanToInt(self, s):
"""
:type s: str
:rtype: int
"""
# the special is 1,4,5,9
roman_map={"M":1000,"CM":900,"D":500,"CD":400,"C":100,"XC":90,"L":50,"XL":40,"X":10,"IX":9,"V":5,"IV":4,"I":1}
n=len(s)
if n==0:
return 0
i=0
res=0
while i<n:
if i+1<n and s[i:i+2] in roman_map:
res+=roman_map[s[i:i+2]]
i+=2
continue
if s[i] in roman_map:
res+=roman_map[s[i]]
i+=1
continue
return res
|
Tanych/CodeTracking
|
13-Roman-to-Integer/solution.py
|
Python
|
mit
| 692
|
def ans():
return sum(
x for
x in range(1000) if
x % 3 == 0 or x % 5 == 0
)
if __name__ == '__main__':
print(ans())
|
mackorone/euler
|
src/001.py
|
Python
|
mit
| 154
|
#!/usr/bin/env python3
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "serveurlibre.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
frafra/serveurlibre
|
manage.py
|
Python
|
lgpl-3.0
| 256
|
'''
Script to send FireEye alerts saved as json files to FireStic for testing.
Option to send a single file or to read a directory and send all .json files.
'''
import requests
import json
import sys
import getopt
import glob
import time
# parameters
# -f --file = a specific json file to send
# -d --dir = all the json files in a directory
# -t --timeout = delay in seconds between sends. Default 1 second
# -u --url = url/ip address of server
# -p --port = port server is listening on
def processFile(inputfile,serverurl):
headers = {'content-type': 'application/json'}
try:
with open(inputfile) as json_file:
file_data = json_file.read()
try:
r = requests.post(serverurl, data=file_data, headers=headers, timeout=5)
except Exception, e:
print " "
print "COMMUNICATION ERROR : " + str(e)
print " "
sys.exit(2)
except Exception, e:
print " "
print "FILE ERROR : " + str(e)
print " "
print inputfile + " sent to " + serverurl + ". Status code: " + str(r.status_code) + "."
return
def printopts():
print '''
USAGE:
-f --file a specific json file to send
-d --dir directory of json files to send. Use ./ for current directory
** must include either -f or -d but not both **
-t --timeout (optional) seconds delay between multiple sends. Default = 1
-u --url url/ip address to send to
-p --port port server is listening on
EXAMPLES:
fstest.py -f ./testalert.json -u localhost -p 8080
fstest.py -d ./alerts -t 2 -u 192.168.1.2 -p 8080
fstest.py -d ./ -u localhost -p 8888
'''
def main(argv):
inputfile = ''
inputdir = ''
timeout = 1
url = ''
port = ''
mode = ''
try:
opts, args = getopt.getopt(argv,"hf:d:t:u:p:",["help=","file=","dir=","timeout=","url=","port="])
except getopt.GetoptError:
printopts()
sys.exit(2)
if not len(opts):
print 'No options specified:'
printopts()
sys.exit(2)
for opt, arg in opts:
if opt in ("-h","--help"):
printopts()
sys.exit()
elif opt in ("-f", "--file"):
inputfile = arg
mode = 'file'
elif opt in ("-d", "--dir"):
inputdir = arg
mode = 'directory'
elif opt in ("-t", "--timeout"):
timeout = arg
elif opt in ("-u", "--url"):
url = arg
elif opt in ("-p", "--port"):
port = arg
# if no url or port --> error
if (url == '') or (port == ''):
print "ERROR: url and port are required"
printopts()
sys.exit(2)
serverurl = 'http://' + url + ':' + port
# go try to read file and send
if (mode == 'file'):
processFile(inputfile,serverurl)
elif (mode == 'directory'):
filelist = glob.glob(inputdir + '*.json')
if len(filelist):
for afile in filelist:
processFile(afile,serverurl)
time.sleep(float(timeout))
else:
print "No files of type .json found in directory: " + inputdir
else:
print "unknown mode"
if __name__ == "__main__":
main(sys.argv[1:])
|
SergeyBondarenko/FireStic
|
testing/fstest.py
|
Python
|
mit
| 3,365
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
import collections
import os
import posixpath
import six
import tarfile
from .buffer import DockerStringBuffer
from .. import DEFAULT_BASEIMAGE
def prepare_path(path, replace_space, replace_sep, expandvars, expanduser):
"""
Performs `os.path` replacement operations on a path string.
:param path: Path string
:type path: unicode | str
:param replace_space: Mask spaces with backslash.
:param replace_sep: Replace potentially different path separators with POSIX path notation (use :const:`posixpath.sep`).
:type replace_sep: bool
:param expandvars: Expand environment variables (:func:`~os.path.expandvars`).
:type expandvars: bool
:param expanduser: Expand user variables (:func:`~os.path.expanduser`).
:type expanduser: bool
:return: Path string from `path` with aforementioned replacements.
:rtype: unicode | str
"""
r_path = path
if expandvars:
r_path = os.path.expandvars(r_path)
if expanduser:
r_path = os.path.expanduser(r_path)
if replace_sep and os.sep != posixpath.sep:
r_path = r_path.replace(os.path.sep, posixpath.sep)
if replace_space:
r_path = r_path.replace(' ', '\\ ')
return r_path
def format_command(cmd, shell=False):
"""
Converts a command line to the notation as used in a Dockerfile ``CMD`` and ``ENTRYPOINT`` command. In shell
notation, this returns a simple string, whereas by default it returns a JSON-list format with the command and
arguments.
:param cmd: Command line as a string or tuple.
:type cmd: unicode | str | tuple | list
:param shell: Use the notation so that Docker runs the command in a shell. Default is ``False``.
:type shell: bool
:return: The command string.
:rtype: unicode | str
"""
def _split_cmd():
line = None
for part in cmd.split(' '):
line = part if line is None else '{0} {1}'.format(line, part)
if part[-1] != '\\':
yield line
line = None
if line is not None:
yield line
if cmd in ([], ''):
return '[]'
if shell:
if isinstance(cmd, (list, tuple)):
return ' '.join(cmd)
elif isinstance(cmd, six.string_types):
return cmd
else:
if isinstance(cmd, (list, tuple)):
return json.dumps(list(map(six.text_type, cmd)))
elif isinstance(cmd, six.string_types):
return json.dumps(list(_split_cmd()))
raise ValueError("Invalid type of command string or sequence: {0}".format(cmd))
def format_expose(expose):
"""
Converts a port number or multiple port numbers, as used in the Dockerfile ``EXPOSE`` command, to a tuple.
:param: Port numbers, can be as integer, string, or a list/tuple of those.
:type expose: int | unicode | str | list | tuple
:return: A tuple, to be separated by spaces before inserting in a Dockerfile.
:rtype: tuple
"""
if isinstance(expose, six.string_types):
return expose,
elif isinstance(expose, collections.Iterable):
return map(six.text_type, expose)
return six.text_type(expose),
def format_labels(labels):
if isinstance(labels, six.string_types):
return labels,
elif isinstance(labels, dict):
return ['"{0}"="{1}"'.format(k, v.replace('\n', '\\\n'))
for k, v in six.iteritems(labels)]
elif isinstance(labels, collections.Iterable):
return ['"{0}"="{1}"'.format(k, v.replace('\n', '\\\n'))
for k, v in labels]
raise ValueError("Invalid format for labels.", labels)
class DockerFile(DockerStringBuffer):
"""
Class for constructing Dockerfiles; can be saved or used in a :class:`DockerContext`. For :class:`DockerContext`, it
keeps track of ``ADD`` operations, so that all files can easily be added to the context tarball.
:param baseimage: Base image to use for the new image. Set this to ``None`` if you want to explicitly write out the
``FROM`` Dockerfile command.
:type baseimage: unicode | str
:param maintainer: Optional maintainer, to be used for the ``MAINTAINER`` Dockerfile command.
:type maintainer: unicode | str
:param initial: Optional initial Dockerfile contents. Should only include header comments, ``FROM``, or
``MAINTAINER``, if those are not set in aforementioned parameters.
:type initial: unicode | str
"""
def __init__(self, baseimage=DEFAULT_BASEIMAGE, maintainer=None, initial=None, **kwargs):
super(DockerFile, self).__init__()
self._files = []
self._remove_files = set()
self._archives = []
self._volumes = kwargs.pop('volumes', None)
self._entrypoint = kwargs.pop('entrypoint', None)
self._command = kwargs.pop('command', None)
self._command_shell = kwargs.pop('command_shell', False)
self._cmd_user = kwargs.pop('cmd_user', None)
self._cmd_workdir = kwargs.pop('cmd_workdir', None)
self._expose = kwargs.pop('expose', None)
self._labels = kwargs.pop('labels', None)
self._shell = kwargs.pop('shell', None)
self._stopsignal = kwargs.pop('stopsignal', None)
self._healthcheck = kwargs.pop('healthcheck', None)
if baseimage:
self.prefix('FROM', baseimage)
self.blank()
if maintainer:
self.prefix('MAINTAINER', maintainer)
self.blank()
if isinstance(initial, six.string_types):
self.writeline(initial)
elif isinstance(initial, collections.Iterable):
self.writelines(initial)
def prefix(self, prefix='#', *args):
"""
Prefix one or multiple arguments with a Dockerfile command. The default is ``#``, for comments. Multiple args will
be separated by a space.
:param prefix: Dockerfile command to use, e.g. ``ENV`` or ``RUN``.
:type prefix: unicode | str
:param args: Arguments to be prefixed.
"""
self.write(prefix)
if args:
self.write(' ')
self.writeline(' '.join(map(six.text_type, args)))
def prefix_all(self, prefix='#', *lines):
"""
Same as :func:`~prefix`, for multiple lines.
:param prefix: Dockerfile command to use, e.g. ``ENV`` or ``RUN``.
:type prefix: unicode | str
:param lines: Lines with arguments to be prefixed.
:type lines: collections.Iterable[unicode | str]
"""
for line in lines:
if isinstance(line, (tuple, list)):
self.prefix(prefix, *line)
elif line:
self.prefix(prefix, line)
else:
self.blank()
def run(self, *args):
"""
Insert a `RUN` command in a Dockerfile, with arguments.
:param args: Command to be inserted after `RUN`.
"""
self.prefix('RUN', *args)
def run_all(self, *lines):
"""
Insert a series of commands in a Dockerfile, all prefixed with ``RUN``.
:param lines: Command lines to be inserted.
:type: collections.Iterable[unicode | str]
"""
self.prefix_all('RUN', *lines)
def run_join(self, *lines):
"""
Insert a series of commands in a Dockerfile joined with '&&' and prefixed with ``RUN``.
:param lines: Command lines to be inserted.
:type: collections.Iterable[unicode | str]
"""
self.prefix('RUN', ' && '.join(lines))
def add_file(self, src_path, dst_path=None, ctx_path=None, replace_space=True, expandvars=False, expanduser=False,
remove_final=False):
"""
Adds a file to the Docker build. An ``ADD`` command is inserted, and the path is stored for later packaging of
the context tarball.
:param src_path: Path to the file or directory.
:type src_path: unicode | str
:param dst_path: Destination path during the Docker build. By default uses the last element of `src_path`.
:type dst_path: unicode | str
:param ctx_path: Path inside the context tarball. Can be set in order to avoid name clashes. By default
identical to the destination path.
:type ctx_path: unicode | str
:param replace_space: Mask spaces in path names with a backslash. Default is ``True``.
:type replace_space: bool
:param expandvars: Expand local environment variables. Default is ``False``.
:type expandvars: bool
:param expanduser: Expand local user variables. Default is ``False``.
:type expanduser: bool
:param remove_final: Remove the file after the build operation has completed. Can be useful e.g. for source code
archives, which are no longer needed after building the binaries. Note that this will not reduce the size of
the resulting image (actually may increase instead) unless the image is squashed.
:type remove_final: bool
:return: The path of the file in the Dockerfile context.
:rtype: unicode | str
"""
if dst_path is None:
head, tail = os.path.split(src_path)
if not tail:
# On trailing backslashes.
tail = os.path.split(head)[1]
if not tail:
ValueError("Could not generate target path from input '{0}'; needs to be specified explicitly.")
target_path = tail
else:
target_path = dst_path
source_path = prepare_path(src_path, False, False, expandvars, expanduser)
target_path = prepare_path(target_path, replace_space, True, expandvars, expanduser)
if ctx_path:
context_path = prepare_path(ctx_path, replace_space, True, expandvars, expanduser)
else:
context_path = target_path
self.prefix('ADD', context_path, target_path)
self._files.append((source_path, context_path))
if remove_final:
self._remove_files.add(target_path)
return context_path
def add_archive(self, src_file, remove_final=False):
"""
Adds the contents of another tarfile to the build. It will be repackaged during context generation, and added
to the root level of the file system. Therefore, it is not required that tar (or compression utilities) is
present in the base image.
:param src_file: Tar archive to add.
:type src_file: unicode | str
:param remove_final: Remove the contents after the build operation has completed. Note that this will remove all
top-level components of the tar archive recursively. Therefore, you should not use this on standard unix
folders. This will also not reduce the size of the resulting image (actually may increase instead) unless the
image is squashed.
:type remove_final: bool
:return: Name of the root files / directories added to the Dockerfile.
:rtype: list[unicode | str]
"""
with tarfile.open(src_file, 'r') as tf:
member_names = [member.name
for member in tf.getmembers()
if posixpath.sep not in member.name]
self.prefix_all('ADD', *zip(member_names, member_names))
if remove_final:
self._remove_files.update(member_names)
self._archives.append(src_file)
return member_names
def add_volume(self, path):
"""
Add a shared volume (i.e. with the ``VOLUME`` command). Not actually written until finalized.
:param path: Path to the shared volume.
"""
self.check_not_finalized()
if self.volumes is None:
self.volumes = [path]
else:
self.volumes.append(path)
def comment(self, input_str=None):
"""
Adds a comment to the Dockerfile. If not defined, adds an empty comment line.
:param input_str: Comment.
:type input_str: unicode | str
"""
if input_str:
self.prefix('#', input_str)
else:
self.write('#\n')
def blank(self):
"""
Adds a blank line to the Dockerfile.
"""
self.write('\n')
def write(self, input_str):
"""
Adds content to the Dockerfile.
:param input_str: Content.
:type input_str: unicode | str
"""
self.check_not_finalized()
if isinstance(input_str, six.binary_type):
self.fileobj.write(input_str)
else:
self.fileobj.write(input_str.encode('utf-8'))
def writelines(self, sequence):
"""
Adds a sequence of content to the Dockerfile.
:param sequence: Content sequence.
:type sequence: collections.Iterable[unicode | str]
"""
for s in sequence:
self.writeline(s)
def writeline(self, input_str):
self.check_not_finalized()
if isinstance(input_str, six.binary_type):
self.fileobj.write(input_str)
else:
self.fileobj.write(input_str.encode('utf-8'))
self.fileobj.write(b'\n')
@property
def volumes(self):
"""
Sets the list of shared volumes to be set in the Dockerfile ``VOLUME`` command. Not written before finalization.
:return: Shared volumes.
:rtype: list
"""
return self._volumes
@volumes.setter
def volumes(self, value):
self.check_not_finalized()
self._volumes = value
@property
def entrypoint(self):
"""
Sets the entry point for the Dockerfile ``ENTRYPOINT`` command. Not written before finalization.
:return: Entry point.
:rtype: unicode | str | list | tuple
"""
return self._entrypoint
@entrypoint.setter
def entrypoint(self, value):
self.check_not_finalized()
self._entrypoint = value
@property
def command(self):
"""
Sets the default command for the Dockerfile ``CMD`` command. Not written before finalization.
:return: Command.
:rtype: unicode | str | list | tuple
"""
return self._command
@command.setter
def command(self, value):
self.check_not_finalized()
self._command = value
@property
def command_shell(self):
"""
Sets if entry point and command should be formatted as a shell, or as an exec command upon finalization.
:return: ``True``, if Docker should use a shell, ``False`` if exec is used.
:rtype: bool
"""
return self._command_shell
@command_shell.setter
def command_shell(self, value):
self.check_not_finalized()
self._command_shell = value
@property
def command_user(self):
"""
Sets the default user that should be used for the default entry point and command. Upon finalization, this will
insert a ``USER`` command right before ``ENTRYPONT`` or ``COMMAND`` if applicable. For applying this to ``RUN``
commands, insert the ``USER`` command manually.
:return: Default user name or id.
:rtype: unicode | str
"""
return self._cmd_user
@command_user.setter
def command_user(self, value):
self.check_not_finalized()
self._cmd_user = value
@property
def command_workdir(self):
"""
Sets the working directory that should be used for the default entry point and command. Upon finalization, this
will insert a ``WORKDIR`` command right before ``ENTRYPONT`` or ``COMMAND`` if applicable. For applying this to
other commands, insert the ``WORKDIR`` command manually.
:return: User name or id. Must be valid in the docker image.
:rtype: unicode | str
"""
return self._cmd_workdir
@command_workdir.setter
def command_workdir(self, value):
self.check_not_finalized()
self._cmd_workdir = value
@property
def expose(self):
"""
Sets the ports to be inserted with the ``EXPOSE`` command in the Dockerfile. Not written before finalization.
:return: Ports.
:rtype: unicode | str | int | tuple | list
"""
return self._expose
@expose.setter
def expose(self, value):
self.check_not_finalized()
self._expose = value
@property
def labels(self):
return self._labels
@labels.setter
def labels(self, value):
self.check_not_finalized()
self._labels = value
@property
def shell(self):
return self._shell
@shell.setter
def shell(self, value):
self.check_not_finalized()
self._shell = value
@property
def stopsignal(self):
return self._stopsignal
@stopsignal.setter
def stopsignal(self, value):
self.check_not_finalized()
self._stopsignal = value
@property
def healthcheck(self):
return self._healthcheck
@healthcheck.setter
def healthcheck(self, value):
self.check_not_finalized()
self._healthcheck = value
def finalize(self):
"""
Finalizes the Dockerfile. Before the buffer is practically marked as read-only, the following Dockerfile
commands are written:
* ``RUN rm -R`` on each files marked for automatic removal;
* ``VOLUME`` for shared volumes;
* ``USER`` as the default user for following commands;
* ``WORKDIR`` as the working directory for following commands;
* ``SHELL`` if the default shell is to be changed;
* ``ENTRYPOINT`` and ``CMD``, each formatted as a shell or exec command according to :attr:`command_shell`;
* ``EXPOSE`` for exposed ports;
* ``LABEL``, ``STOPSIGNAL``, and ``HEALTHCHECK`` instructions for the image;
An attempt to finalize an already-finalized instance has no effect.
"""
if self._finalized:
return
if self._remove_files:
for filename in self._remove_files:
self.prefix('RUN', 'rm -Rf', filename)
self.blank()
if self._volumes is not None:
self.prefix('VOLUME', json.dumps(self._volumes))
if self._cmd_user:
self.prefix('USER', self._cmd_user)
if self._cmd_workdir:
self.prefix('WORKDIR', self._cmd_workdir)
if self._shell:
self.prefix('SHELL', self._shell)
if self._entrypoint is not None:
self.prefix('ENTRYPOINT', format_command(self._entrypoint, self._command_shell))
if self._command is not None:
self.prefix('CMD', format_command(self._command, self._command_shell))
if self._expose is not None:
self.prefix('EXPOSE', *format_expose(self._expose))
if self._labels:
self.prefix('LABEL', *format_labels(self._labels))
if self._stopsignal:
self.prefix('STOPSIGNAL', self._stopsignal)
if self._healthcheck:
self.prefix('HEALTHCHECK', self._healthcheck)
super(DockerFile, self).finalize()
|
merll/docker-map
|
dockermap/build/dockerfile.py
|
Python
|
mit
| 19,295
|
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
This figure is meant to represent an event-type with Faces presented
at times [0,4,8,12,16] and Objects presented at [2,6,10,14,18].
There are two values for Y: one for 'Face' and one for 'Object'
"""
import pylab
import numpy as np
pylab.scatter([0,4,8,12,16], [1,1,1,1,1], c='r', marker='o', label='Face')
pylab.scatter([2,6,10,14,18], [0,0,0,0,0], c='b', marker='o', label='Object')
a = pylab.gca()
a.set_ylim([-0.1,1.1])
a.set_yticks([0,1])
a.set_yticklabels(['Object', 'Face'])
a.set_xlim([-0.5,10])
a.set_xlabel('Time')
|
bthirion/nipy
|
doc/users/plots/event.py
|
Python
|
bsd-3-clause
| 649
|
# We can re- instantize and object using the __init__ method explicitly
class a:
def __init__(self,x,y):
self.x = x
self.y = y
if __name__=="main":
main()
# abc = a(1,2)
# abc.x returns 1
# abc.y return 2
# if we do somethig like abc.__init__(122,123)
# it works perfectly fine
# i know that we can obviously do something like abc.x = 122
# but here i was just checking if ths works
|
PankeshGupta/pynotes
|
reinit.py
|
Python
|
mit
| 411
|
#!/usr/bin/env python
import getopt, sys, re, urllib2, urllib, BaseHTTPServer
from urllib2 import Request, urlopen, URLError, HTTPError
################## HEADER ###################################
#
# Traceroute-like HTTP scanner
# Using the "Max-Forwards" header
# RFC 2616 - HTTP/1.1 - Section 14.31
# RFC 3261 - SIP - Section 8.1.1.6
#
#
# By Nicolas Gregoire (nicolas.gregoire@agarri.fr)
#
# 0.5 : First public release
# 0.4 : Private release, looking for bugs - More heuristics
# 0.3 : A lot more options - More verbosity levels - Some heuristics
#
# By Julien Cayssol (tools@aqwz.com)
#
# 0.2 : Add extract of headers
# 0.1 : Initial version
#
#
# Heuristics :
# - Status Codes :
# - HTTP Status Code == 502
# - HTTP Status Code == 483
# - Specific data in body or headers :
# - X-Forwarded-For in body when using TRACE
# - Via or X-Via in headers
# - Differences between hops :
# - HTTP Status Codes
# - Server headers
# - Content-Type headers
# - Via headers
# - HTML titles
# - HTML <address> tags
# - X-Forwarded-For values when using TRACE
#
############## GLOBAL VARIABLES ###################################
global_data = { 'StatusCode':{}, 'Server':{} , 'Content-Type':{}, 'Title':{}, 'Address':{}, 'X-Fwd':{}, 'Via':{} }
score = 0
verbosity = 0
scheme = 'http'
host = '127.0.0.1'
port = '80'
path = '/'
method = 'TRACE'
body_content = None
max_fwds = 3
userAgent = 'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.0.2) Gecko/20060502 Firefox/1.5.0.2'
contentType = 'text/html'
############## FUNCTIONS ###################################
# Pretty printing
def zprint(string, flag = '=='):
print '[' + flag + '] ' + string
# Increment the heuristic score
def inc_score():
global score
score = score + 1
if verbosity:
zprint('Score : ' + str(score), '!!')
# Help
def showUsage():
print 'Usage : ' + sys.argv[0] + ' [-h] [-m method] [-s scheme] [-t target] [-p port] [-P path] [-v 0|1|2] [-f forwards]'
print '\t[-h] Help (this text)'
print '\t[-m] HTTP Method : default is "TRACE"'
print '\t[-s] Scheme : default is "http"'
print '\t[-t] Target host : default is "127.0.0.1"'
print '\t[-p] Port : default is "80"'
print '\t[-P] Path : default is "/"'
print '\t[-f] Max # of forwards : default is "3"'
print '\t[-v] Verbosity : 0 = default, 1 = verbose, 2 = debug'
print 'Examples :'
print sys.argv[0] + ' -t www.example.org'
print ' => TRACE /'
print sys.argv[0] + ' -t www.example.org -m GET -s https -p 443 -v 1'
print ' => GET / on a SSL host'
print sys.argv[0] + ' -t www.example.org -m POST -P /axis2/checkacc -v 2 -f 5'
print ' => Debug mode on a specific end-point'
sys.exit(1)
# Parse CLI args
def getArguments():
try:
if len(sys.argv) < 2:
zprint('No arguments ? Probably a bad choice. Use "-h" ...', '!!')
sys.exit(1)
optlist, list = getopt.getopt(sys.argv[1:], 'hm:s:t:p:P:v:f:')
except getopt.GetoptError:
showUsage()
for opt in optlist:
if opt[0] == '-h':
showUsage()
if opt[0] == '-m':
global method
method = opt[1]
if opt[0] == '-s':
global scheme
scheme = opt[1]
if opt[0] == '-t':
global host
host = opt[1]
if opt[0] == '-p':
global port
port = opt[1]
if opt[0] == '-P':
global path
path = opt[1]
if opt[0] == '-v':
global verbosity
verbosity = int(opt[1])
if opt[0] == '-f':
global max_fwds
max_fwds = int(opt[1])
# Extract some interesting data from the headers
def analyse_headers(data):
if verbosity:
print
zprint('Analyzing headers', '**')
wanted_headers = [
'Server',
'Via',
'X-Via',
'Set-Cookie',
'X-Forwarded-For',
'Content-Type',
'Content-Length',
'Last-Modified',
'Location',
'Date',
]
for h_name in wanted_headers:
h_value = data.getheader(h_name)
if h_value != None:
# Print the value
if verbosity:
zprint(h_value, h_name)
# Add it to the global structure if needed
if h_name == 'Server' or h_name == 'Content-Type':
global_data[h_name][hop] = h_value
# Some heuristics
if h_name == 'Via' or h_name == 'X-Via':
zprint('"Via" header : Probably a reverse proxy', '++')
global_data['Via'][hop] = h_value
inc_score()
# Extract some interesting data from the body
def analyse_body(data):
if verbosity:
print
zprint('Analyzing body', '**')
wanted_patterns = [
'<title>(.*)</title>',
'<address>(.*)</address>',
'Reason: <strong>(.*)</strong>',
'X-Forwarded-For: (.*)',
]
for p_name in wanted_patterns:
# Case insensitive search
p_value = re.search(p_name, data, re.IGNORECASE)
if p_value != None:
# Only the 1st group, without newlines
value = p_value.groups()[0].strip('\r\n')
if verbosity:
zprint(value, p_name)
# Add it to the global structure if needed
if p_name == '<title>(.*)</title>':
global_data['Title'][hop] = value
if p_name == '<address>(.*)</address>':
global_data['Address'][hop] = value
# Some heuristics
if re.search('X-Forwarded-For:' , p_name):
global_data['X-Fwd'][hop] = value
if method == 'TRACE':
zprint('"X-Forwarded-For" in body when using TRACE : Probably a reverse proxy', '++')
inc_score()
# Analyse the data returned by urllib2.*open()
def debug_and_parse(data):
# Get data
headers = data.info()
body = data.read()
# Debug
if verbosity == 2:
zprint(str(headers), 'DEBUG HEADERS')
zprint(str(body), 'DEBUG BODY')
# Extract some intersting info
codes = BaseHTTPServer.BaseHTTPRequestHandler.responses
global_data['StatusCode'][hop] = str(data.code) + ' ' + codes[data.code][0]
analyse_headers(headers)
analyse_body(body)
############## SCAN ###################################
# Init
getArguments()
# Current target
url = scheme + '://' + host + ':' + port + path
zprint('Target URL : ' + url)
zprint('Used method : ' + method)
zprint('Max number of hops : ' + str(max_fwds))
# Scan
for hop in range(0, max_fwds):
# Create the request object
request = urllib2.Request(url)
request.get_method = lambda: method
request.add_data(body_content)
request.add_header('Content-Type', contentType)
request.add_header('User-agent', userAgent)
# Add the 'Max-Forwards' header
request.add_header('Max-Forwards', hop)
if verbosity:
print('-' * 80)
zprint('Current value of "Max-Forwards" = ' + str(hop) + ' [' + '-' * 20 + ']', '-' * 19)
print('-' * 80)
try:
# Do the HTTP request
opener = urllib2.build_opener(urllib2.HTTPHandler)
result = opener.open(request)
# Found something
if verbosity:
zprint('Status Code => HTTP 200: OK', '**')
# Analyse it
debug_and_parse(result)
# Not a 200 OK
except HTTPError, e:
if verbosity:
zprint('Status Code => ' + str(e), '**')
# Some heuristics
if e.code == 502:
zprint('HTTP 502 : Probably a reverse proxy', '++')
inc_score()
if e.code == 483:
zprint('HTTP 483 : Probably a reverse proxy (SIP ?)', '++')
inc_score()
# Analyse it
debug_and_parse(e)
# Network problem
except URLError, e:
zprint('Network problem !', '!!')
zprint('Reason : ' + str(e.reason), '!!')
break
############## REPORT ###################################
print('-' * 80)
zprint('Heuristic Report [' + '-' * 31 + ']', '-' * 27)
print('-' * 80)
# For each key
for k in global_data.keys():
string = k + ':\n'
previous = 'Undef'
# For each hop
ok = 0
for i in range(0, max_fwds):
# Try this key
try:
current = global_data[k][i]
# We got a value !
ok = 1
except KeyError:
current = 'Undef'
# Some heuristics
if previous != current and i > 0:
inc_score()
# Then add it to the current string
string = string + '\tHop #' + str(i) + " : " + current + '\n'
previous = current
# Display this key only if values were found
if ok:
print string
# Final score
if score == 0:
zprint('No reverse proxy', '--')
else:
zprint('Found a reverse proxy, score is ' + str(score), '++')
|
sharad1126/owtf
|
tools/discovery/web/traceroute/HTTP-Traceroute.py
|
Python
|
bsd-3-clause
| 8,095
|
# Screen dimentions
import threading
X_MAX = 320
Y_MAX = 240
OFFSET = 30
# TFT configuration
DC = 18
RST = 23
SPI_PORT = 0
SPI_DEVICE = 0
# for PIL
import Image, ImageFont, ImageDraw, textwrap, os
# TFT libraries
import Adafruit_ILI9341 as TFT
import Adafruit_GPIO as GPIO
import Adafruit_GPIO.SPI as SPI
class Lcd(object):
def __init__(self):
# Initialize display.
self.disp = TFT.ILI9341(DC, rst=RST, spi=SPI.SpiDev(SPI_PORT, SPI_DEVICE, max_speed_hz=64000000))
self.disp.begin()
self.current_dir = os.path.dirname(os.path.realpath(__file__))
self.message_timer = None
# load the fonts
self.font = ImageFont.truetype(self.current_dir + "/arial.ttf", 18)
self.font_large = ImageFont.truetype(self.current_dir + "/ttwpgott.ttf", 34)
def draw_menu(self, title, current_item, total_items):
image = Image.new("RGBA", (X_MAX, Y_MAX))
text_image, width, height = self.draw_centered_text(title, self.font_large)
image.paste(text_image, (0, Y_MAX / 2 - height / 2 - OFFSET), text_image)
text_image, width, height = self.draw_centered_text(
"(" + str(current_item + 1) + "/" + str(total_items) + ")", self.font)
image.paste(text_image, (X_MAX / 2 - width / 2, Y_MAX - OFFSET), text_image)
self.disp.display(image.rotate(90))
def draw_centered_text(self, text, font, fill=(255, 255, 255)):
# Create a new image with transparent background to store the text.
textimage = Image.new('RGBA', (X_MAX, Y_MAX), (0, 0, 0, 0))
textdraw = ImageDraw.Draw(textimage)
lines = textwrap.wrap(text, width=25)
y_text = 0
for line in lines:
text_width, text_height = font.getsize(line)
textdraw.text((X_MAX / 2 - text_width / 2, y_text), line, font=font, fill=fill)
y_text += text_height
return (textimage, X_MAX, y_text)
def message(self, message_string):
image = self._create_message(message_string)
self.disp.display(image.rotate(90))
def message2(self, primary_text, secondary_text):
image = Image.new("RGBA", (X_MAX, Y_MAX))
text_image, width, height = self.draw_centered_text(primary_text, self.font_large)
image.paste(text_image, (0, Y_MAX / 2 - height / 2 - OFFSET), text_image)
text_image, width, height = self.draw_centered_text(secondary_text, self.font)
image.paste(text_image, (0, Y_MAX - 70 - height/2), text_image)
self.disp.display(image.rotate(90))
def flash(self, message_string, callback, interval=3):
if self.message_timer is not None:
self.message_timer.cancel()
self.message_timer = None
image = self._create_message(message_string)
self.disp.display(image.rotate(90))
self.message_timer = threading.Timer(interval, callback)
self.message_timer.start()
def _create_message(self, message_string):
image = Image.new("RGBA", (X_MAX, Y_MAX))
text_image, width, height = self.draw_centered_text(message_string, self.font_large)
image.paste(text_image, (0, Y_MAX / 2 - height / 2 - OFFSET), text_image)
return image
|
andrewderekjackson/python_lcd_menu
|
lcd.py
|
Python
|
mit
| 3,241
|
import urllib2
import contextlib
# based on http://codereview.stackexchange.com/questions/23364/get-metadata-from-an-icecast-radio-stream
def parse_headers(response):
headers = {}
while True:
line = response.readline()
if line == '\r\n':
break # end of headers
if ':' in line:
key, value = line.split(':', 1)
headers[key] = value
return headers
def poll_radio():
request = urllib2.Request("http://stream.srg-ssr.ch/couleur3/mp3_128.m3u", headers={
'User-Agent': 'User-Agent: VLC/2.0.5 LibVLC/2.0.5',
'Icy-MetaData': '1',
'Range': 'bytes=0-',
})
# the connection will be close on exit from with block
with contextlib.closing(urllib2.urlopen(request)) as response:
headers = parse_headers(response)
meta_interval = int(headers['icy-metaint'])
response.read(meta_interval) # throw away the data until the meta interval
length = ord(response.read(1)) * 16 # length is encoded in the stream
metadata = response.read(length)
print metadata
|
ebu/radiodns-plugit
|
RadioDns-PlugIt/channels/webstreamutils.py
|
Python
|
bsd-3-clause
| 1,103
|
import discord
import asyncio
import json
import logging
import sys
import commands
print(sys.version)
logger = logging.getLogger('discord')
logger.setLevel(logging.DEBUG)
handler = logging.FileHandler(filename='discord.log', encoding='utf-8', mode='w')
handler.setFormatter(logging.Formatter('%(asctime)s:%(levelname)s:%(name)s: %(message)s'))
logger.addHandler(handler)
fp_logininfo = open("logininfo.txt", "r")
logininfo = fp_logininfo.read().split("~") #reading login info from logininfo.txt formatted (email@email.com~password)
fp_logininfo.close()
client = discord.Client()
@client.event
@asyncio.coroutine #notifying console that bot is logged in
def on_ready():
print("Logged in as user: 「{}」".format(client.user.name))
default_status = "with herself"
print("Setting status default: {}".format(default_status))
yield from client.change_status(game=discord.Game(name=default_status))
@client.event
@asyncio.coroutine #on message recieved, execute this block
def on_message(msg):
# if msg.content.startswith("|") or msg.content.startswith("=") or msg.content.find("osu.ppy.sh/") != -1:
# yield from commands.runComm(msg, client)
if msg.author.id != "128878733074366464":
yield from commands.runComm(msg, client)
else:
return
print("Logging in...")
client.run(logininfo[0], logininfo[1])
|
SpiderNight/Aeos
|
UniChan/unichan.py
|
Python
|
mit
| 1,336
|
_DEFAULT_ALPHABET = 'acgturykmswbdhvnx-'
class _FastaEntry:
def __init__(self, position):
self.position = position
self.length = 0
def __str__(self):
return '({0},{1})'.format(self.position, self.length)
class FastaReader:
"""
Implementation of a reader of FASTA files.
"""
def __init__(self, path):
"""
Initialize a new instance of the FastaReader class by reading the file
with the specified path. Raise an IOError if the file does not parse
successfully.
path -- The path to the FASTA file.
"""
self.__current = None
self.__handle = open(path, 'r')
self.__entries = dict()
self.__path = path
self.__parse()
self.__names = tuple(sorted(self.__entries.keys()))
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.__handle.close()
return False
def __str__(self):
return str(self.__names)
@property
def current_sequence(self):
""" The name of the current sequence being read, or None. """
return self.__current
@property
def names(self):
""" The tuple of names for the sequences of the FASTA file. """
return self.__names
@property
def path(self):
""" The path to the FASTA file. """
return self.__path
def get_length(self, name):
"""
Return the number of symbols in the specified sequence. Raise a
LookupError if the sequence is not found.
name -- The name of the sequence.
"""
return self.__find(name).length
def read_symbol(self):
"""
Read and return the next symbol from the current sequence. If no more
symbols are available, return None.
"""
while True:
ch = self.__handle.read(1)
if not ch:
return None
if ch == '>':
self.__handle.seek(0, 2)
return None
if not ch.isspace():
return ch
def seek(self, name):
"""
Seek to the start of the specified sequence. Raise a LookupError if
the sequence does not exist.
name -- The name of the sequence.
"""
self.__seek(self.__find(name))
self.__current = name
def validate(self, name, alphabet=None):
"""
Validate the specified sequence contains only symbols in the specified
alphabet. If unspecified, use a default alphabet. If any symbol is
invalid, raise a RuntimeError. If the sequence is not found, raise a
LookupError.
name -- The name of the sequence to validate.
alphabet -- The alphabet, or None to use a default alphabet.
"""
entry = self.__find(name)
self.__validate(entry, alphabet)
def validate_all(self, alphabet=None):
"""
Validate all sequences contain only symbols in the specified alphabet.
If unspecified, use a default alphabet. If any symbol is invalid,
raise a RuntimeError.
alphabet -- The alphabet, or None to use a default alphabet.
"""
for name in self.__names:
self.validate(name, alphabet)
def __find(self, name):
if name not in self.__entries:
raise LookupError((
'The sequence name "{0}" does not exist in the FASTA file ' +
'"{1}"').format(name, self.__path))
return self.__entries[name]
def __parse(self):
entry = None
while True:
ch = self.__handle.read(1)
if not ch:
break
if ch.isspace():
continue
if ch == '>':
name = self.__handle.readline().strip()
if name in self.__entries:
raise IOError((
'Duplicate sequence name "{0}" in FASTA file "' +
'{0}"').format(name, self.__path))
entry = _FastaEntry(self.__handle.tell())
self.__entries[name] = entry
continue
if entry is None:
raise IOError((
'Encountered symbol "{0}" before ">" in FASTA file ' +
'"{1}"').format(ch, self.__path))
entry.length += 1
self.__handle.seek(0, 2)
def __seek(self, entry):
self.__handle.seek(entry.position)
def __validate(self, entry, alphabet):
if alphabet is None:
alphabet = _DEFAULT_ALPHABET
flags = [False for _ in xrange(255)]
for ch in alphabet:
flags[ord(ch.lower())] = True
flags[ord(ch.upper())] = True
self.__seek(entry)
while True:
ch = self.read_symbol()
if not ch:
break
if not flags[ord(ch)]:
raise RuntimeError((
'Invalid symbol "{0}" found at offset {1} of FASTA ' +
'file "{2}" with alphabet "{3}"').format(
ch,
self.__handle.tell() - 1,
self.__path,
alphabet))
|
jade-cheng/Jocx
|
src/ziphmm/_fasta_reader.py
|
Python
|
gpl-2.0
| 5,277
|
from setuptools import setup, find_packages
setup(
name="workflowy.automation",
packages=find_packages(),
author="Luke Merrett",
description="Scripts for automating Workflowy tasks using Selenium",
license="MIT",
url="https://github.com/lukemerrett/Workflowy-Automation",
install_requires=['selenium']
)
|
lukemerrett/Workflowy-Automation
|
setup.py
|
Python
|
mit
| 333
|
import synapse.tests.utils as s_t_utils
import synapse.tools.cryo.list as s_cryolist
class CryoListTest(s_t_utils.SynTest):
async def test_cryolist(self):
async with self.getTestCryo() as cryo:
items = [(None, {'key': i}) for i in range(20)]
tank = await cryo.init('hehe')
await tank.puts(items)
cryourl = cryo.getLocalUrl()
argv = [cryourl]
retn, outp = await self.execToolMain(s_cryolist.main, argv)
self.eq(0, retn)
outp.expect(cryourl)
outp.expect('hehe: ')
outp.expect("'indx': 20,")
outp.expect("'entries': 20}")
|
vertexproject/synapse
|
synapse/tests/test_tools_cryo_list.py
|
Python
|
apache-2.0
| 671
|
import json
import urllib
from sqlalchemy import and_
from bottle import response, jinja2_template
from db.orm import Files, Hosts
from controllers.helpers import data_strap
from findex_common.utils import ArgValidate
from findex_common.bytes2human import bytes2human
class Documentation():
def __init__(self, cfg, db):
self.cfg = cfg
self.db = db
self.arg_validate = ArgValidate()
@data_strap
def docu(self, env):
return jinja2_template('main/documentation', env=env)
|
iksteen/findex-gui
|
controllers/views/documentation.py
|
Python
|
mit
| 518
|
class Register:
@property
def value(self):
raise NotImplementedError
@value.setter
def value(self, value):
raise NotImplementedError
|
Hexadorsimal/pynes
|
nes/processors/registers/register.py
|
Python
|
mit
| 166
|
#!/usr/bin/env python
"""
================================================
ABElectronics ServoPi 16-Channel PWM Servo Driver
Requires smbus2 or python smbus to be installed
================================================
"""
try:
from smbus2 import SMBus
except ImportError:
try:
from smbus import SMBus
except ImportError:
raise ImportError("python-smbus or smbus2 not found")
import re
import time
import math
import platform
import RPi.GPIO as GPIO
class PWM(object):
"""
PWM class for controlling the PCA9685 PWM IC
"""
# define registers values from datasheet
__MODE1 = 0x00
__MODE2 = 0x01
__SUBADR1 = 0x02
__SUBADR2 = 0x03
__SUBADR3 = 0x04
__ALLCALLADR = 0x05
__LED0_ON_L = 0x06
__LED0_ON_H = 0x07
__LED0_OFF_L = 0x08
__LED0_OFF_H = 0x09
__ALL_LED_ON_L = 0xFA
__ALL_LED_ON_H = 0xFB
__ALL_LED_OFF_L = 0xFC
__ALL_LED_OFF_H = 0xFD
__PRE_SCALE = 0xFE
# define mode bits
__MODE1_EXTCLK = 6 # use external clock
__MODE1_SLEEP = 4 # sleep mode
__MODE1_ALLCALL = 0 # all call address
__MODE2_INVRT = 4 # invert output
__MODE2_OCH = 3 # output type
__MODE2_OUTDRV = 2 # output type
__MODE2_OUTNE1 = 0 # output mode when not enabled
# local variables
__mode1_default = 0x00
__mode2_default = 0x0C
__oe_pin = 7
__address = 0x40
__bus = None
# local methods
@staticmethod
def __get_smbus(bus):
"""
Internal method for getting an instance of the i2c bus
:param bus: I2C bus number. If value is None the class will try to
find the i2c bus automatically using the device name
:type bus: int
:return: i2c bus for target device
:rtype: SMBus
:raises IOError: Could not open the i2c bus
"""
i2c__bus = 1
if bus is not None:
i2c__bus = bus
else:
# detect the device that is being used
device = platform.uname()[1]
if device == "orangepione": # orange pi one
i2c__bus = 0
elif device == "orangepiplus": # orange pi plus
i2c__bus = 0
elif device == "orangepipcplus": # orange pi pc plus
i2c__bus = 0
elif device == "linaro-alip": # Asus Tinker Board
i2c__bus = 1
elif device == "bpi-m2z": # Banana Pi BPI M2 Zero Ubuntu
i2c__bus = 0
elif device == "bpi-iot-ros-ai": # Banana Pi BPI M2 Zero Raspbian
i2c__bus = 0
elif device == "raspberrypi": # running on raspberry pi
# detect i2C port number and assign to i2c__bus
for line in open('/proc/cpuinfo').readlines():
model = re.match('(.*?)\\s*:\\s*(.*)', line)
if model:
(name, value) = (model.group(1), model.group(2))
if name == "Revision":
if value[-4:] in ('0002', '0003'):
i2c__bus = 0 # original model A or B
else:
i2c__bus = 1 # later models
break
try:
return SMBus(i2c__bus)
except IOError:
raise 'Could not open the i2c bus'
@staticmethod
def __checkbit(byte, bit):
"""
Internal method for reading the value of a single bit within a byte
:param byte: input value
:type byte: int
:param bit: location within value to check
:type bit: int
:return: value of selected bit, 0 or 1
:rtype: int
"""
value = 0
if byte & (1 << bit):
value = 1
return value
def __write(self, reg, value):
"""
Internal method to write data to I2C bus
:param value: value to write
:type value: int
:return: IOError
:rtype: IOError
"""
try:
self.__bus.write_byte_data(self.__address, reg, value)
except IOError as err:
return err
def __read(self, reg):
"""
Internal method to read data from I2C bus
:return: IOError
:rtype: IOError
"""
try:
result = self.__bus.read_byte_data(self.__address, reg)
return result
except IOError as err:
return err
# public methods
def __init__(self, address=0x40, bus=None):
"""
init object with i2c address, default is 0x40 for ServoPi board
:param address: device i2c address, defaults to 0x40
:type address: int, optional
:param bus: I2C bus number. If no value is set the class will try to
find the i2c bus automatically using the device name
:type bus: int, optional
"""
self.__address = address
self.__bus = self.__get_smbus(bus)
self.__write(self.__MODE1, self.__mode1_default)
self.__write(self.__MODE2, self.__mode2_default)
GPIO.setwarnings(False)
mode = GPIO.getmode() # check if the GPIO mode has been set
if (mode == 10): # Mode set to GPIO.BOARD
self.__oe_pin = 7
elif (mode == 11): # Mode set to GPIO.BCM
self.__oe_pin = 4
else: # Mode not set
GPIO.setmode(GPIO.BOARD)
self.__oe_pin = 7
GPIO.setup(self.__oe_pin, GPIO.OUT)
def set_pwm_freq(self, freq, calibration=0):
"""
Set the PWM frequency
:param freq: 40 to 1000
:type freq: int
:param calibration: optional integer value to offset oscillator errors.
defaults to 0
:type calibration: int, optional
:raises ValueError: set_pwm_freq: freq out of range
"""
if freq < 40 or freq > 1000:
raise ValueError('set_pwm_freq: freq out of range')
scaleval = 25000000.0 # 25MHz
scaleval /= 4096.0 # 12-bit
scaleval /= float(freq)
scaleval -= 1.0
prescale = math.floor(scaleval + 0.5)
prescale = prescale + calibration
oldmode = self.__read(self.__MODE1)
newmode = (oldmode & 0x7F) | 0x10
self.__write(self.__MODE1, newmode)
self.__write(self.__PRE_SCALE, int(prescale))
self.__write(self.__MODE1, oldmode)
time.sleep(0.005)
self.__write(self.__MODE1, oldmode | 0x80)
def set_pwm(self, channel, on_time, off_time):
"""
Set the output on a single channel
:param channel: 1 to 16
:type channel: int
:param on_time: 0 to 4095
:type on_time: int
:param off_time: 0 to 4095
:type off_time: int
:raises ValueError: set_pwm: channel out of range
:raises ValueError: set_pwm: on_time out of range
:raises ValueError: set_pwm: off_time out of range
:raises ValueError: set_pwm: on_time greater than off_time
"""
if channel < 1 or channel > 16:
raise ValueError('set_pwm: channel out of range')
if on_time < 0 or on_time > 4095:
raise ValueError('set_pwm: on_time out of range')
if off_time < 0 or off_time > 4095:
raise ValueError('set_pwm: off_time out of range')
if on_time > off_time:
raise ValueError('set_pwm: on_time greater than off_time')
channel = channel - 1
self.__write(self.__LED0_ON_L + 4 * channel,
on_time & 0xFF)
self.__write(self.__LED0_ON_H + 4 * channel, on_time >> 8)
self.__write(self.__LED0_OFF_L + 4 * channel,
off_time & 0xFF)
self.__write(self.__LED0_OFF_H + 4 * channel,
off_time >> 8)
def set_pwm_on_time(self, channel, on_time):
"""
Set the output on time on a single channel
:param channel: 1 to 16
:type channel: int
:param on_time: 0 to 4095
:type on_time: int
:raises ValueError: set_pwm_on_time: channel out of range
:raises ValueError: set_pwm_on_time: on_time out of range
"""
if channel < 1 or channel > 16:
raise ValueError('set_pwm_on_time: channel out of range')
if on_time < 0 or on_time > 4095:
raise ValueError('set_pwm_on_time: on_time out of range')
channel = channel - 1
self.__write(self.__LED0_ON_L + 4 * channel,
on_time & 0xFF)
self.__write(self.__LED0_ON_H + 4 * channel, on_time >> 8)
def set_pwm_off_time(self, channel, off_time):
"""
Set the output off time on a single channel
:param channel: 1 to 16
:type channel: int
:param off_time: 0 to 4095
:type off_time: int
:raises ValueError: set_pwm_off_time: channel out of range
:raises ValueError: set_pwm_off_time: off_time out of range
"""
if channel < 1 or channel > 16:
raise ValueError('set_pwm_off_time: channel out of range')
if off_time < 0 or off_time > 4095:
raise ValueError('set_pwm_off_time: off_time out of range')
channel = channel - 1
self.__write(self.__LED0_OFF_L + 4 * channel,
off_time & 0xFF)
self.__write(self.__LED0_OFF_H + 4 * channel,
off_time >> 8)
def get_pwm_on_time(self, channel):
"""
Get the on time for the selected channel
:param channel: 1 to 16
:type channel: int
:raises ValueError: get_pwm_on_time: channel out of range
:return: 0 to 4095
:rtype: int
"""
if channel < 1 or channel > 16:
raise ValueError('get_pwm_on_time: channel out of range')
channel = channel - 1
lowbyte = self.__read(self.__LED0_ON_L + 4 * channel)
highbyte = self.__read(self.__LED0_ON_H + 4 * channel)
value = lowbyte | highbyte << 8
return value
def get_pwm_off_time(self, channel):
"""
Get the on time for the selected channel
:param channel: 1 to 16
:type channel: int
:raises ValueError: get_pwm_off_time: channel out of range
:return: 0 to 4095
:rtype: int
"""
if channel < 1 or channel > 16:
raise ValueError('get_pwm_off_time: channel out of range')
channel = channel - 1
lowbyte = self.__read(self.__LED0_OFF_L + 4 * channel)
highbyte = self.__read(self.__LED0_OFF_H + 4 * channel)
value = lowbyte | highbyte << 8
return value
def set_all_pwm(self, on_time, off_time):
"""
Set the output on all channels
:param on_time: 0 to 4095
:type on_time: int
:param off_time: 0 to 4095
:type off_time: int
:raises ValueError: set_all_pwm: on_time out of range
:raises ValueError: set_all_pwm: off_time out of range
:raises ValueError: set_all_pwm: on_time + off_time
must not exceed 4095
"""
if on_time < 0 or on_time > 4095:
raise ValueError('set_all_pwm: on_time out of range')
if off_time < 0 or off_time > 4095:
raise ValueError('set_all_pwm: off_time out of range')
if (on_time + off_time) > 4095:
raise ValueError('set_all_pwm: on_time + off_time must not \
exceed 4095')
self.__write(self.__ALL_LED_ON_L, on_time & 0xFF)
self.__write(self.__ALL_LED_ON_H, on_time >> 8)
self.__write(self.__ALL_LED_OFF_L, off_time & 0xFF)
self.__write(self.__ALL_LED_OFF_H, off_time >> 8)
def output_disable(self):
"""
Disable output via OE pin
:raises IOError: Failed to write to GPIO pin
"""
try:
GPIO.output(self.__oe_pin, True)
except IOError:
raise IOError("Failed to write to GPIO pin")
def output_enable(self):
"""
Enable output via OE pin
:raises IOError: Failed to write to GPIO pin
"""
try:
GPIO.output(self.__oe_pin, False)
except IOError:
raise IOError("Failed to write to GPIO pin")
def set_allcall_address(self, i2caddress):
"""
Set the I2C address for the All Call function
:param i2caddress: I2C address for the All Call function
:type i2caddress: int
"""
oldmode = self.__read(self.__MODE1)
newmode = oldmode | (1 << self.__MODE1_ALLCALL)
self.__write(self.__MODE1, newmode)
self.__write(self.__ALLCALLADR, i2caddress << 1)
def enable_allcall_address(self):
"""
Enable the I2C address for the All Call function
"""
oldmode = self.__read(self.__MODE1)
newmode = oldmode | (1 << self.__MODE1_ALLCALL)
self.__write(self.__MODE1, newmode)
def disable_allcall_address(self):
"""
Disable the I2C address for the All Call function
"""
oldmode = self.__read(self.__MODE1)
newmode = oldmode & ~(1 << self.__MODE1_ALLCALL)
self.__write(self.__MODE1, newmode)
def sleep(self):
"""
Put the device into a sleep state
"""
oldmode = self.__read(self.__MODE1)
newmode = oldmode | (1 << self.__MODE1_SLEEP)
self.__write(self.__MODE1, newmode)
def wake(self):
"""
Wake the device from its sleep state
"""
oldmode = self.__read(self.__MODE1)
newmode = oldmode & ~(1 << self.__MODE1_SLEEP)
self.__write(self.__MODE1, newmode)
def is_sleeping(self):
"""
Check the sleep status of the device
:return: True or False
:rtype: bool
"""
regval = self.__read(self.__MODE1)
if (self.__checkbit(regval, self.__MODE1_SLEEP)):
return True
else:
return False
def invert_output(self, state):
"""
Invert the PWM output on all channels
:param state: True = inverted, False = non-inverted
:type state: bool
"""
if state is True:
oldmode = self.__read(self.__MODE2)
newmode = oldmode | (1 << self.__MODE2_INVRT)
self.__write(self.__MODE2, newmode)
else:
oldmode = self.__read(self.__MODE2)
newmode = oldmode & ~(1 << self.__MODE2_INVRT)
self.__write(self.__MODE2, newmode)
class Servo(object):
"""
Servo class for controlling RC servos with the Servo PWM Pi Zero
"""
__pwm = None
__position = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
__lowpos = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
__highpos = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
__useoffset = False
__offset = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
__frequency = 50
# local methods
def __refresh_channels(self):
"""
Internal method for refreshing the servo positions
"""
for i in range(0, 16):
if self.__position == 0:
self.__pwm.set_pwm(i+1, 0, 0)
else:
if self.__useoffset is True:
self.__pwm.set_pwm(i+1, self.__offset[i],
self.__position[i] + self.__offset[i])
else:
self.__pwm.set_pwm(i+1, 0, self.__position[i])
def __calculate_offsets(self):
"""
Internal method for calculating the start positions
to stagger the servo position pulses
"""
x = 0
for i in range(0, 16):
x = x + self.__highpos[i]
if x > 4095 - self.__highpos[i]:
x = self.__highpos[0] / 2
self.__offset[i] = x
self.__refresh_channels()
# public methods
def __init__(self, address=0x40, low_limit=1.0,
high_limit=2.0, reset=True, bus=None):
"""
Initialise the Servo object
:param address: i2c address for the ServoPi board, defaults to 0x40
:type address: int, optional
:param low_limit: Pulse length in milliseconds for the
lower servo limit, defaults to 1.0
:type low_limit: float, optional
:param high_limit: Pulse length in milliseconds for the
upper servo limit, defaults to 2.0
:type high_limit: float, optional
:param reset: True = reset the controller and turn off all channels.
False = keep existing servo positions and frequency.
defaults to True
:type reset: bool, optional
:param bus: I2C bus number. If no value is set the class will try to
find the i2c bus automatically using the device name
:type bus: int, optional
"""
self.__pwm = PWM(address, bus)
self.set_low_limit(low_limit)
self.set_high_limit(high_limit)
if reset is True:
self.set_frequency(50)
self.__calculate_offsets() # reset the offset values
else:
# get the on and off times from the pwm controller
for i in range(0, 16):
self.__offset[i] = self.__pwm.get_pwm_on_time(i + 1)
self.__position[i] = self.__pwm.get_pwm_off_time(i + 1) - self.__offset[i]
def move(self, channel, position, steps=250):
"""
Set the servo position
:param channel: 1 to 16
:type channel: int
:param position: value between 0 and the maximum number of steps.
:type position: int
:param steps: The number of steps between the the low and high limits.
This can be any number between 0 and 4095.
On a typical RC servo a step value of 250 is recommended.
defaults to 250
:type steps: int, optional
:raises ValueError: move: channel out of range
:raises ValueError: move: steps out of range
:raises ValueError: move: position out of range
"""
if channel < 1 or channel > 16:
raise ValueError('move: channel out of range')
if steps < 0 or steps > 4095:
raise ValueError('move: steps out of range')
if position >= 0 and position <= steps:
high = float(self.__highpos[channel - 1])
low = float(self.__lowpos[channel - 1])
pwm_value = int((((high - low) / float(steps)) *
float(position)) + low)
self.__position[channel - 1] = pwm_value
if self.__useoffset:
self.__pwm.set_pwm(channel, self.__offset[channel - 1],
pwm_value + self.__offset[channel - 1])
else:
self.__pwm.set_pwm(channel, 0, pwm_value)
else:
raise ValueError('move: position out of range')
def get_position(self, channel, steps=250):
"""
Get the servo position
:param channel: 1 to 16
:type channel: int
:param steps: The number of steps between the the low and high limits.
This can be any number between 0 and 4095.
On a typical RC servo a step value of 250 is recommended.
defaults to 250
:type steps: int, optional
:raises ValueError: get_position: channel out of range
:return: position - value between 0 and the maximum number of steps.
Due to rounding errors when calculating the position, the
returned value may not be exactly the same as the set value.
:rtype: int
"""
if channel < 1 or channel > 16:
raise ValueError('get_position: channel out of range')
pwm_value = float(self.__pwm.get_pwm_off_time(channel))
if self.__useoffset:
pwm_value = pwm_value - self.__offset[channel - 1]
steps = float(steps)
high = float(self.__highpos[channel - 1])
low = float(self.__lowpos[channel - 1])
position = int(math.ceil((steps * (pwm_value - low)) / (high - low)))
return position
def set_low_limit(self, low_limit, channel=0):
"""
Set the pulse length for the lower servo limits. Typically around 1ms.
Warning: Setting the pulse limit below 1ms may damage your servo.
:param low_limit: Pulse length in milliseconds for the lower limit.
:type low_limit: float
:param channel: The channel for which the low limit will be set.
If this value is omitted the low limit will be
set for all channels., defaults to 0
:type channel: int, optional
:raises ValueError: set_low_limit: channel out of range
:raises ValueError: set_low_limit: low limit out of range
"""
if channel < 0 or channel > 16:
raise ValueError('set_low_limit: channel out of range')
lowpos = int(4096.0 * (low_limit / 1000.0) * self.__frequency)
if (lowpos < 0) or (lowpos > 4095):
raise ValueError('set_low_limit: low limit out of range')
if channel >= 1 and channel <= 16:
# update the specified channel
self.__lowpos[channel - 1] = lowpos
else:
# no channel specified so update all channels
for i in range(16):
self.__lowpos[i] = lowpos
def set_high_limit(self, high_limit, channel=0):
"""
Set the pulse length for the upper servo limits. Typically around 2ms.
Warning: Setting the pulse limit above 2ms may damage your servo.
:param high_limit: Pulse length in milliseconds for the upper limit.
:type high_limit: float
:param channel: The channel for which the upper limit will be set.
If this value is omitted the upper limit will be
set for all channels., defaults to 0
:type channel: int, optional
:raises ValueError: set_high_limit: channel out of range
:raises ValueError: set_high_limit: high limit out of range
"""
if channel < 0 or channel > 16:
raise ValueError('set_high_limit: channel out of range')
highpos = int(4096.0 * (high_limit / 1000.0) * self.__frequency)
if (highpos < 0) or (highpos > 4095):
raise ValueError('set_high_limit: high limit out of range')
if channel >= 1 and channel <= 16:
# update the specified channel
self.__highpos[channel - 1] = highpos
else:
# no channel specified so update all channels
for i in range(16):
self.__highpos[i] = highpos
def set_frequency(self, freq, calibration=0):
"""
Set the PWM frequency
:param freq: 40 to 1000
:type freq: int
:param calibration: optional integer value to offset oscillator errors.
defaults to 0
:type calibration: int, optional
"""
self.__pwm.set_pwm_freq(freq, calibration)
self.__frequency = freq
def output_disable(self):
"""
Disable output via OE pin
:raises IOError: Failed to write to GPIO pin
"""
try:
self.__pwm.output_disable()
except IOError:
raise IOError("Failed to write to GPIO pin")
def output_enable(self):
"""
Enable output via OE pin
:raises IOError: Failed to write to GPIO pin
"""
try:
self.__pwm.output_enable()
self.__calculate_offsets() # update the offset values
except IOError:
raise IOError("Failed to write to GPIO pin")
def offset_enable(self):
"""
Enable pulse offsets.
This will set servo pulses to be staggered across the channels
to reduce surges in current draw
"""
self.__useoffset = True
self.__calculate_offsets() # update the offset values
def offset_disable(self):
"""
Disable pulse offsets.
This will set all servo pulses to start at the same time.
"""
self.__useoffset = False
self.__refresh_channels() # refresh the channel locations
def sleep(self):
"""
Put the device into a sleep state
"""
self.__pwm.sleep()
def wake(self):
"""
Wake the device from its sleep state
"""
self.__pwm.wake()
def is_sleeping(self):
"""
Check the sleep status of the device
"""
return self.__pwm.is_sleeping()
|
abelectronicsuk/ABElectronics_Python_Libraries
|
ServoPi/ServoPi.py
|
Python
|
gpl-2.0
| 25,007
|
import json
import os
import avasdk
from zipfile import ZipFile, BadZipFile
from avasdk.plugins.manifest import validate_manifest
from avasdk.plugins.hasher import hash_plugin
from django import forms
from django.core.validators import ValidationError
from .validators import ZipArchiveValidator
class PluginArchiveField(forms.FileField):
default_validators = [ZipArchiveValidator()]
label = 'Plugin .zip'
def get_prefix(self, archive):
files = archive.namelist()
return os.path.commonpath(files)
def get_manifest(self, archive):
try:
with ZipFile(archive.temporary_file_path()) as plugin:
prefix = self.get_prefix(plugin)
prefix = prefix + '/' if len(prefix) else ''
with plugin.open('{}manifest.json'.format(prefix)) as myfile:
manifest = json.loads(myfile.read())
validate_manifest(manifest)
return manifest
except BadZipFile:
raise ValidationError('Bad .zip format')
except FileNotFoundError:
raise ValidationError('Error with upload, please try again')
except KeyError:
raise ValidationError('No manifest.json found in archive')
except json.JSONDecodeError:
raise ValidationError('Error with manifest.json, bad Json Format')
except avasdk.exceptions.ValidationError as e:
raise ValidationError('Error in manifest.json ({})'.format(e))
def get_readme(self, archive):
try:
with ZipFile(archive.temporary_file_path()) as plugin:
prefix = self.get_prefix(plugin)
prefix = prefix + '/' if len(prefix) else ''
with plugin.open('{}/README.md'.format(prefix)) as myfile:
readme = myfile.read()
return readme
except FileNotFoundError:
raise ValidationError('Error with upload, please try again')
except KeyError:
return None
def clean(self, data, initial=None):
f = super().clean(data, initial)
manifest = self.get_manifest(f)
readme = self.get_readme(f)
return {
'zipfile': f,
'manifest': manifest,
'readme': readme,
'checksum': hash_plugin(f.temporary_file_path()),
}
class UploadPluginForm(forms.Form):
archive = PluginArchiveField()
|
ava-project/ava-website
|
website/apps/plugins/forms.py
|
Python
|
mit
| 2,440
|
"""
This script shows how to get all tickets for a project and write ticket
data to a CSV file. For each ticket, the CSV also includes the initial ticket note.
The username and key are saved in an INI file in ~/.codebase_secrets.ini:
[api]
username = example/alice
key = 123abc456def789ghi
Use the script like this:
$ python docs/example/export_tickets_with_notes.py my-project > tickets.csv
"""
import csv
import datetime
import sys
import codebase
import codebase.utils
def main(project_slug):
client = codebase.Client.with_secrets('~/.codebase_secrets.ini')
columns = [
'ticket_id',
'summary',
'assignee',
'status',
'priority',
'created_at',
'updated_at',
'note',
]
writer = csv.DictWriter(sys.stdout, columns, extrasaction='ignore')
writer.writeheader()
for idx, ticket in enumerate(client.get_tickets(project_slug), start=1):
row = dict(ticket)
m = '{idx: 4} - {row[ticket_id]} {row[summary]}\n'.format(idx=idx, row=row)
sys.stderr.write(m)
row['status'] = ticket['status']['name']
row['priority'] = ticket['priority']['name']
notes = client.get_ticket_notes(project_slug, ticket['ticket_id'])
first_note = next(notes, {'content': u''})
row['note'] = first_note['content']
row = codebase.utils.encode_dict(row)
writer.writerow(row)
if __name__ == '__main__':
project_slug = sys.argv[1]
main(project_slug)
|
davidwtbuxton/pycodebase
|
docs/examples/export_tickets_with_notes.py
|
Python
|
mit
| 1,521
|
#!/usr/bin/env python3
#
# Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import ipaddress
import struct
from binascii import hexlify
from enum import IntEnum
import common
class TlvType(IntEnum):
TARGET_EID = 0
MAC_EXTENDED_ADDRESS = 1
RLOC16 = 2
ML_EID = 3
STATUS = 4
TIME_SINCE_LAST_TRANSACTION = 6
ROUTER_MASK = 7
ND_OPTION = 8
ND_DATA = 9
THREAD_NETWORK_DATA = 10
MLE_ROUTING = 11
IPv6_ADDRESSES = 14
XTAL_ACCURACY = 254
class StatusValues(IntEnum):
SUCCESS = 0
NO_ADDRESS_AVAILABLE = 1
TOO_FEW_ROUTERS = 2
HAVE_CHILD_ID_REQUEST = 3
PARENT_PARTITION_CHANGE = 4
class TargetEid(object):
def __init__(self, eid):
self._eid = eid
@property
def eid(self):
return self._eid
def __eq__(self, other):
common.expect_the_same_class(self, other)
return self.eid == other.eid
def __repr__(self):
return "TargetEid(eid={})".format(hexlify(self.eid))
class TargetEidFactory(object):
def parse(self, data, message_info):
eid = bytearray(data.read(16))
return TargetEid(eid)
class MacExtendedAddress(object):
def __init__(self, mac_address):
self._mac_address = mac_address
@property
def mac_address(self):
return self._mac_address
def __eq__(self, other):
common.expect_the_same_class(self, other)
return self.mac_address == other.mac_address
def __repr__(self):
return "MacExtendedAddress(mac_address={})".format(hexlify(self.mac_address))
class MacExtendedAddressFactory(object):
def parse(self, data, message_info):
mac_address = bytearray(data.read(8))
return MacExtendedAddress(mac_address)
class Rloc16(object):
def __init__(self, rloc16):
self._rloc16 = rloc16
@property
def rloc16(self):
return self._rloc16
def __eq__(self, other):
common.expect_the_same_class(self, other)
return self.rloc16 == other.rloc16
def __repr__(self):
return "Rloc16(rloc16={})".format(hex(self.rloc16))
class Rloc16Factory(object):
def parse(self, data, message_info):
rloc16 = struct.unpack(">H", data.read(2))[0]
return Rloc16(rloc16)
class MlEid(object):
def __init__(self, ml_eid):
self._ml_eid = ml_eid
@property
def ml_eid(self):
return self._ml_eid
def __eq__(self, other):
common.expect_the_same_class(self, other)
return self.ml_eid == other.ml_eid
def __repr__(self):
return "MlEid(ml_eid={})".format(hexlify(self.ml_eid))
class MlEidFactory(object):
def parse(self, data, message_info):
ml_eid = bytearray(data.read(8))
return MlEid(ml_eid)
class Status(object):
def __init__(self, status):
self._status = status
@property
def status(self):
return self._status
def __eq__(self, other):
common.expect_the_same_class(self, other)
return self.status == other.status
def __repr__(self):
return "Status(status={})".format(self.status)
class StatusFactory(object):
def parse(self, data, message_info):
status = StatusValues(ord(data.read(1)))
return Status(status)
class TimeSinceLastTransaction(object):
def __init__(self, seconds):
self._seconds = seconds
@property
def seconds(self):
return self._seconds
def __eq__(self, other):
common.expect_the_same_class(self, other)
return self.seconds == other.seconds
def __repr__(self):
return "TimeSinceLastTransaction(seconds={})".format(self.seconds)
class TimeSinceLastTransactionFactory(object):
def parse(self, data, message_info):
seconds = struct.unpack(">L", data.read(4))[0]
return TimeSinceLastTransaction(seconds)
class RouterMask(object):
def __init__(self, id_sequence, router_id_mask):
self._id_sequence = id_sequence
self._router_id_mask = router_id_mask
@property
def id_sequence(self):
return self._id_sequence
@property
def router_id_mask(self):
return self._router_id_mask
def __eq__(self, other):
common.expect_the_same_class(self, other)
return (self.id_sequence == other.id_sequence and self.router_id_mask == other.router_id_mask)
def __repr__(self):
return "RouterMask(id_sequence={}, router_id_mask={})".format(self.id_sequence, hex(self.router_id_mask))
class RouterMaskFactory(object):
def parse(self, data, message_info):
id_sequence = ord(data.read(1))
router_id_mask = struct.unpack(">Q", data.read(8))[0]
return RouterMask(id_sequence, router_id_mask)
class NdOption(object):
def __init__(self, options):
self._options = options
@property
def options(self):
return self._options
def __eq__(self, other):
common.expect_the_same_class(self, other)
return self.options == other.options
def __repr__(self):
return "NdOption(options=[{}])".format(", ".join([str(opt) for opt in self.options]))
class NdOptionFactory(object):
def parse(self, data, message_info):
options = [opt for opt in bytearray(data.read())]
return NdOption(options)
class NdData(object):
# TODO: Not implemented yet
pass
class NdDataFactory(object):
# TODO: Not implemented yet
def parse(self, data, message_info):
raise NotImplementedError("TODO: Not implemented yet")
class XtalAccuracy:
# TODO: Not implemented yet
def __init__(self):
print("XtalAccuracy is not implemented yet.")
class XtalAccuracyFactory:
def parse(self, data, message_info):
return XtalAccuracy()
class ThreadNetworkData(object):
def __init__(self, tlvs):
self._tlvs = tlvs
@property
def tlvs(self):
return self._tlvs
def __eq__(self, other):
common.expect_the_same_class(self, other)
return self.tlvs == other.tlvs
def __repr__(self):
return "ThreadNetworkData(tlvs=[{}])".format(", ".join([str(tlv) for tlv in self.tlvs]))
class ThreadNetworkDataFactory(object):
def __init__(self, network_data_tlvs_factory):
self._network_data_tlvs_factory = network_data_tlvs_factory
def parse(self, data, message_info):
tlvs = self._network_data_tlvs_factory.parse(data, message_info)
return ThreadNetworkData(tlvs)
class IPv6Addresses(list):
pass
class IPv6AddressesFactory(object):
def parse(self, data, message_info):
data = bytes(data.read())
assert len(data) % 16 == 0, data
addrs = IPv6Addresses()
for i in range(0, len(data), 16):
addr = ipaddress.IPv6Address(data[i:i + 16])
addrs.append(addr)
return addrs
|
bukepo/openthread
|
tests/scripts/thread-cert/network_layer.py
|
Python
|
bsd-3-clause
| 8,406
|
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Copyright 2012 Jens Hoffmann (hoffmaje)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
from django.conf.urls.defaults import patterns, include, url
urlpatterns = patterns('',
url(r'^$', 'layla.main.views.home'),
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^logout/$', 'django.contrib.auth.views.logout'),
)
|
hoffmaje/layla
|
layla/main/urls.py
|
Python
|
agpl-3.0
| 413
|
import pkg_resources
__version__ = pkg_resources.get_distribution("pinax-testimonials").version
|
pinax/pinax-testimonials
|
pinax/testimonials/__init__.py
|
Python
|
mit
| 97
|
# -*- encoding: utf-8 -*-
"""
Reference: https://dev.twitch.tv/docs/api/reference
Copyright (C) 2016-2019 script.module.python.twitch
This file is part of script.module.python.twitch
SPDX-License-Identifier: GPL-3.0-only
See LICENSES/GPL-3.0-only for more information.
"""
from ... import keys
from ...api.parameters import Cursor, Language, IntRange, ItemCount
from ... import methods
from ...queries import HelixQuery as Qry
from ...queries import query
# required scope: none
@query
def get_streams(game_id=list(), user_id=list(),
user_login=list(), language=list(), after='MA==',
before='MA==', first=20, use_app_token=False):
q = Qry('streams', use_app_token=use_app_token)
q.add_param(keys.AFTER, Cursor.validate(after), 'MA==')
q.add_param(keys.BEFORE, Cursor.validate(before), 'MA==')
q.add_param(keys.FIRST, IntRange(1, 100).validate(first), 20)
q.add_param(keys.GAME_ID, ItemCount().validate(game_id), list())
q.add_param(keys.USER_ID, ItemCount().validate(user_id), list())
q.add_param(keys.USER_LOGIN, ItemCount().validate(user_login), list())
if isinstance(language, list):
_language = [lang for lang in language if lang in Language.valid()]
q.add_param(keys.LANGUAGE, ItemCount().validate(_language), list())
else:
q.add_param(keys.LANGUAGE, Language.validate(language), '')
return q
# required scope: none
@query
def get_metadata(game_id=list(), user_id=list(),
user_login=list(), language=list(), after='MA==',
before='MA==', first=20, use_app_token=False):
q = Qry('streams/metadata', use_app_token=use_app_token)
q.add_param(keys.AFTER, Cursor.validate(after), 'MA==')
q.add_param(keys.BEFORE, Cursor.validate(before), 'MA==')
q.add_param(keys.FIRST, IntRange(1, 100).validate(first), 20)
q.add_param(keys.GAME_ID, ItemCount().validate(game_id), list())
q.add_param(keys.USER_ID, ItemCount().validate(user_id), list())
q.add_param(keys.USER_LOGIN, ItemCount().validate(user_login), list())
if isinstance(language, list):
_language = [lang for lang in language if lang in Language.valid()]
q.add_param(keys.LANGUAGE, ItemCount().validate(_language), list())
else:
q.add_param(keys.LANGUAGE, Language.validate(language), '')
return q
# required scope: user:edit:broadcast
@query
def create_stream_marker(user_id, description=''):
q = Qry('streams/markers', use_app_token=False, method=methods.POST)
q.add_param(keys.USER_ID, user_id)
q.add_param(keys.DESCRIPTION, description, '')
return q
# required scope: user:read:broadcast
@query
def get_stream_markers(user_id, video_id, after='MA==', before='MA==', first=20):
q = Qry('streams/markers', use_app_token=False, method=methods.GET)
q.add_param(keys.USER_ID, user_id)
q.add_param(keys.VIDEO_ID, video_id)
q.add_param(keys.AFTER, Cursor.validate(after), 'MA==')
q.add_param(keys.BEFORE, Cursor.validate(before), 'MA==')
q.add_param(keys.FIRST, IntRange(1, 100).validate(first), 20)
return q
|
MrSprigster/script.module.python.twitch
|
resources/lib/twitch/api/helix/streams.py
|
Python
|
gpl-3.0
| 3,132
|
"""
Responsible for rendering the main in game menu.
"""
import tcod as libtcod
import CreatureRogue.settings as settings
class GameMenuRenderer:
width = 30
height = settings.SCREEN_HEIGHT
def __init__(self, game):
self.game = game
self.console = libtcod.console_new(GameMenuRenderer.width, GameMenuRenderer.height)
def render(self, keys):
"""
Returns the completed menu console ready to be blitted onto another
existing console.
"""
libtcod.console_clear(self.console)
libtcod.console_set_default_background(self.console, settings.MENU_BG_COLOR)
self._render_lines()
self._render_menu(keys)
return self.console
def _render_lines(self):
libtcod.console_print_frame(self.console, 0, 1, GameMenuRenderer.width - 1, GameMenuRenderer.height - 2)
def _render_menu(self, keys):
"""
Render the menu strings, these are passed in as dictionaries of the
form:
{row: <the row on which to place the menu item>,
char: <the character which activates the menu item>,
str: <The display string for the menu item>}
If the row is negative it is taken as a reverse index from the
bottom of the menu.
"""
for key in keys:
row = key["row"] + 1 if key["row"] >= 0 else GameMenuRenderer.height + key["row"] - 2
libtcod.console_print(self.console, 2, row, "{0}. {1}".format(key["char"], key["str"]))
|
DaveTCode/CreatureRogue
|
CreatureRogue/renderer/game_menu_renderer.py
|
Python
|
mit
| 1,565
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.linalg import linalg as linalg_lib
from tensorflow.python.ops.linalg import linear_operator_test_util
from tensorflow.python.platform import test
linalg = linalg_lib
class SquareLinearOperatorFullMatrixTest(
linear_operator_test_util.SquareLinearOperatorDerivedClassTest):
"""Most tests done in the base class LinearOperatorDerivedClassTest."""
def _operator_and_matrix(self, build_info, dtype, use_placeholder):
shape = list(build_info.shape)
matrix = linear_operator_test_util.random_positive_definite_matrix(
shape, dtype)
lin_op_matrix = matrix
if use_placeholder:
lin_op_matrix = array_ops.placeholder_with_default(matrix, shape=None)
operator = linalg.LinearOperatorFullMatrix(lin_op_matrix, is_square=True)
return operator, matrix
def test_is_x_flags(self):
# Matrix with two positive eigenvalues.
matrix = [[1., 0.], [1., 11.]]
operator = linalg.LinearOperatorFullMatrix(
matrix,
is_positive_definite=True,
is_non_singular=True,
is_self_adjoint=False)
self.assertTrue(operator.is_positive_definite)
self.assertTrue(operator.is_non_singular)
self.assertFalse(operator.is_self_adjoint)
# Auto-detected.
self.assertTrue(operator.is_square)
def test_assert_non_singular_raises_if_cond_too_big_but_finite(self):
with self.cached_session():
tril = linear_operator_test_util.random_tril_matrix(
shape=(50, 50), dtype=np.float32)
diag = np.logspace(-2, 2, 50).astype(np.float32)
tril = array_ops.matrix_set_diag(tril, diag)
matrix = math_ops.matmul(tril, tril, transpose_b=True).eval()
operator = linalg.LinearOperatorFullMatrix(matrix)
with self.assertRaisesOpError("Singular matrix"):
# Ensure that we have finite condition number...just HUGE.
cond = np.linalg.cond(matrix)
self.assertTrue(np.isfinite(cond))
self.assertGreater(cond, 1e12)
operator.assert_non_singular().run()
def test_assert_non_singular_raises_if_cond_infinite(self):
with self.cached_session():
matrix = [[1., 1.], [1., 1.]]
# We don't pass the is_self_adjoint hint here, which means we take the
# generic code path.
operator = linalg.LinearOperatorFullMatrix(matrix)
with self.assertRaisesOpError("Singular matrix"):
operator.assert_non_singular().run()
def test_assert_self_adjoint(self):
matrix = [[0., 1.], [0., 1.]]
operator = linalg.LinearOperatorFullMatrix(matrix)
with self.cached_session():
with self.assertRaisesOpError("not equal to its adjoint"):
operator.assert_self_adjoint().run()
def test_assert_positive_definite(self):
matrix = [[1., 1.], [1., 1.]]
operator = linalg.LinearOperatorFullMatrix(matrix, is_self_adjoint=True)
with self.cached_session():
with self.assertRaisesOpError("Cholesky decomposition was not success"):
operator.assert_positive_definite().run()
class SquareLinearOperatorFullMatrixSymmetricPositiveDefiniteTest(
linear_operator_test_util.SquareLinearOperatorDerivedClassTest):
"""Most tests done in the base class LinearOperatorDerivedClassTest.
In this test, the operator is constructed with hints that invoke the use of
a Cholesky decomposition for solves/determinant.
"""
def setUp(self):
# Increase from 1e-6 to 1e-5. This reduction in tolerance happens,
# presumably, because we are taking a different code path in the operator
# and the matrix. The operator uses a Choleksy, the matrix uses standard
# solve.
self._atol[dtypes.float32] = 1e-5
self._rtol[dtypes.float32] = 1e-5
self._atol[dtypes.float64] = 1e-10
self._rtol[dtypes.float64] = 1e-10
@property
def _dtypes_to_test(self):
return [dtypes.float32, dtypes.float64]
def _operator_and_matrix(self, build_info, dtype, use_placeholder):
shape = list(build_info.shape)
matrix = linear_operator_test_util.random_positive_definite_matrix(
shape, dtype, force_well_conditioned=True)
lin_op_matrix = matrix
if use_placeholder:
lin_op_matrix = array_ops.placeholder_with_default(matrix, shape=None)
operator = linalg.LinearOperatorFullMatrix(lin_op_matrix, is_square=True)
return operator, matrix
def test_is_x_flags(self):
# Matrix with two positive eigenvalues.
matrix = [[1., 0.], [0., 7.]]
operator = linalg.LinearOperatorFullMatrix(
matrix, is_positive_definite=True, is_self_adjoint=True)
self.assertTrue(operator.is_positive_definite)
self.assertTrue(operator.is_self_adjoint)
# Should be auto-set
self.assertTrue(operator.is_non_singular)
self.assertTrue(operator._can_use_cholesky)
self.assertTrue(operator.is_square)
def test_assert_non_singular(self):
matrix = [[1., 1.], [1., 1.]]
operator = linalg.LinearOperatorFullMatrix(
matrix, is_self_adjoint=True, is_positive_definite=True)
with self.cached_session():
# Cholesky decomposition may fail, so the error is not specific to
# non-singular.
with self.assertRaisesOpError(""):
operator.assert_non_singular().run()
def test_assert_self_adjoint(self):
matrix = [[0., 1.], [0., 1.]]
operator = linalg.LinearOperatorFullMatrix(
matrix, is_self_adjoint=True, is_positive_definite=True)
with self.cached_session():
with self.assertRaisesOpError("not equal to its adjoint"):
operator.assert_self_adjoint().run()
def test_assert_positive_definite(self):
matrix = [[1., 1.], [1., 1.]]
operator = linalg.LinearOperatorFullMatrix(
matrix, is_self_adjoint=True, is_positive_definite=True)
with self.cached_session():
# Cholesky decomposition may fail, so the error is not specific to
# non-singular.
with self.assertRaisesOpError(""):
operator.assert_positive_definite().run()
class NonSquareLinearOperatorFullMatrixTest(
linear_operator_test_util.NonSquareLinearOperatorDerivedClassTest):
"""Most tests done in the base class LinearOperatorDerivedClassTest."""
def _operator_and_matrix(self, build_info, dtype, use_placeholder):
shape = list(build_info.shape)
matrix = linear_operator_test_util.random_normal(shape, dtype=dtype)
lin_op_matrix = matrix
if use_placeholder:
lin_op_matrix = array_ops.placeholder_with_default(matrix, shape=None)
operator = linalg.LinearOperatorFullMatrix(lin_op_matrix, is_square=True)
return operator, matrix
def test_is_x_flags(self):
matrix = [[3., 2., 1.], [1., 1., 1.]]
operator = linalg.LinearOperatorFullMatrix(
matrix,
is_self_adjoint=False)
self.assertEqual(operator.is_positive_definite, None)
self.assertEqual(operator.is_non_singular, None)
self.assertFalse(operator.is_self_adjoint)
self.assertFalse(operator.is_square)
def test_matrix_must_have_at_least_two_dims_or_raises(self):
with self.assertRaisesRegexp(ValueError, "at least 2 dimensions"):
linalg.LinearOperatorFullMatrix([1.])
if __name__ == "__main__":
test.main()
|
alshedivat/tensorflow
|
tensorflow/python/kernel_tests/linalg/linear_operator_full_matrix_test.py
|
Python
|
apache-2.0
| 8,081
|
# Copyright (c) 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Handles all processes within the Guest VM, considering it as a Platform
The :py:class:`GuestManager` class is a :py:class:`nova.manager.Manager` that
handles RPC calls relating to Platform specific operations.
**Related Flags**
"""
from itertools import chain
import os
from oslo_log import log as logging
from trove.common import cfg
from trove.common.i18n import _
LOG = logging.getLogger(__name__)
defaults = {
'mysql':
'trove.guestagent.datastore.mysql.manager.Manager',
'percona':
'trove.guestagent.datastore.experimental.percona.manager.Manager',
'pxc':
'trove.guestagent.datastore.experimental.pxc.manager.Manager',
'redis':
'trove.guestagent.datastore.experimental.redis.manager.Manager',
'cassandra':
'trove.guestagent.datastore.experimental.cassandra.manager.Manager',
'couchbase':
'trove.guestagent.datastore.experimental.couchbase.manager.Manager',
'mongodb':
'trove.guestagent.datastore.experimental.mongodb.manager.Manager',
'postgresql':
'trove.guestagent.datastore.experimental.postgresql.manager.Manager',
'couchdb':
'trove.guestagent.datastore.experimental.couchdb.manager.Manager',
'vertica':
'trove.guestagent.datastore.experimental.vertica.manager.Manager',
'db2':
'trove.guestagent.datastore.experimental.db2.manager.Manager',
'mariadb':
'trove.guestagent.datastore.experimental.mariadb.manager.Manager'
}
CONF = cfg.CONF
def get_custom_managers():
return CONF.datastore_registry_ext
def datastore_registry():
return dict(chain(defaults.items(),
get_custom_managers().items()))
def to_gb(bytes):
if bytes == 0:
return 0.0
size = bytes / 1024.0 ** 3
# Make sure we don't return 0.0 if the size is greater than 0
return max(round(size, 2), 0.01)
def to_mb(bytes):
if bytes == 0:
return 0.0
size = bytes / 1024.0 ** 2
# Make sure we don't return 0.0 if the size is greater than 0
return max(round(size, 2), 0.01)
def get_filesystem_volume_stats(fs_path):
try:
stats = os.statvfs(fs_path)
except OSError:
LOG.exception(_("Error getting volume stats."))
raise RuntimeError("Filesystem not found (%s)" % fs_path)
total = stats.f_blocks * stats.f_bsize
free = stats.f_bfree * stats.f_bsize
# return the size in GB
used_gb = to_gb(total - free)
total_gb = to_gb(total)
output = {
'block_size': stats.f_bsize,
'total_blocks': stats.f_blocks,
'free_blocks': stats.f_bfree,
'total': total_gb,
'free': free,
'used': used_gb
}
return output
|
mmasaki/trove
|
trove/guestagent/dbaas.py
|
Python
|
apache-2.0
| 3,294
|
import tests.periodicities.period_test as per
per.buildModel((5 , 'W' , 1600));
|
antoinecarme/pyaf
|
tests/periodicities/Week/Cycle_Week_1600_W_5.py
|
Python
|
bsd-3-clause
| 82
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# termineter/modules/get_info.py
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the project nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
from __future__ import unicode_literals
from c1218.errors import C1218ReadTableError
from c1219.access.general import C1219GeneralAccess
from termineter.module import TermineterModuleOptical
STATUS_FLAGS = flags = (
'Unprogrammed',
'Configuration Error',
'Self Check Error',
'RAM Failure',
'ROM Failure',
'Non Volatile Memory Failure',
'Clock Error',
'Measurement Error',
'Low Battery',
'Low Loss Potential',
'Demand Overload',
'Power Failure',
'Tamper Detect',
'Reverse Rotation'
)
class Module(TermineterModuleOptical):
def __init__(self, *args, **kwargs):
TermineterModuleOptical.__init__(self, *args, **kwargs)
self.author = ['Spencer McIntyre']
self.description = 'Get Basic Meter Information By Reading Tables'
self.detailed_description = 'This module retreives some basic meter information and displays it in a human-readable way.'
def run(self):
conn = self.frmwk.serial_connection
try:
general_ctl = C1219GeneralAccess(conn)
except C1218ReadTableError:
self.frmwk.print_error('Could not read the necessary tables')
return
meter_info = {}
meter_info['Character Encoding'] = general_ctl.char_format
meter_info['Device Type'] = general_ctl.nameplate_type
meter_info['C12.19 Version'] = {0: 'Pre-release', 1: 'C12.19-1997', 2: 'C12.19-2008'}.get(general_ctl.std_version_no) or 'Unknown'
meter_info['Manufacturer'] = general_ctl.manufacturer
meter_info['Model'] = general_ctl.ed_model
meter_info['Hardware Version'] = str(general_ctl.hw_version_no) + '.' + str(general_ctl.hw_revision_no)
meter_info['Firmware Version'] = str(general_ctl.fw_version_no) + '.' + str(general_ctl.fw_revision_no)
meter_info['Serial Number'] = general_ctl.mfg_serial_no
if general_ctl.ed_mode is not None:
modes = []
flags = ['Metering', 'Test Mode', 'Meter Shop Mode', 'Factory']
for i in range(len(flags)):
if general_ctl.ed_mode & (2 ** i):
modes.append(flags[i])
if len(modes):
meter_info['Mode Flags'] = ', '.join(modes)
if general_ctl.std_status is not None:
status = []
for i, flag in enumerate(STATUS_FLAGS):
if general_ctl.std_status & (2 ** i):
status.append(flag)
if len(status):
meter_info['Status Flags'] = ', '.join(status)
if general_ctl.device_id is not None:
meter_info['Device ID'] = general_ctl.device_id
self.frmwk.print_status('General Information:')
fmt_string = " {0:.<38}.{1}"
keys = sorted(list(meter_info.keys()))
for key in keys:
self.frmwk.print_status(fmt_string.format(key, meter_info[key]))
|
securestate/termineter
|
lib/termineter/modules/get_info.py
|
Python
|
bsd-3-clause
| 4,124
|
#!/usr/bin/python
from django.conf import settings
from django.conf.urls import url, include
from django.conf.urls.static import static
from game import views
from rest_framework.routers import DefaultRouter
from rest_framework.schemas import get_schema_view
app_name = 'game'
router = DefaultRouter()
router.register(r'user', views.UserViewSet)
router.register(r'enemy', views.EnemyViewSet)
router.register(r'item', views.ItemViewSet)
router.register(r'player', views.PlayerViewSet)
router.register(r'board', views.BoardViewSet)
router.register(r'game', views.GameViewSet)
router.register(r'score', views.ScoreViewSet)
router.register(r'mine', views.MineViewSet)
router.register(r'market', views.MarketViewSet)
schema_view = get_schema_view(title='Game')
urlpatterns = [
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'), name='login'),
url(r'^schema/$', schema_view),
url(r'^game/now_playing/$', views.now_playing),
url(r'^', include(router.urls), name='game'),
url(r'^new/(?P<player_code>[0-9A-Z]{15})/$', views.DungeonMasterView.as_view()),
url(r'^play/(?P<game_code>[0-9A-Z]{15})/$', views.DungeonMasterView.as_view())
]
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
casparluc/VikingDoom
|
game/urls.py
|
Python
|
gpl-3.0
| 1,261
|
import numpy as np
import pandas as pd
from pyspark.sql import SparkSession
from pyspark.ml.linalg import Vectors
from pyspark.ml.regression import LinearRegression
def generate_data():
np.random.seed(1) # set the seed
x = np.arange(100)
error = np.random.normal(0, size=(100,))
y = 0.5 + 0.3 * x + error
return x, y
def convert_to_df(spark_session, input_data):
x, y = input_data
data = pd.DataFrame([(i, j) for i, j in zip(x, y)], columns=["x", "y"])
data_spark = spark_session.createDataFrame(data)
df = spark_session.createDataFrame((data_spark
.rdd
.map(lambda row: (row[1], 0.5, Vectors.dense(row[0])))
), ["label", "weight", "features"])
return df
def fit_model(df):
lr = LinearRegression(maxIter=5, regParam=0.0, solver="normal", weightCol="weight")
model = lr.fit(df)
return model
def save_model(model):
model.write().overwrite().save("model")
if __name__ == "__main__":
spark_session = SparkSession.builder.getOrCreate()
input_data = generate_data()
df = convert_to_df(spark_session, input_data)
model = fit_model(df)
save_model(model)
|
datitran/PySpark-App-CF
|
linear_regression.py
|
Python
|
mit
| 1,254
|
import numpy as np
import matplotlib.pyplot as pl
import Image
import scipy.signal as sg
#some variable initializations
#resolution of gabor filter
resolution = 1.
#size of gabor filter
gsize = 30
#Number of gabor filter orientations with cosine in the gabor bank
N_Greal = 8
#Number of gabor filter orientations with sine in the gabor bank
N_Gimag = 0
#number of different wave vectors in the gabor bank
N_Size = 8
#total number of gabor filters
N_Gabor = N_Greal*N_Size+N_Gimag*N_Size
# return 2D Gabor Filter with cosine. Uses multivariate Gaussian with standard deviations "sigmax" and "sigmay" and has a mean of 0. Cosine has wave vector "k", phase "phi and is rotated around angle "theta". Filter has "size" as size with resolution "res".
def Gabor_real(size, sigmax, sigmay, k, phi, theta, res):
x,y = np.mgrid[-size/2:size/2:res,-size/2:size/2:res]
xrot = x*np.cos(theta) + y*np.sin(theta)
return (1/(2.*np.pi*sigmax*sigmay))*np.exp(-(x**2/(2.0*sigmax**2))-(y**2/(2.0*sigmay**2)))*np.cos((k*xrot)-phi)
# return 2D Gabor Filter with sine. Uses multivariate Gaussian with standard deviations "sigmax" and "sigmay" and has a mean of 0. Sine has wave vector "k", phase "phi and is rotated around angle "theta". Filter has "size" as size with resolution "res".
def Gabor_imag(size, sigmax, sigmay, k, phi, theta, res):
# return 2D Gabor Filter
x,y = np.mgrid[-size/2:size/2:res,-size/2:size/2:res]
xrot = x*np.cos(theta) + y*np.sin(theta)
return (1/(2.*np.pi*sigmax*sigmay))*np.exp(-(x**2/(2.0*sigmax**2))-(y**2/(2.0*sigmay**2)))*np.sin((k*xrot)-phi)
# return gabor bank of "n_real" cosine gabor filters and "n_imag" sine gabor filters with "n_size" wave vektors and size "size" and resolution "res". returns array of gabor filters with shape (N_Gabor,int(size/res),int(size/res) such that gabor_bank[i] is the i-th gabor filter. gabor_bank[0:nsize*n_real] contains the real gabor filters where gabor_bank[0:n_real] contains n_real differently sized filters of the same orientation and so on. gabor_bank[nsize*n_real:nsize*(n_real+n_imag)] contains the imaginary gabor filters.
def Gabor_Bank(n_real, n_imag, n_size, size, res):
#total number of gabor filters in the gabor bank
N_Gabor = n_real*n_size+n_imag*n_size
gabor_bank = np.zeros((N_Gabor,int(size/res),int(size/res)))
for i in range(n_real):
for j in range(n_size):
gabor_bank[i*n_size+j] = Gabor_real(size,j/4.+1/2.,j/4.+1/2.,n_size/2.+1-j/2.,0,i*np.pi/n_real,res)
for i in range(n_imag):
for j in range(n_size):
gabor_bank[i*n_size+j+n_real*n_size] = Gabor_imag(size,j/4.+1/4.,j/4.+1/4.,n_size/2.+1-j/2.,0,i*2*np.pi/n_imag,res)
return gabor_bank
#nice gabor filter plot function for the "N"-th gabor filter. for my 4 different sizes though.
def Gabor_Plot(gabor_bank,N):
f,ar = pl.subplots(2,2)
ar[0,0].imshow(gabor_bank[N+0])
ar[0,1].imshow(gabor_bank[N+1])
ar[1,0].imshow(gabor_bank[N+2])
ar[1,1].imshow(gabor_bank[N+3])
f.show()
#reads png image with name "image_name". returns a 2D numpy array
def Read_Image(img_name):
img = Image.open(img_name).convert('LA')
img = np.array(img)
#img = img[:,:,0]
#img = np.dot(img[:,:,:3], [0.299, 0.587, 0.144])
return img
#plots image after reading. also plots convolved image if given cimg[i] as argument
def Plot_Image(img):
pl.figure()
pl.imshow(img,cmap='gray')
pl.show()
#convolve data
def Convolve_Data(img,gabor_bank):
cimg = np.zeros((gabor_bank.shape[0],gabor_bank.shape[1]+img.shape[0]-1,gabor_bank.shape[2]+img.shape[1]-1))
for i in range(gabor_bank.shape[0]):
cimg[i]=sg.convolve2d(img, gabor_bank[i])
#return status of convolution (since it is soo slow)
print N_Gabor, i
return cimg
#write "data" into "filename". checks data after writing with assertion.
def Write_Data(data,filename):
with file(filename, 'w') as outfile:
for i in range(data.shape[0]):
np.savetxt(outfile, data[i])
new_data = np.loadtxt(filename)
new_data = new_data.reshape((data.shape[0],data.shape[1],data.shape[2]))
assert np.all(new_data == data)
def Read_Img_Database():
for i in range(1,101):
for j in range(356):
filename="coil-100/obj"+str(i)+"__"+str(j)+".png"
img=Read_Image('coil-100/obj1__100.png')
Plot_Image(img)
|
shiina/invariant-object-recognition
|
gabor.py
|
Python
|
lgpl-3.0
| 4,221
|
import os
import astrodash
directoryPath = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../templates/OzDES_data/')
atels = [
('ATEL_9504_Run24/DES16E1de_E1_combined_160825_v10_b00.dat', 0.292),
('ATEL_9504_Run24/DES16E2dd_E2_combined_160826_v10_b00.dat', 0.0746),
('ATEL_9504_Run24/DES16X3km_X3_combined_160827_v10_b00.dat', 0.06),
('ATEL_9504_Run24/DES16X3er_X3_combined_160827_v10_b00.dat', 0.167),
('ATEL_9504_Run24/DES16X3hj_X3_combined_160827_v10_b00.dat', 0.308),
('ATEL_9504_Run24/DES16X3es_X3_combined_160827_v10_b00.dat', 0.554),
('ATEL_9504_Run24/DES16X3jj_X3_combined_160827_v10_b00.dat', 0.238),
('ATEL_9504_Run24/DES16C3fv_C3_combined_160829_v10_b00.dat', 0.322),
('ATEL_9504_Run24/DES16C3bq_C3_combined_160829_v10_b00.dat', 0.241),
('ATEL_9504_Run24/DES16E1md_E1_combined_160829_v10_b00.dat', 0.178),
('ATEL_9504_Run24/DES16E1ah_E1_combined_160829_v10_b00.dat', 0.149),
('ATEL_9504_Run24/DES16C3ea_C3_combined_160829_v10_b00.dat', 0.217),
('ATEL_9504_Run24/DES16X1ey_X1_combined_160829_v10_b00.dat', 0.076),
('ATEL_9570_Run25/DES16C3bq_C3_combined_160925_v10_b00.dat', 0.237),
('ATEL_9570_Run25/DES16E2aoh_E2_combined_160925_v10_b00.dat', 0.403),
('ATEL_9570_Run25/DES16X3aqd_X3_combined_160925_v10_b00.dat', 0.033),
('ATEL_9570_Run25/DES16X3biz_X3_combined_160925_v10_b00.dat', 0.24),
('ATEL_9570_Run25/DES16C2aiy_C2_combined_160926_v10_b00.dat', 0.182),
('ATEL_9570_Run25/DES16C2ma_C2_combined_160926_v10_b00.dat', 0.24),
('ATEL_9570_Run25/DES16X1ge_X1_combined_160926_v10_b00.dat', 0.25),
('ATEL_9570_Run25/DES16X2auj_X2_combined_160927_v10_b00.dat', 0.144),
('ATEL_9570_Run25/DES16E2bkg_E2_combined_161005_v10_b00.dat', 0.478),
('ATEL_9570_Run25/DES16E2bht_E2_combined_161005_v10_b00.dat', 0.392),
('ATEL_9742_Run26/DES16E1ciy_E1_combined_161101_v10_b00.dat', 0.174),
('ATEL_9742_Run26/DES16S1cps_S1_combined_161101_v10_b00.dat', 0.274),
('ATEL_9742_Run26/DES16E2crb_E2_combined_161102_v10_b00.dat', 0.229),
('ATEL_9742_Run26/DES16E2clk_E2_combined_161102_v10_b00.dat', 0.367),
('ATEL_9742_Run26/DES16E2cqq_E2_combined_161102_v10_b00.dat', 0.426),
('ATEL_9742_Run26/DES16X2ceg_X2_combined_161103_v10_b00.dat', 0.335),
('ATEL_9742_Run26/DES16X2bkr_X2_combined_161103_v10_b00.dat', 0.159),
('ATEL_9742_Run26/DES16X2crr_X2_combined_161103_v10_b00.dat', 0.312),
('ATEL_9742_Run26/DES16X2cpn_X2_combined_161103_v10_b00.dat', 0.28),
('ATEL_9742_Run26/DES16X2bvf_X2_combined_161103_v10_b00.dat', 0.135),
('ATEL_9742_Run26/DES16C1cbg_C1_combined_161103_v10_b00.dat', 0.111),
('ATEL_9742_Run26/DES16C2cbv_C2_combined_161103_v10_b00.dat', 0.109),
('ATEL_9742_Run26/DES16C1bnt_C1_combined_161103_v10_b00.dat', 0.351),
('ATEL_9742_Run26/DES16C3at_C3_combined_161031_v10_b00.dat', 0.217),
('ATEL_9742_Run26/DES16X3cpl_X3_combined_161031_v10_b00.dat', 0.205),
('ATEL_9742_Run26/DES16E2cjg_E2_combined_161102_v10_b00.dat', 0.48),
('ATEL_9742_Run26/DES16X2crt_X2_combined_161103_v10_b00.dat', 0.57),
('ATEL_9855_Run27/DES16E1dcx_E1_combined_161125_v10_b00.dat', 0.453),
('ATEL_9855_Run27/DES16E1dcx_E2_combined_161126_v10_b00.dat', 0.453),
('ATEL_9855_Run27/DES16E1dic_E1_combined_161125_v10_b00.dat', 0.207),
('ATEL_9855_Run27/DES16X3dfk_X3_combined_161125_v10_b00.dat', 0.1495),
('ATEL_9855_Run27/DES16C3dhv_C3_combined_161125_v10_b00.dat', 0.300),
('ATEL_9855_Run27/DES16E2cxw_E2_combined_161126_v10_b00.dat', 0.293),
('ATEL_9855_Run27/DES16E2drd_E2_combined_161126_v10_b00.dat', 0.270),
('ATEL_9855_Run27/DES16X1drk_X1_combined_161127_v10_b00.dat', 0.463),
('ATEL_9855_Run27/DES16X1dbw_X1_combined_161127_v10_b00.dat', 0.336),
('ATEL_9855_Run27/DES16S2ean_S2_combined_161127_v10_b00.dat', 0.161),
('ATEL_9855_Run27/DES16S2dfm_S2_combined_161127_v10_b00.dat', 0.30),
('ATEL_9855_Run27/DES16X1dbx_X1_combined_161127_v10_b00.dat', 0.345),
('ATEL_9855_Run27/DES16E1eae_E1_combined_161129_v10_b00.dat', 0.534),
('ATEL_9855_Run27/DES16E1eef_E1_combined_161129_v10_b00.dat', 0.32),
('ATEL_9855_Run27/DES16S2drt_S2_combined_161127_v10_b00.dat', 0.331),
('ATEL_9855_Run27/DES16X1der_X1_combined_161127_v10_b00.dat', 0.453),
('ATEL_9855_Run27/DES16C3dhy_C3_combined_161128_v10_b00.dat', 0.276),
('ATEL_9855_Run27/DES16X2dqz_X2_combined_161128_v10_b00.dat', 0.204),
('ATEL_9961_Run28/DES16C3elb_C3_combined_161225_v10_b00.dat', 0.429),
('ATEL_9961_Run28/DES16X3dvb_X3_combined_161225_v10_b00.dat', 0.329),
('ATEL_9961_Run28/DES16C2ege_C2_combined_161225_v10_b00.dat', 0.348),
('ATEL_9961_Run28/DES16X3eww_X3_combined_161225_v10_b00.dat', 0.445),
('ATEL_9961_Run28/DES16X3enk_X3_combined_161225_v10_b00.dat', 0.331),
('ATEL_9961_Run28/DES16S1ffb_S1_combined_161226_v10_b00.dat', 0.164),
('ATEL_9961_Run28/DES16C1fgm_C1_combined_161226_v10_b00.dat', 0.361),
('ATEL_9961_Run28/DES16X2dzz_X2_combined_161226_v10_b00.dat', 0.325),
('ATEL_9961_Run28/DES16X1few_X1_combined_161227_v10_b00.dat', 0.311),
('ATEL_9961_Run28/DES16X1chc_X1_combined_161227_v10_b00.dat', 0.043),
('ATEL_9961_Run28/DES16S2ffk_S2_combined_161227_v10_b00.dat', 0.373)]
filenames = [os.path.join(directoryPath, i[0]) for i in atels]
knownRedshifts = [i[1] for i in atels]
classification = astrodash.Classify(filenames, knownRedshifts, classifyHost=False, smooth=5, knownZ=True)
bestFits, redshifts, bestTypes, rejectionLabels, reliableFlags, redshiftErrs = classification.list_best_matches(n=5)
# SAVE BEST MATCHES
print(bestFits)
f = open('classification_results.txt', 'w')
for i in range(len(filenames)):
f.write("%s z=%s %s %s %s\n %s\n\n" % (filenames[i].strip(directoryPath), redshifts[i], bestTypes[i], reliableFlags[i], rejectionLabels[i], bestFits[i]))
f.close()
print("Finished classifying %d spectra!" % len(filenames))
# PLOT SPECTRUM ON GUI
classification.plot_with_gui(indexToPlot=18)
|
daniel-muthukrishna/DASH
|
astrodash/classify_OzDES_ATELs.py
|
Python
|
mit
| 5,978
|
from __future__ import absolute_import
from __future__ import print_function
import datetime
from boto.s3.key import Key
from boto.s3.connection import S3Connection
from django.conf import settings
from django.db import connection
from django.forms.models import model_to_dict
from django.utils.timezone import make_aware as timezone_make_aware
from django.utils.timezone import utc as timezone_utc
from django.utils.timezone import is_naive as timezone_is_naive
from django.db.models.query import QuerySet
import glob
import logging
import os
import ujson
import shutil
import subprocess
import tempfile
from zerver.lib.avatar_hash import user_avatar_hash
from zerver.lib.create_user import random_api_key
from zerver.models import UserProfile, Realm, Client, Huddle, Stream, \
UserMessage, Subscription, Message, RealmEmoji, RealmFilter, \
RealmDomain, Recipient, DefaultStream, get_user_profile_by_id, \
UserPresence, UserActivity, UserActivityInterval, \
get_user_profile_by_email, \
get_display_recipient, Attachment, get_system_bot
from zerver.lib.parallel import run_parallel
from zerver.lib.utils import mkdir_p
from six.moves import range
from typing import Any, Callable, Dict, List, Optional, Set, Tuple
# Custom mypy types follow:
Record = Dict[str, Any]
TableName = str
TableData = Dict[TableName, List[Record]]
Field = str
Path = str
Context = Dict[str, Any]
FilterArgs = Dict[str, Any]
IdSource = Tuple[TableName, Field]
SourceFilter = Callable[[Record], bool]
# These next two types are callbacks, which mypy does not
# support well, because PEP 484 says "using callbacks
# with keyword arguments is not perceived as a common use case."
# CustomFetch = Callable[[TableData, Config, Context], None]
# PostProcessData = Callable[[TableData, Config, Context], None]
CustomFetch = Any # TODO: make more specific, see above
PostProcessData = Any # TODO: make more specific
# The keys of our MessageOutput variables are normally
# List[Record], but when we write partials, we can get
# lists of integers or a single integer.
# TODO: tighten this up with a union.
MessageOutput = Dict[str, Any]
realm_tables = [("zerver_defaultstream", DefaultStream),
("zerver_realmemoji", RealmEmoji),
("zerver_realmdomain", RealmDomain),
("zerver_realmfilter", RealmFilter)] # List[Tuple[TableName, Any]]
ALL_ZERVER_TABLES = [
# TODO: get a linter to ensure that this list is actually complete.
'zerver_attachment',
'zerver_attachment_messages',
'zerver_client',
'zerver_defaultstream',
'zerver_huddle',
'zerver_message',
'zerver_preregistrationuser',
'zerver_preregistrationuser_streams',
'zerver_pushdevicetoken',
'zerver_realm',
'zerver_realmdomain',
'zerver_realmemoji',
'zerver_realmfilter',
'zerver_recipient',
'zerver_scheduledjob',
'zerver_stream',
'zerver_subscription',
'zerver_useractivity',
'zerver_useractivityinterval',
'zerver_usermessage',
'zerver_userpresence',
'zerver_userprofile',
'zerver_userprofile_groups',
'zerver_userprofile_user_permissions',
]
NON_EXPORTED_TABLES = [
# These are known to either be altogether obsolete or
# simply inappropriate for exporting (e.g. contains transient
# data).
'zerver_preregistrationuser',
'zerver_preregistrationuser_streams',
'zerver_pushdevicetoken',
'zerver_scheduledjob',
'zerver_userprofile_groups',
'zerver_userprofile_user_permissions',
]
assert set(NON_EXPORTED_TABLES).issubset(set(ALL_ZERVER_TABLES))
IMPLICIT_TABLES = [
# ManyToMany relationships are exported implicitly.
'zerver_attachment_messages',
]
assert set(IMPLICIT_TABLES).issubset(set(ALL_ZERVER_TABLES))
ATTACHMENT_TABLES = [
'zerver_attachment',
]
assert set(ATTACHMENT_TABLES).issubset(set(ALL_ZERVER_TABLES))
MESSAGE_TABLES = [
# message tables get special treatment, because they're so big
'zerver_message',
'zerver_usermessage',
]
DATE_FIELDS = {
'zerver_attachment': ['create_time'],
'zerver_message': ['last_edit_time', 'pub_date'],
'zerver_realm': ['date_created'],
'zerver_stream': ['date_created'],
'zerver_useractivity': ['last_visit'],
'zerver_useractivityinterval': ['start', 'end'],
'zerver_userpresence': ['timestamp'],
'zerver_userprofile': ['date_joined', 'last_login', 'last_reminder'],
} # type: Dict[TableName, List[Field]]
def sanity_check_output(data):
# type: (TableData) -> None
tables = set(ALL_ZERVER_TABLES)
tables -= set(NON_EXPORTED_TABLES)
tables -= set(IMPLICIT_TABLES)
tables -= set(MESSAGE_TABLES)
tables -= set(ATTACHMENT_TABLES)
for table in tables:
if table not in data:
logging.warn('??? NO DATA EXPORTED FOR TABLE %s!!!' % (table,))
def write_data_to_file(output_file, data):
# type: (Path, Any) -> None
with open(output_file, "w") as f:
f.write(ujson.dumps(data, indent=4))
def make_raw(query, exclude=None):
# type: (Any, List[Field]) -> List[Record]
'''
Takes a Django query and returns a JSONable list
of dictionaries corresponding to the database rows.
'''
rows = []
for instance in query:
data = model_to_dict(instance, exclude=exclude)
"""
In Django 1.10, model_to_dict resolves ManyToManyField as a QuerySet.
Previously, we used to get primary keys. Following code converts the
QuerySet into primary keys.
For reference: https://www.mail-archive.com/django-updates@googlegroups.com/msg163020.html
"""
for field in instance._meta.many_to_many:
value = data[field.name]
if isinstance(value, QuerySet):
data[field.name] = [row.pk for row in value]
rows.append(data)
return rows
def floatify_datetime_fields(data, table):
# type: (TableData, TableName) -> None
for item in data[table]:
for field in DATE_FIELDS[table]:
orig_dt = item[field]
if orig_dt is None:
continue
if timezone_is_naive(orig_dt):
logging.warning("Naive datetime:", item)
dt = timezone_make_aware(orig_dt)
else:
dt = orig_dt
utc_naive = dt.replace(tzinfo=None) - dt.utcoffset()
item[field] = (utc_naive - datetime.datetime(1970, 1, 1)).total_seconds()
class Config(object):
'''
A Config object configures a single table for exporting (and,
maybe some day importing as well.
You should never mutate Config objects as part of the export;
instead use the data to determine how you populate other
data structures.
There are parent/children relationships between Config objects.
The parent should be instantiated first. The child will
append itself to the parent's list of children.
'''
def __init__(self, table=None, model=None,
normal_parent=None, virtual_parent=None,
filter_args=None, custom_fetch=None, custom_tables=None,
post_process_data=None,
concat_and_destroy=None, id_source=None, source_filter=None,
parent_key=None, use_all=False, is_seeded=False, exclude=None):
# type: (str, Any, Config, Config, FilterArgs, CustomFetch, List[TableName], PostProcessData, List[TableName], IdSource, SourceFilter, Field, bool, bool, List[Field]) -> None
assert table or custom_tables
self.table = table
self.model = model
self.normal_parent = normal_parent
self.virtual_parent = virtual_parent
self.filter_args = filter_args
self.parent_key = parent_key
self.use_all = use_all
self.is_seeded = is_seeded
self.exclude = exclude
self.custom_fetch = custom_fetch
self.custom_tables = custom_tables
self.post_process_data = post_process_data
self.concat_and_destroy = concat_and_destroy
self.id_source = id_source
self.source_filter = source_filter
self.children = [] # type: List[Config]
if normal_parent is not None:
self.parent = normal_parent # type: Optional[Config]
else:
self.parent = None
if virtual_parent is not None and normal_parent is not None:
raise ValueError('''
If you specify a normal_parent, please
do not create a virtual_parent.
''')
if normal_parent is not None:
normal_parent.children.append(self)
elif virtual_parent is not None:
virtual_parent.children.append(self)
elif is_seeded is None:
raise ValueError('''
You must specify a parent if you are
not using is_seeded.
''')
if self.id_source is not None:
if self.virtual_parent is None:
raise ValueError('''
You must specify a virtual_parent if you are
using id_source.''')
if self.id_source[0] != self.virtual_parent.table:
raise ValueError('''
Configuration error. To populate %s, you
want data from %s, but that differs from
the table name of your virtual parent (%s),
which suggests you many not have set up
the ordering correctly. You may simply
need to assign a virtual_parent, or there
may be deeper issues going on.''' % (
self.table,
self.id_source[0],
self.virtual_parent.table))
def export_from_config(response, config, seed_object=None, context=None):
# type: (TableData, Config, Any, Context) -> None
table = config.table
parent = config.parent
model = config.model
if context is None:
context = {}
if table:
exported_tables = [table]
else:
if config.custom_tables is None:
raise ValueError('''
You must specify config.custom_tables if you
are not specifying config.table''')
exported_tables = config.custom_tables
for t in exported_tables:
logging.info('Exporting via export_from_config: %s' % (t,))
rows = None
if config.is_seeded:
rows = [seed_object]
elif config.custom_fetch:
config.custom_fetch(
response=response,
config=config,
context=context
)
if config.custom_tables:
for t in config.custom_tables:
if t not in response:
raise Exception('Custom fetch failed to populate %s' % (t,))
elif config.concat_and_destroy:
# When we concat_and_destroy, we are working with
# temporary "tables" that are lists of records that
# should already be ready to export.
data = [] # type: List[Record]
for t in config.concat_and_destroy:
data += response[t]
del response[t]
logging.info('Deleted temporary %s' % (t,))
assert table is not None
response[table] = data
elif config.use_all:
assert model is not None
query = model.objects.all()
rows = list(query)
elif config.normal_parent:
# In this mode, our current model is figuratively Article,
# and normal_parent is figuratively Blog, and
# now we just need to get all the articles
# contained by the blogs.
model = config.model
assert parent is not None
assert parent.table is not None
assert config.parent_key is not None
parent_ids = [r['id'] for r in response[parent.table]]
filter_parms = {config.parent_key: parent_ids} # type: Dict[str, Any]
if config.filter_args is not None:
filter_parms.update(config.filter_args)
assert model is not None
query = model.objects.filter(**filter_parms)
rows = list(query)
elif config.id_source:
# In this mode, we are the figurative Blog, and we now
# need to look at the current response to get all the
# blog ids from the Article rows we fetched previously.
model = config.model
assert model is not None
# This will be a tuple of the form ('zerver_article', 'blog').
(child_table, field) = config.id_source
child_rows = response[child_table]
if config.source_filter:
child_rows = [r for r in child_rows if config.source_filter(r)]
lookup_ids = [r[field] for r in child_rows]
filter_parms = dict(id__in=lookup_ids)
if config.filter_args:
filter_parms.update(config.filter_args)
query = model.objects.filter(**filter_parms)
rows = list(query)
# Post-process rows (which won't apply to custom fetches/concats)
if rows is not None:
assert table is not None # Hint for mypy
response[table] = make_raw(rows, exclude=config.exclude)
if table in DATE_FIELDS:
floatify_datetime_fields(response, table)
if config.post_process_data:
config.post_process_data(
response=response,
config=config,
context=context
)
# Now walk our children. It's extremely important to respect
# the order of children here.
for child_config in config.children:
export_from_config(
response=response,
config=child_config,
context=context,
)
def get_realm_config():
# type: () -> Config
# This is common, public information about the realm that we can share
# with all realm users.
realm_config = Config(
table='zerver_realm',
is_seeded=True
)
Config(
table='zerver_defaultstream',
model=DefaultStream,
normal_parent=realm_config,
parent_key='realm_id__in',
)
Config(
table='zerver_realmemoji',
model=RealmEmoji,
normal_parent=realm_config,
parent_key='realm_id__in',
)
Config(
table='zerver_realmdomain',
model=RealmDomain,
normal_parent=realm_config,
parent_key='realm_id__in',
)
Config(
table='zerver_realmfilter',
model=RealmFilter,
normal_parent=realm_config,
parent_key='realm_id__in',
)
Config(
table='zerver_client',
model=Client,
virtual_parent=realm_config,
use_all=True
)
user_profile_config = Config(
custom_tables=[
'zerver_userprofile',
'zerver_userprofile_mirrordummy',
],
# set table for children who treat us as normal parent
table='zerver_userprofile',
virtual_parent=realm_config,
custom_fetch=fetch_user_profile,
)
Config(
custom_tables=[
'zerver_userprofile_crossrealm',
],
virtual_parent=user_profile_config,
custom_fetch=fetch_user_profile_cross_realm,
)
Config(
table='zerver_userpresence',
model=UserPresence,
normal_parent=user_profile_config,
parent_key='user_profile__in',
)
Config(
table='zerver_useractivity',
model=UserActivity,
normal_parent=user_profile_config,
parent_key='user_profile__in',
)
Config(
table='zerver_useractivityinterval',
model=UserActivityInterval,
normal_parent=user_profile_config,
parent_key='user_profile__in',
)
# Some of these tables are intermediate "tables" that we
# create only for the export. Think of them as similar to views.
user_subscription_config = Config(
table='_user_subscription',
model=Subscription,
normal_parent=user_profile_config,
filter_args={'recipient__type': Recipient.PERSONAL},
parent_key='user_profile__in',
)
Config(
table='_user_recipient',
model=Recipient,
virtual_parent=user_subscription_config,
id_source=('_user_subscription', 'recipient'),
)
#
stream_subscription_config = Config(
table='_stream_subscription',
model=Subscription,
normal_parent=user_profile_config,
filter_args={'recipient__type': Recipient.STREAM},
parent_key='user_profile__in',
)
stream_recipient_config = Config(
table='_stream_recipient',
model=Recipient,
virtual_parent=stream_subscription_config,
id_source=('_stream_subscription', 'recipient'),
)
Config(
table='zerver_stream',
model=Stream,
virtual_parent=stream_recipient_config,
id_source=('_stream_recipient', 'type_id'),
source_filter=lambda r: r['type'] == Recipient.STREAM,
exclude=['email_token'],
post_process_data=sanity_check_stream_data
)
#
Config(
custom_tables=[
'_huddle_recipient',
'_huddle_subscription',
'zerver_huddle',
],
normal_parent=user_profile_config,
custom_fetch=fetch_huddle_objects,
)
# Now build permanent tables from our temp tables.
Config(
table='zerver_recipient',
virtual_parent=user_profile_config,
concat_and_destroy=[
'_user_recipient',
'_stream_recipient',
'_huddle_recipient',
],
)
Config(
table='zerver_subscription',
virtual_parent=user_profile_config,
concat_and_destroy=[
'_user_subscription',
'_stream_subscription',
'_huddle_subscription',
]
)
return realm_config
def sanity_check_stream_data(response, config, context):
# type: (TableData, Config, Context) -> None
if context['exportable_user_ids'] is not None:
# If we restrict which user ids are exportable,
# the way that we find # streams is a little too
# complex to have a sanity check.
return
actual_streams = set([stream.name for stream in Stream.objects.filter(realm=response["zerver_realm"][0]['id'])])
streams_in_response = set([stream['name'] for stream in response['zerver_stream']])
if streams_in_response != actual_streams:
print(streams_in_response - actual_streams)
print(actual_streams - streams_in_response)
raise Exception('''
zerver_stream data does not match
Stream.objects.all().
Please investigate!
''')
def fetch_user_profile(response, config, context):
# type: (TableData, Config, Context) -> None
realm = context['realm']
exportable_user_ids = context['exportable_user_ids']
query = UserProfile.objects.filter(realm_id=realm.id)
exclude = ['password', 'api_key']
rows = make_raw(list(query), exclude=exclude)
normal_rows = [] # type: List[Record]
dummy_rows = [] # type: List[Record]
for row in rows:
if exportable_user_ids is not None:
if row['id'] in exportable_user_ids:
assert not row['is_mirror_dummy']
else:
# Convert non-exportable users to
# inactive is_mirror_dummy users.
row['is_mirror_dummy'] = True
row['is_active'] = False
if row['is_mirror_dummy']:
dummy_rows.append(row)
else:
normal_rows.append(row)
response['zerver_userprofile'] = normal_rows
response['zerver_userprofile_mirrordummy'] = dummy_rows
def fetch_user_profile_cross_realm(response, config, context):
# type: (TableData, Config, Context) -> None
realm = context['realm']
if realm.string_id == "zulip":
response['zerver_userprofile_crossrealm'] = []
else:
response['zerver_userprofile_crossrealm'] = [dict(email=x.email, id=x.id) for x in [
get_system_bot(settings.NOTIFICATION_BOT),
get_system_bot(settings.EMAIL_GATEWAY_BOT),
get_system_bot(settings.WELCOME_BOT),
]]
def fetch_attachment_data(response, realm_id, message_ids):
# type: (TableData, int, Set[int]) -> None
filter_args = {'realm_id': realm_id}
query = Attachment.objects.filter(**filter_args)
response['zerver_attachment'] = make_raw(list(query))
floatify_datetime_fields(response, 'zerver_attachment')
'''
We usually export most messages for the realm, but not
quite ALL messages for the realm. So, we need to
clean up our attachment data to have correct
values for response['zerver_attachment'][<n>]['messages'].
'''
for row in response['zerver_attachment']:
filterer_message_ids = set(row['messages']).intersection(message_ids)
row['messages'] = sorted(list(filterer_message_ids))
'''
Attachments can be connected to multiple messages, although
it's most common to have just one message. Regardless,
if none of those message(s) survived the filtering above
for a particular attachment, then we won't export the
attachment row.
'''
response['zerver_attachment'] = [
row for row in response['zerver_attachment']
if row['messages']]
def fetch_huddle_objects(response, config, context):
# type: (TableData, Config, Context) -> None
realm = context['realm']
assert config.parent is not None
assert config.parent.table is not None
user_profile_ids = set(r['id'] for r in response[config.parent.table])
# First we get all huddles involving someone in the realm.
realm_huddle_subs = Subscription.objects.select_related("recipient").filter(recipient__type=Recipient.HUDDLE,
user_profile__in=user_profile_ids)
realm_huddle_recipient_ids = set(sub.recipient_id for sub in realm_huddle_subs)
# Mark all Huddles whose recipient ID contains a cross-realm user.
unsafe_huddle_recipient_ids = set()
for sub in Subscription.objects.select_related().filter(recipient__in=realm_huddle_recipient_ids):
if sub.user_profile.realm != realm:
# In almost every case the other realm will be zulip.com
unsafe_huddle_recipient_ids.add(sub.recipient_id)
# Now filter down to just those huddles that are entirely within the realm.
#
# This is important for ensuring that the User objects needed
# to import it on the other end exist (since we're only
# exporting the users from this realm), at the cost of losing
# some of these cross-realm messages.
huddle_subs = [sub for sub in realm_huddle_subs if sub.recipient_id not in unsafe_huddle_recipient_ids]
huddle_recipient_ids = set(sub.recipient_id for sub in huddle_subs)
huddle_ids = set(sub.recipient.type_id for sub in huddle_subs)
huddle_subscription_dicts = make_raw(huddle_subs)
huddle_recipients = make_raw(Recipient.objects.filter(id__in=huddle_recipient_ids))
response['_huddle_recipient'] = huddle_recipients
response['_huddle_subscription'] = huddle_subscription_dicts
response['zerver_huddle'] = make_raw(Huddle.objects.filter(id__in=huddle_ids))
def fetch_usermessages(realm, message_ids, user_profile_ids, message_filename):
# type: (Realm, Set[int], Set[int], Path) -> List[Record]
# UserMessage export security rule: You can export UserMessages
# for the messages you exported for the users in your realm.
user_message_query = UserMessage.objects.filter(user_profile__realm=realm,
message_id__in=message_ids)
user_message_chunk = []
for user_message in user_message_query:
if user_message.user_profile_id not in user_profile_ids:
continue
user_message_obj = model_to_dict(user_message)
user_message_obj['flags_mask'] = user_message.flags.mask
del user_message_obj['flags']
user_message_chunk.append(user_message_obj)
logging.info("Fetched UserMessages for %s" % (message_filename,))
return user_message_chunk
def export_usermessages_batch(input_path, output_path):
# type: (Path, Path) -> None
"""As part of the system for doing parallel exports, this runs on one
batch of Message objects and adds the corresponding UserMessage
objects. (This is called by the export_usermessage_batch
management command)."""
with open(input_path, "r") as input_file:
output = ujson.loads(input_file.read())
message_ids = [item['id'] for item in output['zerver_message']]
user_profile_ids = set(output['zerver_userprofile_ids'])
del output['zerver_userprofile_ids']
realm = Realm.objects.get(id=output['realm_id'])
del output['realm_id']
output['zerver_usermessage'] = fetch_usermessages(realm, set(message_ids), user_profile_ids, output_path)
write_message_export(output_path, output)
os.unlink(input_path)
def write_message_export(message_filename, output):
# type: (Path, MessageOutput) -> None
write_data_to_file(output_file=message_filename, data=output)
logging.info("Dumped to %s" % (message_filename,))
def export_partial_message_files(realm, response, chunk_size=1000, output_dir=None):
# type: (Realm, TableData, int, Path) -> Set[int]
if output_dir is None:
output_dir = tempfile.mkdtemp(prefix="zulip-export")
def get_ids(records):
# type: (List[Record]) -> Set[int]
return set(x['id'] for x in records)
# Basic security rule: You can export everything either...
# - sent by someone in your exportable_user_ids
# OR
# - received by someone in your exportable_user_ids (which
# equates to a recipient object we are exporting)
#
# TODO: In theory, you should be able to export messages in
# cross-realm PM threads; currently, this only exports cross-realm
# messages received by your realm that were sent by Zulip system
# bots (e.g. emailgateway, notification-bot).
# Here, "we" and "us" refers to the inner circle of users who
# were specified as being allowed to be exported. "Them"
# refers to other users.
user_ids_for_us = get_ids(
response['zerver_userprofile']
)
recipient_ids_for_us = get_ids(response['zerver_recipient'])
ids_of_our_possible_senders = get_ids(
response['zerver_userprofile'] +
response['zerver_userprofile_mirrordummy'] +
response['zerver_userprofile_crossrealm'])
ids_of_non_exported_possible_recipients = ids_of_our_possible_senders - user_ids_for_us
recipients_for_them = Recipient.objects.filter(
type=Recipient.PERSONAL,
type_id__in=ids_of_non_exported_possible_recipients).values("id")
recipient_ids_for_them = get_ids(recipients_for_them)
# We capture most messages here, since the
# recipients we subscribe to are also the
# recipients of most messages we send.
messages_we_received = Message.objects.filter(
sender__in=ids_of_our_possible_senders,
recipient__in=recipient_ids_for_us,
).order_by('id')
# This should pick up stragglers; messages we sent
# where we the recipient wasn't subscribed to by any of
# us (such as PMs to "them").
messages_we_sent_to_them = Message.objects.filter(
sender__in=user_ids_for_us,
recipient__in=recipient_ids_for_them,
).order_by('id')
message_queries = [
messages_we_received,
messages_we_sent_to_them
]
all_message_ids = set() # type: Set[int]
dump_file_id = 1
for message_query in message_queries:
dump_file_id = write_message_partial_for_query(
realm=realm,
message_query=message_query,
dump_file_id=dump_file_id,
all_message_ids=all_message_ids,
output_dir=output_dir,
chunk_size=chunk_size,
user_profile_ids=user_ids_for_us,
)
return all_message_ids
def write_message_partial_for_query(realm, message_query, dump_file_id,
all_message_ids, output_dir,
chunk_size, user_profile_ids):
# type: (Realm, Any, int, Set[int], Path, int, Set[int]) -> int
min_id = -1
while True:
actual_query = message_query.filter(id__gt=min_id)[0:chunk_size]
message_chunk = make_raw(actual_query)
message_ids = set(m['id'] for m in message_chunk)
assert len(message_ids.intersection(all_message_ids)) == 0
all_message_ids.update(message_ids)
if len(message_chunk) == 0:
break
# Figure out the name of our shard file.
message_filename = os.path.join(output_dir, "messages-%06d.json" % (dump_file_id,))
message_filename += '.partial'
logging.info("Fetched Messages for %s" % (message_filename,))
# Clean up our messages.
table_data = {} # type: TableData
table_data['zerver_message'] = message_chunk
floatify_datetime_fields(table_data, 'zerver_message')
# Build up our output for the .partial file, which needs
# a list of user_profile_ids to search for (as well as
# the realm id).
output = {} # type: MessageOutput
output['zerver_message'] = table_data['zerver_message']
output['zerver_userprofile_ids'] = list(user_profile_ids)
output['realm_id'] = realm.id
# And write the data.
write_message_export(message_filename, output)
min_id = max(message_ids)
dump_file_id += 1
return dump_file_id
def export_uploads_and_avatars(realm, output_dir):
# type: (Realm, Path) -> None
uploads_output_dir = os.path.join(output_dir, 'uploads')
avatars_output_dir = os.path.join(output_dir, 'avatars')
for output_dir in (uploads_output_dir, avatars_output_dir):
if not os.path.exists(output_dir):
os.makedirs(output_dir)
if settings.LOCAL_UPLOADS_DIR:
# Small installations and developers will usually just store files locally.
export_uploads_from_local(realm,
local_dir=os.path.join(settings.LOCAL_UPLOADS_DIR, "files"),
output_dir=uploads_output_dir)
export_avatars_from_local(realm,
local_dir=os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars"),
output_dir=avatars_output_dir)
else:
# Some bigger installations will have their data stored on S3.
export_files_from_s3(realm,
settings.S3_AVATAR_BUCKET,
output_dir=avatars_output_dir,
processing_avatars=True)
export_files_from_s3(realm,
settings.S3_AUTH_UPLOADS_BUCKET,
output_dir=uploads_output_dir)
def export_files_from_s3(realm, bucket_name, output_dir, processing_avatars=False):
# type: (Realm, str, Path, bool) -> None
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
bucket = conn.get_bucket(bucket_name, validate=True)
records = []
logging.info("Downloading uploaded files from %s" % (bucket_name))
avatar_hash_values = set()
user_ids = set()
if processing_avatars:
bucket_list = bucket.list()
for user_profile in UserProfile.objects.filter(realm=realm):
avatar_hash = user_avatar_hash(user_profile.email)
avatar_hash_values.add(avatar_hash)
avatar_hash_values.add(avatar_hash + ".original")
user_ids.add(user_profile.id)
else:
bucket_list = bucket.list(prefix="%s/" % (realm.id,))
if settings.EMAIL_GATEWAY_BOT is not None:
email_gateway_bot = get_system_bot(settings.EMAIL_GATEWAY_BOT)
else:
email_gateway_bot = None
count = 0
for bkey in bucket_list:
if processing_avatars and bkey.name not in avatar_hash_values:
continue
key = bucket.get_key(bkey.name)
# This can happen if an email address has moved realms
if 'realm_id' in key.metadata and key.metadata['realm_id'] != str(realm.id):
if email_gateway_bot is None or key.metadata['user_profile_id'] != str(email_gateway_bot.id):
raise Exception("Key metadata problem: %s %s / %s" % (key.name, key.metadata, realm.id))
# Email gateway bot sends messages, potentially including attachments, cross-realm.
print("File uploaded by email gateway bot: %s / %s" % (key.name, key.metadata))
elif processing_avatars:
if 'user_profile_id' not in key.metadata:
raise Exception("Missing user_profile_id in key metadata: %s" % (key.metadata,))
if int(key.metadata['user_profile_id']) not in user_ids:
raise Exception("Wrong user_profile_id in key metadata: %s" % (key.metadata,))
elif 'realm_id' not in key.metadata:
raise Exception("Missing realm_id in key metadata: %s" % (key.metadata,))
record = dict(s3_path=key.name, bucket=bucket_name,
size=key.size, last_modified=key.last_modified,
content_type=key.content_type, md5=key.md5)
record.update(key.metadata)
# A few early avatars don't have 'realm_id' on the object; fix their metadata
user_profile = get_user_profile_by_id(record['user_profile_id'])
if 'realm_id' not in record:
record['realm_id'] = user_profile.realm_id
record['user_profile_email'] = user_profile.email
if processing_avatars:
dirname = output_dir
filename = os.path.join(dirname, key.name)
record['path'] = key.name
else:
fields = key.name.split('/')
if len(fields) != 3:
raise Exception("Suspicious key %s" % (key.name))
dirname = os.path.join(output_dir, fields[1])
filename = os.path.join(dirname, fields[2])
record['path'] = os.path.join(fields[1], fields[2])
if not os.path.exists(dirname):
os.makedirs(dirname)
key.get_contents_to_filename(filename)
records.append(record)
count += 1
if (count % 100 == 0):
logging.info("Finished %s" % (count,))
with open(os.path.join(output_dir, "records.json"), "w") as records_file:
ujson.dump(records, records_file, indent=4)
def export_uploads_from_local(realm, local_dir, output_dir):
# type: (Realm, Path, Path) -> None
count = 0
records = []
for attachment in Attachment.objects.filter(realm_id=realm.id):
local_path = os.path.join(local_dir, attachment.path_id)
output_path = os.path.join(output_dir, attachment.path_id)
mkdir_p(os.path.dirname(output_path))
subprocess.check_call(["cp", "-a", local_path, output_path])
stat = os.stat(local_path)
record = dict(realm_id=attachment.realm_id,
user_profile_id=attachment.owner.id,
user_profile_email=attachment.owner.email,
s3_path=attachment.path_id,
path=attachment.path_id,
size=stat.st_size,
last_modified=stat.st_mtime,
content_type=None)
records.append(record)
count += 1
if (count % 100 == 0):
logging.info("Finished %s" % (count,))
with open(os.path.join(output_dir, "records.json"), "w") as records_file:
ujson.dump(records, records_file, indent=4)
def export_avatars_from_local(realm, local_dir, output_dir):
# type: (Realm, Path, Path) -> None
count = 0
records = []
users = list(UserProfile.objects.filter(realm=realm))
users += [
get_system_bot(settings.NOTIFICATION_BOT),
get_system_bot(settings.EMAIL_GATEWAY_BOT),
get_system_bot(settings.WELCOME_BOT),
]
for user in users:
if user.avatar_source == UserProfile.AVATAR_FROM_GRAVATAR:
continue
avatar_hash = user_avatar_hash(user.email)
wildcard = os.path.join(local_dir, avatar_hash + '.*')
for local_path in glob.glob(wildcard):
logging.info('Copying avatar file for user %s from %s' % (
user.email, local_path))
fn = os.path.basename(local_path)
output_path = os.path.join(output_dir, fn)
mkdir_p(str(os.path.dirname(output_path)))
subprocess.check_call(["cp", "-a", str(local_path), str(output_path)])
stat = os.stat(local_path)
record = dict(realm_id=realm.id,
user_profile_id=user.id,
user_profile_email=user.email,
s3_path=fn,
path=fn,
size=stat.st_size,
last_modified=stat.st_mtime,
content_type=None)
records.append(record)
count += 1
if (count % 100 == 0):
logging.info("Finished %s" % (count,))
with open(os.path.join(output_dir, "records.json"), "w") as records_file:
ujson.dump(records, records_file, indent=4)
def do_write_stats_file_for_realm_export(output_dir):
# type: (Path) -> None
stats_file = os.path.join(output_dir, 'stats.txt')
realm_file = os.path.join(output_dir, 'realm.json')
attachment_file = os.path.join(output_dir, 'attachment.json')
message_files = glob.glob(os.path.join(output_dir, 'messages-*.json'))
fns = sorted([attachment_file] + message_files + [realm_file])
logging.info('Writing stats file: %s\n' % (stats_file,))
with open(stats_file, 'w') as f:
for fn in fns:
f.write(os.path.basename(fn) + '\n')
payload = open(fn).read()
data = ujson.loads(payload)
for k in sorted(data):
f.write('%5d %s\n' % (len(data[k]), k))
f.write('\n')
avatar_file = os.path.join(output_dir, 'avatars/records.json')
uploads_file = os.path.join(output_dir, 'uploads/records.json')
for fn in [avatar_file, uploads_file]:
f.write(fn+'\n')
payload = open(fn).read()
data = ujson.loads(payload)
f.write('%5d records\n' % len(data))
f.write('\n')
def do_export_realm(realm, output_dir, threads, exportable_user_ids=None):
# type: (Realm, Path, int, Set[int]) -> None
response = {} # type: TableData
# We need at least one thread running to export
# UserMessage rows. The management command should
# enforce this for us.
if not settings.TEST_SUITE:
assert threads >= 1
assert os.path.exists("./manage.py")
realm_config = get_realm_config()
create_soft_link(source=output_dir, in_progress=True)
logging.info("Exporting data from get_realm_config()...")
export_from_config(
response=response,
config=realm_config,
seed_object=realm,
context=dict(realm=realm, exportable_user_ids=exportable_user_ids)
)
logging.info('...DONE with get_realm_config() data')
export_file = os.path.join(output_dir, "realm.json")
write_data_to_file(output_file=export_file, data=response)
sanity_check_output(response)
logging.info("Exporting uploaded files and avatars")
export_uploads_and_avatars(realm, output_dir)
# We (sort of) export zerver_message rows here. We write
# them to .partial files that are subsequently fleshed out
# by parallel processes to add in zerver_usermessage data.
# This is for performance reasons, of course. Some installations
# have millions of messages.
logging.info("Exporting .partial files messages")
message_ids = export_partial_message_files(realm, response, output_dir=output_dir)
logging.info('%d messages were exported' % (len(message_ids)))
# zerver_attachment
export_attachment_table(realm=realm, output_dir=output_dir, message_ids=message_ids)
# Start parallel jobs to export the UserMessage objects.
launch_user_message_subprocesses(threads=threads, output_dir=output_dir)
logging.info("Finished exporting %s" % (realm.string_id))
create_soft_link(source=output_dir, in_progress=False)
def export_attachment_table(realm, output_dir, message_ids):
# type: (Realm, Path, Set[int]) -> None
response = {} # type: TableData
fetch_attachment_data(response=response, realm_id=realm.id, message_ids=message_ids)
output_file = os.path.join(output_dir, "attachment.json")
logging.info('Writing attachment table data to %s' % (output_file,))
write_data_to_file(output_file=output_file, data=response)
def create_soft_link(source, in_progress=True):
# type: (Path, bool) -> None
is_done = not in_progress
in_progress_link = '/tmp/zulip-export-in-progress'
done_link = '/tmp/zulip-export-most-recent'
if in_progress:
new_target = in_progress_link
else:
subprocess.check_call(['rm', '-f', in_progress_link])
new_target = done_link
subprocess.check_call(["ln", "-nsf", source, new_target])
if is_done:
logging.info('See %s for output files' % (new_target,))
def launch_user_message_subprocesses(threads, output_dir):
# type: (int, Path) -> None
logging.info('Launching %d PARALLEL subprocesses to export UserMessage rows' % (threads,))
def run_job(shard):
# type: (str) -> int
subprocess.call(["./manage.py", 'export_usermessage_batch', '--path',
str(output_dir), '--thread', shard])
return 0
for (status, job) in run_parallel(run_job,
[str(x) for x in range(0, threads)],
threads=threads):
print("Shard %s finished, status %s" % (job, status))
def do_export_user(user_profile, output_dir):
# type: (UserProfile, Path) -> None
response = {} # type: TableData
export_single_user(user_profile, response)
export_file = os.path.join(output_dir, "user.json")
write_data_to_file(output_file=export_file, data=response)
logging.info("Exporting messages")
export_messages_single_user(user_profile, output_dir)
def export_single_user(user_profile, response):
# type: (UserProfile, TableData) -> None
config = get_single_user_config()
export_from_config(
response=response,
config=config,
seed_object=user_profile,
)
def get_single_user_config():
# type: () -> Config
# zerver_userprofile
user_profile_config = Config(
table='zerver_userprofile',
is_seeded=True,
exclude=['password', 'api_key'],
)
# zerver_subscription
subscription_config = Config(
table='zerver_subscription',
model=Subscription,
normal_parent=user_profile_config,
parent_key='user_profile__in',
)
# zerver_recipient
recipient_config = Config(
table='zerver_recipient',
model=Recipient,
virtual_parent=subscription_config,
id_source=('zerver_subscription', 'recipient'),
)
# zerver_stream
Config(
table='zerver_stream',
model=Stream,
virtual_parent=recipient_config,
id_source=('zerver_recipient', 'type_id'),
source_filter=lambda r: r['type'] == Recipient.STREAM,
exclude=['email_token'],
)
return user_profile_config
def export_messages_single_user(user_profile, output_dir, chunk_size=1000):
# type: (UserProfile, Path, int) -> None
user_message_query = UserMessage.objects.filter(user_profile=user_profile).order_by("id")
min_id = -1
dump_file_id = 1
while True:
actual_query = user_message_query.select_related("message", "message__sending_client").filter(id__gt=min_id)[0:chunk_size]
user_message_chunk = [um for um in actual_query]
user_message_ids = set(um.id for um in user_message_chunk)
if len(user_message_chunk) == 0:
break
message_chunk = []
for user_message in user_message_chunk:
item = model_to_dict(user_message.message)
item['flags'] = user_message.flags_list()
item['flags_mask'] = user_message.flags.mask
# Add a few nice, human-readable details
item['sending_client_name'] = user_message.message.sending_client.name
item['display_recipient'] = get_display_recipient(user_message.message.recipient)
message_chunk.append(item)
message_filename = os.path.join(output_dir, "messages-%06d.json" % (dump_file_id,))
logging.info("Fetched Messages for %s" % (message_filename,))
output = {'zerver_message': message_chunk}
floatify_datetime_fields(output, 'zerver_message')
write_message_export(message_filename, output)
min_id = max(user_message_ids)
dump_file_id += 1
# Code from here is the realm import code path
# id_maps is a dictionary that maps table names to dictionaries
# that map old ids to new ids. We use this in
# re_map_foreign_keys and other places.
#
# We explicity initialize id_maps with the tables that support
# id re-mapping.
#
# Code reviewers: give these tables extra scrutiny, as we need to
# make sure to reload related tables AFTER we re-map the ids.
id_maps = {
'client': {},
'user_profile': {},
} # type: Dict[str, Dict[int, int]]
def update_id_map(table, old_id, new_id):
# type: (TableName, int, int) -> None
if table not in id_maps:
raise Exception('''
Table %s is not initialized in id_maps, which could
mean that we have not thought through circular
dependencies.
''' % (table,))
id_maps[table][old_id] = new_id
def fix_datetime_fields(data, table):
# type: (TableData, TableName) -> None
for item in data[table]:
for field_name in DATE_FIELDS[table]:
if item[field_name] is not None:
item[field_name] = datetime.datetime.fromtimestamp(item[field_name], tz=timezone_utc)
def convert_to_id_fields(data, table, field_name):
# type: (TableData, TableName, Field) -> None
'''
When Django gives us dict objects via model_to_dict, the foreign
key fields are `foo`, but we want `foo_id` for the bulk insert.
This function handles the simple case where we simply rename
the fields. For cases where we need to munge ids in the
database, see re_map_foreign_keys.
'''
for item in data[table]:
item[field_name + "_id"] = item[field_name]
del item[field_name]
def re_map_foreign_keys(data, table, field_name, related_table, verbose=False):
# type: (TableData, TableName, Field, TableName, bool) -> None
'''
We occasionally need to assign new ids to rows during the
import/export process, to accommodate things like existing rows
already being in tables. See bulk_import_client for more context.
The tricky part is making sure that foreign key references
are in sync with the new ids, and this fixer function does
the re-mapping. (It also appends `_id` to the field.)
'''
lookup_table = id_maps[related_table]
for item in data[table]:
old_id = item[field_name]
if old_id in lookup_table:
new_id = lookup_table[old_id]
if verbose:
logging.info('Remapping %s%s from %s to %s' % (table,
field_name + '_id',
old_id,
new_id))
else:
new_id = old_id
item[field_name + "_id"] = new_id
del item[field_name]
def fix_bitfield_keys(data, table, field_name):
# type: (TableData, TableName, Field) -> None
for item in data[table]:
item[field_name] = item[field_name + '_mask']
del item[field_name + '_mask']
def bulk_import_model(data, model, table, dump_file_id=None):
# type: (TableData, Any, TableName, str) -> None
# TODO, deprecate dump_file_id
model.objects.bulk_create(model(**item) for item in data[table])
if dump_file_id is None:
logging.info("Successfully imported %s from %s." % (model, table))
else:
logging.info("Successfully imported %s from %s[%s]." % (model, table, dump_file_id))
# Client is a table shared by multiple realms, so in order to
# correctly import multiple realms into the same server, we need to
# check if a Client object already exists, and so we need to support
# remap all Client IDs to the values in the new DB.
def bulk_import_client(data, model, table):
# type: (TableData, Any, TableName) -> None
for item in data[table]:
try:
client = Client.objects.get(name=item['name'])
except Client.DoesNotExist:
client = Client.objects.create(name=item['name'])
update_id_map(table='client', old_id=item['id'], new_id=client.id)
def import_uploads_local(import_dir, processing_avatars=False):
# type: (Path, bool) -> None
records_filename = os.path.join(import_dir, "records.json")
with open(records_filename) as records_file:
records = ujson.loads(records_file.read())
for record in records:
if processing_avatars:
# For avatars, we need to rehash the user's email with the
# new server's avatar salt
avatar_hash = user_avatar_hash(record['user_profile_email'])
file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars", avatar_hash)
if record['s3_path'].endswith('.original'):
file_path += '.original'
else:
file_path += '.png'
else:
file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, "files", record['s3_path'])
orig_file_path = os.path.join(import_dir, record['path'])
if not os.path.exists(os.path.dirname(file_path)):
subprocess.check_call(["mkdir", "-p", os.path.dirname(file_path)])
shutil.copy(orig_file_path, file_path)
def import_uploads_s3(bucket_name, import_dir, processing_avatars=False):
# type: (str, Path, bool) -> None
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
bucket = conn.get_bucket(bucket_name, validate=True)
records_filename = os.path.join(import_dir, "records.json")
with open(records_filename) as records_file:
records = ujson.loads(records_file.read())
for record in records:
key = Key(bucket)
if processing_avatars:
# For avatars, we need to rehash the user's email with the
# new server's avatar salt
avatar_hash = user_avatar_hash(record['user_profile_email'])
key.key = avatar_hash
if record['s3_path'].endswith('.original'):
key.key += '.original'
else:
key.key = record['s3_path']
user_profile_id = int(record['user_profile_id'])
# Support email gateway bot and other cross-realm messages
if user_profile_id in id_maps["user_profile"]:
logging.info("Uploaded by ID mapped user: %s!" % (user_profile_id,))
user_profile_id = id_maps["user_profile"][user_profile_id]
user_profile = get_user_profile_by_id(user_profile_id)
key.set_metadata("user_profile_id", str(user_profile.id))
key.set_metadata("realm_id", str(user_profile.realm_id))
key.set_metadata("orig_last_modified", record['last_modified'])
headers = {u'Content-Type': record['content_type']}
key.set_contents_from_filename(os.path.join(import_dir, record['path']), headers=headers)
def import_uploads(import_dir, processing_avatars=False):
# type: (Path, bool) -> None
if processing_avatars:
logging.info("Importing avatars")
else:
logging.info("Importing uploaded files")
if settings.LOCAL_UPLOADS_DIR:
import_uploads_local(import_dir, processing_avatars=processing_avatars)
else:
if processing_avatars:
bucket_name = settings.S3_AVATAR_BUCKET
else:
bucket_name = settings.S3_AUTH_UPLOADS_BUCKET
import_uploads_s3(bucket_name, import_dir, processing_avatars=processing_avatars)
# Importing data suffers from a difficult ordering problem because of
# models that reference each other circularly. Here is a correct order.
#
# * Client [no deps]
# * Realm [-notifications_stream]
# * Stream [only depends on realm]
# * Realm's notifications_stream
# * Now can do all realm_tables
# * UserProfile, in order by ID to avoid bot loop issues
# * Huddle
# * Recipient
# * Subscription
# * Message
# * UserMessage
#
# Because the Python object => JSON conversion process is not fully
# faithful, we have to use a set of fixers (e.g. on DateTime objects
# and Foreign Keys) to do the import correctly.
def do_import_realm(import_dir):
# type: (Path) -> None
logging.info("Importing realm dump %s" % (import_dir,))
if not os.path.exists(import_dir):
raise Exception("Missing import directory!")
realm_data_filename = os.path.join(import_dir, "realm.json")
if not os.path.exists(realm_data_filename):
raise Exception("Missing realm.json file!")
logging.info("Importing realm data from %s" % (realm_data_filename,))
with open(realm_data_filename) as f:
data = ujson.load(f)
convert_to_id_fields(data, 'zerver_realm', 'notifications_stream')
fix_datetime_fields(data, 'zerver_realm')
realm = Realm(**data['zerver_realm'][0])
if realm.notifications_stream_id is not None:
notifications_stream_id = int(realm.notifications_stream_id) # type: Optional[int]
else:
notifications_stream_id = None
realm.notifications_stream_id = None
realm.save()
bulk_import_client(data, Client, 'zerver_client')
# Email tokens will automatically be randomly generated when the
# Stream objects are created by Django.
fix_datetime_fields(data, 'zerver_stream')
convert_to_id_fields(data, 'zerver_stream', 'realm')
bulk_import_model(data, Stream, 'zerver_stream')
realm.notifications_stream_id = notifications_stream_id
realm.save()
convert_to_id_fields(data, "zerver_defaultstream", 'stream')
for (table, model) in realm_tables:
convert_to_id_fields(data, table, 'realm')
bulk_import_model(data, model, table)
# Remap the user IDs for notification_bot and friends to their
# appropriate IDs on this server
for item in data['zerver_userprofile_crossrealm']:
logging.info("Adding to ID map: %s %s" % (item['id'], get_user_profile_by_email(item['email']).id))
new_user_id = get_user_profile_by_email(item['email']).id
update_id_map(table='user_profile', old_id=item['id'], new_id=new_user_id)
# Merge in zerver_userprofile_mirrordummy
data['zerver_userprofile'] = data['zerver_userprofile'] + data['zerver_userprofile_mirrordummy']
del data['zerver_userprofile_mirrordummy']
data['zerver_userprofile'].sort(key=lambda r: r['id'])
fix_datetime_fields(data, 'zerver_userprofile')
convert_to_id_fields(data, 'zerver_userprofile', 'realm')
re_map_foreign_keys(data, 'zerver_userprofile', 'bot_owner', related_table="user_profile")
convert_to_id_fields(data, 'zerver_userprofile', 'default_sending_stream')
convert_to_id_fields(data, 'zerver_userprofile', 'default_events_register_stream')
for user_profile_dict in data['zerver_userprofile']:
user_profile_dict['password'] = None
user_profile_dict['api_key'] = random_api_key()
# Since Zulip doesn't use these permissions, drop them
del user_profile_dict['user_permissions']
del user_profile_dict['groups']
user_profiles = [UserProfile(**item) for item in data['zerver_userprofile']]
for user_profile in user_profiles:
user_profile.set_unusable_password()
UserProfile.objects.bulk_create(user_profiles)
if 'zerver_huddle' in data:
bulk_import_model(data, Huddle, 'zerver_huddle')
bulk_import_model(data, Recipient, 'zerver_recipient')
re_map_foreign_keys(data, 'zerver_subscription', 'user_profile', related_table="user_profile")
convert_to_id_fields(data, 'zerver_subscription', 'recipient')
bulk_import_model(data, Subscription, 'zerver_subscription')
fix_datetime_fields(data, 'zerver_userpresence')
re_map_foreign_keys(data, 'zerver_userpresence', 'user_profile', related_table="user_profile")
re_map_foreign_keys(data, 'zerver_userpresence', 'client', related_table='client')
bulk_import_model(data, UserPresence, 'zerver_userpresence')
fix_datetime_fields(data, 'zerver_useractivity')
re_map_foreign_keys(data, 'zerver_useractivity', 'user_profile', related_table="user_profile")
re_map_foreign_keys(data, 'zerver_useractivity', 'client', related_table='client')
bulk_import_model(data, UserActivity, 'zerver_useractivity')
fix_datetime_fields(data, 'zerver_useractivityinterval')
re_map_foreign_keys(data, 'zerver_useractivityinterval', 'user_profile', related_table="user_profile")
bulk_import_model(data, UserActivityInterval, 'zerver_useractivityinterval')
# Import uploaded files and avatars
import_uploads(os.path.join(import_dir, "avatars"), processing_avatars=True)
import_uploads(os.path.join(import_dir, "uploads"))
# Import zerver_message and zerver_usermessage
import_message_data(import_dir)
# Do attachments AFTER message data is loaded.
# TODO: de-dup how we read these json files.
fn = os.path.join(import_dir, "attachment.json")
if not os.path.exists(fn):
raise Exception("Missing attachment.json file!")
logging.info("Importing attachment data from %s" % (fn,))
with open(fn) as f:
data = ujson.load(f)
import_attachments(data)
def import_message_data(import_dir):
# type: (Path) -> None
dump_file_id = 1
while True:
message_filename = os.path.join(import_dir, "messages-%06d.json" % (dump_file_id,))
if not os.path.exists(message_filename):
break
with open(message_filename) as f:
data = ujson.load(f)
logging.info("Importing message dump %s" % (message_filename,))
re_map_foreign_keys(data, 'zerver_message', 'sender', related_table="user_profile")
convert_to_id_fields(data, 'zerver_message', 'recipient')
re_map_foreign_keys(data, 'zerver_message', 'sending_client', related_table='client')
fix_datetime_fields(data, 'zerver_message')
bulk_import_model(data, Message, 'zerver_message')
# Due to the structure of these message chunks, we're
# guaranteed to have already imported all the Message objects
# for this batch of UserMessage objects.
convert_to_id_fields(data, 'zerver_usermessage', 'message')
re_map_foreign_keys(data, 'zerver_usermessage', 'user_profile', related_table="user_profile")
fix_bitfield_keys(data, 'zerver_usermessage', 'flags')
bulk_import_model(data, UserMessage, 'zerver_usermessage')
dump_file_id += 1
def import_attachments(data):
# type: (TableData) -> None
# Clean up the data in zerver_attachment that is not
# relevant to our many-to-many import.
fix_datetime_fields(data, 'zerver_attachment')
re_map_foreign_keys(data, 'zerver_attachment', 'owner', related_table="user_profile")
convert_to_id_fields(data, 'zerver_attachment', 'realm')
# Configure ourselves. Django models many-to-many (m2m)
# relations asymmetrically. The parent here refers to the
# Model that has the ManyToManyField. It is assumed here
# the child models have been loaded, but we are in turn
# responsible for loading the parents and the m2m rows.
parent_model = Attachment
parent_db_table_name = 'zerver_attachment'
parent_singular = 'attachment'
child_singular = 'message'
child_plural = 'messages'
m2m_table_name = 'zerver_attachment_messages'
parent_id = 'attachment_id'
child_id = 'message_id'
# First, build our list of many-to-many (m2m) rows.
# We do this in a slightly convoluted way to anticipate
# a future where we may need to call re_map_foreign_keys.
m2m_rows = [] # type: List[Record]
for parent_row in data[parent_db_table_name]:
for fk_id in parent_row[child_plural]:
m2m_row = {} # type: Record
m2m_row[parent_singular] = parent_row['id']
m2m_row[child_singular] = fk_id
m2m_rows.append(m2m_row)
# Create our table data for insert.
m2m_data = {m2m_table_name: m2m_rows} # type: TableData
convert_to_id_fields(m2m_data, m2m_table_name, parent_singular)
convert_to_id_fields(m2m_data, m2m_table_name, child_singular)
m2m_rows = m2m_data[m2m_table_name]
# Next, delete out our child data from the parent rows.
for parent_row in data[parent_db_table_name]:
del parent_row[child_plural]
# Next, load the parent rows.
bulk_import_model(data, parent_model, parent_db_table_name)
# Now, go back to our m2m rows.
# TODO: Do this the kosher Django way. We may find a
# better way to do this in Django 1.9 particularly.
with connection.cursor() as cursor:
sql_template = '''
insert into %s (%s, %s) values(%%s, %%s);''' % (m2m_table_name,
parent_id,
child_id)
tups = [(row[parent_id], row[child_id]) for row in m2m_rows]
cursor.executemany(sql_template, tups)
logging.info('Successfully imported M2M table %s' % (m2m_table_name,))
|
jrowan/zulip
|
zerver/lib/export.py
|
Python
|
apache-2.0
| 62,244
|
import direct.directbase.DirectStart
from pandac.PandaModules import *
from direct.gui.DirectGui import *
from direct.interval.IntervalGlobal import *
from random import random
from direct.showbase.DirectObject import DirectObject
from direct.interval.MetaInterval import Sequence
import random,math,sys,os
from direct.task import Task
from direct.particles.Particles import Particles
from direct.particles.ParticleEffect import ParticleEffect
from direct.particles.ForceGroup import ForceGroup
from types import *
from Explosion import Explode
class Projectile(DirectObject):
def __init__(self, player, startPos, myNode,AL):
## self.accept("space", self.incrementSideWindowNumber)
self.worldNode = myNode
self.AL = AL
self.bombSound = self.AL.getAudio(3)
self.happySound = self.AL.getAudio(0)
self.startPos = startPos
self.player = player
self.exploded = 0
self.fired = 0
self.newPos =Vec3(0,0,0)
self.mainWindow=base.win
self.altBuffer=self.mainWindow.makeTextureBuffer("hello", 256, 256)
def fire(self, gravity, angle, velocity, wind, timeflow, explosionSize,
explosionBursts,sideWindowNum):
self.number = sideWindowNum
self.removeAll()
self.fired = 1
self.gravity = gravity
self.angle = angle
self.velocity = velocity*5
self.wind = wind
##### Crockets speed #####
self.timeflow = timeflow*8
##### Crockets size #####
self.explosionSize = explosionSize
self.explosionBursts = explosionBursts
if self.player==2:
self.angle = 180 - self.angle
self.proj = loader.loadModel('models/banana')
self.proj.setR(0)
## self.accept("arrow_up", self.proj.setR(90))
## self.accept("arrow_down", self.proj.setR(0))
self.projTex = loader.loadTexture('models/building_yellow.bmp')
self.proj.setTexture(self.projTex)
self.proj.setPos(self.startPos)
self.projNode = render.attachNewNode('proj')
self.projNode.reparentTo(self.worldNode)
self.proj.setScale(10)
self.proj.reparentTo(self.projNode)
taskMgr.add(self.update, "updater")
self.sideWindow()
self.explosionStarted = 1
## def createSideWindow(self):
## mainWindow=base.win
## altBuffer=mainWindow.makeTextureBuffer("texBuffer", 256, 256)
## altRender=NodePath("new render")
## self.altCam=base.makeCamera(altBuffer)
## self.altCam.reparentTo(altRender)
## self.altCam.setPos(0,-401,105)
## altRender.reparentTo(render)
## self.altCam.lookAt(self.proj)
## base.bufferViewer.setPosition("llcorner")
## base.bufferViewer.setCardSize(.75, .75)
## base.bufferViewer.enable(1)
## base.bufferViewer.setLayout('cycle')
## base.bufferViewer.selectCard(self.number)
def sideWindow(self):
altCam=base.makeCamera(self.altBuffer)
self.camRoot = self.proj.attachNewNode('cam')
self.camRoot.setPos(Vec3(-15,-5,10))
self.camRoot.lookAt(self.proj)
altCam.reparentTo(self.camRoot)
base.bufferViewer.setPosition("ulcorner")
base.bufferViewer.setCardSize(.50,.50)
base.bufferViewer.setLayout('cycle')
base.bufferViewer.selectCard(self.number)
## mainWindow=base.win
## altBuffer=mainWindow.makeTextureBuffer("texBuffer", 256, 256)
## altRender=NodePath("new render")
## self.altCam=base.makeCamera(altBuffer)
## self.altCam.reparentTo(altRender)
## self.altCam.setPos(0,-401,105)
## altRender.reparentTo(render)
## self.altCam.lookAt(self.proj)
## base.bufferViewer.setPosition("llcorner")
## base.bufferViewer.setCardSize(.75, .75)
## base.bufferViewer.enable(1)
## base.bufferViewer.setLayout('cycle')
## base.bufferViewer.selectCard(self.number)
def update(self, task):
self.newPos = self.projectPoint(self.angle,self.velocity,
self.gravity,task.time*
self.timeflow,self.startPos,
self.wind)
if self.proj.getX()>175 or self.proj.getX()<-175 or self.proj.getZ()<0:
self.hit("building")
## self.proj.setR(0)
## self.proj.setR(task.time*360*3)
self.proj.setPos(self.newPos)
return Task.cont
def projectPoint(self,angle, velocity, gravity, time, location, wind):
return Vec3(math.sin(math.radians(90.0 - angle)) *
velocity * time + wind[0] * time + location.getX(),
location.getY(),math.cos(math.radians(90.0 - angle)) *
velocity * time - gravity * time * time + wind[1] *
time + location.getZ())
def hit(self, target):
if self.explosionStarted:
self.explosionStarted = 0
self.fired = 0
taskMgr.remove("updater")
self.explode = Explode(self.explosionSize, self.explosionBursts, .75,self.proj.getPos(), target)
taskMgr.add( self.updateExplode, "BOOM")
self.bombSound.play()
def updateExplode(self, task):
if task.time > self.explode.exlength * 2:
self.explode.sparkoff()
if self.explode.target=="gorilla":
self.explode.mushroom.start(self.explode.mushroomNode)
self.explode.pe4.softStop()
self.explode.pe3.start(self.explode.boomnode)
if task.time > self.explode.exlength * 7.5 and self.explode.target=="building":
self.explode.pe3.softStop()
taskMgr.remove("BOOM")
self.exploded = 1
self.explode.remove()
return Task.done
if task.time > 5 and self.explode.target=="gorilla":
self.explode.pe3.softStop()
taskMgr.remove("BOOM")
self.exploded = 1
self.explode.mushroom.softStop()
self.explode.remove()
return Task.done
return Task.cont
def removeAll(self):
base.bufferViewer.toggleEnable()
print'beingremoved'
if taskMgr.hasTaskNamed("updater"):
taskMgr.remove("updater")
if taskMgr.hasTaskNamed("BOOM"):
taskMgr.remove("BOOM")
try:
self.projNode.detachNode()
except:
1+1
self.exploded = 0
self.ignoreAll()
try:
self.explode.remove()
except:
1+1
|
davidnarciso/PyGorillas
|
Projectile.py
|
Python
|
mit
| 6,696
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.