code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
class Solution(object):
def findBlackPixel(self, picture, N):
"""
:type picture: List[List[str]]
:type N: int
:rtype: int
"""
if not picture or not picture[0]: return 0
row = [i for i in range(len(picture)) if picture[i].count('B') == N]
col = [i for i in range(len(picture[0])) if [j[i] for j in picture].count('B') == N]
count = 0
for r in row:
for c in col:
if picture[r][c] == 'B':
sub = filter(lambda x: x[c] == 'B', picture)
if all(s == picture[r] for s in sub):
count += 1
return count
|
Mlieou/oj_solutions
|
leetcode/python/ex_533.py
|
Python
|
mit
| 678
|
#!/usr/bin/env python3
#
# Generate Serpent deck for FastCube Serpent deck
# Ondrej Chvala, ochvala@utk.edu
# 2016-07-30
import materials
import cells
import surfaces
def write_deck(N, r, refl):
'''Function to write the FastCube Serpent input deck.
Inputs:
N: size of the N x N checkerboard lattice
r: radius of the fuel in the fuel pin [cm]
refl: reflector thickness [cm]
Outputs:
output: String containing the FastCube deck'''
# Header
output = '''\
set title "Fast Flux Research Reactor, N {N}, rfuel {r}, reflector thickness {refl}"
'''
# Surfaces
output += surfaces.write_surfaces(N, r, refl)
# Cells
output += cells.write_cells()
# Materials
output += materials.write_materials()
# Data cards
data_cards = '''
%______________data cards___________________________________________
% Power [W]
set power 100
set cpd 2
% Boundary condition
set bc 1
% Neutron population and criticality cycles
set pop 10000 200 40 % 10000 neutrons, 100 cycles, 40 of them inactive
% Data Libraries
set acelib "sss_endfb7u.sssdir"
set declib "sss_endfb7.dec"
set nfylib "sss_endfb7.nfy"
% Analog reaction rate
set arr 2
'''
output += data_cards
# Plots
plot_cards = '''
% Plots
plot 1 3000 3000
plot 2 3000 3000
plot 3 3000 3000
mesh 1 3000 3000
mesh 2 3000 3000
mesh 3 3000 3000
'''
output += plot_cards
output = output.format(**locals())
return output
if __name__ == '__main__':
print("This module writes the deck for FastCube Serpent deck.")
input("Press Ctrl+C to quit, or enter else to test it. ")
print(write_deck(11, 1.25, 25))
|
ondrejch/FSM
|
scripts/mk0/cubedeck.py
|
Python
|
gpl-3.0
| 1,663
|
import rospy
import actionlib
from bitbots_msgs.msg import KickAction, KickFeedback, KickActionResult, KickGoal
from actionlib_msgs.msg import GoalStatus
class KickCapsule():
last_feedback = None # type: KickFeedback
last_feedback_received = None # type: rospy.Time
last_goal = None # type: KickGoal
last_goal_sent = None # type: rospy.Time
last_result = None # type: KickActionResult
last_result_received = None # type: rospy.Time
is_currently_kicking = False # type: bool
__connected = False # type: bool
__action_client = None # type: actionlib.SimpleActionClient
def __init__(self, blackboard):
"""
:param blackboard: Global blackboard instance
"""
self.__blackboard = blackboard
self.connect()
def connect(self):
topic = self.__blackboard.config["dynamic_kick"]["topic"]
rospy.loginfo("Connecting {}.KickCapsule to bitbots_dynamic_kick ({})"
.format(str(self.__blackboard.__class__).split(".")[-1], topic))
self.__action_client = actionlib.SimpleActionClient(topic, KickAction)
self.__connected = self.__action_client.wait_for_server(
rospy.Duration(self.__blackboard.config["dynamic_kick"]["wait_time"]))
if not self.__connected:
rospy.logerr("No dynamic_kick server running on {}".format(topic))
def kick(self, goal):
"""
:param goal: Goal to kick to
:type goal: KickGoal
:raises RuntimeError: when not connected to dynamic_kick server
"""
if not self.__connected:
# try to connect again
self.__connected = self.__action_client.wait_for_server(
rospy.Duration(self.__blackboard.config["dynamic_kick"]["wait_time"]))
if not self.__connected:
raise RuntimeError("Not connected to any dynamic_kick server")
self.__action_client.send_goal(goal, self.__done_cb, self.__active_cb, self.__feedback_cb)
self.last_goal = goal
self.last_goal_sent = rospy.Time.now()
def __done_cb(self, state, result):
self.last_result = KickActionResult(status=state, result=result)
self.last_result_received = rospy.Time.now()
self.is_currently_kicking = False
def __feedback_cb(self, feedback):
self.last_feedback = feedback
self.last_feedback_received = rospy.Time.now()
def __active_cb(self):
self.is_currently_kicking = True
|
bit-bots/bitbots_behaviour
|
bitbots_blackboard/src/bitbots_blackboard/capsules/kick_capsule.py
|
Python
|
bsd-3-clause
| 2,507
|
# Copyright (c) 2010-2014 openpyxl
#
# stdlib imports
from io import BytesIO
import zipfile
import pytest
# package imports
from openpyxl.workbook import Workbook
from openpyxl.writer.excel import ExcelWriter
@pytest.mark.pil_required
def test_write_images(datadir):
datadir.chdir()
wb = Workbook()
ew = ExcelWriter(workbook=wb)
from openpyxl.drawing import Image
img = Image("plain.png")
buf = BytesIO()
archive = zipfile.ZipFile(buf, 'w')
ew._write_images([img], archive, 1)
archive.close()
buf.seek(0)
archive = zipfile.ZipFile(buf, 'r')
zipinfo = archive.infolist()
assert len(zipinfo) == 1
assert zipinfo[0].filename == 'xl/media/image1.png'
|
Hitachi-Data-Systems/org-chart-builder
|
openpyxl/writer/tests/test_drawing.py
|
Python
|
apache-2.0
| 711
|
import urllib
from .oauth import OAuthSharer
class TwitterSharer(OAuthSharer):
def send(self, message, hashtag='', **kw):
if hashtag:
message += ' ' + hashtag
request = self.client.request(
'https://api.twitter.com/1.1/statuses/update.json',
method='POST',
body='status=%s' % urllib.quote_plus(message)
)[0]
return request.status == 200
|
FelixLoether/python-sharer
|
sharer/twitter.py
|
Python
|
mit
| 424
|
from nose.plugins.attrib import attr
from test.integration.base import DBTIntegrationTest
class TestAdapterDDL(DBTIntegrationTest):
def setUp(self):
DBTIntegrationTest.setUp(self)
self.run_sql_file("test/integration/018_adapter_ddl_tests/seed.sql")
@property
def schema(self):
return "adaper_ddl_018"
@property
def models(self):
return "test/integration/018_adapter_ddl_tests/models"
@attr(type='postgres')
def test_sort_and_dist_keys_are_nops_on_postgres(self):
results = self.run_dbt(['run'])
self.assertEqual(len(results), 1)
self.assertTablesEqual("seed","materialized")
|
nave91/dbt
|
test/integration/018_adapter_ddl_tests/test_adapter_ddl.py
|
Python
|
apache-2.0
| 666
|
# Copyright (c) 2021 Ultimaker B.V.
from typing import Optional
class PaginationMetadata:
"""Class representing the metadata related to pagination."""
def __init__(self,
total_count: Optional[int] = None,
total_pages: Optional[int] = None,
**kwargs) -> None:
"""
Creates a new digital factory project response object
:param total_count: The total count of items.
:param total_pages: The total number of pages when pagination is applied.
:param kwargs:
"""
self.total_count = total_count
self.total_pages = total_pages
self.__dict__.update(kwargs)
def __str__(self) -> str:
return "PaginationMetadata | Total Count: {}, Total Pages: {}".format(self.total_count, self.total_pages)
|
Ultimaker/Cura
|
plugins/DigitalLibrary/src/PaginationMetadata.py
|
Python
|
lgpl-3.0
| 828
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls.defaults import patterns, url, include
from .views import IndexView, CreateView, EditView
from .extras import urls as extras_urls
urlpatterns = patterns('openstack_dashboard.dashboards.admin.flavors.views',
url(r'^$', IndexView.as_view(), name='index'),
url(r'^create/$', CreateView.as_view(), name='create'),
url(r'^(?P<id>[^/]+)/edit/$', EditView.as_view(), name='edit'),
url(r'^(?P<id>[^/]+)/extras/', include(extras_urls, namespace='extras')),
)
|
trunglq7/horizon
|
openstack_dashboard/dashboards/admin/flavors/urls.py
|
Python
|
apache-2.0
| 1,298
|
import obd,time
import json, os
from log import Log
from Logger import Logger
class car:
def __init__(self, logger):
self.connection = obd.Async()
self.log = Log()
self.logger = logger
self.counter = 0
def getSpeed(self,r):
self.log.add("SPEED", str(r.value))
self.count()
def getFuelLevel(self,r):
print(r.value)
self.log.add("FUEL_LEVEL", str(r.value))
self.count()
def getThrottlePosition(self,r):
self.log.add("THROTTLE_POSITION", str(r.value))
self.count()
def getCoolantTemp(self,r):
self.log.add("COOLANT_TEMP", str(r.value))
self.count()
def getOilPressure(self,r):
self.log.add("RPM", str(r.value))
self.count()
def getLoad(self,r):
self.log.add("ENGINE_LOAD",str(r.value))
self.count()
def getFuelRate(self,r):
self.log.add("FUEL_RATE", str(r.value))
self.count()
def count(self):
self.counter += 1
if self.counter == 7 : #number of callbacks
self.logger.write(self.log.getLog())
self.counter = 0
def setupCallbacks(self):
self.connection.watch(obd.commands.SPEED, callback=self.getSpeed)
self.connection.watch(obd.commands.FUEL_LEVEL, callback=self.getFuelLevel)
self.connection.watch(obd.commands.THROTTLE_POS, callback=self.getThrottlePosition)
self.connection.watch(obd.commands.COOLANT_TEMP, callback=self.getCoolantTemp)
self.connection.watch(obd.commands.RPM, callback=self.getOilPressure) # change to oil pressure
self.connection.watch(obd.commands.ENGINE_LOAD, callback=self.getLoad)
self.connection.watch(obd.commands.FUEL_RATE, callback=self.getFuelRate)
def startLogging(self):
self.connection.start()
def stopLogging(self):
self.connection.stop()
def getStatus(self):
return self.connection.status()
Logger = Logger()
tahoe = car(Logger)
tahoe.setupCallbacks()
while tahoe.getStatus() == OBDStatus.CAR_CONNECTED:
tahoe.startLogging()
tahoe.stopLogging()
|
rahutchinson/PyTahoeLog
|
mainLogger.py
|
Python
|
mit
| 2,045
|
from django.contrib import admin
from taxonomy.models import *
from ajax_select import make_ajax_form
from ajax_select.admin import AjaxSelectAdmin
from django.contrib.admin import SimpleListFilter
from django.contrib.admin.views import main
from django.http import HttpResponseRedirect
class OrderFilter(SimpleListFilter):
title = 'order'
parameter_name = 'rank'
def lookups(self, request, model_admin):
orders = set([t.order() for t in model_admin.model.objects.all()])
return [(t.id, t.name) for t in orders if t is not None]
def queryset(self, request, queryset):
if self.value():
return Taxon.objects.childrenOf(self.value)
else:
return queryset
class FamilyFilter(SimpleListFilter):
title = 'family'
parameter_name = 'family'
def lookups(self, request, model_admin):
families = set([t.family() for t in model_admin.model.objects.all()])
return [(t.id, t.name) for t in families if t is not None]
def queryset(self, request, queryset):
if self.value():
return Taxon.objects.childrenOf(self.value)
else:
return queryset
class RankAdmin(admin.ModelAdmin):
list_display = ["id", "name", "sortOrder", "parent"]
list_editable = ["name", "sortOrder", "parent"]
def bulk_update(modeladmin, request, queryset):
selected = request.POST.getlist(admin.ACTION_CHECKBOX_NAME)
return HttpResponseRedirect("/bulk_update/?ids=%s" % (",".join(selected)))
bulk_update.short_description = "Bulk update selected records"
class TaxonAdmin(AjaxSelectAdmin):
search_fields = ('name','commonName')
list_editable = ["extant",]
list_display = ["order", "family", "subfamily", "tribe", "genus", "species", "extant"]
list_filter = ['rank',OrderFilter,FamilyFilter]
form = make_ajax_form(Taxon, {"parent":"taxonLookup"})
actions = [bulk_update]
def __init__(self,*args,**kwargs):
super(TaxonAdmin, self).__init__(*args, **kwargs)
main.EMPTY_CHANGELIST_VALUE = '-'
admin.site.register(Taxon, TaxonAdmin)
admin.site.register(Rank, RankAdmin)
|
wabarr/taxonomy
|
taxonomy/admin.py
|
Python
|
gpl-2.0
| 2,141
|
"""Plot to test line collections"""
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.collections import LineCollection
def main():
t = np.linspace(0, 10, 100)
x = 0.1 * t * np.cos(np.pi * t)
y = 0.1 * t * np.sin(np.pi * t)
points = np.array([x, y]).T.reshape(100, 1, 2)
segments = np.hstack([points[:-1], points[1:]])
lc = LineCollection(segments, cmap=plt.cm.jet,
norm=plt.Normalize(0, 10),
array=t, linewidths=3)
fig, ax = plt.subplots()
ax.add_collection(lc)
plt.xlim(-1, 1)
plt.ylim(-1, 1)
ax.set_title("Line Collections", size=18)
ax.xaxis.set_major_formatter(plt.NullFormatter())
ax.yaxis.set_major_formatter(plt.NullFormatter())
return fig
if __name__ == '__main__':
main()
plt.show()
|
mpld3/mpld3_rewrite
|
test_plots/test_line_collections.py
|
Python
|
bsd-3-clause
| 829
|
# -*- coding: utf-8 -*-
'''
some tools that help you define operator grammars to preparse python expression.
see dinpy.py for a real sample.
>>> from dao.solve import set_run_mode, noninteractive
>>> set_run_mode(noninteractive)
>>> from dao.term import Var
>>> from dao.builtins.terminal import eoi
>>> from dao.builtins.container import pytuple, first
>>> bindings, body = Var('bindings'), Var('body')
>>> do = word('do')
>>> let = element('let', call(bindings)+do+getitem(body)+eoi+pytuple(first(bindings), body))
>>> preparse(let({'x':1}).do[1,2])
({'x': 1}, (1, 2))
'''
__all__ = ['element', 'preparse', 'lead',
'lt', 'le', 'eq', 'ne', 'gt', 'ge', 'bitor', 'xor', 'bitand',
'lshift', 'rshift', 'add', 'sub', 'mul', 'div', 'floordiv', 'mod',
'pos', 'neg', 'invert', 'abs', 'pow',
'getattr', 'call', 'getitem', 'iterator',
'attr_item', 'attr_call', 'word', 'words', 'getitem_to_list',
'DinpySyntaxError', 'syntax_error']
from dao.solve import run_mode, interactive, to_sexpression
from dao.solve import interactive_solver, interactive_tagger, interactive_parser
from dao.term import deref, unify, DummyVar
from dao.solve import eval, preparse, dao_repr
from dao import special
from dao.builtins.matcher import matcher
from dao.builtins.parser import parse_sequence
from dao.builtins.container import to_list
from dao.builtins.control import and_p, or_p
##from dao.dinpy.dexpr import DinpySyntaxError
_current_form = None
class DinpySyntaxError(Exception):
def __init__(self): pass
def __repr__(self):
return 'DinpySyntaxError: '+dao_repr(_current_form)
def element(name, grammar):
''' name = element(grammar)'''
return _lead_element_class(FormTraveller)(name, preparse(grammar))
def lead(klass):
'''
# use case:
# var.a.b.c
class VarForm(object):
def __init__(self):
self.__vars__ = []
def __getattr__(self, var):
self.__vars__.append(Var(var))
return self
def __len__(self): return len(self.__vars__)
def __iter__(self): return iter(self.__vars__)
var = lead(VarForm)
'''
return lead_class(klass)()
def lead_class(klass):
attrs = {}
for a, value in klass.__dict__.items():
if not a.startswith('__init__') and isinstance(value, type(lead_class)): #type(lead): function type
## attrs[a] = lambda self, *args, **kw: value(klass(), *args, **kw) # why error?
attrs[a] = _lead_function(klass, value)
else: attrs[a] = value
return type('Lead'+klass.__name__, klass.__bases__, attrs)
def _lead_function(klass, function):
return lambda self, *args, **kw: function(
klass(self.__form_name__, self.__form_grammar__), *args, **kw)
def _lead_element_function(klass, function):
return lambda self, *args, **kw: function(
klass(self.__form_name__, self.__form_grammar__), *args, **kw)
def _lead_element_class(klass):
attrs = {}
for a, value in klass.__dict__.items():
if not a.startswith('__init__') and isinstance(value, type(lead_class)): #type(lead): function type
## attrs[a] = lambda self, *args, **kw: value(klass(), *args, **kw) # why error?
attrs[a] = _lead_element_function(klass, value)
else: attrs[a] = value
return type('Lead'+klass.__name__, klass.__bases__, attrs)
@matcher('syntax_error')
def syntax_error(solver):
print dao_repr(_current_form)
raise DinpySyntaxError()
syntax_error = syntax_error()
(__lt__, __le__, __eq__, __ne__, __gt__, __ge__,
__getattr__, __call__, __getitem__, __iter__,
__add__, __sub__, __mul__, __floordiv__, __div__, __truediv__,
__mod__, __pow__, __lshift__, __rshift__, __and__, __xor__, __or__,
__neg__, __pos__, __abs__, __invert__) = range(27)
names = (
'__lt__, __le__, __eq__, __ne__, __gt__, __ge__, '
'__getattr__, __call__, __getitem__, __iter__, '
'__add__, __sub__, __mul__, __floordiv__, __div__, __truediv__, '
'__mod__, __pow__, __lshift__, __rshift__, __and__, __xor__, __or__, '
'__neg__, __pos__, __abs__, __invert__'.split(', '))
class FormTraveller(object):
def __init__(self, name, grammar):
self.__form_name__ = name
if grammar is None:
self.__form_grammar__ = None
else:
#print grammar
self.__form_grammar__ = to_sexpression(preparse(or_p(grammar, syntax_error)))
self.__operator_data__ = []
def __lt__(self, other):
self.__operator_data__.append((__lt__, other)); return self
def __le__(self, other):
self.__operator_data__.append((__le__, other)); return self
def __eq__(self, other):
self.__operator_data__.append((__eq__, other)); return self
def __ne__(self, other):
self.__operator_data__.append((__ne__, other)); return self
def __gt__(self, other):
self.__operator_data__.append((__gt__, other)); return self
def __ge__(self, other):
self.__operator_data__.append((__ge__, other)); return self
def __getattr__(self, name):
## print name
self.__operator_data__.append((__getattr__, name)); return self
def __call__(self, *args, **kwargs):
self.__operator_data__.append((__call__, args, kwargs));
return self
def __getitem__(self, key):
self.__operator_data__.append((__getitem__, key)); return self
def __add__(self, other):
self.__operator_data__.append((__add__, other)); return self
def __sub__(self, other):
self.__operator_data__.append((__sub__, other)); return self
def __mul__(self, other):
self.__operator_data__.append((__mul__, other)); return self
def __floordiv__(self, other):
self.__operator_data__.append((__floordiv__, other)); return self
def __div__(self, other):
self.__operator_data__.append((__div__, other));
return self
def __truediv__(self, other):
self.__operator_data__.append((__lt__, other)); return self
def __mod__(self, other):
self.__operator_data__.append((__mod__, other));
## print other
return self
def __pow__(self, other):
self.__operator_data__.append((__pow__, other)); return self
def __lshift__(self, other):
self.__operator_data__.append((__lshift__, other)); return self
def __rshift__(self, other):
self.__operator_data__.append((__rshift__, other)); return self
def __and__(self, other):
self.__operator_data__.append((__and__, other)); return self
def __xor__(self, other):
self.__operator_data__.append((__xor__, other));
return self
def __or__(self, other):
self.__operator_data__.append((__or__, other)); return self
def __iter__(self):
self.__operator_data__.append(__iter__); return self
def __neg__(self):
self.__operator_data__.append(__neg__); return self
def __pos__(self):
self.__operator_data__.append(__pos__); return self
def __abs__(self):
self.__operator_data__.append(__abs__); return self
def __invert__(self):
self.__operator_data__.append(__invert__); return self
def ___parse___(self, parser):
global _current_form
_current_form = self
return eval(parse_sequence(self.__form_grammar__, self.__operator_data__))
def __nonzero__(self): return False
# prevent __getattr__
def closure(self, env): return self
def deref(self, env): return self
def getvalue(self, env, memo): return self
def __repr__(self):
if run_mode() is interactive:
code = interactive_parser().parse(self)
code = interactive_tagger().tag_loop_label(code)
code = to_sexpression(code)
result = interactive_solver().eval(code)
return repr(result) if result is not None else ''
else: return self.____repr____()
def ____repr____(self):
result = self.__form_name__
for x in self.__operator_data__:
if x== __neg__: result = '-%s'%result
elif x== __pos__: result = '+%s'%result
elif x== __abs__: result = 'abs(%s)'%result
elif x== __invert__: result = '~%s'%result
elif x[0]==__lt__: result += '<%s'%dao_repr(x[1])
elif x[0]==__le__: result += '<=%s'%dao_repr(x[1])
elif x[0]==__eq__: result += '==%s'%dao_repr(x[1])
elif x[0]== __ne__: result += '!=%s'%dao_repr(x[1])
elif x[0]== __gt__: result += '>%s'%dao_repr(x[1])
elif x[0]== __ge__: result += '>=%s'%dao_repr(x[1])
elif x[0]== __getattr__: result += '.%s'%dao_repr(x[1])
elif x[0]== __call__: result += '(%s)'%dao_repr(x[1])
elif x[0]== __getitem__: result += '[%s]'%dao_repr(x[1])
elif x[0]== __iter__: result = 'iter(%s)'%result
elif x[0]== __add__: result += '+%s'%dao_repr(x[1])
elif x[0]== __sub__: result += '-%s'%dao_repr(x[1])
elif x[0]== __mul__: result += '*%s'%dao_repr(x[1])
elif x[0]== __floordiv__: result += '//%s'%dao_repr(x[1])
elif x[0]== __div__: result += '/%s'%dao_repr(x[1])
elif x[0]== __truediv__: result += '/%s'%dao_repr(x[1])
elif x[0]== __mod__: result += '%%%s'%dao_repr(x[1])
elif x[0]== __pow__: result += '**%s'%dao_repr(x[1])
elif x[0]== __lshift__: result += '<<%s'%dao_repr(x[1])
elif x[0]== __rshift__: result += '>>%s'%dao_repr(x[1])
elif x[0]== __and__: result += '&%s'%dao_repr(x[1])
elif x[0]== __xor__: result += '^%s'%dao_repr(x[1])
elif x[0]== __or__: result += '|%s'%dao_repr(x[1])
return result
def binary(attr):
@matcher(names[attr])
def func(solver, argument=None):
argument = deref(argument, solver.env)
syntax_result, pos = solver.parse_state
## print syntax_result, syntax_result[pos][1]
if pos==len(syntax_result): return
try:
if syntax_result[pos][0]!=attr: return
except: return
if argument is not None:
for _ in unify(argument, syntax_result[pos][1], solver.env):
solver.parse_state = syntax_result, pos+1
yield cont, True
else:
solver.parse_state = syntax_result, pos+1
yield cont, True
solver.parse_state = syntax_result, pos
return func
@matcher('__call__')
def call(solver, args=None, kwargs=None):
args = deref(args, solver.env)
kwargs = deref(kwargs, solver.env)
syntax_result, pos = solver.parse_state
if pos==len(syntax_result): return
try:
if syntax_result[pos][0]!=__call__: return
except: return
if args is not None:
for _ in unify(args, syntax_result[pos][1], solver.env):
if kwargs is not None:
for _ in unify(kwargs, syntax_result[pos][2], solver.env):
solver.parse_state = syntax_result, pos+1
yield cont, True
else:
solver.parse_state = syntax_result, pos+1
yield cont, True
else:
solver.parse_state = syntax_result, pos+1
yield cont, True
solver.parse_state = syntax_result, pos
def unary(attr):
@matcher(names[attr])
def func(solver):
syntax_result, pos = solver.parse_state
if pos==len(syntax_result): return
if syntax_result[pos]!=attr: return
solver.parse_state = syntax_result, pos+1
yield cont, True
solver.parse_state = syntax_result, pos
return func
'''
lambda Lambda expression
or Boolean OR
and Boolean AND
not x Boolean NOT
in, not in Membership tests
is, is not Identity tests
<, <=, >, >=, <>, !=, == Comparisons
| Bitwise OR
^ Bitwise XOR
& Bitwise AND
<<, >> Shifts
+, - Addition and subtraction
*, /, % Multiplication, division, remainder
+x, -x Positive, negative
~x Bitwise not
** Exponentiation
x.attribute Attribute reference
x[index] Subscription
x[index:index] Slicing
f(arguments...) Function call
(expressions...) Binding or tuple display
[expressions...] List display
{key:datum...} Dictionary display
`expressions...` String conversion
'''
lt = binary(__lt__) # <
le = binary(__le__) # <=
eq = binary(__eq__) # ==
ne = binary(__ne__) # !=, <>
gt = binary(__gt__) # >
ge = binary(__ge__) # >=
bitor = binary(__or__) # |
xor = binary(__xor__) # ^
bitand = binary(__and__) # &
lshift = binary(__lshift__) # <<
rshift = binary(__rshift__) # >>
add = binary(__add__) # +
sub = binary(__sub__) # -
mul = binary(__mul__) # *
div = binary(__div__) # /
floordiv = binary(__floordiv__) # //
mod = binary(__mod__) # %
pos = unary(__pos__)() # +x, negative
neg = unary(__neg__)() # -x, Positive
invert = unary(__invert__)() # ~x Bitwise not
abs = unary(__abs__)() # abs()
pow = binary(__pow__) # ** Exponentiation
getattr = binary(__getattr__) # attribute access
getitem = binary(__getitem__) # object[index]
iterator = unary(__iter__) # object[index]
def word(word): return getattr(word)
def words(text): return [getattr(w.strip()) for w in text.split(',')]
def attr_item(name): return lambda arg: and_p(getattr(name),getitem(arg))
def attr_call(name): return lambda *args: and_p(getattr(name), call(*args))
def getitem_to_list(argument=None):
if argument is not None:
_x = DummyVar('_x')
return and_p(getitem(_x), special.set(argument, to_list(_x)))
else: return getitem()
if __name__ == "__main__":
import doctest
doctest.testmod()
|
chaosim/dao
|
dao/dinpy/pysyntax.py
|
Python
|
gpl-3.0
| 13,132
|
"""
Copyright 2008 Serge Matveenko
This file is part of Picket.
Picket is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Picket is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Picket. If not, see <http://www.gnu.org/licenses/>.
"""
from distutils import version as v
"""
dictionary for storing copyrights and other project stuff
"""
COPYING = {}
"""
Picket home page url.
"""
COPYING['URL'] = 'http://picket.nophp.ru/'
"""
Picket version. Possibly with branch name
"""
COPYING['VERSION'] = v.LooseVersion('0.3-master')
"""
List of Picket authors in order of their code appearence
"""
COPYING['AUTHORS'] = ['Serge Matveenko', 'TrashNRoll']
"""
List of years of project development
"""
COPYING['YEARS'] = [2008, 2009, 2010,]
"""
GPL warning text as of 2008-10-10
"""
COPYING['WARNING'] = \
"""Picket is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Picket is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Picket. If not, see <http://www.gnu.org/licenses/>."""
version = COPYING['VERSION']
|
lig/picket_deadend
|
apps/picket/__init__.py
|
Python
|
gpl-3.0
| 1,859
|
from urlparse import urlparse
from api_tests.nodes.views.test_node_contributors_list import NodeCRUDTestCase
from nose.tools import * # flake8: noqa
from api.base.settings.defaults import API_BASE
from framework.auth.core import Auth
from tests.base import fake
from tests.factories import (
ProjectFactory,
CommentFactory,
RegistrationFactory,
WithdrawnRegistrationFactory,
)
class TestWithdrawnRegistrations(NodeCRUDTestCase):
def setUp(self):
super(TestWithdrawnRegistrations, self).setUp()
self.registration = RegistrationFactory(creator=self.user, project=self.public_project)
self.withdrawn_registration = WithdrawnRegistrationFactory(registration=self.registration, user=self.registration.creator)
self.public_pointer_project = ProjectFactory(is_public=True)
self.public_pointer = self.public_project.add_pointer(self.public_pointer_project,
auth=Auth(self.user),
save=True)
self.withdrawn_url = '/{}registrations/{}/'.format(API_BASE, self.registration._id)
self.withdrawn_registration.justification = 'We made a major error.'
self.withdrawn_registration.save()
def test_can_access_withdrawn_contributors(self):
url = '/{}registrations/{}/contributors/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 200)
def test_cannot_access_withdrawn_children(self):
url = '/{}registrations/{}/children/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_cannot_access_withdrawn_comments(self):
self.public_project = ProjectFactory(is_public=True, creator=self.user)
self.public_comment = CommentFactory(node=self.public_project, user=self.user)
url = '/{}registrations/{}/comments/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_can_access_withdrawn_contributor_detail(self):
url = '/{}registrations/{}/contributors/{}/'.format(API_BASE, self.registration._id, self.user._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 200)
def test_cannot_return_a_withdrawn_registration_at_node_detail_endpoint(self):
url = '/{}nodes/{}/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_cannot_delete_a_withdrawn_registration(self):
url = '/{}registrations/{}/'.format(API_BASE, self.registration._id)
res = self.app.delete_json_api(url, auth=self.user.auth, expect_errors=True)
self.registration.reload()
assert_equal(res.status_code, 405)
def test_cannot_access_withdrawn_files_list(self):
url = '/{}registrations/{}/files/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_cannot_access_withdrawn_node_links_detail(self):
url = '/{}registrations/{}/node_links/{}/'.format(API_BASE, self.registration._id, self.public_pointer._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_cannot_access_withdrawn_node_links_list(self):
url = '/{}registrations/{}/node_links/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_cannot_access_withdrawn_node_logs(self):
self.public_project = ProjectFactory(is_public=True, creator=self.user)
url = '/{}registrations/{}/logs/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_cannot_access_withdrawn_registrations_list(self):
self.registration.save()
url = '/{}registrations/{}/registrations/'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_withdrawn_registrations_display_limited_fields(self):
registration = self.registration
res = self.app.get(self.withdrawn_url, auth=self.user.auth)
assert_equal(res.status_code, 200)
attributes = res.json['data']['attributes']
expected_attributes = {
'title': registration.title,
'description': registration.description,
'date_created': registration.date_created.isoformat(),
'date_registered': registration.registered_date.isoformat(),
'withdrawal_justification': registration.retraction.justification,
'public': None,
'category': None,
'date_modified': None,
'registration': True,
'fork': None,
'collection': None,
'tags': None,
'withdrawn': True,
'pending_withdrawal': None,
'pending_registration_approval': None,
'pending_embargo_approval': None,
'embargo_end_date': None,
'registered_meta': None,
'current_user_permissions': None,
'registration_supplement': registration.registered_schema[0].name
}
for attribute in expected_attributes:
assert_equal(expected_attributes[attribute], attributes[attribute])
contributors = urlparse(res.json['data']['relationships']['contributors']['links']['related']['href']).path
assert_equal(contributors, '/{}registrations/{}/contributors/'.format(API_BASE, registration._id))
assert_not_in('children', res.json['data']['relationships'])
assert_not_in('comments', res.json['data']['relationships'])
assert_not_in('node_links', res.json['data']['relationships'])
assert_not_in('registrations', res.json['data']['relationships'])
assert_not_in('parent', res.json['data']['relationships'])
assert_not_in('forked_from', res.json['data']['relationships'])
assert_not_in('files', res.json['data']['relationships'])
assert_not_in('logs', res.json['data']['relationships'])
assert_not_in('primary_institution', res.json['data']['relationships'])
assert_not_in('registered_by', res.json['data']['relationships'])
assert_not_in('registered_from', res.json['data']['relationships'])
assert_not_in('root', res.json['data']['relationships'])
def test_field_specific_related_counts_ignored_if_hidden_field_on_withdrawn_registration(self):
url = '/{}registrations/{}/?related_counts=children'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_not_in('children', res.json['data']['relationships'])
assert_in('contributors', res.json['data']['relationships'])
def test_field_specific_related_counts_retrieved_if_visible_field_on_withdrawn_registration(self):
url = '/{}registrations/{}/?related_counts=contributors'.format(API_BASE, self.registration._id)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(res.json['data']['relationships']['contributors']['links']['related']['meta']['count'], 1)
|
abought/osf.io
|
api_tests/registrations/views/test_withdrawn_registrations.py
|
Python
|
apache-2.0
| 7,791
|
#! /usr/bin/env python
import random
import utilities
def read_nodes_from_training(file_name):
"""
Returns a list of all the nodes in the graph
"""
node_set = set()
for nodes in utilities.edges_generator(file_name):
for node in nodes:
node_set.add(node)
return list(node_set)
def random_benchmark(train_file, test_file, submission_file, num_predictions):
"""
Runs the random benchmark.
"""
nodes = read_nodes_from_training(train_file)
test_nodes = utilities.read_nodes_list(test_file)
test_predictions = [[random.choice(nodes) for x in range(num_predictions)]
for node in test_nodes]
utilities.write_submission_file(submission_file,
test_nodes,
test_predictions)
if __name__=="__main__":
random_benchmark("../Data/train.csv",
"../Data/test.csv",
"../Submissions/random_benchmark.csv",
10)
|
ameyavilankar/social-network-recommendation
|
preprocessing/random_benchmark.py
|
Python
|
bsd-2-clause
| 1,033
|
import tests.model_control.test_ozone_custom_models_enabled as testmod
testmod.build_model( ['Anscombe'] , ['MovingAverage'] , ['Seasonal_Second'] , ['MLP'] );
|
antoinecarme/pyaf
|
tests/model_control/detailed/transf_Anscombe/model_control_one_enabled_Anscombe_MovingAverage_Seasonal_Second_MLP.py
|
Python
|
bsd-3-clause
| 161
|
#!/usr/bin/env python
#
# Copyright 2018 by Ruben Undheim
#
# This is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
import sys,os
import curses
import threading
import time
#import queue
import yaml
from gnuradio import gr, blocks, audio
import grdab
import locale
locale.setlocale(locale.LC_ALL, '')
channel_list_filename = "".join([os.getenv("HOME"),"/.grdab/channels.yaml"])
channel_list = []
if os.path.isfile(channel_list_filename):
with open(channel_list_filename, "rb") as fp:
filecontent = fp.read().decode('utf-8')
channel_list = yaml.load(filecontent)
samp_rate = samp_rate = 2048000
def draw_menu(stdscr):
global src
global decoder
global dab_ofdm_demod_0
global c2f
global f2c
global audio_sink_0
global xrun_monitor
global fg
global use_zeromq
global rpc_mgr_server
global dab_ofdm_demod_0
global ppm_shared
global from_file_global
k = 0
cursor_x = 0
cursor_y = 0
# Clear and refresh the screen for a blank canvas
stdscr.clear()
stdscr.refresh()
curses.curs_set(0)
kThread = KeyDetecThread(stdscr)
# Start colors in curses
curses.start_color()
curses.init_pair(1, curses.COLOR_CYAN, curses.COLOR_BLACK)
curses.init_pair(2, curses.COLOR_RED, curses.COLOR_BLACK)
curses.init_pair(3, curses.COLOR_BLACK, curses.COLOR_WHITE)
selected = 0
active = 0
scroll = 0
nelem = len(channel_list)
# Loop where k is the last character pressed
while (k != ord('q')):
# Initialization
stdscr.clear()
height, width = stdscr.getmaxyx()
center_x = int((width // 2) - 2)
center_y = int((height // 2) - 2)
if k == curses.KEY_DOWN:
cursor_y = cursor_y + 1
elif k == curses.KEY_UP:
cursor_y = cursor_y - 1
elif k == curses.KEY_RIGHT:
cursor_x = cursor_x + 1
elif k == curses.KEY_LEFT:
cursor_x = cursor_x - 1
previous_active = active
if k == 259: # key up
if selected > 0:
selected -= 1
if selected <= scroll:
if scroll > 1:
scroll -= 1
else:
scroll = 0
elif k == 258: # key down
if selected < (nelem-1):
selected += 1
if selected == height - 1 + scroll:
scroll += 1
elif k == 339: # page up
if selected >= 15:
selected -= 15
else:
selected = 0
if selected <= height - 1 + scroll:
if scroll >= 15:
scroll -= 15
else:
scroll = 0
elif k == 338: # page down
if selected < (nelem-15):
selected += 15
else:
selected = nelem-1
if selected >= height - 1 + scroll:
scroll += 15
elif k == 360: # end
selected = nelem-1
if selected >= height - 1 + scroll:
scroll = nelem - height + 1
elif k == 262: # home
selected = 0
scroll = 0
elif k == 10: # enter
active = selected
if k == 10:
stdscr.move(center_y, center_x)
ch = channel_list[active]
freq = float(ch['frequency'])*1e6
if from_file_global != None:
pass
elif use_zeromq:
rpc_mgr_server.request("set_frequency",[freq])
else:
src.set_center_freq(freq, 0)
if 'classic' in ch and ch['classic'] == True:
dabplus = False
else:
dabplus = True
if dabplus:
new = grdab.dabplus_audio_decoder_ff(grdab.parameters.dab_parameters(mode=1, sample_rate=samp_rate, verbose=False), ch['bit_rate'], ch['address'], ch['subch_size'], ch['protect_level'], True)
else:
new = grdab.dab_audio_decoder_ff(grdab.parameters.dab_parameters(mode=1, sample_rate=samp_rate, verbose=False), ch['bit_rate'], ch['address'], ch['subch_size'], ch['protect_level'], True)
newaudio = audio.sink(48000, '', True)
fg.stop()
fg.wait()
xrun_monitor.stop_until_tag()
fg.disconnect(src, dab_ofdm_demod_0, decoder)
fg.disconnect((decoder, 0), (f2c, 0))
fg.disconnect((decoder, 1), (f2c, 1))
fg.disconnect((c2f, 0), (audio_sink_0, 0))
fg.disconnect((c2f, 1), (audio_sink_0, 1))
del decoder
del audio_sink_0
decoder = new
audio_sink_0 = newaudio
fg.connect(src, dab_ofdm_demod_0, decoder)
fg.connect((decoder, 0), (f2c, 0))
fg.connect((decoder, 1), (f2c, 1))
fg.connect((c2f, 0), (audio_sink_0, 0))
fg.connect((c2f, 1), (audio_sink_0, 1))
time.sleep(1)
fg.start()
stdscr.move(cursor_y, cursor_x)
cursor_x = max(0, cursor_x)
cursor_x = min(width-1, cursor_x)
cursor_y = max(0, cursor_y)
cursor_y = min(height-1, cursor_y)
statusbarstr = "Press 'q' to exit. Select channel and push Enter to change | Selected: {}. Listening to: {}".format(cursor_y, active)
start_y = int((height // 2) - 2)
# Rendering some text
whstr = "Width: {}, Height: {}".format(width, height)
ntorender = len(channel_list)-scroll
if ntorender >= height - 1:
ntorender = height - 1;
for i in range(scroll, ntorender+scroll):
channel_name = channel_list[i]['name'].encode('utf-8')
if i == selected:
stdscr.addstr(i-scroll, 0, channel_name, curses.color_pair(3))
elif i == active:
stdscr.addstr(i-scroll, 0, channel_name, curses.color_pair(2))
else:
stdscr.addstr(i-scroll, 0, channel_name, curses.color_pair(1))
# Render status bar
stdscr.attron(curses.color_pair(3))
stdscr.addstr(height-1, 0, statusbarstr)
stdscr.addstr(height-1, len(statusbarstr), " " * (width - len(statusbarstr) - 1))
stdscr.attroff(curses.color_pair(3))
# Turning on attributes for title
stdscr.attron(curses.color_pair(2))
stdscr.attron(curses.A_BOLD)
# Rendering title
#stdscr.addstr(start_y, start_x_title, title)
# Turning off attributes for title
stdscr.attroff(curses.color_pair(2))
stdscr.attroff(curses.A_BOLD)
stdscr.move(cursor_y, cursor_x)
# Refresh the screen
stdscr.refresh()
stdscr.timeout(0)
kn = stdscr.getch()
k = kn
stdscr.timeout(-1)
# Wait for next input
#queue.
if k == -1:
stdscr.move(center_y, center_x)
time.sleep(0.1)
stdscr.move(cursor_y, cursor_x)
class KeyDetecThread(threading.Thread):
def __init__(self, stdscr):
threading.Thread.__init__(self)
self.running = 1
self.stdscr = stdscr
def run(self):
while self.running:
k = self.stdscr.getch()
def main(rf_gain, if_gain, bb_gain, ppm, use_zeromq_in=False, server="tcp://127.0.0.1:10444", server_control="tcp://127.0.0.1:10445", from_file=None, from_file_repeat=False, skip_xrun_monitor=False):
global src
global decoder
global dab_ofdm_demod_0
global c2f
global f2c
global audio_sink_0
global fg
global xrun_monitor
global use_zeromq
global rpc_mgr_server
global dab_ofdm_demod_0
global ppm_shared
global from_file_global
frequency=220.352e6
audio_sample_rate=48000
ppm_shared = ppm
dab_bit_rate=64
dab_address=304
dab_subch_size=64
dab_protect_level=1
use_zeromq=use_zeromq_in
from_file_global = from_file
if use_zeromq:
from gnuradio import zeromq
else:
import osmosdr
import time
if len(channel_list) > 0:
ch = channel_list[0]
frequency = float(ch['frequency'])*1e6
else:
ch = {"bit_rate" : 64, "address" : 304, "subch_size" : 64, "protect_level" : 1}
print("Setting frequency: %0.3f MHz" % (frequency/1e6))
fg = gr.top_block()
if from_file != None:
file_input = blocks.file_source(gr.sizeof_gr_complex, from_file, from_file_repeat)
if skip_xrun_monitor:
src = file_input
else:
fthrottle = blocks.throttle(gr.sizeof_gr_complex, samp_rate)
fg.connect(file_input, fthrottle)
src = fthrottle
print("Run from file %s" % from_file)
elif not use_zeromq:
osmosdr_source_0 = osmosdr.source( args="numchan=" + str(1) + " " + '' )
osmosdr_source_0.set_sample_rate(samp_rate)
osmosdr_source_0.set_center_freq(frequency, 0)
osmosdr_source_0.set_freq_corr(0, 0)
osmosdr_source_0.set_dc_offset_mode(0, 0)
osmosdr_source_0.set_iq_balance_mode(0, 0)
osmosdr_source_0.set_gain_mode(False, 0)
osmosdr_source_0.set_gain(rf_gain, 0)
osmosdr_source_0.set_if_gain(if_gain, 0)
osmosdr_source_0.set_bb_gain(bb_gain, 0)
osmosdr_source_0.set_antenna('RX2', 0)
osmosdr_source_0.set_bandwidth(2000000, 0)
src = osmosdr_source_0
else:
zeromq_source = zeromq.sub_source(gr.sizeof_gr_complex, 1, server, 100, False, -1)
rpc_mgr_server = zeromq.rpc_manager()
rpc_mgr_server.set_request_socket(server_control)
rpc_mgr_server.request("set_sample_rate",[samp_rate])
rpc_mgr_server.request("set_rf_gain",[rf_gain])
rpc_mgr_server.request("set_if_gain",[if_gain])
rpc_mgr_server.request("set_bb_gain",[bb_gain])
rpc_mgr_server.request("set_ppm",[0]) # Not using hardware correction since it behaves differently on different hardware
rpc_mgr_server.request("set_frequency",[frequency])
time.sleep(0.7)
src = zeromq_source
sample_rate_correction_factor = 1 + float(ppm_shared)*1e-6
dab_ofdm_demod_0 = grdab.ofdm_demod(
grdab.parameters.dab_parameters(
mode=1,
sample_rate=samp_rate,
verbose=False
),
grdab.parameters.receiver_parameters(
mode=1,
softbits=True,
input_fft_filter=True,
autocorrect_sample_rate=False,
sample_rate_correction_factor=sample_rate_correction_factor,
always_include_resample=True,
verbose=False,
correct_ffe=True,
equalize_magnitude=True
)
)
if 'classic' in ch and ch['classic'] == True:
dabplus = False
else:
dabplus = True
if dabplus:
decoder = grdab.dabplus_audio_decoder_ff(grdab.parameters.dab_parameters(mode=1, sample_rate=samp_rate, verbose=False), ch['bit_rate'], ch['address'], ch['subch_size'], ch['protect_level'], True)
else:
decoder = grdab.dab_audio_decoder_ff(grdab.parameters.dab_parameters(mode=1, sample_rate=samp_rate, verbose=False), ch['bit_rate'], ch['address'], ch['subch_size'], ch['protect_level'], True)
xrun_monitor = grdab.xrun_monitor_cc(100000)
xrun_monitor.set_report_fill(False)
f2c = blocks.float_to_complex()
c2f = blocks.complex_to_float()
audio_sink_0 = audio.sink(audio_sample_rate, '', True)
fg.connect(src, dab_ofdm_demod_0, decoder)
fg.connect((decoder, 0), (f2c, 0))
fg.connect((decoder, 1), (f2c, 1))
if skip_xrun_monitor:
fg.connect(f2c, c2f)
else:
fg.connect(f2c, xrun_monitor)
fg.connect(xrun_monitor, c2f)
fg.connect((c2f, 0), (audio_sink_0, 0))
fg.connect((c2f, 1), (audio_sink_0, 1))
fg.start()
curses.wrapper(draw_menu)
if __name__ == "__main__":
main()
|
andrmuel/gr-dab
|
python/app/curses_app.py
|
Python
|
gpl-3.0
| 12,694
|
# -*- encoding: utf8 -*-
#
# The Qubes OS Project, http://www.qubes-os.org
#
# Copyright (C) 2017 Marek Marczykowski-Górecki
# <marmarek@invisiblethingslab.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with this program; if not, see <http://www.gnu.org/licenses/>.
import qubesadmin.tests
import qubesadmin.tests.tools
import qubesadmin.tools.qvm_pause
class TC_00_qvm_pause(qubesadmin.tests.QubesTestCase):
def test_000_with_vm(self):
self.app.expected_calls[
('dom0', 'admin.vm.List', None, None)] = \
b'0\x00some-vm class=AppVM state=Running\n'
self.app.expected_calls[
('some-vm', 'admin.vm.Pause', None, None)] = b'0\x00'
qubesadmin.tools.qvm_pause.main(['some-vm'], app=self.app)
self.assertAllCalled()
def test_001_missing_vm(self):
with self.assertRaises(SystemExit):
with qubesadmin.tests.tools.StderrBuffer() as stderr:
qubesadmin.tools.qvm_pause.main([], app=self.app)
self.assertIn('one of the arguments --all VMNAME is required',
stderr.getvalue())
self.assertAllCalled()
def test_002_invalid_vm(self):
self.app.expected_calls[
('dom0', 'admin.vm.List', None, None)] = \
b'0\x00some-vm class=AppVM state=Running\n'
with self.assertRaises(SystemExit):
with qubesadmin.tests.tools.StderrBuffer() as stderr:
qubesadmin.tools.qvm_pause.main(['no-such-vm'], app=self.app)
self.assertIn('no such domain', stderr.getvalue())
self.assertAllCalled()
def test_003_not_running(self):
# TODO: some option to ignore this error?
self.app.expected_calls[
('some-vm', 'admin.vm.Pause', None, None)] = \
b'2\x00QubesVMNotStartedError\x00\x00Domain is powered off: ' \
b'some-vm\x00'
self.app.expected_calls[
('dom0', 'admin.vm.List', None, None)] = \
b'0\x00some-vm class=AppVM state=Halted\n'
self.assertEqual(
qubesadmin.tools.qvm_pause.main(['some-vm'], app=self.app),
1)
self.assertAllCalled()
def test_004_multiple_vms(self):
self.app.expected_calls[
('some-vm', 'admin.vm.Pause', None, None)] = \
b'0\x00'
self.app.expected_calls[
('other-vm', 'admin.vm.Pause', None, None)] = \
b'0\x00'
self.app.expected_calls[
('dom0', 'admin.vm.List', None, None)] = \
b'0\x00some-vm class=AppVM state=Running\n' \
b'other-vm class=AppVM state=Running\n'
self.assertEqual(
qubesadmin.tools.qvm_pause.main(['some-vm', 'other-vm'],
app=self.app),
0)
self.assertAllCalled()
|
marmarek/qubes-core-mgmt-client
|
qubesadmin/tests/tools/qvm_pause.py
|
Python
|
lgpl-2.1
| 3,376
|
# -*- coding: utf-8 -*-
from hello import mod
def register_hello_module(app):
app.register_blueprint(mod)
|
liufan/cornerstone
|
src/op_site/domain/hello/__init__.py
|
Python
|
apache-2.0
| 110
|
from layers.models import *
import TileStache
from django.core.cache import cache
import logging
import json
from django.conf import settings
CACHE_KEY = "config.json"
def get_config(force=False):
"""
Get TileStache confiuration.
"""
cached = cache.get(CACHE_KEY, None)
if not force and cached is not None:
return TileStache.Config.buildConfiguration(json.loads(cached))
logging.debug("generating config")
config_dict = get_base_config_dict()
for layer in Layer.objects.all():
config_dict['layers'][layer.layerName] = layer.get_layer_config()
#logging.debug(json.dumps(config_dict, indent=4, separators=(',', ': ')))
cache.set(CACHE_KEY, json.dumps(config_dict), 60*5)
if settings.TILESTACHE_CONFIG_PATH is not None:
with open(settings.TILESTACHE_CONFIG_PATH, 'w') as f:
f.write(json.dumps(config_dict, indent=4, separators=(',', ': ')))
return TileStache.Config.buildConfiguration(config_dict)
def get_base_config_dict():
"""
Get the base tile stache configuration dictionary, layers get added to this to build a config.
"""
config = {}
config['cache'] = settings.TILESTACHE_CACHE
config['layers'] = {}
return config
|
trailbehind/EasyTileServer
|
webApp/layers/config.py
|
Python
|
bsd-3-clause
| 1,239
|
import math
def points_form_square(points):
p0 = points[0]
dist = lambda a, b: math.sqrt((a[0] - b[0]) ** 2 + (a[1] - b[1]) ** 2)
dist_from_p0 = lambda p: dist(p0, p)
points = sorted(points, key=dist_from_p0)
if dist(points[0], points[1]) != dist(points[0], points[2]):
return False
if dist(points[0], points[1]) >= dist(points[0], points[3]):
return False
if dist(points[0], points[3]) != dist(points[1], points[2]):
return False
return True
|
frasertweedale/drill
|
py/geom.py
|
Python
|
mit
| 501
|
#####################################################################
# vec3 - 3-dimensional vector
#
# Copyright (C) 2002, Matthias Baas (baas@ira.uka.de)
#
# You may distribute under the terms of the BSD license, as
# specified in the file license.txt.
####################################################################
import types, math
# vec3
class vec3:
"""Three-dimensional vector.
This class can be used to represent points, vectors, normals
or even colors. The usual vector operations are available.
"""
def __init__(self, *args):
"""Constructor.
There are several possibilities how to initialize a vector:
v = vec3() -> v = <0,0,0>
v = vec3(a) -> v = <a,a,a>
v = vec3(x,y) -> v = <x,y,0>
v = vec3(x,y,z) -> v = <x,y,z>
Note that specifying just one value sets all three components to
that value (except when that single value is a another vec3, then
that vector is copied).
Additionally you can wrap those values in a list or a tuple or
specify them as a string:
v = vec3([1,2,3]) -> v = <1,2,3>
v = vec3("4,5") -> v = <4,5,0>
"""
if len(args)==0:
self.x, self.y, self.z = (0.0, 0.0, 0.0)
elif len(args)==1:
T = type(args[0])
# scalar
if T==types.FloatType or T==types.IntType or T==types.LongType:
self.x, self.y, self.z = (args[0], args[0], args[0])
# vec3
elif isinstance(args[0], vec3):
self.x, self.y, self.z = args[0]
# Tuple/List
elif T==types.TupleType or T==types.ListType:
if len(args[0])==0:
self.x = self.y = self.z = 0.0
elif len(args[0])==1:
self.x = self.y = self.z = args[0][0]
elif len(args[0])==2:
self.x, self.y = args[0]
self.z = 0.0
elif len(args[0])==3:
self.x, self.y, self.z = args[0]
else:
raise TypeError, "vec3() takes at most 3 arguments"
# String
elif T==types.StringType:
s=args[0].replace(","," ").replace(" "," ").strip().split(" ")
if s==[""]:
s=[]
f=map(lambda x: float(x), s)
dummy = vec3(f)
self.x, self.y, self.z = dummy
# error
else:
raise TypeError,"vec3() arg can't be converted to vec3"
elif len(args)==2:
self.x, self.y, self.z = (args[0], args[1], 0.0)
elif len(args)==3:
self.x, self.y, self.z = args
else:
raise TypeError, "vec3() takes at most 3 arguments"
def __repr__(self):
return 'vec3('+`self.x`+', '+`self.y`+', '+`self.z`+')'
def __str__(self):
fmt="%1.4f"
return '('+fmt%self.x+', '+fmt%self.y+', '+fmt%self.z+')'
def __eq__(self, other):
"""== operator
>>> a=vec3(1.0, 0.5, -1.8)
>>> b=vec3(-0.3, 0.75, 0.5)
>>> c=vec3(-0.3, 0.75, 0.5)
>>> print a==b
0
>>> print b==c
1
>>> print a==None
0
"""
if isinstance(other, vec3):
return self.x==other.x and self.y==other.y and self.z==other.z
else:
return 0
def __ne__(self, other):
"""!= operator
>>> a=vec3(1.0, 0.5, -1.8)
>>> b=vec3(-0.3, 0.75, 0.5)
>>> c=vec3(-0.3, 0.75, 0.5)
>>> print a!=b
1
>>> print b!=c
0
>>> print a!=None
1
"""
if isinstance(other, vec3):
return self.x!=other.x or self.y!=other.y or self.z!=other.z
else:
return 1
def __add__(self, other):
"""Vector addition.
>>> a=vec3(1.0, 0.5, -1.8)
>>> b=vec3(-0.3, 0.75, 0.5)
>>> print a+b
(0.7000, 1.2500, -1.3000)
"""
if isinstance(other, vec3):
return vec3(self.x+other.x, self.y+other.y, self.z+other.z)
else:
raise TypeError, "unsupported operand type for +"
def __sub__(self, other):
"""Vector subtraction.
>>> a=vec3(1.0, 0.5, -1.8)
>>> b=vec3(-0.3, 0.75, 0.5)
>>> print a-b
(1.3000, -0.2500, -2.3000)
"""
if isinstance(other, vec3):
return vec3(self.x-other.x, self.y-other.y, self.z-other.z)
else:
raise TypeError, "unsupported operand type for -"
def __mul__(self, other):
"""Multiplication with a scalar or dot product.
>>> a=vec3(1.0, 0.5, -1.8)
>>> b=vec3(-0.3, 0.75, 0.5)
>>> print a*2.0
(2.0000, 1.0000, -3.6000)
>>> print 2.0*a
(2.0000, 1.0000, -3.6000)
>>> print a*b
-0.825
"""
T = type(other)
# vec3*scalar
if T==types.FloatType or T==types.IntType or T==types.LongType:
return vec3(self.x*other, self.y*other, self.z*other)
# vec3*vec3
if isinstance(other, vec3):
return self.x*other.x + self.y*other.y + self.z*other.z
# unsupported
else:
# Try to delegate the operation to the other operand
if getattr(other,"__rmul__",None)!=None:
return other.__rmul__(self)
else:
raise TypeError, "unsupported operand type for *"
__rmul__ = __mul__
def __div__(self, other):
"""Division by scalar
>>> a=vec3(1.0, 0.5, -1.8)
>>> print a/2.0
(0.5000, 0.2500, -0.9000)
"""
T = type(other)
# print T
# vec3/scalar
if T==types.FloatType or T==types.IntType or T==types.LongType:
return vec3(self.x/other, self.y/other, self.z/other)
# unsupported
else:
raise TypeError, "unsupported operand type for /"
def __mod__(self, other):
"""Modulo (component wise)
>>> a=vec3(3.0, 2.5, -1.8)
>>> print a%2.0
(1.0000, 0.5000, 0.2000)
"""
T = type(other)
# vec3%scalar
if T==types.FloatType or T==types.IntType or T==types.LongType:
return vec3(self.x%other, self.y%other, self.z%other)
# unsupported
else:
raise TypeError, "unsupported operand type for %"
def __iadd__(self, other):
"""Inline vector addition.
>>> a=vec3(1.0, 0.5, -1.8)
>>> b=vec3(-0.3, 0.75, 0.5)
>>> a+=b
>>> print a
(0.7000, 1.2500, -1.3000)
"""
if isinstance(other, vec3):
self.x+=other.x
self.y+=other.y
self.z+=other.z
return self
else:
raise TypeError, "unsupported operand type for +="
def __isub__(self, other):
"""Inline vector subtraction.
>>> a=vec3(1.0, 0.5, -1.8)
>>> b=vec3(-0.3, 0.75, 0.5)
>>> a-=b
>>> print a
(1.3000, -0.2500, -2.3000)
"""
if isinstance(other, vec3):
self.x-=other.x
self.y-=other.y
self.z-=other.z
return self
else:
raise TypeError, "unsupported operand type for -="
def __imul__(self, other):
"""Inline multiplication (only with scalar)
>>> a=vec3(1.0, 0.5, -1.8)
>>> a*=2.0
>>> print a
(2.0000, 1.0000, -3.6000)
"""
T = type(other)
# vec3*=scalar
if T==types.FloatType or T==types.IntType or T==types.LongType:
self.x*=other
self.y*=other
self.z*=other
return self
else:
raise TypeError, "unsupported operand type for *="
def __idiv__(self, other):
"""Inline division with scalar
>>> a=vec3(1.0, 0.5, -1.8)
>>> a/=2.0
>>> print a
(0.5000, 0.2500, -0.9000)
"""
T = type(other)
# vec3/=scalar
if T==types.FloatType or T==types.IntType or T==types.LongType:
self.x/=other
self.y/=other
self.z/=other
return self
else:
raise TypeError, "unsupported operand type for /="
def __imod__(self, other):
"""Inline modulo
>>> a=vec3(3.0, 2.5, -1.8)
>>> a%=2.0
>>> print a
(1.0000, 0.5000, 0.2000)
"""
T = type(other)
# vec3%=scalar
if T==types.FloatType or T==types.IntType or T==types.LongType:
self.x%=other
self.y%=other
self.z%=other
return self
else:
raise TypeError, "unsupported operand type for %="
def __neg__(self):
"""Negation
>>> a=vec3(3.0, 2.5, -1.8)
>>> print -a
(-3.0000, -2.5000, 1.8000)
"""
return vec3(-self.x, -self.y, -self.z)
def __pos__(self):
"""
>>> a=vec3(3.0, 2.5, -1.8)
>>> print +a
(3.0000, 2.5000, -1.8000)
"""
return vec3(+self.x, +self.y, +self.z)
def __abs__(self):
"""Return the length of the vector.
abs(v) is equivalent to v.length().
>>> a=vec3(1.0, 0.5, -1.8)
>>> print abs(a)
2.11896201004
"""
return math.sqrt(self*self)
def __len__(self):
"""Length of the sequence (always 3)"""
return 3
def __getitem__(self, key):
"""Return a component by index (0-based)
>>> a=vec3(1.0, 0.5, -1.8)
>>> print a[0]
1.0
>>> print a[1]
0.5
>>> print a[2]
-1.8
"""
T=type(key)
if T!=types.IntType and T!=types.LongType:
raise TypeError, "index must be integer"
if key==0: return self.x
elif key==1: return self.y
elif key==2: return self.z
else:
raise IndexError,"index out of range"
def __setitem__(self, key, value):
"""Set a component by index (0-based)
>>> a=vec3()
>>> a[0]=1.5; a[1]=0.7; a[2]=-0.3
>>> print a
(1.5000, 0.7000, -0.3000)
"""
T=type(key)
if T!=types.IntType and T!=types.LongType:
raise TypeError, "index must be integer"
if key==0: self.x = value
elif key==1: self.y = value
elif key==2: self.z = value
else:
raise IndexError,"index out of range"
def cross(self, other):
"""Cross product.
>>> a=vec3(1.0, 0.5, -1.8)
>>> b=vec3(-0.3, 0.75, 0.5)
>>> c=a.cross(b)
>>> print c
(1.6000, 0.0400, 0.9000)
"""
if isinstance(other, vec3):
return vec3(self.y*other.z-self.z*other.y,
self.z*other.x-self.x*other.z,
self.x*other.y-self.y*other.x)
else:
raise TypeError, "unsupported operand type for cross()"
def length(self):
"""Return the length of the vector.
v.length() is equivalent to abs(v).
>>> a=vec3(1.0, 0.5, -1.8)
>>> print a.length()
2.11896201004
"""
return math.sqrt(self*self)
def normalize(self):
"""Return normalized vector.
>>> a=vec3(1.0, 0.5, -1.8)
>>> print a.normalize()
(0.4719, 0.2360, -0.8495)
"""
nlen = 1.0/math.sqrt(self*self)
return vec3(self.x*nlen, self.y*nlen, self.z*nlen)
def angle(self, other):
"""Return angle (in radians) between self and other.
>>> a=vec3(1.0, 0.5, -1.8)
>>> b=vec3(-0.3, 0.75, 0.5)
>>> print a.angle(b)
1.99306755584
"""
if isinstance(other, vec3):
return math.acos((self*other) / (abs(self)*abs(other)))
else:
raise TypeError, "unsupported operand type for angle()"
def reflect(self, N):
"""Return the reflection vector.
N is the surface normal which has to be of unit length.
>>> a=vec3(1.0, 0.5, -1.8)
>>> print a.reflect(vec3(1,0,1))
(2.6000, 0.5000, -0.2000)
"""
return self - 2.0*(self*N)*N
def refract(self, N, eta):
"""Return the transmitted vector.
N is the surface normal which has to be of unit length.
eta is the relative index of refraction. If the returned
vector is zero then there is no transmitted light because
of total internal reflection.
>>> a=vec3(1.0, -1.5, 0.8)
>>> print a.refract(vec3(0,1,0), 1.33)
(1.3300, -1.7920, 1.0640)
"""
dot = self*N
k = 1.0 - eta*eta*(1.0 - dot*dot)
if k<0:
return vec3(0.0,0.0,0.0)
else:
return eta*self - (eta*dot + math.sqrt(k))*N
def ortho(self):
"""Returns an orthogonal vector.
Returns a vector that is orthogonal to self (where
self*self.ortho()==0).
>>> a=vec3(1.0, -1.5, 0.8)
>>> print round(a*a.ortho(),8)
0.0
"""
x=abs(self.x)
y=abs(self.y)
z=abs(self.z)
# Is z the smallest element? Then use x and y
if z<=x and z<=y:
return vec3(-self.y, self.x, 0.0)
# Is y smallest element? Then use x and z
elif y<=x and y<=z:
return vec3(-self.z, 0.0, self.x)
# x is smallest
else:
return vec3(0.0, -self.z, self.y)
######################################################################
def _test():
import doctest, vec3
failed, total = doctest.testmod(vec3)
print "%d/%d failed" % (failed, total)
if __name__=="__main__":
_test()
# a = vec3(1,2,3.03)
# b = vec3("-2,0.5,1E10")
# print a.angle(b)
|
RAPD/RAPD
|
src/plugins/subcontractors/xdsme/pycgtypes/vec3.py
|
Python
|
agpl-3.0
| 13,964
|
# Copyright(c) 2013 Intel Corporation.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms and conditions of the GNU General Public License,
# version 2, as published by the Free Software Foundation.
#
# This program is distributed in the hope it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
#
# The full GNU General Public License is included in this distribution in
# the file called "COPYING".
from subprocess import call
import lndir
def make_it(tmpdir):
t = tmpdir.mkdir("sub")
t.chdir()
a_d = t.mkdir('a')
a_d.join('1').write('1')
a_d.join('2').write('2')
a_d.join('3').write('3')
a_d.mkdir('A')
b_d = t.mkdir('b')
b_d.join('1').write('1')
b_d.join('2').mksymlinkto(a_d.join('2'), absolute=0)
b_d.join('3').write('3')
c_d = t.mkdir('c')
c_d.join('1').write('1')
c_d.join('2').write('2')
c_d.join('3').write('3')
c_d.join('A').mksymlinkto(a_d.join('A'), absolute=0)
t.join('d').mksymlinkto('c')
e_d = t.mkdir('e')
e_d.join('1').write('1')
e_d.join('2').write('2')
e_d.join('3').mksymlinkto(a_d.join('3'), absolute=1)
e_d.join('A').mksymlinkto(a_d.join('A'), absolute=1)
return t
def test_create_abs(tmpdir):
src_dir = make_it(tmpdir)
tmpdir.chdir()
dst = tmpdir.mkdir("dest")
lndir.lndir(str(src_dir), str(dst))
call(["tree", "-Ffin"])
call(["diff", "--brief", "-r", str(src_dir), str(dst)])
# ./dest/
# ./dest/a/
# ./dest/a/1 -> /tmp/pytest-60/test_create_abs0/sub/a/1
# ./dest/a/2 -> /tmp/pytest-60/test_create_abs0/sub/a/2
# ./dest/a/3 -> /tmp/pytest-60/test_create_abs0/sub/a/3
# ./dest/a/A/
# ./dest/b/
# ./dest/b/1 -> /tmp/pytest-60/test_create_abs0/sub/b/1
# ./dest/b/2 -> ../a/2
# ./dest/b/3 -> /tmp/pytest-60/test_create_abs0/sub/b/3
# ./dest/c/
# ./dest/c/1 -> /tmp/pytest-60/test_create_abs0/sub/c/1
# ./dest/c/2 -> /tmp/pytest-60/test_create_abs0/sub/c/2
# ./dest/c/3 -> /tmp/pytest-60/test_create_abs0/sub/c/3
# ./dest/c/A -> ../a/A/
# ./dest/d -> c/
# ./dest/e/
# ./dest/e/1 -> /tmp/pytest-60/test_create_abs0/sub/e/1
# ./dest/e/2 -> /tmp/pytest-60/test_create_abs0/sub/e/2
# ./dest/e/3 -> /tmp/pytest-60/test_create_abs0/sub/a/3
# ./dest/e/A -> /tmp/pytest-60/test_create_abs0/sub/a/A/
# ./sub/
# ./sub/a/
# ./sub/a/1
# ./sub/a/2
# ./sub/a/3
# ./sub/a/A/
# ./sub/b/
# ./sub/b/1
# ./sub/b/2 -> ../a/2
# ./sub/b/3
# ./sub/c/
# ./sub/c/1
# ./sub/c/2
# ./sub/c/3
# ./sub/c/A -> ../a/A/
# ./sub/d -> c/
# ./sub/e/
# ./sub/e/1
# ./sub/e/2
# ./sub/e/3 -> /tmp/pytest-60/test_create_abs0/sub/a/3
# ./sub/e/A -> /tmp/pytest-60/test_create_abs0/sub/a/A/
def test_create_rel(tmpdir):
src_dir = make_it(tmpdir)
tmpdir.chdir()
dst = tmpdir.mkdir("dest")
lndir.lndir("../sub", str(dst))
call(["tree", "-Ffin"])
call(["diff", "--brief", "-r", str(src_dir), str(dst)])
# ./dest/
# ./dest/a/
# ./dest/a/1 -> ../../sub/a/1
# ./dest/a/2 -> ../../sub/a/2
# ./dest/a/3 -> ../../sub/a/3
# ./dest/a/A/
# ./dest/b/
# ./dest/b/1 -> ../../sub/b/1
# ./dest/b/2 -> ../a/2
# ./dest/b/3 -> ../../sub/b/3
# ./dest/c/
# ./dest/c/1 -> ../../sub/c/1
# ./dest/c/2 -> ../../sub/c/2
# ./dest/c/3 -> ../../sub/c/3
# ./dest/c/A -> ../a/A/
# ./dest/d -> c/
# ./dest/e/
# ./dest/e/1 -> ../../sub/e/1
# ./dest/e/2 -> ../../sub/e/2
# ./dest/e/3 -> /tmp/pytest-60/test_create_rel0/sub/a/3
# ./dest/e/A -> /tmp/pytest-60/test_create_rel0/sub/a/A/
# ./sub/
# ./sub/a/
# ./sub/a/1
# ./sub/a/2
# ./sub/a/3
# ./sub/a/A/
# ./sub/b/
# ./sub/b/1
# ./sub/b/2 -> ../a/2
# ./sub/b/3
# ./sub/c/
# ./sub/c/1
# ./sub/c/2
# ./sub/c/3
# ./sub/c/A -> ../a/A/
# ./sub/d -> c/
# ./sub/e/
# ./sub/e/1
# ./sub/e/2
# ./sub/e/3 -> /tmp/pytest-60/test_create_rel0/sub/a/3
# ./sub/e/A -> /tmp/pytest-60/test_create_rel0/sub/a/A/
|
rbbratta/site_lib
|
lndir_test.py
|
Python
|
gpl-2.0
| 4,142
|
###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2016, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'chartsheet06.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""Test the worksheet properties of an XlsxWriter chartsheet file."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
chartsheet = workbook.add_chartsheet()
chart = workbook.add_chart({'type': 'bar'})
chart.axis_ids = [43778432, 43780352]
data = [
[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15],
]
worksheet.write_column('A1', data[0])
worksheet.write_column('B1', data[1])
worksheet.write_column('C1', data[2])
chart.add_series({'values': '=Sheet1!$A$1:$A$5'})
chart.add_series({'values': '=Sheet1!$B$1:$B$5'})
chart.add_series({'values': '=Sheet1!$C$1:$C$5'})
chartsheet.set_tab_color('red')
chartsheet.set_chart(chart)
workbook.close()
self.assertExcelEqual()
|
jkyeung/XlsxWriter
|
xlsxwriter/test/comparison/test_chartsheet06.py
|
Python
|
bsd-2-clause
| 1,649
|
from insights.parsers import qpid_stat
from insights.tests import context_wrap
QPID_STAT_Q = """
COMMAND> qpid-stat -q --ssl-certificate=/etc/pki/katello/qpid_client_striped.crt -b amqps://localhost:5671
Queues
queue dur autoDel excl msg msgIn msgOut bytes bytesIn bytesOut cons bind
==========================================================================================================================================================
00d6cc19-15fc-4b7c-af3c-6a38e7bb386d:1.0 Y Y 0 2 2 0 486 486 1 2
prrhss001058.infra.novamedia.com:event Y Y 0 2.62k 2.62k 0 45.5m 45.5m 1 2
celery Y 4 41 37 4.12k 37.5k 33.4k 8 2
pulp.agent.836a7366-4790-482d-b3bc-efee9d42b3cd Y 1 1 0 463 463 0 0 1
reserved_resource_worker-7@prrhss001058.infra.novamedia.com.celery.pidbox Y 0 0 0 0 0 0 1 2
reserved_resource_worker-7@prrhss001058.infra.novamedia.com.dq Y Y 0 182 182 0 229k 229k 1 2
""".strip()
QPID_STAT_U = """
COMMAND> qpid-stat -u --ssl-certificate=/etc/pki/katello/qpid_client_striped.crt -b amqps://localhost:5671
Subscriptions
subscr queue conn procName procId browse acked excl creditMode delivered sessUnacked
===========================================================================================================================================================================================================================
0 00d6cc19-15fc-4b7c-af3c-6a38e7bb386d:1.0 qpid.10.20.1.10:5671-10.20.1.10:33787 celery 21409 CREDIT 2 0
0 pulp.agent.c6a430bc-5ec7-42f8-99ce-f320ed0b9113 qpid.10.20.1.10:5671-10.30.0.148:57423 goferd 32227 Y CREDIT 0 0
1 prrhss001058.infra.novamedia.com:event qpid.10.20.1.10:5671-10.20.1.10:33848 Qpid Java Client 21066 Y Y WINDOW 2,623 0
0 celeryev.4c77bd03-1cde-49eb-bdc0-b7c38f9ff93d qpid.10.20.1.10:5671-10.20.1.10:33777 celery 21356 Y CREDIT 363,228 0
1 celery qpid.10.20.1.10:5671-10.20.1.10:33786 celery 21409 Y CREDIT 5 0
katello_event_queue katello_event_queue qpid.10.20.1.10:5671-10.20.1.10:33911 ruby 21801 Y CREDIT 7,642 0
""".strip()
def test_qpid_stat_q():
qpid_list = qpid_stat.QpidStatQ(context_wrap(QPID_STAT_Q))
assert qpid_list.data[0].get('queue') == '00d6cc19-15fc-4b7c-af3c-6a38e7bb386d:1.0'
assert qpid_list.data[0].get('dur') == ''
assert qpid_list.data[1].get('queue') == 'prrhss001058.infra.novamedia.com:event'
assert qpid_list.data[1].get('dur') == 'Y'
assert qpid_list.data[1].get('autoDel') == ''
assert qpid_list.data[1].get('excl') == 'Y'
assert qpid_list.data[1].get('msg') == '0'
assert qpid_list.data[1].get('msgIn') == '2.62k'
assert qpid_list.data[1].get('msgOut') == '2.62k'
assert qpid_list.data[1].get('bytes') == '0'
assert qpid_list.data[1].get('bytesIn') == '45.5m'
assert qpid_list.data[1].get('bytesOut') == '45.5m'
assert qpid_list.data[1].get('cons') == '1'
assert qpid_list.data[1].get('bind') == '2'
assert qpid_list.data[2].get('msg') == '4'
assert qpid_list.data[3].get('cons') == '0'
assert qpid_list.data[4].get('bytesIn') == '0'
assert qpid_list.data[5].get('queue') == 'reserved_resource_worker-7@prrhss001058.infra.novamedia.com.dq'
assert qpid_list.data[5].get('dur') == 'Y'
assert qpid_list.data[5].get('autoDel') == 'Y'
assert qpid_list.data[5].get('excl') == ''
assert qpid_list.data[5].get('msg') == '0'
assert qpid_list.data[5].get('msgIn') == '182'
assert qpid_list.data[5].get('msgOut') == '182'
assert qpid_list.data[5].get('bytes') == '0'
assert qpid_list.data[5].get('bytesIn') == '229k'
assert qpid_list.data[5].get('bytesOut') == '229k'
assert qpid_list.data[5].get('cons') == '1'
assert qpid_list.data[5].get('bind') == '2'
# test iteration
assert [d['queue'] for d in qpid_list] == [
'00d6cc19-15fc-4b7c-af3c-6a38e7bb386d:1.0',
'prrhss001058.infra.novamedia.com:event',
'celery',
'pulp.agent.836a7366-4790-482d-b3bc-efee9d42b3cd',
'reserved_resource_worker-7@prrhss001058.infra.novamedia.com.celery.pidbox',
'reserved_resource_worker-7@prrhss001058.infra.novamedia.com.dq',
]
def test_qpid_stat_u():
qpid_list = qpid_stat.QpidStatU(context_wrap(QPID_STAT_U))
assert qpid_list.data[0].get('subscr') == '0'
assert qpid_list.data[0].get('queue') == '00d6cc19-15fc-4b7c-af3c-6a38e7bb386d:1.0'
assert qpid_list.data[0].get('conn') == 'qpid.10.20.1.10:5671-10.20.1.10:33787'
assert qpid_list.data[0].get('procName') == 'celery'
assert qpid_list.data[0].get('procId') == '21409'
assert qpid_list.data[0].get('browse') == ''
assert qpid_list.data[0].get('acked') == ''
assert qpid_list.data[0].get('excl') == ''
assert qpid_list.data[0].get('creditMode') == 'CREDIT'
assert qpid_list.data[0].get('delivered') == '2'
assert qpid_list.data[0].get('sessUnacked') == '0'
assert qpid_list.data[1].get('queue') == 'pulp.agent.c6a430bc-5ec7-42f8-99ce-f320ed0b9113'
assert qpid_list.data[1].get('conn') == 'qpid.10.20.1.10:5671-10.30.0.148:57423'
assert qpid_list.data[1].get('acked') == 'Y'
assert qpid_list.data[1].get('procName') == 'goferd'
assert qpid_list.data[2].get('subscr') == '1'
assert qpid_list.data[2].get('queue') == 'prrhss001058.infra.novamedia.com:event'
assert qpid_list.data[2].get('conn') == 'qpid.10.20.1.10:5671-10.20.1.10:33848'
assert qpid_list.data[2].get('procName') == 'Qpid Java Client'
assert qpid_list.data[2].get('procId') == '21066'
assert qpid_list.data[2].get('browse') == ''
assert qpid_list.data[2].get('acked') == 'Y'
assert qpid_list.data[2].get('excl') == 'Y'
assert qpid_list.data[2].get('creditMode') == 'WINDOW'
assert qpid_list.data[2].get('delivered') == '2,623'
assert qpid_list.data[2].get('sessUnacked') == '0'
assert qpid_list.data[3].get('delivered') == '363,228'
assert qpid_list.data[5].get('subscr') == 'katello_event_queue'
# test iteration
assert [d['queue'] for d in qpid_list] == [
'00d6cc19-15fc-4b7c-af3c-6a38e7bb386d:1.0',
'pulp.agent.c6a430bc-5ec7-42f8-99ce-f320ed0b9113',
'prrhss001058.infra.novamedia.com:event',
'celeryev.4c77bd03-1cde-49eb-bdc0-b7c38f9ff93d',
'celery',
'katello_event_queue',
]
|
PaulWay/insights-core
|
insights/parsers/tests/test_qpid_stat.py
|
Python
|
apache-2.0
| 7,553
|
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
"""Tests for the Hello World"""
import os
from autopilot.matchers import Eventually
from testtools.matchers import Equals
import Piano
class MainViewTestCase(Piano.ClickAppTestCase):
"""Generic tests for the Hello World"""
def test_initial_label(self):
label = self.main_view.select_single(objectName='label')
self.assertThat(label.text, Equals('Hello..'))
def test_click_button_should_update_label(self):
button = self.main_view.select_single(objectName='button')
self.pointing_device.click_object(button)
label = self.main_view.select_single(objectName='label')
self.assertThat(label.text, Eventually(Equals('..world!')))
|
ZacharyIgielman/uPiano
|
Piano/tests/autopilot/Piano/test_main.py
|
Python
|
cc0-1.0
| 768
|
# -*- coding: utf-8 -*-
# Copyright (C) 2013 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': 'Brazilian Localization Account Product',
'summary': "Brazilian Localization Account Product",
'category': 'Localisation',
'license': 'AGPL-3',
'author': 'Akretion, Odoo Community Association (OCA)',
'website': 'http://odoo-brasil.org',
'version': '8.0.2.0.1',
'depends': [
'l10n_br_data_account',
'account_product_fiscal_classification',
],
'data': [
'l10n_br_account_product_sequence.xml',
'account_invoice_workflow.xml',
'data/l10n_br_account_product.cfop.csv',
'data/l10n_br_account.fiscal.document.csv',
'data/l10n_br_account_data.xml',
'data/l10n_br_account_product_data.xml',
'data/l10n_br_tax.icms_partition.csv',
'data/ir_cron.xml',
'views/l10n_br_account_product_view.xml',
'views/l10n_br_account_view.xml',
'views/l10n_br_account_product_view.xml',
'views/account_view.xml',
'wizard/l10n_br_account_invoice_costs_ratio_view.xml',
'views/account_invoice_view.xml',
'views/res_partner_view.xml',
'views/res_company_view.xml',
'views/account_product_fiscal_classification_view.xml',
'views/product_view.xml',
'views/res_country_view.xml',
'wizard/l10n_br_account_nfe_export_invoice_view.xml',
'wizard/l10n_br_account_nfe_export_view.xml',
'wizard/l10n_br_account_document_status_sefaz_view.xml',
'wizard/account_invoice_refund_view.xml',
'security/l10n_br_account_product_security.xml',
'security/ir.model.access.csv',
'report/account_invoice_report_view.xml',
],
'demo': [
'demo/account_tax_code_demo.xml',
'demo/account_tax_demo.xml',
'demo/base_demo.xml',
'demo/product_demo.xml',
'demo/l10n_br_account_product_demo.xml',
'demo/account_fiscal_position_rule_demo.xml',
'demo/product_taxes.yml',
],
'test': [
'test/account_customer_invoice.yml',
'test/account_supplier_invoice.yml',
'test/account_invoice_refund.yml',
'test/nfe_export.yml',
],
'installable': True,
'auto_install': False,
}
|
rvalyi/l10n-brazil
|
l10n_br_account_product/__openerp__.py
|
Python
|
agpl-3.0
| 2,324
|
import unittest
from book_store import calculate_total
# Tests adapted from `problem-specifications//canonical-data.json` @ v1.0.1
class BookStoreTests(unittest.TestCase):
def test_only_a_single_book(self):
self.assertAlmostEqual(calculate_total([1]), 8.00,
places=2)
def test_two_of_the_same_book(self):
self.assertAlmostEqual(calculate_total([2, 2]), 16.00,
places=2)
def test_empty_basket(self):
self.assertAlmostEqual(calculate_total([]), 0.00,
places=2)
def test_two_different_books(self):
self.assertAlmostEqual(calculate_total([1, 2]), 15.20,
places=2)
def test_three_different_books(self):
self.assertAlmostEqual(calculate_total([1, 2, 3]), 21.60,
places=2)
def test_four_different_books(self):
self.assertAlmostEqual(calculate_total([1, 2, 3, 4]), 25.60,
places=2)
def test_five_different_books(self):
self.assertAlmostEqual(
calculate_total([1, 2, 3, 4, 5]), 30.00,
places=2)
def test_two_groups_of_4_is_cheaper_than_group_of_5_plus_group_of_3(self):
self.assertAlmostEqual(
calculate_total([1, 1, 2, 2, 3, 3, 4, 5]), 51.20,
places=2)
def test_group_of_4_plus_group_of_2_is_cheaper_than_2_groups_of_3(self):
self.assertAlmostEqual(
calculate_total([1, 1, 2, 2, 3, 4]), 40.80,
places=2)
def test_two_each_of_first_4_books_and_1_copy_each_of_rest(self):
self.assertAlmostEqual(
calculate_total([1, 1, 2, 2, 3, 3, 4, 4, 5]), 55.60,
places=2)
def test_two_copies_of_each_book(self):
self.assertAlmostEqual(
calculate_total([1, 1, 2, 2, 3, 3, 4, 4, 5, 5]), 60.00,
places=2)
def test_three_copies_of_first_book_and_2_each_of_remaining(self):
self.assertAlmostEqual(
calculate_total([1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 1]),
68.00,
places=2)
def test_three_each_of_first_2_books_and_2_each_of_remaining_books(self):
self.assertAlmostEqual(
calculate_total([1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 1, 2]),
75.20,
places=2)
if __name__ == '__main__':
unittest.main()
|
mweb/python
|
exercises/book-store/book_store_test.py
|
Python
|
mit
| 2,408
|
##########################################################################
#
# Copyright (c) 2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import gc
import sys
import math
import unittest
import imath
import IECore
import IECoreScene
class SceneInterfaceTest( unittest.TestCase ) :
__testFile = "/tmp/test.scc"
__testFileUpper = "/tmp/test.SCC"
def writeSCC( self ) :
m = IECoreScene.SceneCache( SceneInterfaceTest.__testFile, IECore.IndexedIO.OpenMode.Write )
m.writeAttribute( "w", IECore.BoolData( True ), 1.0 )
t = m.createChild( "t" )
t.writeTransform( IECore.M44dData(imath.M44d().translate(imath.V3d( 1, 0, 0 ))), 1.0 )
t.writeAttribute( "wuh", IECore.BoolData( True ), 1.0 )
s = t.createChild( "s" )
s.writeObject( IECoreScene.SpherePrimitive( 1 ), 1.0 )
s.writeAttribute( "glah", IECore.BoolData( True ), 1.0 )
def testGet( self ) :
self.writeSCC()
instance1 = IECoreScene.SharedSceneInterfaces.get( SceneInterfaceTest.__testFile )
instance2 = IECoreScene.SharedSceneInterfaces.get( SceneInterfaceTest.__testFile )
instance1_upper = IECoreScene.SharedSceneInterfaces.get( SceneInterfaceTest.__testFileUpper )
self.assertTrue( instance1.isSame( instance2 ) )
self.assertTrue( instance1.isSame( instance1_upper ) )
instance3 = IECoreScene.SceneInterface.create( SceneInterfaceTest.__testFile, IECore.IndexedIO.OpenMode.Read )
instance4 = IECoreScene.SceneInterface.create( SceneInterfaceTest.__testFile, IECore.IndexedIO.OpenMode.Read )
instance3_upper = IECoreScene.SceneInterface.create( SceneInterfaceTest.__testFileUpper, IECore.IndexedIO.OpenMode.Read )
self.assertFalse( instance3.isSame( instance4 ) )
self.assertFalse( instance3.isSame( instance1 ) )
self.assertFalse( instance3.isSame( instance2 ) )
self.assertFalse( instance3.isSame( instance3_upper ) )
self.assertFalse( instance3_upper.isSame( instance1 ) )
self.assertFalse( instance3_upper.isSame( instance2 ) )
self.assertFalse( instance3_upper.isSame( instance1_upper ) )
def testErase( self ) :
self.writeSCC()
instance1 = IECoreScene.SharedSceneInterfaces.get( SceneInterfaceTest.__testFile )
instance2 = IECoreScene.SharedSceneInterfaces.get( SceneInterfaceTest.__testFile )
self.assertTrue( instance1.isSame( instance2 ) )
IECoreScene.SharedSceneInterfaces.erase( SceneInterfaceTest.__testFile )
instance3 = IECoreScene.SharedSceneInterfaces.get( SceneInterfaceTest.__testFile )
self.assertFalse( instance3.isSame( instance1 ) )
instance4 = IECoreScene.SharedSceneInterfaces.get( SceneInterfaceTest.__testFile )
self.assertFalse( instance4.isSame( instance1 ) )
self.assertTrue( instance4.isSame( instance3 ) )
def testClear( self ) :
self.writeSCC()
instance1 = IECoreScene.SharedSceneInterfaces.get( SceneInterfaceTest.__testFile )
instance2 = IECoreScene.SharedSceneInterfaces.get( SceneInterfaceTest.__testFile )
self.assertTrue( instance1.isSame( instance2 ) )
IECoreScene.SharedSceneInterfaces.clear()
instance3 = IECoreScene.SharedSceneInterfaces.get( SceneInterfaceTest.__testFile )
self.assertFalse( instance3.isSame( instance1 ) )
instance4 = IECoreScene.SharedSceneInterfaces.get( SceneInterfaceTest.__testFile )
self.assertFalse( instance4.isSame( instance1 ) )
self.assertTrue( instance4.isSame( instance3 ) )
def testVisibilityName( self ) :
self.assertEqual( IECoreScene.SceneInterface.visibilityName, "scene:visible" )
if __name__ == "__main__":
unittest.main()
|
appleseedhq/cortex
|
test/IECoreScene/SceneInterfaceTest.py
|
Python
|
bsd-3-clause
| 5,128
|
"""
Utility Mixins for unit tests
"""
import json
import sys
from mock import patch
from django.conf import settings
from django.core.urlresolvers import clear_url_caches, resolve
from django.test import TestCase
from util.db import OuterAtomic, CommitOnSuccessManager
class UrlResetMixin(object):
"""Mixin to reset urls.py before and after a test
Django memoizes the function that reads the urls module (whatever module
urlconf names). The module itself is also stored by python in sys.modules.
To fully reload it, we need to reload the python module, and also clear django's
cache of the parsed urls.
However, the order in which we do this doesn't matter, because neither one will
get reloaded until the next request
Doing this is expensive, so it should only be added to tests that modify settings
that affect the contents of urls.py
"""
def _reset_urls(self, urlconf_modules):
"""Reset `urls.py` for a set of Django apps."""
for urlconf in urlconf_modules:
if urlconf in sys.modules:
reload(sys.modules[urlconf])
clear_url_caches()
# Resolve a URL so that the new urlconf gets loaded
resolve('/')
def setUp(self, *args, **kwargs):
"""Reset Django urls before tests and after tests
If you need to reset `urls.py` from a particular Django app (or apps),
specify these modules in *args.
Examples:
# Reload only the root urls.py
super(MyTestCase, self).setUp()
# Reload urls from my_app
super(MyTestCase, self).setUp("my_app.urls")
# Reload urls from my_app and another_app
super(MyTestCase, self).setUp("my_app.urls", "another_app.urls")
"""
super(UrlResetMixin, self).setUp(**kwargs)
urlconf_modules = [settings.ROOT_URLCONF]
if args:
urlconf_modules.extend(args)
self._reset_urls(urlconf_modules)
self.addCleanup(lambda: self._reset_urls(urlconf_modules))
class EventTestMixin(object):
"""
Generic mixin for verifying that events were emitted during a test.
"""
def setUp(self, tracker):
super(EventTestMixin, self).setUp()
self.tracker = tracker
patcher = patch(self.tracker)
self.mock_tracker = patcher.start()
self.addCleanup(patcher.stop)
def assert_no_events_were_emitted(self):
"""
Ensures no events were emitted since the last event related assertion.
"""
self.assertFalse(self.mock_tracker.emit.called) # pylint: disable=maybe-no-member
def assert_event_emitted(self, event_name, **kwargs):
"""
Verify that an event was emitted with the given parameters.
"""
self.mock_tracker.emit.assert_any_call( # pylint: disable=maybe-no-member
event_name,
kwargs
)
def reset_tracker(self):
"""
Reset the mock tracker in order to forget about old events.
"""
self.mock_tracker.reset_mock()
class PatchMediaTypeMixin(object):
"""
Generic mixin for verifying unsupported media type in PATCH
"""
def test_patch_unsupported_media_type(self):
response = self.client.patch( # pylint: disable=no-member
self.url,
json.dumps({}),
content_type=self.unsupported_media_type
)
self.assertEqual(response.status_code, 415)
def patch_testcase():
"""
Disable commit_on_success decorators for tests in TestCase subclasses.
Since tests in TestCase classes are wrapped in an atomic block, we
cannot use transaction.commit() or transaction.rollback().
https://docs.djangoproject.com/en/1.8/topics/testing/tools/#django.test.TransactionTestCase
"""
def enter_atomics_wrapper(wrapped_func):
"""
Wrapper for TestCase._enter_atomics
"""
wrapped_func = wrapped_func.__func__
def _wrapper(*args, **kwargs):
"""
Method that performs atomic-entering accounting.
"""
CommitOnSuccessManager.ENABLED = False
OuterAtomic.ALLOW_NESTED = True
if not hasattr(OuterAtomic, 'atomic_for_testcase_calls'):
OuterAtomic.atomic_for_testcase_calls = 0
OuterAtomic.atomic_for_testcase_calls += 1
return wrapped_func(*args, **kwargs)
return classmethod(_wrapper)
def rollback_atomics_wrapper(wrapped_func):
"""
Wrapper for TestCase._rollback_atomics
"""
wrapped_func = wrapped_func.__func__
def _wrapper(*args, **kwargs):
"""
Method that performs atomic-rollback accounting.
"""
CommitOnSuccessManager.ENABLED = True
OuterAtomic.ALLOW_NESTED = False
OuterAtomic.atomic_for_testcase_calls -= 1
return wrapped_func(*args, **kwargs)
return classmethod(_wrapper)
# pylint: disable=protected-access
TestCase._enter_atomics = enter_atomics_wrapper(TestCase._enter_atomics)
TestCase._rollback_atomics = rollback_atomics_wrapper(TestCase._rollback_atomics)
|
hamzehd/edx-platform
|
common/djangoapps/util/testing.py
|
Python
|
agpl-3.0
| 5,229
|
from __future__ import division, print_function, unicode_literals
# This code is so you can run the samples without installing the package
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
#
testinfo = "s, t 0.33, s, t 0.66, s, t 1.1, s, q"
tags = "MoveCornerUp"
import pyglet
import cocos
from cocos.director import director
from cocos.actions import *
from cocos.layer import *
class BackgroundLayer(cocos.layer.Layer):
def __init__(self):
super(BackgroundLayer, self).__init__()
self.img = pyglet.resource.image('background_image.png')
def draw( self ):
glColor4ub(255, 255, 255, 255)
glPushMatrix()
self.transform()
self.img.blit(0,0)
glPopMatrix()
def main():
director.init( resizable=True )
main_scene = cocos.scene.Scene()
main_scene.add( BackgroundLayer(), z=0 )
e = MoveCornerUp( duration=1 )
main_scene.do( e )
director.run( main_scene )
if __name__ == '__main__':
main()
|
vyscond/cocos
|
test/test_move_corner_up.py
|
Python
|
bsd-3-clause
| 1,021
|
#!/usr/bin/env python3
import os
import re
import json
import plistlib
import argparse
from typing import List
# region Global sets
# A set of category folder names in current sample viewer.
categories = {
'Maps',
'Layers',
'Features',
'Display information',
'Search',
'Edit data',
'Geometry',
'Route and directions',
'Analysis',
'Cloud and portal',
'Scenes',
'Utility network',
'Augmented reality'
}
# endregion
# region Static functions
def get_plist_cat_mapping(plist_category: str) -> str:
"""
Get the mapping between plist categories and the ones on website.
:param plist_category: The category in `ContentPlist.plist`.
:return: The category in `README.metadata.json` files, which also defines
the online categories on
https://developers.arcgis.com/ios/latest/swift/sample-code/
"""
plist_json_categories_mapping = {
'Maps': 'Maps',
'Layers': 'Layers',
'Features': 'Features',
'Display Information': 'Display information',
'Search': 'Search',
'Edit Data': 'Edit data',
'Geometry': 'Geometry',
'Route & Directions': 'Route and directions',
'Analysis': 'Analysis',
'Cloud & Portal': 'Cloud and portal',
'Scenes': 'Scenes',
'Utility Network': 'Utility network',
'Augmented Reality': 'Augmented reality'
}
return plist_json_categories_mapping.get(plist_category)
def load_plist(plist_path: str) -> List[dict]:
"""
Open a plist file.
:param plist_path: The path to plist file.
:return: The plist dictionary. In our particular case is a list of dicts.
"""
with open(plist_path, 'rb') as fp:
plist = plistlib.load(fp)
return plist
def get_plist_categories(plist: List[dict]) -> List[str]:
"""
A helper function to get all categories in our plist.
:param plist: The plist dictionary.
:return: A list of categories.
"""
plist_categories = [cat.get('displayName') for cat in plist]
return plist_categories
def get_folder_name_from_path(path: str, index: int = -1) -> str:
"""
Get the folder name from a full path.
:param path: A string of a full/absolute path to a folder.
:param index: The index of path parts. Default to -1 to get the most
trailing folder in the path; set to certain index to get other parts.
:return: The folder name.
"""
return os.path.normpath(path).split(os.path.sep)[index]
def get_readme_title(head_string: str) -> str:
"""
Parse the head of README and get title.
:param head_string: A string containing title, description and images.
:return: Stripped title string.
"""
# Split title section and rule out empty lines.
parts = list(filter(bool, head_string.splitlines()))
if len(parts) < 3:
raise Exception('README should contain title, description and image.')
title = parts[0].lstrip('# ').rstrip()
return title
# endregion
class SampleNames:
def __init__(self, folder_path: str):
"""
Given a folder path of a sample, get everything we need to compare.
- Titles
- sample’s folder name
- sample’s `README.md` title
- `README.metadata.json` title
- sample's plist title
- Category names
- sample’s enclosing category folder name
- `README.metadata.json` category
:param folder_path: The path to a sample's folder.
"""
self.folder_path = folder_path
self.folder_name = get_folder_name_from_path(folder_path)
self.folder_category = get_folder_name_from_path(folder_path, -2)
self.json_title, self.json_category = self.get_json_title_category()
self.readme_title = self.get_readme_title()
def get_json_title_category(self) -> (str, str):
json_path = os.path.join(self.folder_path, 'README.metadata.json')
try:
json_file = open(json_path, 'r')
json_data = json.load(json_file)
except Exception as err:
print(f'Error reading JSON - {self.folder_name} - {err}')
raise err
else:
json_file.close()
return json_data['title'], json_data['category']
def get_readme_title(self):
readme_path = os.path.join(self.folder_path, 'README.md')
try:
readme_file = open(readme_path, 'r')
# read the readme content into a string
readme_contents = readme_file.read()
except Exception as err:
print(f'Error reading README - {self.folder_name} - {err}.')
raise err
else:
readme_file.close()
pattern = re.compile(r'^#{2}(?!#)\s(.*)', re.MULTILINE)
readme_parts = re.split(pattern, readme_contents)
return get_readme_title(readme_parts[0])
# region Main wrapper functions
def single_sample_check_diff(folder_path: str, plist: List[dict]):
sample_names = SampleNames(folder_path)
# 1. Check if plist category matches category folder name.
# If they don't match, no need to go further.
plist_cats = list(
filter(lambda d: get_plist_cat_mapping(d.get('displayName')) ==
sample_names.folder_category, plist))
if len(plist_cats) != 1:
# Nearly impossible.
# This happens when the category name in plist does
# not match any of the category folder names.
# Currently there are only 13 categories and it is easy to tell.
raise Exception(f'Error plist category does not match category folder.')
err_count = 0
# 2. Check if plist category matches json.category.
plist_category = plist_cats[0].get('displayName')
matched_category = get_plist_cat_mapping(plist_category)
if matched_category != sample_names.json_category:
err_count += 1
print(f' {err_count}. plist category {matched_category} '
f'does not match json.category.')
# Get the children, a list of sample dicts.
plist_children: List[dict] = plist_cats[0].get('children')
# 3. Check if plist title matches sample folder name.
plist_names = list(
filter(lambda d: d.get('displayName') == sample_names.folder_name,
plist_children))
if len(plist_names) != 1:
err_count += 1
print(f' {err_count}. plist title does not match folder name.')
else:
plist_name = plist_names[0].get('displayName')
# 4. Check if plist title matches sample README title.
if plist_name != sample_names.readme_title:
err_count += 1
print(f' {err_count}. plist title "{plist_name}" does not match '
f'README title "{sample_names.readme_title}".')
# 5. Check if plist title matches sample json.title.
if plist_name != sample_names.json_title:
err_count += 1
print(f' {err_count}. plist title "{plist_name}" does not match '
f'json.title "{sample_names.json_title}".')
if err_count > 0:
raise Exception(f'{err_count} error(s) occurred during checking '
f'/{sample_names.folder_category}'
f'/{sample_names.folder_name}.')
def all_samples(path: str, plist: List[dict]):
"""
Run the check on all samples.
:param path: The path to 'arcgis-ios-sdk-samples' folder.
:param plist: The plist dictionary. In our case is a list of dicts.
:return: None. Throws if exception occurs.
"""
exception_count = 0
for root, dirs, files in os.walk(path):
# Get parent folder name.
parent_folder_name = get_folder_name_from_path(root)
# If parent folder name is a valid category name.
if parent_folder_name in categories:
for dir_name in dirs:
sample_path = os.path.join(root, dir_name)
# Omit empty folders - they are omitted by Git.
if len([f for f in os.listdir(sample_path)
if not f.startswith('.DS_Store')]) == 0:
continue
try:
single_sample(sample_path, plist)
except Exception as err:
exception_count += 1
print(f'{exception_count}. {err}')
# Throw once if there are exceptions.
if exception_count > 0:
raise Exception('Error(s) occurred during checking all samples.')
def single_sample(path: str, plist: List[dict]):
"""
Run the check on a single sample.
:param path: The path to a sample's folder.
:param plist: The plist dictionary. In our case is a list of dicts.
:return: None. Throws if exception occurs.
"""
try:
single_sample_check_diff(path, plist)
except Exception as err:
raise err
def main():
msg = 'Title checker script. Run it against the /arcgis-ios-sdk-samples ' \
'folder or a single sample folder. ' \
'On success: Script will exit with zero. ' \
'On failure: Title inconsistency will print to console and the ' \
'script will exit with non-zero code.'
parser = argparse.ArgumentParser(description=msg)
parser.add_argument('-a', '--all', help='path to arcgis-ios-sdk-samples '
'folder')
parser.add_argument('-s', '--single', help='path to single sample folder.')
args = parser.parse_args()
if args.all:
# Load ContentPList.plist.
plist_path = os.path.normpath(
args.all + '/Content Display Logic/ContentPList.plist')
plist = load_plist(plist_path)
if not plist:
raise Exception('Error loading plist.')
try:
all_samples(args.all, plist)
except Exception as err:
raise err
elif args.single:
# Load ContentPList.plist.
plist_path = os.path.normpath(
args.single + '/../../Content Display Logic/ContentPList.plist')
plist = load_plist(plist_path)
if not plist:
raise Exception('Error loading plist.')
try:
single_sample(args.single, plist)
except Exception as err:
raise err
else:
raise Exception('Invalid arguments, abort.')
# endregion
if __name__ == '__main__':
try:
main()
except Exception as error:
print(f'{error}')
exit(1)
|
Esri/arcgis-runtime-samples-ios
|
Scripts/CI/README_Metadata_StyleCheck/title_differ.py
|
Python
|
apache-2.0
| 10,499
|
"""
This is your project's main settings file that can be committed to your
repo. If you need to override a setting locally, use local.py
"""
import os
import logging
# Normally you should not import ANYTHING from Django directly
# into your settings, but ImproperlyConfigured is an exception.
from django.core.exceptions import ImproperlyConfigured
def get_env_setting(setting):
""" Get the environment setting or return exception """
try:
return os.environ[setting]
except KeyError:
error_msg = "Set the %s env variable" % setting
raise ImproperlyConfigured(error_msg)
# Your project root
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__) + "../../../")
SUPPORTED_NONLOCALES = ['media', 'admin', 'static']
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
# Defines the views served for root URLs.
ROOT_URLCONF = 'mail_sender.urls'
# Application definition
INSTALLED_APPS = (
# Django contrib apps
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.admin',
'django.contrib.humanize',
'django.contrib.syndication',
'django.contrib.staticfiles',
# Third-party apps, patches, fixes
'djcelery',
'debug_toolbar',
'compressor',
# Application base, containing global templates.
'base',
# Local apps, referenced via appname
)
# Place bcrypt first in the list, so it will be the default password hashing
# mechanism
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
'django.contrib.auth.hashers.BCryptPasswordHasher',
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
'django.contrib.auth.hashers.SHA1PasswordHasher',
'django.contrib.auth.hashers.MD5PasswordHasher',
'django.contrib.auth.hashers.CryptPasswordHasher',
)
# Sessions
#
# By default, be at least somewhat secure with our session cookies.
SESSION_COOKIE_HTTPONLY = True
# Set this to true if you are using https
SESSION_COOKIE_SECURE = False
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.example.com/media/"
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'media')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.example.com/media/", "http://example.com/media/"
MEDIA_URL = '/media/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.example.com/static/"
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
# URL prefix for static files.
# Example: "http://media.example.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Los_Angeles'
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder',
)
MIDDLEWARE_CLASSES = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware',
]
TEMPLATE_CONTEXT_PROCESSORS = [
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.media',
'django.core.context_processors.request',
'django.core.context_processors.i18n',
'django.core.context_processors.static',
'django.core.context_processors.csrf',
'django.core.context_processors.tz',
'django.contrib.messages.context_processors.messages',
]
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or
# "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(PROJECT_ROOT, 'templates'),
)
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
def custom_show_toolbar(request):
""" Only show the debug toolbar to users with the superuser flag. """
from django.conf import settings
if settings.DEBUG:
return request.user.is_superuser
else:
return False
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'SHOW_TOOLBAR_CALLBACK': 'mail_sender.settings.base.custom_show_toolbar',
'HIDE_DJANGO_SQL': True,
'TAG': 'body',
'SHOW_TEMPLATE_CONTEXT': True,
'ENABLE_STACKTRACES': True,
}
# Uncomment the following setting if you get an ImportError such as:
# ImproperlyConfigured: The included urlconf projectname.urls doesn't have any patterns in it
# See:
# http://stackoverflow.com/questions/20963856/improperlyconfigured-the-included-urlconf-project-urls-doesnt-have-any-patte/21005346#21005346
# http://django-debug-toolbar.readthedocs.org/en/1.0/installation.html#explicit-setup
#DEBUG_TOOLBAR_PATCH_SETTINGS = False
# DEBUG_TOOLBAR_PANELS = (
# #'debug_toolbar_user_panel.panels.UserPanel',
# 'debug_toolbar.panels.version.VersionDebugPanel',
# 'debug_toolbar.panels.timer.TimerDebugPanel',
# 'debug_toolbar.panels.settings_vars.SettingsVarsDebugPanel',
# 'debug_toolbar.panels.headers.HeaderDebugPanel',
# 'debug_toolbar.panels.request_vars.RequestVarsDebugPanel',
# 'debug_toolbar.panels.template.TemplateDebugPanel',
# 'debug_toolbar.panels.sql.SQLDebugPanel',
# 'debug_toolbar.panels.signals.SignalDebugPanel',
# 'debug_toolbar.panels.logger.LoggingPanel',
# )
# Specify a custom user model to use
#AUTH_USER_MODEL = 'accounts.MyUser'
FILE_UPLOAD_PERMISSIONS = 0o0664
# The WSGI Application to use for runserver
WSGI_APPLICATION = 'mail_sender.wsgi.application'
# Define your database connections
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.',
'NAME': '',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
#'OPTIONS': {
# 'init_command': 'SET storage_engine=InnoDB',
# 'charset' : 'utf8',
# 'use_unicode' : True,
#},
#'TEST_CHARSET': 'utf8',
#'TEST_COLLATION': 'utf8_general_ci',
},
# 'slave': {
# ...
# },
}
# Uncomment this and set to all slave DBs in use on the site.
# SLAVE_DATABASES = ['slave']
# Recipients of traceback emails and other notifications.
ADMINS = (
#('name', 'mail@xxx.com'),
)
MANAGERS = ADMINS
# SECURITY WARNING: don't run with debug turned on in production!
# Debugging displays nice error messages, but leaks memory. Set this to False
# on all server instances and True only for development.
DEBUG = TEMPLATE_DEBUG = False
# Is this a development instance? Set this to True on development/master
# instances and False on stage/prod.
DEV = False
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# SECURITY WARNING: keep the secret key used in production secret!
# Hardcoded values can leak through source control.
# This is an example method of getting the value from an environment setting.
# Uncomment to use, and then make sure you set the SECRET_KEY environment variable.
# This is good to use in production, and on services that support it such as Heroku.
#SECRET_KEY = get_env_setting('SECRET_KEY')
# Uncomment these to activate and customize Celery:
# CELERY_ALWAYS_EAGER = False # required to activate celeryd
# BROKER_HOST = 'localhost'
# BROKER_PORT = 5672
# BROKER_USER = 'django'
# BROKER_PASSWORD = 'django'
# BROKER_VHOST = 'django'
# CELERY_RESULT_BACKEND = 'amqp'
INTERNAL_IPS = ('127.0.0.1')
# Enable this option for memcached
#CACHE_BACKEND= "memcached://127.0.0.1:11211/"
# Set this to true if you use a proxy that sets X-Forwarded-Host
#USE_X_FORWARDED_HOST = False
SERVER_EMAIL = "webmaster@example.com"
DEFAULT_FROM_EMAIL = "webmaster@example.com"
SYSTEM_EMAIL_PREFIX = "[mail_sender]"
## Log settings
LOG_LEVEL = logging.INFO
HAS_SYSLOG = True
SYSLOG_TAG = "http_app_mail_sender" # Make this unique to your project.
# Common Event Format logging parameters
#CEF_PRODUCT = 'mail_sender'
#CEF_VENDOR = 'Your Company'
#CEF_VERSION = '0'
#CEF_DEVICE_VERSION = '0'
|
lxdiyun/mail_sender
|
mail_sender/settings/base.py
|
Python
|
bsd-3-clause
| 9,917
|
from airflow.models import DAG
from airflow.operators.dummy_operator import DummyOperator
from datetime import datetime, timedelta
from airflow.operators import PythonOperator
from airflow.hooks import RedisHook
from airflow.models import Variable
from airflow.hooks import MemcacheHook
from etl_tasks_functions import get_time
from etl_tasks_functions import subtract_time
from subdags.interference_utility import calculate_cambium_all_ss_dl_interference
from subdags.interference_utility import calculate_cambium_ss_ul_interference
from subdags.interference_utility import calculate_cambium_i_and_m_ss_ul_interference
from subdags.interference_utility import calculate_cambium_bs_interference
from subdags.interference_utility import calculate_cambium_bs_dl_interference
from subdags.interference_utility import calculate_cambiumi_bs_interference
from subdags.interference_utility import calculate_cambiumm_bs_interference
from subdags.interference_utility import backtrack_x_min
from subdags.interference_utility import get_severity_values
from subdags.interference_utility import calculate_age
from subdags.interference_utility import calculate_severity
from airflow.operators import MySqlLoaderOperator
import logging
import itertools
import socket
import random
import traceback
import time
from pprint import pprint
default_args = {
'owner': 'wireless',
'depends_on_past': False,
'start_date': datetime.now() - timedelta(minutes=2),
'email': ['vipulsharma144@gmail.com'],
'email_on_failure': False,
'email_on_retry': False,
'retries': 1,
'retry_delay': timedelta(minutes=1),
'provide_context': True,
'catchup': False,
# 'queue': 'bash_queue',
# 'pool': 'backfill',
# 'priority_weight': 10,
# 'end_date': datetime(2016, 1, 1),
}
redis_hook_6 = RedisHook(redis_conn_id="redis_hook_10")
DEBUG_MODE = False
down_devices = []
set_dependency_for_ss_on_all_machines = False
memc_con_cluster = MemcacheHook(memc_cnx_id = 'memc_cnx')
vrfprv_memc_con = MemcacheHook(memc_cnx_id = 'vrfprv_memc_cnx')
pub_memc_con = MemcacheHook(memc_cnx_id = 'pub_memc_cnx')
INSERT_HEADER = "INSERT INTO %s.performance_utilization"
INSERT_TAIL = """
(machine_name,current_value,service_name,avg_value,max_value,age,min_value,site_name,data_source,critical_threshold,device_name,severity,sys_timestamp,ip_address,warning_threshold,check_timestamp,refer )
values
(%(machine_name)s,%(current_value)s,%(service_name)s,%(avg_value)s,%(max_value)s,%(age)s,%(min_value)s,%(site_name)s,%(data_source)s,%(critical_threshold)s,%(device_name)s,%(severity)s,%(sys_timestamp)s,%(ip_address)s,%(warning_threshold)s,%(check_timestamp)s,%(refer)s)
"""
UPDATE_HEADER = "INSERT INTO %s.performance_utilizationstatus"
UPDATE_TAIL = """
(machine_name,current_value,service_name,avg_value,max_value,age,min_value,site_name,data_source,critical_threshold,device_name,severity,sys_timestamp,ip_address,warning_threshold,check_timestamp,refer )
values
(%(machine_name)s,%(current_value)s,%(service_name)s,%(avg_value)s,%(max_value)s,%(age)s,%(min_value)s,%(site_name)s,%(data_source)s,%(critical_threshold)s,%(device_name)s,%(severity)s,%(sys_timestamp)s,%(ip_address)s,%(warning_threshold)s,%(check_timestamp)s,%(refer)s)
ON DUPLICATE KEY UPDATE machine_name = VALUES(machine_name),current_value = VALUES(current_value),age=VALUES(age),site_name=VALUES(site_name),critical_threshold=VALUES(critical_threshold),severity=VALUES(severity),sys_timestamp=VALUES(sys_timestamp),ip_address=VALUES(ip_address),warning_threshold=VALUES(warning_threshold),check_timestamp=VALUES(check_timestamp),refer=VALUES(refer)
"""
ERROR_DICT ={404:'Device not found yet',405:'No SS Connected to BS-BS is not skipped'}
ERROR_FOR_DEVICE_OMITTED = [404]
interference_service_mapping = eval(Variable.get("interference_services_mapping"))
kpi_rules = eval(Variable.get("kpi_rules"))
device_to_service_mapper = eval(Variable.get("interference_kpi_to_formula_mapping"))
def process_interference_kpi(
parent_dag_name,
child_dag_name,
start_date,
schedule_interval,
celery_queue,
bs_tech_sites,
ss_tech_sites,
hostnames_per_site,
hostnames_ss_per_site,
bs_name,
ss_name,
config_sites): #here config site is list of all sites in system_config var
try:
sites = bs_tech_sites
ss_sites = ss_tech_sites
union_sites = set(bs_tech_sites).union(set(ss_tech_sites))
except Exception:
logging.info("Missed Data for the relevant device type ")
traceback.print_exc()
interference_kpi_subdag_dag = DAG(
dag_id="%s.%s"%(parent_dag_name, child_dag_name),
schedule_interval=schedule_interval,
start_date=start_date,
)
def get_calculated_ss_data():
ss_data = redis_hook_6.rget("calculated_ss_interference_kpi")
combined_site_data = {}
for site_data in ss_data:
site_data = eval(site_data)
combined_site_data.update(site_data)
return combined_site_data
def format_bs_data(**kwargs):
machine_name = kwargs.get("params").get("machine_name")
device_type = kwargs.get("params").get("technology")
bs_data = redis_hook_6.rget("calculated_bs_interference_%s_%s"%(device_type,machine_name))
bs_kpi_dict = {
'site_name': 'unknown' ,
'device_name': 'unknown',
'service_name': 'unknown',
'ip_address': 'unknown',
'severity': 'unknown',
'age': 'unknown',
'data_source': 'unknown',
'current_value': 'unknown',
'warning_threshold': 'unknown',
'critical_threshold': 'unknown',
'check_timestamp': 'unknown',
'sys_timestamp': 'unknown' ,
'refer':'unknown',
'min_value':'unknown',
'max_value':'unknown',
'avg_value':'unknown',
'machine_name':'unknown'
}
cur_processing_time = backtrack_x_min(time.time(),300) +120 # this is used to rewind the time to previous multiple of 5 value so that kpi can be shown accordingly
bs_devices_list = []
#{'wimax_bs_interference_kpi': '404', 'connectedss': {1: ['10.170.72.33', '10.170.72.39'], 2: ['10.170.72.40', '10.170.72.11', '10.170.72.31', '10.170.72.58', '10.170.72.20', '10.170.72.47', '10.170.72.52', '10.170.72.56', '10.170.72.61']}, 'device_type': 'StarmaxIDU', 'services': ['wimax_bs_interference_kpi'], 'pmp1_sec': '00:0a:10:08:02:41', 'hostname': '28455', 'ipaddress': '10.170.72.2', 'pmp2_sec': '00:0a:10:08:02:43', 'site': 'ospf1_slave_1'}
for bs_device in bs_data:
bs_device= eval(bs_device)
hostname = bs_device.get('hostname')
bs_kpi_dict['machine_name']= machine_name
bs_kpi_dict['check_timestamp']=cur_processing_time
bs_kpi_dict['sys_timestamp']=cur_processing_time
bs_kpi_dict['device_name']=bs_device.get('hostname')
bs_kpi_dict['site_name']= bs_device.get('site')
bs_kpi_dict['ip_address']=bs_device.get('ipaddress')
for service in bs_device.get('services'):
print bs_device.get('services')
thresholds = get_severity_values(service)
bs_kpi_dict['critical_threshold']=thresholds[0]
bs_kpi_dict['warning_threshold']= thresholds[1]
bs_kpi_dict['service_name']= service
for data_source_sec in bs_device.get(service):
print 1 , data_source_sec,service
#data_source = 'interference'
data_source = "ul_interference" if "ul_interference" in service else "dl_interference"
bs_kpi_dict['data_source']=data_source
bs_kpi_dict['current_value']= bs_device.get(service).get(data_source_sec)
bs_kpi_dict['refer']=bs_device.get(data_source+'_sec')
bs_kpi_dict['severity']= calculate_severity(service,bs_kpi_dict['current_value'])
bs_kpi_dict['age']= calculate_age(hostname,bs_kpi_dict['severity'],bs_device.get('device_type'),cur_processing_time)
bs_kpi_dict['min_value']= bs_kpi_dict['current_value']
bs_kpi_dict['max_value']=bs_kpi_dict['current_value']
bs_kpi_dict['avg_value']=bs_kpi_dict['current_value']
if bs_kpi_dict['current_value'] not in ERROR_FOR_DEVICE_OMITTED:
bs_devices_list.append(bs_kpi_dict.copy())
try:
redis_hook_6.rpush("formatted_bs_%s_%s"%(device_type,machine_name),bs_devices_list)
except Exception:
logging.error("Unable to push Formatted BS Data")
#To create SS dict
def format_ss_data(**kwargs):
machine_name = kwargs.get("params").get("machine_name")
device_type = kwargs.get("params").get("technology")
ss_kpi_dict = {
'site_name': 'unknown' ,
'device_name': 'unknown',
'service_name': 'unknown',
'ip_address': 'unknown',
'severity': 'unknown',
'age': 'unknown',
'data_source': 'unknown',
'current_value': 'unknown',
'warning_threshold': 'unknown',
'critical_threshold': 'unknown',
'check_timestamp': 'unknown',
'sys_timestamp': 'unknown' ,
'refer':'unknown',
'min_value':'unknown',
'max_value':'unknown',
'avg_value':'unknown',
'machine_name':'unknown'
}
ss_data =redis_hook_6.rget("calculated_ss_interference_%s_%s"%(device_type,machine_name))
cur_processing_time = backtrack_x_min(time.time(),300) + 120 # this is used to rewind the time to previous multiple of 5 value so that kpi can be shown accordingly
ss_devices_list = []
for ss_device in ss_data:
ss_device = eval(ss_device)
hostname = ss_device.get('hostname')
for service in ss_device.get('services'):
if int(ss_device.get(service)) != 404:
current_value=ss_device.get(service)
data_source = "ul_interference" if "ul_interference" in service else "dl_interference"
thresholds = get_severity_values(service)
ss_kpi_dict['critical_threshold']=thresholds[0]
ss_kpi_dict['data_source']=data_source
ss_kpi_dict['site_name']=ss_device.get('site')
ss_kpi_dict['severity']= 'ok' if ss_device.get(service) == 0 or ss_device.get(service) == 1 else 'unknown' #TODO: ok and unknown are only 2 sev for ss we can incluudethis in rules later
ss_kpi_dict['avg_value']=current_value
ss_kpi_dict['service_name']= service
ss_kpi_dict['age']= calculate_age(hostname,ss_kpi_dict['severity'],ss_device.get('device_type'),cur_processing_time)
ss_kpi_dict['min_value']= current_value
ss_kpi_dict['machine_name']= machine_name
ss_kpi_dict['check_timestamp']=cur_processing_time
ss_kpi_dict['device_name']=ss_device.get('hostname')
ss_kpi_dict['sys_timestamp']=cur_processing_time
ss_kpi_dict['max_value']=current_value
ss_kpi_dict['current_value']=current_value
ss_kpi_dict['refer']=''
ss_kpi_dict['ip_address']=ss_device.get('ipaddress')
ss_kpi_dict['warning_threshold']= thresholds[1]
ss_devices_list.append(ss_kpi_dict.copy())
else:
logging.warning("Unable to get the kpi value for device %s for %s"%(hostname,service))
continue
try:
if len(ss_devices_list) > 0:
redis_hook_6.rpush("formatted_ss_%s_%s"%(device_type,machine_name),ss_devices_list)
else:
logging.info("No %s device found in %s after formatting "%(device_type,machine_name))
except Exception:
logging.error("Unable to push formatted SS data to redis")
def get_required_data_ss(**kwargs):
site_name = kwargs.get("params").get("site_name")
device_type = kwargs.get("params").get("technology")
machine_name = site_name.split("_")[0]
ss_data_dict = {}
all_ss_data = []
if "vrfprv" in site_name:
memc_con = vrfprv_memc_con
elif "pub" in site_name:
memc_con = pub_memc_con
else:
memc_con = memc_con_cluster
if site_name not in hostnames_ss_per_site.keys():
logging.warning("No SS devices found for %s"%(site_name))
return 1
for hostnames_dict in hostnames_ss_per_site.get(site_name):
host_name = hostnames_dict.get("hostname")
ip_address = hostnames_dict.get("ip_address")
ss_data_dict['hostname'] = host_name
ss_data_dict['ipaddress'] = ip_address
ss_data_dict['site_name'] = site_name
for service in interference_service_mapping.get(ss_name):
ss_data_dict[service] = memc_con.get("%s_%s"%(ip_address,service))
all_ss_data.append(ss_data_dict.copy())
print 3
if len(all_ss_data) == 0:
logging.error("No data Fetched ! Aborting Successfully")
return 0
try:
print "Success %s_%s"%(device_type,machine_name)
redis_hook_6.rpush("%s_%s"%(device_type,machine_name),all_ss_data)
print "Success"
except Exception:
logging.error("Unable to insert ss data into redis")
def get_required_data_bs(**kwargs):
site_name = kwargs.get("params").get("site_name")
device_type = kwargs.get("params").get("technology")
machine_name = site_name.split("_")[0]
bs_data_dict = {}
all_bs_data = []
if "vrfprv" in site_name:
memc_con = vrfprv_memc_con
elif "pub" in site_name:
memc_con = pub_memc_con
else:
memc_con = memc_con_cluster
try:
for hostnames_dict in hostnames_per_site.get(site_name):
host_name = hostnames_dict.get("hostname")
ip_address = hostnames_dict.get("ip_address")
connected_ss = memc_con.get("%s_active_ss"%(host_name))
bs_data_dict['hostname'] = host_name
bs_data_dict['ipaddress'] = ip_address
bs_data_dict['connectedss'] = connected_ss
bs_data_dict['device_type'] = device_type
bs_data_dict['site_name'] = site_name
for service in interference_service_mapping.get(bs_name):
bs_data_dict[service] = memc_con.get("%s_%s"%(host_name,service))
all_bs_data.append(bs_data_dict.copy())
except TypeError:
logging.info("Unable to get site in the hostnames_per_site variable")
if len(all_bs_data) > 0 :
redis_hook_6.rpush("%s_%s"%(device_type,machine_name),all_bs_data)
else:
logging.info("No Host Found at site")
def calculate_interference_data_bs(**kwargs):
machine_name = kwargs.get("params").get("machine_name")
device_type = kwargs.get("params").get("technology")
services = device_to_service_mapper.get(device_type)
ss_services = device_to_service_mapper.get(ss_name)
ss_data = get_calculated_ss_data()
bs_data = redis_hook_6.rget("%s_%s"%(device_type,machine_name))
all_bs_calculated_data = []
bs_services = interference_service_mapping.get(bs_name)
count =0
for base_statations in bs_data:
devices = eval(base_statations)
devices['site'] = devices.get('site_name')
devices['device_type'] = device_type
for service in services:
if 'services' in devices.keys():
devices['services'].append(service)
else:
devices['services'] = [service]
if kpi_rules.get(service).get('isFunction'):
devices[service] = eval(kpi_rules.get(service).get('formula'))
else:
devices[service] = eval(kpi_rules.get(service).get('formula'))
#IGNORING ERRORED DEVICES
if devices.get('services'):
all_bs_calculated_data.append(devices.copy())
if len(all_bs_calculated_data) > 0:
try:
redis_hook_6.rpush("calculated_bs_interference_%s_%s"%(device_type,machine_name),all_bs_calculated_data)
except Exception:
logging.error("Unable to insert data in redis")
else:
logging.info("No Data found for site %s"%(machine_name))
#here we will only calculate the interference for the bs which is dependent on the SS
def calculate_interference_data_ss(**kwargs):
machine_name = kwargs.get("params").get("machine_name")
device_type = kwargs.get("params").get("technology")
devices_data_dict = redis_hook_6.rget("%s_%s"%(device_type,machine_name))
if len(devices_data_dict) == 0:
logging.info("No Data found for ss %s "%(machine_name))
return 1
services = device_to_service_mapper.get(device_type)
ip_ul_mapper = {}
ss_interference_list= []
ss_data = []
for devices in devices_data_dict:
devices = eval(devices)
devices['site'] = devices.get('site_name')
devices['device_type'] = device_type
for service in services: #loop for the all the configured services
if 'services' in devices.keys():
devices['services'].append(service)
else:
devices['services'] = []
devices.get('services').append(service)
if kpi_rules.get(service).get('isFunction'):
devices[service] = eval(kpi_rules.get(service).get('formula'))
else:
devices[service] = eval(kpi_rules.get(service).get('formula'))
ss_data.append(devices.copy())
ip_ul_mapper[devices.get('ipaddress')] = devices.copy()
ss_interference_list.append(ip_ul_mapper.copy())
redis_hook_6.rpush("calculated_ss_interference_%s_%s"%(device_type,machine_name),ss_data)
redis_hook_6.rpush("calculated_ss_interference_kpi",ss_interference_list)
def aggregate_interference_data(*args,**kwargs):
machine_name = kwargs.get("params").get("machine_name")
bs_or_ss = kwargs.get("params").get("type")
if bs_or_ss == "bs":
device_type = bs_name
else:
device_type = ss_name
#device_type = kwargs.get("params").get("device_type")
formatted_data=redis_hook_6.rget("formatted_%s_%s_%s"%(bs_or_ss,device_type,machine_name))
machine_data = []
for site_data in formatted_data:
machine_data.append(eval(site_data))
redis_hook_6.set("aggregated_interference_%s_%s_%s"%(machine_name,bs_or_ss,device_type),str(machine_data))
machine_names = set([site.split("_")[0] for site in union_sites])
config_machines = set([site.split("_")[0] for site in config_sites])
aggregate_dependency_ss = {}
aggregate_dependency_bs = {}
ss_calc_task_list = {}
bs_calc_task_list = {}
#TODo Remove this if ss >> bs task
# calculate_interference_lost_ss_bs_task = PythonOperator(
# task_id = "calculate_bs_interference_lost_ss",
# provide_context=True,
# python_callable=calculate_interference_data_bs,
# params={"lost_n_found":True},
# dag=interference_kpi_subdag_dag
# )
for each_machine_name in machine_names:
if each_machine_name in config_machines:
aggregate_interference_data_ss_task = PythonOperator(
task_id = "aggregate_interference_ss_%s"%each_machine_name,
provide_context=True,
python_callable=aggregate_interference_data,
params={"machine_name":each_machine_name,'type':'ss'},
dag=interference_kpi_subdag_dag,
queue = celery_queue
)
aggregate_interference_data_bs_task = PythonOperator(
task_id = "aggregate_interference_bs_%s"%each_machine_name,
provide_context=True,
python_callable=aggregate_interference_data,
params={"machine_name":each_machine_name,'type':'bs'},
dag=interference_kpi_subdag_dag,
queue = celery_queue
)
calculate_utilization_data_ss_task = PythonOperator(
task_id = "calculate_ss_interference_kpi_of_%s"%each_machine_name,
provide_context=True,
trigger_rule = 'all_done',
python_callable=calculate_interference_data_ss,
params={"machine_name":each_machine_name,"technology":ss_name},
dag=interference_kpi_subdag_dag,
queue = celery_queue,
)
calculate_utilization_data_bs_task = PythonOperator(
task_id = "calculate_bs_interference_kpi_of_%s"%each_machine_name,
provide_context=True,
python_callable=calculate_interference_data_bs,
trigger_rule = 'all_done',
params={"machine_name":each_machine_name,"technology":bs_name,"lost_n_found":False},
dag=interference_kpi_subdag_dag,
queue = celery_queue,
)
format_data_ss_task = PythonOperator(
task_id = "format_data_of_ss_%s"%each_machine_name,
provide_context=True,
python_callable=format_ss_data,
trigger_rule = 'all_done',
params={"machine_name":each_machine_name,"technology":ss_name},
dag=interference_kpi_subdag_dag,
queue = celery_queue,
)
format_data_bs_task = PythonOperator(
task_id = "format_data_of_bs_%s"%each_machine_name,
provide_context=True,
python_callable=format_bs_data,
trigger_rule = 'all_done',
params={"machine_name":each_machine_name,"technology":bs_name},
dag=interference_kpi_subdag_dag,
queue = celery_queue,
)
format_data_ss_task >> aggregate_interference_data_ss_task
format_data_bs_task >> aggregate_interference_data_bs_task
ss_calc_task_list[each_machine_name] = calculate_utilization_data_ss_task
bs_calc_task_list[each_machine_name] = calculate_utilization_data_bs_task
calculate_utilization_data_ss_task >> format_data_ss_task
calculate_utilization_data_bs_task >> format_data_bs_task
device_tech = {'ss':ss_name,'bs':bs_name}
#we gotta create teh crazy queries WTF this is so unsafe
INSERT_QUERY = INSERT_HEADER%("nocout_"+each_machine_name) + INSERT_TAIL
UPDATE_QUERY = UPDATE_HEADER%("nocout_"+each_machine_name) + UPDATE_TAIL
INSERT_QUERY = INSERT_QUERY.replace('\n','')
UPDATE_QUERY = UPDATE_QUERY.replace('\n','')
for bs_or_ss in device_tech.keys():
if not DEBUG_MODE:
insert_data_in_mysql = MySqlLoaderOperator(
task_id ="upload_%s_data_%s"%(bs_or_ss,each_machine_name),
dag=interference_kpi_subdag_dag,
query=INSERT_QUERY,
#data="",
redis_key="aggregated_interference_%s_%s_%s"%(each_machine_name,bs_or_ss,device_tech.get(bs_or_ss)),
redis_conn_id = "redis_hook_10",
mysql_conn_id='mysql_uat',
queue = celery_queue,
trigger_rule = 'all_done'
)
update_data_in_mysql = MySqlLoaderOperator(
task_id ="update_%s_data_%s"%(bs_or_ss,each_machine_name),
query=UPDATE_QUERY ,
#data="",
redis_key="aggregated_interference_%s_%s_%s"%(each_machine_name,bs_or_ss,device_tech.get(bs_or_ss)),
redis_conn_id = "redis_hook_10",
mysql_conn_id='mysql_uat',
dag=interference_kpi_subdag_dag,
queue = celery_queue,
trigger_rule = 'all_done'
)
if bs_or_ss == "ss":
update_data_in_mysql << aggregate_interference_data_ss_task
insert_data_in_mysql << aggregate_interference_data_ss_task
else:
update_data_in_mysql << aggregate_interference_data_bs_task
insert_data_in_mysql << aggregate_interference_data_bs_task
else:
logging.info("Not inserting data Debug mode active")
for each_site_name in union_sites:
if each_site_name in config_sites:
get_required_data_ss_task = PythonOperator(
task_id = "get_interference_data_of_ss_%s"%each_site_name,
provide_context=True,
trigger_rule = 'all_done',
python_callable=get_required_data_ss,
params={"site_name":each_site_name,"technology":ss_name},
dag=interference_kpi_subdag_dag,
queue = celery_queue
)
get_required_data_bs_task = PythonOperator(
task_id = "get_interference_data_of_bs_%s"%each_site_name,
provide_context=True,
trigger_rule = 'all_done',
python_callable=get_required_data_bs,
params={"site_name":each_site_name,"technology":bs_name},
dag=interference_kpi_subdag_dag,
queue = celery_queue,
)
machine_name = each_site_name.split("_")[0]
get_required_data_ss_task >> ss_calc_task_list.get(machine_name)
get_required_data_bs_task >> bs_calc_task_list.get(machine_name)
else:
logging.info("Skipping %s"%(each_site_name))
if set_dependency_for_ss_on_all_machines:
for bs in bs_calc_task_list:
for ss in ss_calc_task_list:
try:
bs_task = bs_calc_task_list.get(bs)
ss_task =ss_calc_task_list.get(ss)
#print "%s << %s"%(bs,ss)
bs_task << ss_task
except:
#print "EXCEPTION %s << %s"%(bs,ss)
print "Exception"
pass
else:
for bs in bs_calc_task_list:
try:
bs_task = bs_calc_task_list.get(bs)
ss_task =ss_calc_task_list.get(bs)
#print "%s << %s"%(bs,ss)
bs_task << ss_task
except:
#print "EXCEPTION %s << %s"%(bs,ss)
print "Exception"
pass
return interference_kpi_subdag_dag
|
vipul-tm/DAG
|
dags-ttpl/subdags/interference_kpi_subdag.py
|
Python
|
bsd-3-clause
| 23,550
|
from __future__ import division
import time
from conseval.params import ParamDef, Params, WithParams
from conseval.utils.general import norm_scores, window_scores
################################################################################
# Fetch tools
################################################################################
def get_scorer(name, **params):
"""
Get the appropriate Scorer object in the module scorers.`name`, initialized
with 'params'.
"""
scorer_cls = get_scorer_cls(name)
scorer = scorer_cls(**params)
return scorer
def get_scorer_cls(name):
"""
Get the appropriate Scorer class in the module scorers.`name`.
>>> cls = get_scorer_cls('caprasingh07.js_divergence')
>>> cls.__module__
scorers.caprasingh07.js_divergence
>>> cls.__name__
JsDivergence
"""
try:
scorer_module = __import__('scorers.'+name, fromlist=['x'])
scorer_clsname = "".join(s.capitalize() for s in name.split('.')[-1].split('_'))
if hasattr(scorer_module, 'IS_BASE_SCORER'):
if getattr(scorer_module, 'IS_BASE_SCORER'):
return
scorer_cls = getattr(scorer_module, scorer_clsname)
except (ImportError, AttributeError), e:
raise ImportError("%s: %s is not a valid scorer." % (e, name))
return scorer_cls
################################################################################
# Scorer class
################################################################################
class Scorer(WithParams):
"""
Base class for any scorer. Define subclasses that override the _score
method in scorers/[scorer_name.py] with class name ScorerName.
"""
# Tunable parameters for scoring. Children inherit all these parameters
# along with these defaults. Defaults can be overridden and parameters
# can be extended, see scorers/rate4site_eb.py for an example.
params = Params(
ParamDef('window_size', 2, int, lambda x: x>=0,
help="Number of residues on either side included in the window"),
ParamDef('window_lambda', .5, float, lambda x: 0<=x<=1,
help="lambda for window heuristic linear combination. Meaningful only if window_size != 0."),
ParamDef('normalize', False, bool,
help="return z-scores (over the alignment) of each column, instead of original scores"),
)
def __init__(self, **params):
super(Scorer, self).__init__(**params)
self.name = ".".join(type(self).__module__.split('.')[1:])
def score(self, alignment):
"""
Score each site in the first sequence of `alignment`. Performs computations
that are not specific to any site, and calls score_col() to perform the
site-secific computations.
Additional global computations can be performed by overriding _precache(),
see below.
@param alignment:
Alignment object
@return:
List of scores for each site
"""
t0 = time.time()
# Main computation.
scores = self._score(alignment)
if self.window_size:
scores = window_scores(scores, self.window_size, self.window_lambda)
if self.normalize:
scores = list(norm_scores(scores, filter=5))
dt = time.time() - t0 #len(alignment.msa), len(alignment.msa[0])
return scores
def _score(self, alignment):
"""
Called by _score(..). Override to define scoring method for entire
alignment.
@param alignment:
Alignment object
@return:
List of scores for each site
"""
raise NotImplementedError()
def set_output_id(self, output_id):
self.output_id = output_id
def set_output_dir(self, out_dir):
self.output_dir = out_dir
|
jwayne/conseval
|
conseval/scorer.py
|
Python
|
gpl-2.0
| 3,882
|
# -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import json
import os
import waffle
from django.conf import settings
from django.http import HttpResponse
from django.test.client import Client, RequestFactory
from django.test.utils import override_settings
from bedrock.base.helpers import static
from bedrock.base.urlresolvers import reverse
from mock import ANY, call, Mock, patch
from nose.tools import eq_, ok_
from pyquery import PyQuery as pq
from bedrock.firefox import views as fx_views
from bedrock.firefox.firefox_details import FirefoxDesktop, FirefoxAndroid, FirefoxIOS
from bedrock.firefox.utils import product_details
from bedrock.mozorg.tests import TestCase
TEST_DATA_DIR = os.path.join(os.path.dirname(__file__), 'test_data')
PROD_DETAILS_DIR = os.path.join(TEST_DATA_DIR, 'product_details_json')
GOOD_PLATS = {'Windows': {}, 'OS X': {}, 'Linux': {}}
firefox_desktop = FirefoxDesktop(json_dir=PROD_DETAILS_DIR)
firefox_android = FirefoxAndroid(json_dir=PROD_DETAILS_DIR)
firefox_ios = FirefoxIOS(json_dir=PROD_DETAILS_DIR)
class TestInstallerHelp(TestCase):
def setUp(self):
self.button_mock = Mock()
self.patcher = patch.dict('jingo.env.globals',
download_firefox=self.button_mock)
self.patcher.start()
self.view_name = 'firefox.installer-help'
with self.activate('en-US'):
self.url = reverse(self.view_name)
def tearDown(self):
self.patcher.stop()
def test_buttons_use_lang(self):
"""
The buttons should use the lang from the query parameter.
"""
self.client.get(self.url, {
'installer_lang': 'fr'
})
self.button_mock.assert_has_calls([
call(force_direct=True, force_full_installer=True, locale='fr'),
call('beta', small=ANY, force_direct=True,
force_full_installer=True, icon=ANY, locale='fr'),
call('alpha', small=ANY, force_direct=True,
force_full_installer=True, icon=ANY, locale='fr'),
])
def test_buttons_ignore_non_lang(self):
"""
The buttons should ignore an invalid lang.
"""
self.client.get(self.url, {
'installer_lang': 'not-a-locale'
})
self.button_mock.assert_has_calls([
call(force_direct=True, force_full_installer=True, locale=None),
call('beta', small=ANY, force_direct=True,
force_full_installer=True, icon=ANY, locale=None),
call('alpha', small=ANY, force_direct=True,
force_full_installer=True, icon=ANY, locale=None),
])
def test_invalid_channel_specified(self):
"""
All buttons should show when channel is invalid.
"""
self.client.get(self.url, {
'channel': 'dude',
})
self.button_mock.assert_has_calls([
call(force_direct=True, force_full_installer=True, locale=None),
call('beta', small=ANY, force_direct=True,
force_full_installer=True, icon=ANY, locale=None),
call('alpha', small=ANY, force_direct=True,
force_full_installer=True, icon=ANY, locale=None),
])
def test_one_button_when_channel_specified(self):
"""
There should be only one button when the channel is given.
"""
self.client.get(self.url, {
'channel': 'beta',
})
self.button_mock.assert_called_once_with('beta', force_direct=True,
force_full_installer=True,
locale=None)
@patch.object(fx_views, 'firefox_desktop', firefox_desktop)
class TestFirefoxAll(TestCase):
def setUp(self):
with self.activate('en-US'):
self.url = reverse('firefox.all')
def test_no_search_results(self):
"""
Tables should be gone and not-found message should be shown when there
are no search results.
"""
resp = self.client.get(self.url + '?q=DOES_NOT_EXIST')
doc = pq(resp.content)
ok_(not doc('table.build-table'))
ok_(not doc('.not-found.hide'))
def test_no_search_query(self):
"""
When not searching all builds should show.
"""
resp = self.client.get(self.url)
doc = pq(resp.content)
eq_(len(doc('.build-table')), 2)
eq_(len(doc('.not-found.hide')), 2)
num_builds = len(firefox_desktop.get_filtered_full_builds('release'))
num_builds += len(firefox_desktop.get_filtered_test_builds('release'))
eq_(len(doc('tr[data-search]')), num_builds)
def test_no_locale_details(self):
"""
When a localized build has been added to the Firefox details while the
locale details are not updated yet, the filtered build list should not
include the localized build.
"""
builds = firefox_desktop.get_filtered_full_builds('release')
ok_('uz' in firefox_desktop.firefox_primary_builds)
ok_('uz' not in firefox_desktop.languages)
eq_(len([build for build in builds if build['locale'] == 'uz']), 0)
class TestFirefoxPartners(TestCase):
@patch('bedrock.firefox.views.settings.DEBUG', True)
def test_js_bundle_files_debug_true(self):
"""
When DEBUG is on the bundle should return the individual files
with the STATIC_URL.
"""
bundle = 'partners_desktop'
files = settings.PIPELINE_JS[bundle]['source_filenames']
files = [static(f) for f in files]
self.assertEqual(files,
json.loads(fx_views.get_js_bundle_files(bundle)))
@patch('bedrock.firefox.views.settings.DEBUG', False)
def test_js_bundle_files_debug_false(self):
"""
When DEBUG is off the bundle should return a single minified filename.
"""
bundle = 'partners_desktop'
filename = static('js/%s-bundle.js' % bundle)
bundle_file = json.loads(fx_views.get_js_bundle_files(bundle))
self.assertEqual(len(bundle_file), 1)
self.assertEqual(bundle_file[0], filename)
@patch('bedrock.mozorg.views.requests.post')
def test_sf_form_proxy_error_response(self, post_patch):
"""An error response from SF should be returned."""
new_mock = Mock()
new_mock.status_code = 400
post_patch.return_value = new_mock
with self.activate('en-US'):
url = reverse('mozorg.partnerships')
resp = self.client.post(url, {
'first_name': 'The',
'last_name': 'Dude',
'company': 'Urban Achievers',
'email': 'thedude@mozilla.com',
}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(resp.status_code, 400)
# decode JSON response
resp_data = json.loads(resp.content)
self.assertEqual(resp_data['msg'], 'bad_request')
self.assertTrue(post_patch.called)
@patch('bedrock.mozorg.views.requests.post')
def test_sf_form_proxy_invalid_form(self, post_patch):
"""A form error should result in a 400 response."""
with self.activate('en-US'):
url = reverse('mozorg.partnerships')
resp = self.client.post(url, {
'first_name': 'Dude' * 20,
}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(resp.status_code, 400)
resp_data = json.loads(resp.content)
self.assertEqual(resp_data['msg'], 'Form invalid')
self.assertFalse(post_patch.called)
@patch('bedrock.mozorg.views.requests.post')
def test_sf_form_proxy(self, post_patch):
new_mock = Mock()
new_mock.status_code = 200
post_patch.return_value = new_mock
with self.activate('en-US'):
url = reverse('mozorg.partnerships')
resp = self.client.post(url, {
'first_name': 'The',
'last_name': 'Dude',
'title': 'Abider of things',
'company': 'Urban Achievers',
'email': 'thedude@mozilla.com',
}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(resp.status_code, 200)
resp_data = json.loads(resp.content)
self.assertEqual(resp_data['msg'], 'ok')
post_patch.assert_called_once_with(ANY, {
'first_name': u'The',
'last_name': u'Dude',
'description': u'',
'retURL': 'http://www.mozilla.org/en-US/about/'
'partnerships?success=1',
'title': u'Abider of things',
'URL': u'',
'company': u'Urban Achievers',
'oid': '00DU0000000IrgO',
'phone': u'',
'street': u'',
'zip': u'',
'city': u'',
'state': u'',
'country': u'',
'mobile': u'',
'00NU0000002pDJr': [], # interest (multi-select)
'00NU00000053D4G': u'', # interested_countries
'00NU00000053D4L': u'', # interested_languages
'00NU00000053D4a': u'', # campaign_type
'industry': u'',
'email': u'thedude@mozilla.com',
'lead_source': 'www.mozilla.org/about/partnerships/',
})
def test_sf_form_csrf_status(self):
"""Test that CSRF checks return 200 with token and 403 without."""
csrf_client = Client(enforce_csrf_checks=True)
response = csrf_client.get(reverse('firefox.partners.index'))
post_url = reverse('mozorg.partnerships')
response = csrf_client.post(post_url, {
'first_name': "Partner",
'csrfmiddlewaretoken': response.cookies['csrftoken'].value,
})
self.assertEqual(response.status_code, 200)
response = csrf_client.post(post_url, {'first_name': "Partner"})
self.assertEqual(response.status_code, 403)
none_mock = Mock()
none_mock.return_value = None
@patch.object(fx_views.WhatsnewView, 'redirect_to', none_mock)
@patch('bedrock.firefox.views.l10n_utils.render', return_value=HttpResponse())
class TestWhatsNew(TestCase):
def setUp(self):
self.view = fx_views.WhatsnewView.as_view()
self.rf = RequestFactory(HTTP_USER_AGENT='Firefox')
@override_settings(DEV=True)
def test_can_post(self, render_mock):
"""Home page must accept post for newsletter signup."""
req = self.rf.post('/en-US/firefox/whatsnew/')
self.view(req)
# would return 405 before calling render otherwise
render_mock.assert_called_once_with(req, ['firefox/australis/whatsnew-no-tour.html'], ANY)
# begin 36.0 hello tour tests
@override_settings(DEV=True)
def test_fx_36_0(self, render_mock):
"""Should use no tour template for 36.0 with no old version"""
req = self.rf.get('/en-US/firefox/whatsnew/')
self.view(req, version='36.0')
template = render_mock.call_args[0][1]
eq_(template, ['firefox/australis/fx36/whatsnew-no-tour.html'])
@override_settings(DEV=True)
def test_fx_36_0_with_oldversion(self, render_mock):
"""Should use hello whatsnew tour template for 36.0 with old version"""
req = self.rf.get('/en-US/firefox/whatsnew/?oldversion=35.0')
self.view(req, version='36.0')
template = render_mock.call_args[0][1]
eq_(template, ['firefox/australis/fx36/whatsnew-tour.html'])
@override_settings(DEV=True)
def test_fx_36_0_with_wrong_oldversion(self, render_mock):
"""Should no tour template for 36.0 with old version that is greater"""
req = self.rf.get('/en-US/firefox/whatsnew/?oldversion=36.1')
self.view(req, version='36.0')
template = render_mock.call_args[0][1]
eq_(template, ['firefox/australis/fx36/whatsnew-no-tour.html'])
# end 36.0 hello tour tests
@override_settings(DEV=True)
def test_fx_37_0_whatsnew(self, render_mock):
"""Should show Android SMS template for 37.0"""
req = self.rf.get('/en-US/firefox/whatsnew/?oldversion=36.0')
self.view(req, version='37.0')
template = render_mock.call_args[0][1]
eq_(template, ['firefox/whatsnew-fx37.html'])
# begin 38.0.5 whatsnew tests
@override_settings(DEV=True)
def test_fx_dev_browser_35_0_a2_whatsnew(self, render_mock):
"""Should show dev browser whatsnew template"""
req = self.rf.get('/en-US/firefox/whatsnew/')
self.view(req, version='35.0a2')
template = render_mock.call_args[0][1]
eq_(template, ['firefox/dev-whatsnew.html'])
@override_settings(DEV=True)
def test_fx_38_0_5_whatsnew_en_us(self, render_mock):
"""Should show Pocket + Video template for en-US"""
req = self.rf.get('/en-US/firefox/whatsnew/?oldversion=38.0')
self.view(req, version='38.0.5')
template = render_mock.call_args[0][1]
ctx = render_mock.call_args[0][2]
ok_('video_url' in ctx)
eq_(template, ['firefox/whatsnew_38/whatsnew-pocket-video.html'])
@override_settings(DEV=True)
def test_fx_38_0_5_whatsnew_fr(self, render_mock):
"""Should show Video template for fr"""
req = self.rf.get('/fr/firefox/whatsnew/?oldversion=38.0')
req.locale = 'fr'
self.view(req, version='38.0.5')
template = render_mock.call_args[0][1]
ctx = render_mock.call_args[0][2]
ok_('video_url' in ctx)
eq_(template, ['firefox/whatsnew_38/whatsnew-video.html'])
@override_settings(DEV=True)
def test_fx_38_0_5_whatsnew_ja(self, render_mock):
"""Should show Pocket template for ja"""
req = self.rf.get('/ja/firefox/whatsnew/?oldversion=38.0')
req.locale = 'ja'
self.view(req, version='38.0.5')
template = render_mock.call_args[0][1]
ctx = render_mock.call_args[0][2]
ok_('video_url' not in ctx)
eq_(template, ['firefox/whatsnew_38/whatsnew-pocket.html'])
# end 38.0.5 whatsnew tests
# begin 42.0 whatsnew tests
@override_settings(DEV=True)
def test_fx_42_0(self, render_mock):
"""Should use tracking protection whatsnew template for 42.0"""
req = self.rf.get('/en-US/firefox/whatsnew/')
self.view(req, version='42.0')
template = render_mock.call_args[0][1]
eq_(template, ['firefox/whatsnew_42/variant-a.html'])
# end 42.0 whatsnew tests
@override_settings(DEV=True)
def test_older_whatsnew(self, render_mock):
"""Should show default no tour template for 35 and below"""
req = self.rf.get('/en-US/firefox/whatsnew/?oldversion=34.0')
self.view(req, version='35.0')
template = render_mock.call_args[0][1]
eq_(template, ['firefox/australis/whatsnew-no-tour.html'])
@override_settings(DEV=True)
def test_rv_prefix(self, render_mock):
"""Prefixed oldversion shouldn't impact version sniffing."""
req = self.rf.get('/en-US/firefox/whatsnew/?oldversion=rv:10.0')
self.view(req, version='36.0')
template = render_mock.call_args[0][1]
eq_(template, ['firefox/australis/fx36/whatsnew-tour.html'])
@override_settings(DEV=False)
def test_fx_australis_secure_redirect(self, render_mock):
"""Should redirect to https: for 29.0."""
url = '/en-US/firefox/whatsnew/'
req = self.rf.get(url)
with patch.object(req, 'is_secure', return_value=False):
resp = self.view(req, version='29.0')
eq_(resp['location'], 'https://testserver' + url)
@override_settings(DEV=True)
def test_fx_australis_secure_redirect_not_dev(self, render_mock):
"""Should not redirect to https: in DEV mode."""
url = '/en-US/firefox/whatsnew/'
req = self.rf.get(url)
with patch.object(req, 'is_secure', return_value=False):
resp = self.view(req, version='29.0')
eq_(resp.status_code, 200)
@override_settings(DEV=True)
def test_fx_australis_secure_redirect_secure(self, render_mock):
"""Should not redirect to https: when already secure."""
url = '/en-US/firefox/whatsnew/'
req = self.rf.get(url)
with patch.object(req, 'is_secure', return_value=True):
resp = self.view(req, version='29.0')
eq_(resp.status_code, 200)
@patch.object(fx_views.TourView, 'redirect_to', none_mock)
@patch('bedrock.firefox.views.l10n_utils.render', return_value=HttpResponse())
class TestTourView(TestCase):
def setUp(self):
self.view = fx_views.TourView.as_view()
self.rf = RequestFactory(HTTP_USER_AGENT='Firefox')
@override_settings(DEV=True)
def test_fx_tour_template(self, render_mock):
"""Should use firstrun tour template"""
req = self.rf.get('/en-US/firefox/tour/')
self.view(req, version='29.0')
template = render_mock.call_args[0][1]
eq_(template, ['firefox/australis/help-menu-tour.html'])
@override_settings(DEV=True)
def test_fx_dev_browser_35_0_a2(self, render_mock):
"""Should use dev browser firstrun template"""
req = self.rf.get('/en-US/firefox/tour/')
self.view(req, version='35.0a2')
template = render_mock.call_args[0][1]
eq_(template, ['firefox/dev-firstrun.html'])
@override_settings(DEV=True)
def test_fx_dev_browser_34_0_a2(self, render_mock):
"""Should use standard firstrun template for older aurora"""
req = self.rf.get('/en-US/firefox/tour/')
self.view(req, version='34.0a2')
template = render_mock.call_args[0][1]
eq_(template, ['firefox/australis/help-menu-tour.html'])
@override_settings(DEV=True)
def test_fx_search_tour_34_0(self, render_mock):
"""Should use search tour template for 34.0"""
req = self.rf.get('/en-US/firefox/tour/')
self.view(req, version='34.0')
template = render_mock.call_args[0][1]
eq_(template, ['firefox/australis/help-menu-34-tour.html'])
@override_settings(DEV=True)
def test_fx_search_tour_34_0_5(self, render_mock):
"""Should use search tour template for 34.0.5"""
req = self.rf.get('/en-US/firefox/tour/')
self.view(req, version='34.0.5')
template = render_mock.call_args[0][1]
eq_(template, ['firefox/australis/help-menu-34-tour.html'])
@override_settings(DEV=True)
def test_fx_search_tour_34_0_locales(self, render_mock):
"""Should use australis template for 34.0 non en-US locales"""
req = self.rf.get('/en-US/firefox/tour/')
req.locale = 'de'
self.view(req, version='34.0')
template = render_mock.call_args[0][1]
eq_(template, ['firefox/australis/help-menu-tour.html'])
@override_settings(DEV=True)
def test_fx_firstrun_tour_36_0(self, render_mock):
"""Should use fx36 tour template for 36.0"""
req = self.rf.get('/en-US/firefox/tour/')
self.view(req, version='36.0')
template = render_mock.call_args[0][1]
eq_(template, ['firefox/australis/fx36/help-menu-36-tour.html'])
@override_settings(DEV=False)
def test_fx_australis_secure_redirect(self, render_mock):
"""Should redirect to https"""
url = '/en-US/firefox/tour/'
req = self.rf.get(url)
with patch.object(req, 'is_secure', return_value=False):
resp = self.view(req, version='29.0')
eq_(resp['location'], 'https://testserver' + url)
@override_settings(DEV=True)
def test_fx_australis_secure_redirect_not_dev(self, render_mock):
"""Should not redirect to https: in DEV mode."""
url = '/en-US/firefox/tour/'
req = self.rf.get(url)
with patch.object(req, 'is_secure', return_value=False):
resp = self.view(req, version='29.0')
eq_(resp.status_code, 200)
@override_settings(DEV=True)
def test_fx_australis_secure_redirect_secure(self, render_mock):
"""Should not redirect to https: when already secure."""
url = '/en-US/firefox/tour/'
req = self.rf.get(url)
with patch.object(req, 'is_secure', return_value=True):
resp = self.view(req, version='29.0')
eq_(resp.status_code, 200)
@patch.object(fx_views.FirstrunView, 'redirect_to', none_mock)
@patch('bedrock.firefox.views.l10n_utils.render', return_value=HttpResponse())
class TestFirstRun(TestCase):
def setUp(self):
self.view = fx_views.FirstrunView.as_view()
self.rf = RequestFactory()
@override_settings(DEV=True)
def test_can_post(self, render_mock):
"""Home page must accept post for newsletter signup."""
req = self.rf.post('/en-US/firefox/firstrun/')
self.view(req)
# would return 405 before calling render otherwise
render_mock.assert_called_once_with(req,
['firefox/australis/firstrun-tour.html'], ANY)
@override_settings(DEV=True)
def test_fx_australis_29(self, render_mock):
"""Should use firstrun tour template"""
req = self.rf.get('/en-US/firefox/firstrun/')
self.view(req, version='29.0')
template = render_mock.call_args[0][1]
eq_(template, ['firefox/australis/firstrun-tour.html'])
@override_settings(DEV=True)
def test_fx_dev_browser(self, render_mock):
"""Should use dev browser firstrun template"""
req = self.rf.get('/en-US/firefox/firstrun/')
self.view(req, version='35.0a2')
template = render_mock.call_args[0][1]
eq_(template, ['firefox/dev-firstrun.html'])
@override_settings(DEV=True)
def test_fx_dev_browser_34_0_a2(self, render_mock):
"""Should use standard firstrun template for older aurora"""
req = self.rf.get('/en-US/firefox/firstrun/')
self.view(req, version='34.0a2')
template = render_mock.call_args[0][1]
eq_(template, ['firefox/australis/firstrun-tour.html'])
@override_settings(DEV=True)
def test_fx_search_tour_34_0(self, render_mock):
"""Should use search tour template for 34.0"""
req = self.rf.get('/en-US/firefox/firstrun/')
self.view(req, version='34.0')
template = render_mock.call_args[0][1]
eq_(template, ['firefox/australis/firstrun-34-tour.html'])
@override_settings(DEV=True)
def test_fx_search_tour_34_0_5(self, render_mock):
"""Should use search tour template for 34.0.5"""
req = self.rf.get('/en-US/firefox/firstrun/')
self.view(req, version='34.0.5')
template = render_mock.call_args[0][1]
eq_(template, ['firefox/australis/firstrun-34-tour.html'])
@override_settings(DEV=True)
def test_fx_search_tour_34_0_locales(self, render_mock):
"""Should use australis template for 34.0 non en-US locales"""
req = self.rf.get('/en-US/firefox/firstrun/')
req.locale = 'de'
self.view(req, version='34.0')
template = render_mock.call_args[0][1]
eq_(template, ['firefox/australis/firstrun-tour.html'])
@override_settings(DEV=True)
def test_fx_search_tour_35_0_1(self, render_mock):
"""Should use search tour template for 35.0.1"""
req = self.rf.get('/en-US/firefox/firstrun/')
self.view(req, version='35.0.1')
template = render_mock.call_args[0][1]
eq_(template, ['firefox/australis/firstrun-34-tour.html'])
@override_settings(DEV=True)
def test_fx_firstrun_tour_36_0(self, render_mock):
"""Should use fx36 tour template for 36.0"""
req = self.rf.get('/en-US/firefox/firstrun/')
self.view(req, version='36.0')
template = render_mock.call_args[0][1]
eq_(template, ['firefox/australis/fx36/firstrun-tour.html'])
@override_settings(DEV=True)
def test_fx_firstrun_38_0_5(self, render_mock):
"""Should use fx38.0.5 firstrun template for 38.0.5"""
req = self.rf.get('/en-US/firefox/firstrun/')
self.view(req, version='38.0.5')
template = render_mock.call_args[0][1]
eq_(template, ['firefox/australis/fx38_0_5/firstrun.html'])
@override_settings(DEV=True)
@patch.object(waffle, 'switch_is_active', Mock(return_value=True))
def test_fx_firstrun_40_0(self, render_mock):
"""Should use fx40.0 firstrun template for 40.0"""
req = self.rf.get('/en-US/firefox/firstrun/')
self.view(req, version='40.0')
template = render_mock.call_args[0][1]
eq_(template, ['firefox/firstrun/firstrun.html'])
@override_settings(DEV=False)
def test_fx_australis_secure_redirect(self, render_mock):
"""Should redirect to https:"""
url = '/en-US/firefox/firstrun/'
req = self.rf.get(url)
with patch.object(req, 'is_secure', return_value=False):
resp = self.view(req, version='29.0')
eq_(resp['location'], 'https://testserver' + url)
@override_settings(DEV=True)
def test_fx_australis_secure_redirect_not_dev(self, render_mock):
"""Should not redirect to https: in DEV mode."""
url = '/en-US/firefox/firstrun/'
req = self.rf.get(url)
with patch.object(req, 'is_secure', return_value=False):
resp = self.view(req, version='29.0')
eq_(resp.status_code, 200)
@override_settings(DEV=True)
def test_fx_australis_secure_redirect_secure(self, render_mock):
"""Should not redirect to https: when already secure."""
url = '/en-US/firefox/firstrun/'
req = self.rf.get(url)
with patch.object(req, 'is_secure', return_value=True):
resp = self.view(req, version='29.0')
eq_(resp.status_code, 200)
@patch.object(fx_views, 'firefox_desktop', firefox_desktop)
class FxVersionRedirectsMixin(object):
@override_settings(DEV=True) # avoid https redirects
def assert_ua_redirects_to(self, ua, url_name, status_code=301):
response = self.client.get(self.url, HTTP_USER_AGENT=ua)
eq_(response.status_code, status_code)
eq_(response['Vary'], 'User-Agent')
eq_(response['Location'],
'http://testserver%s' % reverse(url_name))
# An additional redirect test with a query string
query = '?ref=getfirefox'
response = self.client.get(self.url + query, HTTP_USER_AGENT=ua)
eq_(response.status_code, status_code)
eq_(response['Vary'], 'User-Agent')
eq_(response['Location'],
'http://testserver%s' % reverse(url_name) + query)
def test_non_firefox(self):
"""
Any non-Firefox user agents should be permanently redirected to
/firefox/new/.
"""
user_agent = 'random'
self.assert_ua_redirects_to(user_agent, 'firefox.new')
@override_settings(DEV=True)
@patch.dict(product_details.firefox_versions,
LATEST_FIREFOX_VERSION='13.0.5')
@patch('bedrock.firefox.firefox_details.firefox_desktop.latest_builds',
return_value=('13.0.5', GOOD_PLATS))
def test_current_minor_version_firefox(self, latest_mock):
"""
Should show current even if behind by a patch version
"""
user_agent = ('Mozilla/5.0 (Macintosh; Intel Mac OS X 10.7; rv:13.0) '
'Gecko/20100101 Firefox/13.0')
response = self.client.get(self.url, HTTP_USER_AGENT=user_agent)
eq_(response.status_code, 200)
eq_(response['Vary'], 'User-Agent')
@override_settings(DEV=True)
@patch.dict(product_details.firefox_versions,
LATEST_FIREFOX_VERSION='25.0',
FIREFOX_ESR='24.1')
@patch('bedrock.firefox.firefox_details.firefox_desktop.latest_builds',
return_value=('25.0', GOOD_PLATS))
def test_esr_firefox(self, latest_mock):
"""
Currently released ESR firefoxen should not redirect. At present
that is 24.0.x.
"""
user_agent = ('Mozilla/5.0 (Macintosh; Intel Mac OS X 10.7; rv:24.0) '
'Gecko/20100101 Firefox/24.0')
response = self.client.get(self.url, HTTP_USER_AGENT=user_agent)
eq_(response.status_code, 200)
eq_(response['Vary'], 'User-Agent')
@override_settings(DEV=True)
@patch.dict(product_details.firefox_versions,
LATEST_FIREFOX_VERSION='16.0')
@patch('bedrock.firefox.firefox_details.firefox_desktop.latest_builds',
return_value=('16.0', GOOD_PLATS))
def test_current_firefox(self, latest_mock):
"""
Currently released firefoxen should not redirect.
"""
user_agent = ('Mozilla/5.0 (Macintosh; Intel Mac OS X 10.7; rv:16.0) '
'Gecko/20100101 Firefox/16.0')
response = self.client.get(self.url, HTTP_USER_AGENT=user_agent)
eq_(response.status_code, 200)
eq_(response['Vary'], 'User-Agent')
@override_settings(DEV=True)
@patch.dict(product_details.firefox_versions,
LATEST_FIREFOX_VERSION='16.0')
@patch('bedrock.firefox.firefox_details.firefox_desktop.latest_builds',
return_value=('16.0', GOOD_PLATS))
def test_future_firefox(self, latest_mock):
"""
Pre-release firefoxen should not redirect.
"""
user_agent = ('Mozilla/5.0 (Macintosh; Intel Mac OS X 10.7; rv:18.0) '
'Gecko/20100101 Firefox/18.0')
response = self.client.get(self.url, HTTP_USER_AGENT=user_agent)
eq_(response.status_code, 200)
eq_(response['Vary'], 'User-Agent')
class TestWhatsnewRedirect(FxVersionRedirectsMixin, TestCase):
def setUp(self):
self.user_agent = ('Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:29.0) '
'Gecko/20100101 Firefox/29.0')
self.expected = 'data-has-tour="True"'
self.url = reverse('firefox.whatsnew', args=['36.0'])
@override_settings(DEV=True)
@patch.dict(product_details.firefox_versions,
LATEST_FIREFOX_VERSION='16.0')
@patch('bedrock.mozorg.helpers.misc.find_static', return_value=True)
def test_whatsnew_tour_oldversion(self, find_static):
"""Should not show tour if upgrading from 36.0 onwards."""
# sanity check that it should show for other values of "oldversion"
response = self.client.get(self.url + '?oldversion=28.0', HTTP_USER_AGENT=self.user_agent)
self.assertIn(self.expected, response.content)
response = self.client.get(self.url + '?oldversion=27.0.1', HTTP_USER_AGENT=self.user_agent)
self.assertIn(self.expected, response.content)
response = self.client.get(self.url + '?oldversion=4.0', HTTP_USER_AGENT=self.user_agent)
self.assertIn(self.expected, response.content)
response = self.client.get(self.url + '?oldversion=rv:10.0', HTTP_USER_AGENT=self.user_agent)
self.assertIn(self.expected, response.content)
response = self.client.get(self.url + '?oldversion=33.0', HTTP_USER_AGENT=self.user_agent)
self.assertIn(self.expected, response.content)
response = self.client.get(self.url + '?oldversion=33.0.1', HTTP_USER_AGENT=self.user_agent)
self.assertIn(self.expected, response.content)
response = self.client.get(self.url + '?oldversion=36.1', HTTP_USER_AGENT=self.user_agent)
self.assertNotIn(self.expected, response.content)
response = self.client.get(self.url + '?oldversion=36.1.1', HTTP_USER_AGENT=self.user_agent)
self.assertNotIn(self.expected, response.content)
response = self.client.get(self.url + '?oldversion=36.0', HTTP_USER_AGENT=self.user_agent)
self.assertNotIn(self.expected, response.content)
response = self.client.get(self.url + '?oldversion=37.0', HTTP_USER_AGENT=self.user_agent)
self.assertNotIn(self.expected, response.content)
# if there's no oldversion parameter, show no tour
response = self.client.get(self.url, HTTP_USER_AGENT=self.user_agent)
self.assertNotIn(self.expected, response.content)
class TestHelloStartRedirect(TestCase):
def setUp(self):
self.user_agent = ('Mozilla/5.0 (Macintosh; Intel Mac OS X 10.8; rv:35.0) '
'Gecko/20100101 Firefox/35.0')
self.url = reverse('firefox.hello.start', args=['35.0'])
def test_fx_hello_redirect_non_firefox(self):
"""Should redirect to /firefox/hello if not on Firefox"""
self.user_agent = ('Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, '
'like Gecko) Chrome/41.0.2228.0 Safari/537.36')
self.url = reverse('firefox.hello.start', args=['35.0'])
response = self.client.get(self.url, HTTP_USER_AGENT=self.user_agent)
eq_(response.status_code, 301)
eq_(response.get('Vary'), 'User-Agent')
eq_('http://testserver%s' % reverse('firefox.hello'),
response.get('Location'))
def test_fx_hello_no_redirect(self):
"""Should not redirect to /firefox/hello if on Firefox"""
response = self.client.get(self.url, HTTP_USER_AGENT=self.user_agent)
eq_(response.status_code, 200)
eq_(response.get('Vary'), 'User-Agent')
|
davehunt/bedrock
|
bedrock/firefox/tests/test_base.py
|
Python
|
mpl-2.0
| 33,460
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Models for scheduled execution of jobs"""
import json
import textwrap
from datetime import datetime
from typing import Any, Optional
from flask_appbuilder import Model
from sqlalchemy import (
Boolean,
Column,
DateTime,
Float,
ForeignKey,
Integer,
String,
Table,
Text,
)
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import backref, relationship, RelationshipProperty
from superset import db, security_manager
from superset.models.helpers import AuditMixinNullable
metadata = Model.metadata # pylint: disable=no-member
alert_owner = Table(
"alert_owner",
metadata,
Column("id", Integer, primary_key=True),
Column("user_id", Integer, ForeignKey("ab_user.id")),
Column("alert_id", Integer, ForeignKey("alerts.id")),
)
class Alert(Model, AuditMixinNullable):
"""Schedules for emailing slices / dashboards"""
__tablename__ = "alerts"
id = Column(Integer, primary_key=True)
label = Column(String(150), nullable=False)
active = Column(Boolean, default=True, index=True)
# TODO(bkyryliuk): enforce minimal supported frequency
crontab = Column(String(50), nullable=False)
alert_type = Column(String(50))
owners = relationship(security_manager.user_model, secondary=alert_owner)
recipients = Column(Text)
slack_channel = Column(Text)
# TODO(bkyryliuk): implement log_retention
log_retention = Column(Integer, default=90)
grace_period = Column(Integer, default=60 * 60 * 24)
slice_id = Column(Integer, ForeignKey("slices.id"))
slice = relationship("Slice", backref="alerts", foreign_keys=[slice_id])
dashboard_id = Column(Integer, ForeignKey("dashboards.id"))
dashboard = relationship("Dashboard", backref="alert", foreign_keys=[dashboard_id])
last_eval_dttm = Column(DateTime, default=datetime.utcnow)
last_state = Column(String(10))
def __str__(self) -> str:
return f"<{self.id}:{self.label}>"
class AlertLog(Model):
"""Keeps track of alert-related operations"""
__tablename__ = "alert_logs"
id = Column(Integer, primary_key=True)
scheduled_dttm = Column(DateTime)
dttm_start = Column(DateTime, default=datetime.utcnow)
dttm_end = Column(DateTime, default=datetime.utcnow)
alert_id = Column(Integer, ForeignKey("alerts.id"))
alert = relationship("Alert", backref="logs", foreign_keys=[alert_id])
state = Column(String(10))
@property
def duration(self) -> int:
return (self.dttm_end - self.dttm_start).total_seconds()
# TODO: Currently SQLObservation table will constantly grow with no limit,
# add some retention restriction or more to a more scalable db e.g.
# https://github.com/apache/incubator-superset/blob/master/superset/utils/log.py#L32
class SQLObserver(Model, AuditMixinNullable):
"""Runs SQL-based queries for alerts"""
__tablename__ = "sql_observers"
id = Column(Integer, primary_key=True)
sql = Column(Text, nullable=False)
@declared_attr
def alert_id(self) -> int:
return Column(Integer, ForeignKey("alerts.id"), nullable=False)
@declared_attr
def alert(self) -> RelationshipProperty:
return relationship(
"Alert",
foreign_keys=[self.alert_id],
backref=backref("sql_observer", cascade="all, delete-orphan"),
)
@declared_attr
def database_id(self) -> int:
return Column(Integer, ForeignKey("dbs.id"), nullable=False)
@declared_attr
def database(self) -> RelationshipProperty:
return relationship(
"Database",
foreign_keys=[self.database_id],
backref=backref("sql_observers", cascade="all, delete-orphan"),
)
def get_last_observation(self) -> Optional[Any]:
observations = list(
db.session.query(SQLObservation)
.filter_by(observer_id=self.id)
.order_by(SQLObservation.dttm.desc())
.limit(1)
)
if observations:
return observations[0]
return None
class SQLObservation(Model): # pylint: disable=too-few-public-methods
"""Keeps track of values retrieved from SQLObservers"""
__tablename__ = "sql_observations"
id = Column(Integer, primary_key=True)
dttm = Column(DateTime, default=datetime.utcnow, index=True)
observer_id = Column(Integer, ForeignKey("sql_observers.id"), nullable=False)
observer = relationship(
"SQLObserver",
foreign_keys=[observer_id],
backref=backref("observations", cascade="all, delete-orphan"),
)
alert_id = Column(Integer, ForeignKey("alerts.id"))
alert = relationship(
"Alert",
foreign_keys=[alert_id],
backref=backref("observations", cascade="all, delete-orphan"),
)
value = Column(Float)
error_msg = Column(String(500))
class Validator(Model, AuditMixinNullable):
"""Used to determine how an alert and its observations should be validated"""
__tablename__ = "alert_validators"
id = Column(Integer, primary_key=True)
validator_type = Column(String(100), nullable=False)
config = Column(
Text,
default=textwrap.dedent(
"""
{
}
"""
),
)
@declared_attr
def alert_id(self) -> int:
return Column(Integer, ForeignKey("alerts.id"), nullable=False)
@declared_attr
def alert(self) -> RelationshipProperty:
return relationship(
"Alert",
foreign_keys=[self.alert_id],
backref=backref("validators", cascade="all, delete-orphan"),
)
@property
def pretty_config(self) -> str:
""" String representing the comparison that will trigger a validator """
config = json.loads(self.config)
if self.validator_type.lower() == "operator":
return f"{config['op']} {config['threshold']}"
if self.validator_type.lower() == "not null":
return "!= Null or 0"
return ""
|
airbnb/superset
|
superset/models/alerts.py
|
Python
|
apache-2.0
| 6,835
|
# load defaults and override with devlopment settings
from defaults import *
DEBUG = False
WSGI_APPLICATION = 'bucketlist_django.wsgi.application'
# Parse database configuration from $DATABASE_URL
import dj_database_url
DATABASES['default'] = dj_database_url.config()
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# Allow all host headers
ALLOWED_HOSTS = ['*']
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, '..', 'bucketlist', 'static'),
)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
|
andela-tadesanya/django-bucketlist-application
|
bucketlist_django/bucketlist_django/settings/production.py
|
Python
|
mit
| 932
|
"""
Centralized location for useful control associated functions and variables
2013
"""
from control.thrusters import thrusters, desires
def set_all_motors_from_seq(pwms, got_thrusters):
g = desires.group()
for i, motor in enumerate(got_thrusters):
motor.update_shm_group(g, pwms[i])
desires.set(g)
def set_all_motors(pwm):
got_thrusters = thrusters()
set_all_motors_from_seq([pwm] * len(got_thrusters), got_thrusters)
def zero_motors():
set_all_motors(0)
def set_shm_wrench(shm_group, wrench):
g = shm_group.group()
g.f_x = wrench[0]
g.f_y = wrench[1]
g.f_z = wrench[2]
g.t_x = wrench[3]
g.t_y = wrench[4]
g.t_z = wrench[5]
shm_group.set(g)
class DOFSet(object):
""" Ensures consistent ordering of our 6 DOFs """
def __init__(self, l=None, f=0, s=0, d=0, r=0, p=0, y=0):
if l is None:
self.forward = f
self.sway = s
self.depth = d
self.roll = r
self.pitch = p
self.yaw = y
else:
self.forward = l[self.forward_i]
self.sway = l[self.sway_i]
self.depth = l[self.depth_i]
self.roll = l[self.roll_i]
self.pitch = l[self.pitch_i]
self.yaw = l[self.yaw_i]
torque = [False, False, False, True, True, True]
forward_i = 0
sway_i = 1
depth_i = 2
roll_i = 3
pitch_i = 4
yaw_i = 5
def __iter__(self):
return iter([self.forward, self.sway, self.depth, \
self.roll, self.pitch, self.yaw])
|
cuauv/software
|
control/util.py
|
Python
|
bsd-3-clause
| 1,581
|
"""Dummy module to create rax security groups"""
#!/usr/bin/env python
import pyrax
from ansible.module_utils.basic import *
uri_sgs = 'https://dfw.networks.api.rackspacecloud.com/v2.0/security-groups'
def get_sg(cnw, name):
try:
result, sgs = cnw.identity.method_get(uri_sgs)
if result.status_code == 200:
sg_list = filter(lambda sg: sg['name'] == name,
sgs['security_groups'])
return result.status_code, sg_list
except Exception as e:
return e.code, {'status': e.code, 'message': e.message}
def rax_security_group_present(data):
name = data['name']
description = data['description']
cnw = pyrax.cloud_networks
# If already exists, just return the first matching id
result, sg_list = get_sg(cnw, name)
if sg_list:
return False, False, sg_list[0]
data_json = {
'security_group': {
'name': name,
'description' : description
}
}
try:
result, sg = cnw.identity.method_post(uri_sgs, data=data_json)
if result.status_code == 201:
return False, True, result.json()['security_group']
elif result.status_code == 422:
return False, False, result.json()
else:
return True, False, {'status': result.status_code, 'data':
result.json()}
except Exception as e:
return True, False, {'status': 'ERROR', 'data': e.message}
def rax_security_group_absent(data=None):
cnw = pyrax.cloud_networks
name = data['name']
status_code, sg_list = get_sg(cnw, name)
result = None
for sg in sg_list:
sg_id = sg['id']
try:
result, _ = cnw.identity.method_delete(uri_sgs + '/' + sg_id)
if result.status_code == 200:
continue
except pyrax.exceptions.ClientException as e:
if e.code == 409:
return True, False, {'status': 'ERROR',
'security_group_id': sg_id,
'data': 'Security group in use'
}
except Exception as e:
return True, False, {'status': 'ERROR',
'security_group_id': sg_id}
if result:
return False, True, {'status': 'deleted', 'deleted_security_groups':
[sg['id'] for sg in sg_list]}
else:
return False, False, {'status': 'security group not found', 'security_groups':
sg_list}
def main():
fields = {
'name': {'required': True, 'type': 'str'},
'description': {'required': False, 'type': 'str'},
'region': {'required': True, 'type': 'str'},
'state': {
'default': 'present',
'choices': ['present', 'absent'],
'type': 'str'
}
}
choice_map = {
'present': rax_security_group_present,
'absent': rax_security_group_absent
}
module = AnsibleModule(argument_spec=fields)
pyrax.set_setting('identity_type', 'rackspace')
pyrax.set_credential_file('rax.py')
pyrax.set_setting('region', module.params['region'])
is_error, has_changed, result = \
choice_map.get(module.params['state'])(module.params)
if not is_error:
module.exit_json(changed=has_changed, security_group=result)
else:
module.fail_json(msg='Error', security_group=result)
if __name__ == '__main__':
main()
|
xroot88/rax_ansible
|
library/rax_security_group.py
|
Python
|
apache-2.0
| 3,554
|
#!/bin/python
#Copyright (c) 2013, Regents of the University of California
#All rights reserved.
#
#Redistribution and use in source and binary forms, with or without
#modification, are permitted provided that the following conditions are met:
#
#1. Redistributions of source code must retain the above copyright notice,
#this list of conditions and the following disclaimer.
#
#2. Redistributions in binary form must reproduce the above copyright notice,
#this list of conditions and the following disclaimer in the documentation
#and/or other materials provided with the distribution.
#
#THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
#AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
#IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
#DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
#FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
#DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
#SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
#CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
#OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
#OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import vcf
import unittest
import StringIO
from test_helper import MAX_INDEL_LEN
sys.path.insert(0,'..')
from smashbenchmarking.vcf_eval.chrom_variants import Variant,VARIANT_TYPE,GENOTYPE_TYPE
from smashbenchmarking.vcf_eval.chrom_variants import *
from smashbenchmarking.vcf_eval.chrom_variants import _getOverlaps, _getRestOfPath
from smashbenchmarking.vcf_eval.variants import Variants
#this class holds info from each VCF record
class VariantTestCase(unittest.TestCase):
def testSnpVariant(self):
testVar = Variant(10,'A',['C'],VARIANT_TYPE.SNP,GENOTYPE_TYPE.HET)
self.assertEqual(testVar.gains,[0])
self.assertEqual(testVar.losses,[0])
def testInsertionVariant(self):
testVar = Variant(10,'A',['AAAA'],VARIANT_TYPE.INDEL_INS,GENOTYPE_TYPE.HET)
self.assertEqual(testVar.gains,[3])
self.assertEqual(testVar.losses,[0])
self.assertFalse(testVar.overlaps_allele(9))
self.assertTrue(testVar.overlaps_allele(10))
self.assertFalse(testVar.overlaps_allele(11))
def testDeletionVariant(self):
testVar = Variant(10,'AAAA',['A'],VARIANT_TYPE.INDEL_DEL,GENOTYPE_TYPE.HET)
self.assertEqual(testVar.gains,[-3])
self.assertEqual(testVar.losses,[3])
self.assertFalse(testVar.overlaps_allele(9))
self.assertTrue(testVar.overlaps_allele(10))
self.assertTrue(testVar.overlaps_allele(13))
self.assertFalse(testVar.overlaps_allele(14))
def testOverlapsVariant(self):
snpVar = Variant(10,'A',['C'],VARIANT_TYPE.SNP,GENOTYPE_TYPE.HET)
indelVar = Variant(7,'AAAAAAAA',['A'],VARIANT_TYPE.INDEL_DEL,GENOTYPE_TYPE.HET)
self.assertTrue(snpVar.strictly_overlaps_var(indelVar))
self.assertTrue(indelVar.strictly_overlaps_var(snpVar))
snpVar2 = Variant(2,'C',['T'],VARIANT_TYPE.SNP,GENOTYPE_TYPE.HET)
self.assertFalse(snpVar2.strictly_overlaps_var(indelVar))
self.assertFalse(indelVar.strictly_overlaps_var(snpVar2))
#test ChromVariants class
class ChromVariantsTestCase(unittest.TestCase):
def testAddRecord(self):
pred_str = """##fileformat=VCFv4.0\n
##FORMAT=<ID=GT,Number=1,Type=String,Description="Genotype">\n
##source=TVsim\n
#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT NA00001
chr19 2 . A T 20 PASS . GT 0/1\n
chr19 5 . AT A 20 PASS . GT 1/1\n
chr19 10 . C CG 20 PASS . GT 0/1\n
chr19 20 . ATGC ACGT 20 PASS . GT 0/1\n
chr19 30 . AAAAAGAAAGGCATGACCTATCCACCCATGCCACCTGGATGGACCTCACAGGCACACTGCTTCATGAGAGAG A 20 PASS . GT 0/1
"""
newChromVar = ChromVariants('chr19',MAX_INDEL_LEN)
pred_io = StringIO.StringIO(pred_str)
pred_vcf = vcf.Reader(pred_io)
newChromVar.add_record(pred_vcf.next())
self.assertTrue(2 in newChromVar.all_locations)
self.assertEqual(newChromVar.snp_pos_dict[2].var_type,VARIANT_TYPE.SNP)
self.assertFalse(newChromVar.indel_pos_dict)
self.assertFalse(newChromVar.sv_pos_dict)
newChromVar.add_record(pred_vcf.next())
self.assertTrue(5 in newChromVar.all_locations)
self.assertEqual(newChromVar.indel_pos_dict[5].var_type, VARIANT_TYPE.INDEL_DEL)
newChromVar.add_record(pred_vcf.next())
self.assertTrue(10 in newChromVar.all_locations)
self.assertEqual(newChromVar.indel_pos_dict[10].var_type, VARIANT_TYPE.INDEL_INS)
newChromVar.add_record(pred_vcf.next())
self.assertTrue(20 in newChromVar.all_locations)
self.assertEqual(newChromVar.indel_pos_dict[20].var_type, VARIANT_TYPE.INDEL_INV)
newChromVar.add_record(pred_vcf.next())
self.assertTrue(30 in newChromVar.all_locations)
self.assertEqual(newChromVar.sv_pos_dict[30].var_type, VARIANT_TYPE.SV_DEL)
#all indels/sv (del,ins,oth) live in one bucket
self.assertEqual(len(newChromVar._var_dict(VARIANT_TYPE.INDEL_DEL)),len(newChromVar._var_dict(VARIANT_TYPE.INDEL_INS)))
self.assertEqual(len(newChromVar._var_dict(VARIANT_TYPE.SV_DEL)),len(newChromVar._var_dict(VARIANT_TYPE.SV_INS)))
def testAddRecordNoSample(self):
vcf_str = """##fileformat=VCFv4.0\n
#CHROM POS ID REF ALT QUAL FILTER INFO \n
chr19 2 . A T 20 PASS . \n
"""
newChromVar = ChromVariants('chr19',MAX_INDEL_LEN)
test_vcf = vcf.Reader(StringIO.StringIO(vcf_str))
newChromVar.add_record(test_vcf.next())
self.assertEqual(newChromVar.all_locations,[])
def testRemoveRecord(self):
pred_str = """##fileformat=VCFv4.0\n
##FORMAT=<ID=GT,Number=1,Type=String,Description="Genotype">\n
##source=TVsim\n
#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT NA00001
chr19 1 . C G 20 PASS . GT 1/1\n
chr19 2 . A T 20 PASS . GT 0/1\n
chr19 5 . AT A 20 PASS . GT 1/1\n
chr19 10 . C CG 20 PASS . GT 0/1\n
chr19 20 . ATGC ACGT 20 PASS . GT 0/1\n
chr19 30 . AAAAAGAAAGGCATGACCTATCCACCCATGCCACCTGGATGGACCTCACAGGCACACTGCTTCATGAGAGAG A 20 PASS . GT 0/1
"""
newChromVar = ChromVariants('chr19',MAX_INDEL_LEN)
pred_io = StringIO.StringIO(pred_str)
pred_vcf = vcf.Reader(pred_io)
for r in pred_vcf:
newChromVar.add_record(r)
self.assertEqual(len(newChromVar.all_variants),6)
self.assertEqual(len(newChromVar.all_locations),6)
self.assertEqual(len(newChromVar._var_locations[VARIANT_TYPE.SNP]),2)
self.assertEqual(len(newChromVar._var_dict(VARIANT_TYPE.SNP)),2)
self.assertEqual(len(newChromVar._var_locations[VARIANT_TYPE.INDEL_DEL]),1)
self.assertEqual(len(newChromVar._var_locations[VARIANT_TYPE.INDEL_INS]),1)
# note that _var_dict holds all types of indels in one bucket, etc
self.assertEqual(len(newChromVar._var_dict(VARIANT_TYPE.INDEL_INS)),3)
self.assertEqual(len(newChromVar._var_locations[VARIANT_TYPE.SV_DEL]),1)
self.assertEqual(len(newChromVar._var_dict(VARIANT_TYPE.SV_DEL)),1)
newChromVar._remove_variant(10)
self.assertEqual(len(newChromVar.all_variants),5)
self.assertEqual(len(newChromVar.all_locations),5)
self.assertEqual(len(newChromVar._var_locations[VARIANT_TYPE.SNP]),2)
self.assertEqual(len(newChromVar._var_dict(VARIANT_TYPE.SNP)),2)
self.assertEqual(len(newChromVar._var_locations[VARIANT_TYPE.INDEL_DEL]),1)
self.assertEqual(len(newChromVar._var_locations[VARIANT_TYPE.INDEL_INS]),0)
self.assertEqual(len(newChromVar._var_dict(VARIANT_TYPE.INDEL_INS)),2)
self.assertEqual(len(newChromVar._var_locations[VARIANT_TYPE.SV_DEL]),1)
self.assertEqual(len(newChromVar._var_dict(VARIANT_TYPE.SV_DEL)),1)
def testKnownFalsePositives(self):
vcf_str = """##fileformat=VCFv4.0\n
##FORMAT=<ID=GT,Number=1,Type=String,Description="Genotype">\n
#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT NA00001\n
chr1 7 . G . 20 PASS . GT 0/0\n
"""
newChromVar = ChromVariants('chr1',MAX_INDEL_LEN,knownFP=True)
vcf_io = StringIO.StringIO(vcf_str)
vcfr = vcf.Reader(vcf_io)
for r in vcfr:
newChromVar.add_record(r)
self.assertEqual(newChromVar.all_locations,[7])
var = newChromVar.all_variants[7]
self.assertEqual(var.ref,'G')
self.assertEqual(var.alt[0], None)
#test rando helper methods
class ChromVariantHelperMethodsTestCase(unittest.TestCase):
def testExtractRange(self):
new_range = extract_range([8,9,10,11,20,80],10,20)
self.assertEqual(new_range[0],10)
self.assertEqual(new_range[-1],11)
def testExtractRangeAndFilter(self):
pred_str = """##fileformat=VCFv4.0\n
##FORMAT=<ID=GT,Number=1,Type=String,Description="Genotype">\n
##source=TVsim\n
#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT NA00001\n
chr19 2 . T G 20 PASS . GT 0/1\n
chr19 10 . A C 20 PASS . GT 1/1\n
chr19 13 . A ACT 20 PASS . GT 1/1\n
chr19 15 . A T 20 PASS . GT 0/1\n
chr19 18 . AAAAAGAAAGGCATGACCTATCCACCCATGCCACCTGGATGGACCTCACAGGCACACTGCTTCATGAGAGAG A 20 PASS . GT 0/1
"""
pred_io = StringIO.StringIO(pred_str)
pred_vcf = vcf.Reader(pred_io)
pred_vars = Variants(pred_vcf, MAX_INDEL_LEN)
variants_in_window = extract_range_and_filter(pred_vars.on_chrom('chr19'),10,20,13)
self.assertEqual(len(variants_in_window),3)
#SV is removed
self.assertFalse(any(map(lambda v: v.var_type.startswith("SV"), variants_in_window)))
#variant overlapping with variant at location of interest is removed
self.assertFalse(any(map(lambda v: v.pos == 2,variants_in_window)))
def testGetOverlaps(self):
pred_str = """##fileformat=VCFv4.0\n
##FORMAT=<ID=GT,Number=1,Type=String,Description="Genotype">\n
##source=TVsim\n
#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT NA00001\n
chr19 10 . ACT A 20 PASS . GT 1/1\n
chr19 13 . AC A 20 PASS . GT 1/1\n
chr19 14 . TAGG TA 20 PASS . GT 1/1\n
chr19 15 . AGG A 20 PASS . GT 0/1\n
chr19 19 . T TAAAC 20 PASS . GT 0/1
"""
pred_io = StringIO.StringIO(pred_str)
pred_vcf = vcf.Reader(pred_io)
pred_vars = Variants(pred_vcf,MAX_INDEL_LEN)
variants_in_window = extract_range_and_filter(pred_vars.on_chrom('chr19'),10,20,10)
#the three overlapping variants should be in same group
overlaps = _getOverlaps([], variants_in_window)
self.assertEqual(len(overlaps),3)
self.assertEqual(map(lambda o: len(o),overlaps),[1,3,1])
def testGetRestOfPaths(self):
pred_str = """##fileformat=VCFv4.0\n
##FORMAT=<ID=GT,Number=1,Type=String,Description="Genotype">\n
##source=TVsim\n
#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT NA00001\n
chr19 11 . ACT A 20 PASS . GT 1/1\n
chr19 15 . ACGATT AA 20 PASS . GT 1/1\n
chr19 16 . ACG A 20 PASS . GT 1/1\n
chr19 22 . ATT A 20 PASS . GT 0/1\n
"""
pred_io = StringIO.StringIO(pred_str)
pred_vcf = vcf.Reader(pred_io)
pred_vars = Variants(pred_vcf,MAX_INDEL_LEN)
viw = extract_range_and_filter(pred_vars.on_chrom('chr19'),10,25,11)
paths = _getRestOfPath([], _getOverlaps([],viw))
#all paths take variants at pos 11 and 22; one takes pos 15, one pos 16
self.assertEqual(len(paths),2)
self.assertEqual(len(paths[0]),3)
self.assertEqual(len(paths[1]),3)
self.assertTrue(all(map(lambda e: any(map(lambda x: x.pos == 11, e)), paths)))
self.assertTrue(all(map(lambda e: any(map(lambda x: x.pos == 22, e)), paths)))
self.assertTrue(any(map(lambda x: x.pos == 15, paths[0])))
self.assertFalse(any(map(lambda x: x.pos == 16, paths[0])))
self.assertFalse(any(map(lambda x: x.pos == 15, paths[1])))
self.assertTrue(any(map(lambda x: x.pos == 16, paths[1])))
def testIsInversion(self):
pred_str = """##fileformat=VCFv4.0\n
##FORMAT=<ID=GT,Number=1,Type=String,Description="Genotype">\n
#CHROM POS ID REF ALT QUAL FILTER INFO FORMAT NA00001\n
chr19 11 . ACT TCA 20 PASS . GT 1/1\n
chr19 15 . ACGATT ATTAGC 20 PASS . GT 1/1\n
chr19 16 . ACG A 20 PASS . GT 1/1\n
"""
vcfr = vcf_file = vcf.Reader(StringIO.StringIO(pred_str))
self.assertTrue(is_inversion(vcfr.next(),MAX_INDEL_LEN)) # inversion with no leading base
self.assertTrue(is_inversion(vcfr.next(),MAX_INDEL_LEN)) # inversion with leading base
self.assertFalse(is_inversion(vcfr.next(),MAX_INDEL_LEN)) # deletions are not inversions
if __name__ == '__main__':
unittest.main()
|
amplab/smash
|
test/chrom_variants.py
|
Python
|
bsd-2-clause
| 14,153
|
# -*- coding: utf-'8' "-*-"
import base64
try:
import simplejson as json
except ImportError:
import json
import logging
import urlparse
import werkzeug.urls
import urllib2
from openerp.addons.payment.models.payment_acquirer import ValidationError
from openerp.addons.payment_mercadopago.controllers.main import MercadoPagoController
from openerp.osv import osv, fields
from openerp.tools.float_utils import float_compare
from openerp import SUPERUSER_ID
_logger = logging.getLogger(__name__)
from openerp.addons.payment_mercadopago.mercadopago import mercadopago
class AcquirerMercadopago(osv.Model):
_inherit = 'payment.acquirer'
def _get_mercadopago_urls(self, cr, uid, environment, context=None):
""" MercadoPago URLS """
if environment == 'prod':
return {
#https://www.mercadopago.com/mla/checkout/pay?pref_id=153438434-6eb25e49-1bb8-4553-95b2-36033be216ad
#'mercadopago_form_url': 'https://www.paypal.com/cgi-bin/webscr',
'mercadopago_form_url': 'https://www.mercadopago.com/mla/checkout/pay',
'mercadopago_rest_url': 'https://api.mercadolibre.com/oauth/token',
}
else:
return {
#'mercadopago_form_url': 'https://www.sandbox.paypal.com/cgi-bin/webscr',
#https://api.mercadolibre.com/oauth/token
'mercadopago_form_url': 'https://sandbox.mercadopago.com/mla/checkout/pay',
'mercadopago_rest_url': 'https://api.sandbox.mercadolibre.com/oauth/token',
}
def _get_providers(self, cr, uid, context=None):
providers = super(AcquirerMercadopago, self)._get_providers(cr, uid, context=context)
providers.append(['mercadopago', 'MercadoPago'])
print "_get_providers: ", providers
return providers
_columns = {
'mercadopago_client_id': fields.char('MercadoPago Client Id',256,required_if_provider='mercadopago'),
'mercadopago_secret_key': fields.char('MercadoPago Secret Key',256,required_if_provider='mercadopago'),
'mercadopago_email_account': fields.char('MercadoPago Email ID', required_if_provider='mercadopago'),
'mercadopago_seller_account': fields.char(
'MercadoPago Merchant ID',
help='The Merchant ID is used to ensure communications coming from MercadoPago are valid and secured.'),
'mercadopago_use_ipn': fields.boolean('Use IPN', help='MercadoPago Instant Payment Notification'),
# Server 2 server
'mercadopago_api_enabled': fields.boolean('Use Rest API'),
'mercadopago_api_username': fields.char('Rest API Username'),
'mercadopago_api_password': fields.char('Rest API Password'),
'mercadopago_api_access_token': fields.char('Access Token'),
'mercadopago_api_access_token_validity': fields.datetime('Access Token Validity'),
}
_defaults = {
'mercadopago_use_ipn': True,
'fees_active': False,
'fees_dom_fixed': 0.35,
'fees_dom_var': 3.4,
'fees_int_fixed': 0.35,
'fees_int_var': 3.9,
'mercadopago_api_enabled': False,
}
def _migrate_mercadopago_account(self, cr, uid, context=None):
""" COMPLETE ME """
#cr.execute('SELECT id, mercadopago_account FROM res_company')
#res = cr.fetchall()
company_ids = self.pool.get( "res.company" ).search(cr,uid,[])
for company in self.pool.get('res.company').browse(cr,uid,company_ids):
company_id = company.id
company_mercadopago_account = company.mercadopago_account
#for (company_id, company_mercadopago_account) in res:
if company_mercadopago_account:
company_mercadopago_ids = self.search(cr, uid, [('company_id', '=', company_id), ('provider', '=', 'mercadopago')], limit=1, context=context)
if company_mercadopago_ids:
self.write(cr, uid, company_mercadopago_ids, {'mercadopago_email_account': company_mercadopago_account}, context=context)
else:
mercadopago_view = self.pool['ir.model.data'].get_object(cr, uid, 'payment_mercadopago', 'mercadopago_acquirer_button')
self.create(cr, uid, {
'name': 'MercadoPago',
'provider': 'mercadopago',
'mercadopago_email_account': company_mercadopago_account,
'view_template_id': mercadopago_view.id,
}, context=context)
return True
def mercadopago_compute_fees(self, cr, uid, id, amount, currency_id, country_id, context=None):
""" Compute mercadopago fees.
:param float amount: the amount to pay
:param integer country_id: an ID of a res.country, or None. This is
the customer's country, to be compared to
the acquirer company country.
:return float fees: computed fees
"""
acquirer = self.browse(cr, uid, id, context=context)
if not acquirer.fees_active:
return 0.0
country = self.pool['res.country'].browse(cr, uid, country_id, context=context)
if country and acquirer.company_id.country_id.id == country.id:
percentage = acquirer.fees_dom_var
fixed = acquirer.fees_dom_fixed
else:
percentage = acquirer.fees_int_var
fixed = acquirer.fees_int_fixed
fees = (percentage / 100.0 * amount + fixed ) / (1 - percentage / 100.0)
return fees
def mercadopago_form_generate_values(self, cr, uid, id, partner_values, tx_values, context=None):
base_url = self.pool['ir.config_parameter'].get_param(cr, SUPERUSER_ID, 'web.base.url')
acquirer = self.browse(cr, uid, id, context=context)
print "mercadopago_form_generate_values: tx_values: ", tx_values
print "partner_values:", partner_values
MPago = False
MPagoPrefId = False
if acquirer.mercadopago_client_id and acquirer.mercadopago_secret_key:
MPago = mercadopago.MP( acquirer.mercadopago_client_id, acquirer.mercadopago_secret_key )
print "MPago: ", MPago
else:
error_msg = 'YOU MUST COMPLETE acquirer.mercadopago_client_id and acquirer.mercadopago_secret_key'
_logger.error(error_msg)
raise ValidationError(error_msg)
jsondump = ""
if MPago:
if acquirer.environment=="prod":
MPago.sandbox_mode(False)
else:
MPago.sandbox_mode(True)
MPagoToken = MPago.get_access_token()
preference = {
"items": [
{
"title": "Orden Ecommerce "+ tx_values["reference"] ,
#"picture_url": "https://www.mercadopago.com/org-img/MP3/home/logomp3.gif",
"quantity": 1,
"currency_id": tx_values['currency'] and tx_values['currency'].name or '',
"unit_price": tx_values["amount"],
#"category_id": "Categoría",
}
]
,
"payer": {
"name": partner_values["name"],
"surname": partner_values["first_name"],
"email": partner_values["email"],
# "date_created": "2015-01-29T11:51:49.570-04:00",
# "phone": {
# "area_code": "+5411",
# "number": partner_values["phone"]
# },
# "identification": {
# "type": "DNI",
# "number": "12345678"
# },
# "address": {
# "street_name": partner_values["address"],
# "street_number": "",
# "zip_code": partner_values["zip"]
# } contni
},
"back_urls": {
"success": '%s' % urlparse.urljoin( base_url, MercadoPagoController._return_url),
"failure": '%s' % urlparse.urljoin( base_url, MercadoPagoController._cancel_url),
"pending": '%s' % urlparse.urljoin( base_url, MercadoPagoController._return_url)
},
"auto_return": "approved",
# "payment_methods": {
# "excluded_payment_methods": [
# {
# "id": "amex"
# }
# ],
# "excluded_payment_types": [
# {
# "id": "ticket"
# }
# ],
# "installments": 24,
# "default_payment_method_id": '',
# "default_installments": '',
# },
# "shipments": {
# "receiver_address":
# {
# "zip_code": "1430",
# "street_number": 123,
# "street_name": "Calle Trece",
# "floor": 4,
# "apartment": "C"
# }
# },
"notification_url": '%s' % urlparse.urljoin( base_url, MercadoPagoController._notify_url),
"external_reference": tx_values["reference"],
"expires": True,
"expiration_date_from": "2015-01-29T11:51:49.570-04:00",
"expiration_date_to": "2015-02-28T11:51:49.570-04:00"
}
print "preference:", preference
preferenceResult = MPago.create_preference(preference)
print "preferenceResult: ", preferenceResult
if 'response' in preferenceResult:
if 'id' in preferenceResult['response']:
MPagoPrefId = preferenceResult['response']['id']
else:
error_msg = 'Returning response is:'
error_msg+= json.dumps(preferenceResult, indent=2)
_logger.error(error_msg)
raise ValidationError(error_msg)
if acquirer.environment=="prod":
linkpay = preferenceResult['response']['init_point']
else:
linkpay = preferenceResult['response']['sandbox_init_point']
jsondump = json.dumps( preferenceResult, indent=2 )
print "linkpay:", linkpay
print "jsondump:", jsondump
print "MPagoPrefId: ", MPagoPrefId
print "MPagoToken: ", MPagoToken
mercadopago_tx_values = dict(tx_values)
if MPagoPrefId:
mercadopago_tx_values.update({
'pref_id': MPagoPrefId,
# 'cmd': '_xclick',
# 'business': acquirer.mercadopago_email_account,
# 'item_name': tx_values['reference'],
# 'item_number': tx_values['reference'],
# 'amount': tx_values['amount'],
# 'currency_code': tx_values['currency'] and tx_values['currency'].name or '',
# 'address1': partner_values['address'],
# 'city': partner_values['city'],
# 'country': partner_values['country'] and partner_values['country'].name or '',
# 'state': partner_values['state'] and partner_values['state'].name or '',
# 'email': partner_values['email'],
# 'zip': partner_values['zip'],
# 'first_name': partner_values['first_name'],
# 'last_name': partner_values['last_name'],
# 'return': '%s' % urlparse.urljoin(base_url, MercadoPagoController._return_url),
# 'notify_url': '%s' % urlparse.urljoin(base_url, MercadoPagoController._notify_url),
# 'cancel_return': '%s' % urlparse.urljoin(base_url, MercadoPagoController._cancel_url),
})
# if acquirer.fees_active:
# mercadopago_tx_values['handling'] = '%.2f' % mercadopago_tx_values.pop('fees', 0.0)
# if mercadopago_tx_values.get('return_url'):
# mercadopago_tx_values['custom'] = json.dumps({'return_url': '%s' % mercadopago_tx_values.pop('return_url')})
return partner_values, mercadopago_tx_values
def mercadopago_get_form_action_url(self, cr, uid, id, context=None):
acquirer = self.browse(cr, uid, id, context=context)
mercadopago_urls = self._get_mercadopago_urls(cr, uid, acquirer.environment, context=context)['mercadopago_form_url']
# mercadopago_urls = mercadopago_urls + "?pref_id=" +
print "mercadopago_get_form_action_url: ", mercadopago_urls
return mercadopago_urls
def _mercadopago_s2s_get_access_token(self, cr, uid, ids, context=None):
"""
Note: see # see http://stackoverflow.com/questions/2407126/python-urllib2-basic-auth-problem
for explanation why we use Authorization header instead of urllib2
password manager
"""
res = dict.fromkeys(ids, False)
parameters = werkzeug.url_encode({'grant_type': 'client_credentials'})
for acquirer in self.browse(cr, uid, ids, context=context):
tx_url = self._get_mercadopago_urls(cr, uid, acquirer.environment)['mercadopago_rest_url']
request = urllib2.Request(tx_url, parameters)
# add other headers (https://developer.paypal.com/webapps/developer/docs/integration/direct/make-your-first-call/)
request.add_header('Accept', 'application/json')
request.add_header('Accept-Language', 'en_US')
# add authorization header
base64string = base64.encodestring('%s:%s' % (
acquirer.mercadopago_api_username,
acquirer.mercadopago_api_password)
).replace('\n', '')
request.add_header("Authorization", "Basic %s" % base64string)
request = urllib2.urlopen(request)
result = request.read()
res[acquirer.id] = json.loads(result).get('access_token')
request.close()
return res
class TxMercadoPago(osv.Model):
_inherit = 'payment.transaction'
_columns = {
'mercadopago_txn_id': fields.char('Transaction ID'),
'mercadopago_txn_type': fields.char('Transaction type'),
}
# --------------------------------------------------
# FORM RELATED METHODS
# --------------------------------------------------
def _mercadopago_form_get_tx_from_data(self, cr, uid, data, context=None):
# reference, txn_id = data.get('external_reference'), data.get('txn_id')
reference, collection_id = data.get('external_reference'), data.get('collection_id')
if not reference or not collection_id:
error_msg = 'MercadoPago: received data with missing reference (%s) or collection_id (%s)' % (reference,collection_id)
_logger.error(error_msg)
raise ValidationError(error_msg)
# find tx -> @TDENOTE use txn_id ?
tx_ids = self.pool['payment.transaction'].search(cr, uid, [('reference', '=', reference)], context=context)
if not tx_ids or len(tx_ids) > 1:
error_msg = 'MercadoPago: received data for reference %s' % (reference)
if not tx_ids:
error_msg += '; no order found'
else:
error_msg += '; multiple order found'
_logger.error(error_msg)
raise ValidationError(error_msg)
return self.browse(cr, uid, tx_ids[0], context=context)
def _mercadopago_form_get_invalid_parameters(self, cr, uid, tx, data, context=None):
invalid_parameters = []
_logger.warning('Received a notification from MercadoLibre.')
# TODO: txn_id: shoudl be false at draft, set afterwards, and verified with txn details
# if tx.acquirer_reference and data.get('txn_id') != tx.acquirer_reference:
# invalid_parameters.append(('txn_id', data.get('txn_id'), tx.acquirer_reference))
# check what is buyed
# if float_compare(float(data.get('mc_gross', '0.0')), (tx.amount + tx.fees), 2) != 0:
# invalid_parameters.append(('mc_gross', data.get('mc_gross'), '%.2f' % tx.amount)) # mc_gross is amount + fees
# if data.get('mc_currency') != tx.currency_id.name:
# invalid_parameters.append(('mc_currency', data.get('mc_currency'), tx.currency_id.name))
# if 'handling_amount' in data and float_compare(float(data.get('handling_amount')), tx.fees, 2) != 0:
# invalid_parameters.append(('handling_amount', data.get('handling_amount'), tx.fees))
# check buyer
# if tx.partner_reference and data.get('payer_id') != tx.partner_reference:
# invalid_parameters.append(('payer_id', data.get('payer_id'), tx.partner_reference))
# check seller
# if data.get('receiver_email') != tx.acquirer_id.mercadopago_email_account:
# invalid_parameters.append(('receiver_email', data.get('receiver_email'), tx.acquirer_id.mercadopago_email_account))
# if data.get('receiver_id') and tx.acquirer_id.mercadopago_seller_account and data['receiver_id'] != tx.acquirer_id.mercadopago_seller_account:
# invalid_parameters.append(('receiver_id', data.get('receiver_id'), tx.acquirer_id.mercadopago_seller_account))
return invalid_parameters
#From https://developers.mercadopago.com/documentacion/notificaciones-de-pago
#
#approved El pago fue aprobado y acreditado.
#pending El usuario no completó el proceso de pago.
#in_process El pago está siendo revisado.
#rejected El pago fue rechazado. El usuario puede intentar nuevamente.
#refunded (estado terminal) El pago fue devuelto al usuario.
#cancelled (estado terminal) El pago fue cancelado por superar el tiempo necesario para realizar el pago o por una de las partes.
#in_mediation Se inició una disputa para el pago.
#charged_back (estado terminal) Se realizó un contracargo en la tarjeta de crédito.
#called by Trans.form_feedback(...) > %s_form_validate(...)
def _mercadopago_form_validate(self, cr, uid, tx, data, context=None):
status = data.get('collection_status')
data = {
'acquirer_reference': data.get('external_reference'),
'mercadopago_txn_type': data.get('payment_type')
}
if status in ['approved', 'processed']:
_logger.info('Validated MercadoPago payment for tx %s: set as done' % (tx.reference))
data.update(state='done', date_validate=data.get('payment_date', fields.datetime.now()))
return tx.write(data)
elif status in ['pending', 'in_process','in_mediation']:
_logger.info('Received notification for MercadoPago payment %s: set as pending' % (tx.reference))
data.update(state='pending', state_message=data.get('pending_reason', ''))
return tx.write(data)
elif status in ['cancelled','refunded','charged_back','rejected']:
_logger.info('Received notification for MercadoPago payment %s: set as cancelled' % (tx.reference))
data.update(state='cancel', state_message=data.get('cancel_reason', ''))
return tx.write(data)
else:
error = 'Received unrecognized status for MercadoPago payment %s: %s, set as error' % (tx.reference, status)
_logger.info(error)
data.update(state='error', state_message=error)
return tx.write(data)
# --------------------------------------------------
# SERVER2SERVER RELATED METHODS
# --------------------------------------------------
def _mercadopago_try_url(self, request, tries=3, context=None):
""" Try to contact MercadoPago. Due to some issues, internal service errors
seem to be quite frequent. Several tries are done before considering
the communication as failed.
.. versionadded:: pre-v8 saas-3
.. warning::
Experimental code. You should not use it before OpenERP v8 official
release.
"""
done, res = False, None
while (not done and tries):
try:
res = urllib2.urlopen(request)
done = True
except urllib2.HTTPError as e:
res = e.read()
e.close()
if tries and res and json.loads(res)['name'] == 'INTERNAL_SERVICE_ERROR':
_logger.warning('Failed contacting MercadoPago, retrying (%s remaining)' % tries)
tries = tries - 1
if not res:
pass
# raise openerp.exceptions.
result = res.read()
res.close()
return result
def _mercadopago_s2s_send(self, cr, uid, values, cc_values, context=None):
"""
.. versionadded:: pre-v8 saas-3
.. warning::
Experimental code. You should not use it before OpenERP v8 official
release.
"""
tx_id = self.create(cr, uid, values, context=context)
tx = self.browse(cr, uid, tx_id, context=context)
headers = {
'Content-Type': 'application/json',
'Authorization': 'Bearer %s' % tx.acquirer_id._mercadopago_s2s_get_access_token()[tx.acquirer_id.id],
}
data = {
'intent': 'sale',
'transactions': [{
'amount': {
'total': '%.2f' % tx.amount,
'currency': tx.currency_id.name,
},
'description': tx.reference,
}]
}
if cc_values:
data['payer'] = {
'payment_method': 'credit_card',
'funding_instruments': [{
'credit_card': {
'number': cc_values['number'],
'type': cc_values['brand'],
'expire_month': cc_values['expiry_mm'],
'expire_year': cc_values['expiry_yy'],
'cvv2': cc_values['cvc'],
'first_name': tx.partner_name,
'last_name': tx.partner_name,
'billing_address': {
'line1': tx.partner_address,
'city': tx.partner_city,
'country_code': tx.partner_country_id.code,
'postal_code': tx.partner_zip,
}
}
}]
}
else:
# TODO: complete redirect URLs
data['redirect_urls'] = {
# 'return_url': 'http://example.com/your_redirect_url/',
# 'cancel_url': 'http://example.com/your_cancel_url/',
},
data['payer'] = {
'payment_method': 'mercadopago',
}
data = json.dumps(data)
request = urllib2.Request('https://api.sandbox.paypal.com/v1/payments/payment', data, headers)
result = self._mercadopago_try_url(request, tries=3, context=context)
return (tx_id, result)
def _mercadopago_s2s_get_invalid_parameters(self, cr, uid, tx, data, context=None):
"""
.. versionadded:: pre-v8 saas-3
.. warning::
Experimental code. You should not use it before OpenERP v8 official
release.
"""
invalid_parameters = []
return invalid_parameters
def _mercadopago_s2s_validate(self, cr, uid, tx, data, context=None):
"""
.. versionadded:: pre-v8 saas-3
.. warning::
Experimental code. You should not use it before OpenERP v8 official
release.
"""
values = json.loads(data)
status = values.get('state')
if status in ['approved']:
_logger.info('Validated Mercadopago s2s payment for tx %s: set as done' % (tx.reference))
tx.write({
'state': 'done',
'date_validate': values.get('udpate_time', fields.datetime.now()),
'mercadopago_txn_id': values['id'],
})
return True
elif status in ['pending', 'expired']:
_logger.info('Received notification for MercadoPago s2s payment %s: set as pending' % (tx.reference))
tx.write({
'state': 'pending',
# 'state_message': data.get('pending_reason', ''),
'mercadopago_txn_id': values['id'],
})
return True
else:
error = 'Received unrecognized status for MercadoPago s2s payment %s: %s, set as error' % (tx.reference, status)
_logger.info(error)
tx.write({
'state': 'error',
# 'state_message': error,
'mercadopago_txn_id': values['id'],
})
return False
def _mercadopago_s2s_get_tx_status(self, cr, uid, tx, context=None):
"""
.. versionadded:: pre-v8 saas-3
.. warning::
Experimental code. You should not use it before OpenERP v8 official
release.
"""
# TDETODO: check tx.mercadopago_txn_id is set
headers = {
'Content-Type': 'application/json',
'Authorization': 'Bearer %s' % tx.acquirer_id._mercadopago_s2s_get_access_token()[tx.acquirer_id.id],
}
url = 'https://api.sandbox.paypal.com/v1/payments/payment/%s' % (tx.mercadopago_txn_id)
request = urllib2.Request(url, headers=headers)
data = self._mercadopago_try_url(request, tries=3, context=context)
return self.s2s_feedback(cr, uid, tx.id, data, context=context)
|
Trust-Code/payment_mercadopago
|
models/mercadopago.py
|
Python
|
gpl-2.0
| 25,619
|
from django.test import TestCase
from django.db import models, DEFAULT_DB_ALIAS
from django.db.models import signals
from django.core import management
from django.core.exceptions import FieldError
from django.contrib.contenttypes.models import ContentType
from models import MyPerson, Person, StatusPerson, LowerStatusPerson
from models import MyPersonProxy, Abstract, OtherPerson, User, UserProxy
from models import UserProxyProxy, Country, State, StateProxy, TrackerUser
from models import BaseUser, Bug, ProxyTrackerUser, Improvement, ProxyProxyBug
from models import ProxyBug, ProxyImprovement
class ProxyModelTests(TestCase):
def test_same_manager_queries(self):
"""
The MyPerson model should be generating the same database queries as
the Person model (when the same manager is used in each case).
"""
my_person_sql = MyPerson.other.all().query.get_compiler(
DEFAULT_DB_ALIAS).as_sql()
person_sql = Person.objects.order_by("name").query.get_compiler(
DEFAULT_DB_ALIAS).as_sql()
self.assertEqual(my_person_sql, person_sql)
def test_inheretance_new_table(self):
"""
The StatusPerson models should have its own table (it's using ORM-level
inheritance).
"""
sp_sql = StatusPerson.objects.all().query.get_compiler(
DEFAULT_DB_ALIAS).as_sql()
p_sql = Person.objects.all().query.get_compiler(
DEFAULT_DB_ALIAS).as_sql()
self.assertNotEqual(sp_sql, p_sql)
def test_basic_proxy(self):
"""
Creating a Person makes them accessible through the MyPerson proxy.
"""
person = Person.objects.create(name="Foo McBar")
self.assertEqual(len(Person.objects.all()), 1)
self.assertEqual(len(MyPerson.objects.all()), 1)
self.assertEqual(MyPerson.objects.get(name="Foo McBar").id, person.id)
self.assertFalse(MyPerson.objects.get(id=person.id).has_special_name())
def test_no_proxy(self):
"""
Person is not proxied by StatusPerson subclass.
"""
Person.objects.create(name="Foo McBar")
self.assertEqual(list(StatusPerson.objects.all()), [])
def test_basic_proxy_reverse(self):
"""
A new MyPerson also shows up as a standard Person.
"""
MyPerson.objects.create(name="Bazza del Frob")
self.assertEqual(len(MyPerson.objects.all()), 1)
self.assertEqual(len(Person.objects.all()), 1)
LowerStatusPerson.objects.create(status="low", name="homer")
lsps = [lsp.name for lsp in LowerStatusPerson.objects.all()]
self.assertEqual(lsps, ["homer"])
def test_correct_type_proxy_of_proxy(self):
"""
Correct type when querying a proxy of proxy
"""
Person.objects.create(name="Foo McBar")
MyPerson.objects.create(name="Bazza del Frob")
LowerStatusPerson.objects.create(status="low", name="homer")
pp = sorted([mpp.name for mpp in MyPersonProxy.objects.all()])
self.assertEqual(pp, ['Bazza del Frob', 'Foo McBar', 'homer'])
def test_proxy_included_in_ancestors(self):
"""
Proxy models are included in the ancestors for a model's DoesNotExist
and MultipleObjectsReturned
"""
Person.objects.create(name="Foo McBar")
MyPerson.objects.create(name="Bazza del Frob")
LowerStatusPerson.objects.create(status="low", name="homer")
max_id = Person.objects.aggregate(max_id=models.Max('id'))['max_id']
self.assertRaises(Person.DoesNotExist,
MyPersonProxy.objects.get,
name='Zathras'
)
self.assertRaises(Person.MultipleObjectsReturned,
MyPersonProxy.objects.get,
id__lt=max_id+1
)
self.assertRaises(Person.DoesNotExist,
StatusPerson.objects.get,
name='Zathras'
)
sp1 = StatusPerson.objects.create(name='Bazza Jr.')
sp2 = StatusPerson.objects.create(name='Foo Jr.')
max_id = Person.objects.aggregate(max_id=models.Max('id'))['max_id']
self.assertRaises(Person.MultipleObjectsReturned,
StatusPerson.objects.get,
id__lt=max_id+1
)
def test_abc(self):
"""
All base classes must be non-abstract
"""
def build_abc():
class NoAbstract(Abstract):
class Meta:
proxy = True
self.assertRaises(TypeError, build_abc)
def test_no_cbc(self):
"""
The proxy must actually have one concrete base class
"""
def build_no_cbc():
class TooManyBases(Person, Abstract):
class Meta:
proxy = True
self.assertRaises(TypeError, build_no_cbc)
def test_no_base_classes(self):
def build_no_base_classes():
class NoBaseClasses(models.Model):
class Meta:
proxy = True
self.assertRaises(TypeError, build_no_base_classes)
def test_new_fields(self):
def build_new_fields():
class NoNewFields(Person):
newfield = models.BooleanField()
class Meta:
proxy = True
self.assertRaises(FieldError, build_new_fields)
def test_myperson_manager(self):
Person.objects.create(name="fred")
Person.objects.create(name="wilma")
Person.objects.create(name="barney")
resp = [p.name for p in MyPerson.objects.all()]
self.assertEqual(resp, ['barney', 'fred'])
resp = [p.name for p in MyPerson._default_manager.all()]
self.assertEqual(resp, ['barney', 'fred'])
def test_otherperson_manager(self):
Person.objects.create(name="fred")
Person.objects.create(name="wilma")
Person.objects.create(name="barney")
resp = [p.name for p in OtherPerson.objects.all()]
self.assertEqual(resp, ['barney', 'wilma'])
resp = [p.name for p in OtherPerson.excluder.all()]
self.assertEqual(resp, ['barney', 'fred'])
resp = [p.name for p in OtherPerson._default_manager.all()]
self.assertEqual(resp, ['barney', 'wilma'])
def test_proxy_model_signals(self):
"""
Test save signals for proxy models
"""
output = []
def make_handler(model, event):
def _handler(*args, **kwargs):
output.append('%s %s save' % (model, event))
return _handler
h1 = make_handler('MyPerson', 'pre')
h2 = make_handler('MyPerson', 'post')
h3 = make_handler('Person', 'pre')
h4 = make_handler('Person', 'post')
signals.pre_save.connect(h1, sender=MyPerson)
signals.post_save.connect(h2, sender=MyPerson)
signals.pre_save.connect(h3, sender=Person)
signals.post_save.connect(h4, sender=Person)
dino = MyPerson.objects.create(name=u"dino")
self.assertEqual(output, [
'MyPerson pre save',
'MyPerson post save'
])
output = []
h5 = make_handler('MyPersonProxy', 'pre')
h6 = make_handler('MyPersonProxy', 'post')
signals.pre_save.connect(h5, sender=MyPersonProxy)
signals.post_save.connect(h6, sender=MyPersonProxy)
dino = MyPersonProxy.objects.create(name=u"pebbles")
self.assertEqual(output, [
'MyPersonProxy pre save',
'MyPersonProxy post save'
])
signals.pre_save.disconnect(h1, sender=MyPerson)
signals.post_save.disconnect(h2, sender=MyPerson)
signals.pre_save.disconnect(h3, sender=Person)
signals.post_save.disconnect(h4, sender=Person)
signals.pre_save.disconnect(h5, sender=MyPersonProxy)
signals.post_save.disconnect(h6, sender=MyPersonProxy)
def test_content_type(self):
ctype = ContentType.objects.get_for_model
self.assertTrue(ctype(Person) is ctype(OtherPerson))
def test_user_userproxy_userproxyproxy(self):
User.objects.create(name='Bruce')
resp = [u.name for u in User.objects.all()]
self.assertEqual(resp, ['Bruce'])
resp = [u.name for u in UserProxy.objects.all()]
self.assertEqual(resp, ['Bruce'])
resp = [u.name for u in UserProxyProxy.objects.all()]
self.assertEqual(resp, ['Bruce'])
def test_proxy_delete(self):
"""
Proxy objects can be deleted
"""
User.objects.create(name='Bruce')
u2 = UserProxy.objects.create(name='George')
resp = [u.name for u in UserProxy.objects.all()]
self.assertEqual(resp, ['Bruce', 'George'])
u2.delete()
resp = [u.name for u in UserProxy.objects.all()]
self.assertEqual(resp, ['Bruce'])
def test_select_related(self):
"""
We can still use `select_related()` to include related models in our
querysets.
"""
country = Country.objects.create(name='Australia')
state = State.objects.create(name='New South Wales', country=country)
resp = [s.name for s in State.objects.select_related()]
self.assertEqual(resp, ['New South Wales'])
resp = [s.name for s in StateProxy.objects.select_related()]
self.assertEqual(resp, ['New South Wales'])
self.assertEqual(StateProxy.objects.get(name='New South Wales').name,
'New South Wales')
resp = StateProxy.objects.select_related().get(name='New South Wales')
self.assertEqual(resp.name, 'New South Wales')
def test_proxy_bug(self):
contributor = TrackerUser.objects.create(name='Contributor',
status='contrib')
someone = BaseUser.objects.create(name='Someone')
Bug.objects.create(summary='fix this', version='1.1beta',
assignee=contributor, reporter=someone)
pcontributor = ProxyTrackerUser.objects.create(name='OtherContributor',
status='proxy')
Improvement.objects.create(summary='improve that', version='1.1beta',
assignee=contributor, reporter=pcontributor,
associated_bug=ProxyProxyBug.objects.all()[0])
# Related field filter on proxy
resp = ProxyBug.objects.get(version__icontains='beta')
self.assertEqual(repr(resp), '<ProxyBug: ProxyBug:fix this>')
# Select related + filter on proxy
resp = ProxyBug.objects.select_related().get(version__icontains='beta')
self.assertEqual(repr(resp), '<ProxyBug: ProxyBug:fix this>')
# Proxy of proxy, select_related + filter
resp = ProxyProxyBug.objects.select_related().get(
version__icontains='beta'
)
self.assertEqual(repr(resp), '<ProxyProxyBug: ProxyProxyBug:fix this>')
# Select related + filter on a related proxy field
resp = ProxyImprovement.objects.select_related().get(
reporter__name__icontains='butor'
)
self.assertEqual(repr(resp),
'<ProxyImprovement: ProxyImprovement:improve that>'
)
# Select related + filter on a related proxy of proxy field
resp = ProxyImprovement.objects.select_related().get(
associated_bug__summary__icontains='fix'
)
self.assertEqual(repr(resp),
'<ProxyImprovement: ProxyImprovement:improve that>'
)
def test_proxy_load_from_fixture(self):
management.call_command('loaddata', 'mypeople.json', verbosity=0, commit=False)
p = MyPerson.objects.get(pk=100)
self.assertEqual(p.name, 'Elvis Presley')
|
mzdaniel/oh-mainline
|
vendor/packages/Django/tests/modeltests/proxy_models/tests.py
|
Python
|
agpl-3.0
| 11,673
|
"""
Module providing easy API for working with remote files and folders.
"""
from __future__ import with_statement
import hashlib
import tempfile
import re
import os
from six import string_types, BytesIO as StringIO
from fabric.api import *
from fabric.utils import apply_lcwd
def exists(path, use_sudo=False, verbose=False):
"""
Return True if given path exists on the current remote host.
If ``use_sudo`` is True, will use `sudo` instead of `run`.
`exists` will, by default, hide all output (including the run line, stdout,
stderr and any warning resulting from the file not existing) in order to
avoid cluttering output. You may specify ``verbose=True`` to change this
behavior.
"""
func = use_sudo and sudo or run
cmd = 'test -e %s' % _expand_path(path)
# If verbose, run normally
if verbose:
with settings(warn_only=True):
return not func(cmd).failed
# Otherwise, be quiet
with settings(hide('everything'), warn_only=True):
return not func(cmd).failed
def is_link(path, use_sudo=False, verbose=False):
"""
Return True if the given path is a symlink on the current remote host.
If ``use_sudo`` is True, will use `.sudo` instead of `.run`.
`.is_link` will, by default, hide all output. Give ``verbose=True`` to change this.
"""
func = sudo if use_sudo else run
cmd = 'test -L "$(echo %s)"' % path
args, kwargs = [], {'warn_only': True}
if not verbose:
opts = [hide('everything')]
with settings(*args, **kwargs):
return func(cmd).succeeded
def first(*args, **kwargs):
"""
Given one or more file paths, returns first one found, or None if none
exist. May specify ``use_sudo`` and ``verbose`` which are passed to `exists`.
"""
for directory in args:
if exists(directory, **kwargs):
return directory
def upload_template(filename, destination, context=None, use_jinja=False,
template_dir=None, use_sudo=False, backup=True, mirror_local_mode=False,
mode=None):
"""
Render and upload a template text file to a remote host.
Returns the result of the inner call to `~fabric.operations.put` -- see its
documentation for details.
``filename`` should be the path to a text file, which may contain `Python
string interpolation formatting
<http://docs.python.org/library/stdtypes.html#string-formatting>`_ and will
be rendered with the given context dictionary ``context`` (if given.)
Alternately, if ``use_jinja`` is set to True and you have the Jinja2
templating library available, Jinja will be used to render the template
instead. Templates will be loaded from the invoking user's current working
directory by default, or from ``template_dir`` if given.
The resulting rendered file will be uploaded to the remote file path
``destination``. If the destination file already exists, it will be
renamed with a ``.bak`` extension unless ``backup=False`` is specified.
By default, the file will be copied to ``destination`` as the logged-in
user; specify ``use_sudo=True`` to use `sudo` instead.
The ``mirror_local_mode`` and ``mode`` kwargs are passed directly to an
internal `~fabric.operations.put` call; please see its documentation for
details on these two options.
.. versionchanged:: 1.1
Added the ``backup``, ``mirror_local_mode`` and ``mode`` kwargs.
"""
func = use_sudo and sudo or run
# Normalize destination to be an actual filename, due to using StringIO
with settings(hide('everything'), warn_only=True):
if func('test -d %s' % _expand_path(destination)).succeeded:
sep = "" if destination.endswith('/') else "/"
destination += sep + os.path.basename(filename)
# Use mode kwarg to implement mirror_local_mode, again due to using
# StringIO
if mirror_local_mode and mode is None:
mode = os.stat(filename).st_mode
# To prevent put() from trying to do this
# logic itself
mirror_local_mode = False
# Process template
text = None
if use_jinja:
try:
template_dir = template_dir or os.getcwd()
template_dir = apply_lcwd(template_dir, env)
from jinja2 import Environment, FileSystemLoader
jenv = Environment(loader=FileSystemLoader(template_dir))
text = jenv.get_template(filename).render(**context or {})
# Force to a byte representation of Unicode, or str()ification
# within Paramiko's SFTP machinery may cause decode issues for
# truly non-ASCII characters.
text = text.encode('utf-8')
except ImportError:
import traceback
tb = traceback.format_exc()
abort(tb + "\nUnable to import Jinja2 -- see above.")
else:
filename = apply_lcwd(filename, env)
with open(os.path.expanduser(filename)) as inputfile:
text = inputfile.read()
if context:
text = text % context
# Back up original file
if backup and exists(destination):
func("cp %s{,.bak}" % _expand_path(destination))
if isinstance(text, string_types):
text = text.encode('utf-8')
# Upload the file.
return put(
local_path=StringIO(text),
remote_path=destination,
use_sudo=use_sudo,
mirror_local_mode=mirror_local_mode,
mode=mode
)
def sed(filename, before, after, limit='', use_sudo=False, backup='.bak',
flags='', shell=False):
"""
Run a search-and-replace on ``filename`` with given regex patterns.
Equivalent to ``sed -i<backup> -r -e "/<limit>/ s/<before>/<after>/<flags>g"
<filename>``. Setting ``backup`` to an empty string will, disable backup
file creation.
For convenience, ``before`` and ``after`` will automatically escape forward
slashes, single quotes and parentheses for you, so you don't need to
specify e.g. ``http:\/\/foo\.com``, instead just using ``http://foo\.com``
is fine.
If ``use_sudo`` is True, will use `sudo` instead of `run`.
The ``shell`` argument will be eventually passed to `run`/`sudo`. It
defaults to False in order to avoid problems with many nested levels of
quotes and backslashes. However, setting it to True may help when using
``~fabric.operations.cd`` to wrap explicit or implicit ``sudo`` calls.
(``cd`` by it's nature is a shell built-in, not a standalone command, so it
should be called within a shell.)
Other options may be specified with sed-compatible regex flags -- for
example, to make the search and replace case insensitive, specify
``flags="i"``. The ``g`` flag is always specified regardless, so you do not
need to remember to include it when overriding this parameter.
.. versionadded:: 1.1
The ``flags`` parameter.
.. versionadded:: 1.6
Added the ``shell`` keyword argument.
"""
func = use_sudo and sudo or run
# Characters to be escaped in both
for char in "/'":
before = before.replace(char, r'\%s' % char)
after = after.replace(char, r'\%s' % char)
# Characters to be escaped in replacement only (they're useful in regexen
# in the 'before' part)
for char in "()":
after = after.replace(char, r'\%s' % char)
if limit:
limit = r'/%s/ ' % limit
context = {
'script': r"'%ss/%s/%s/%sg'" % (limit, before, after, flags),
'filename': _expand_path(filename),
'backup': backup
}
# Test the OS because of differences between sed versions
with hide('running', 'stdout'):
platform = run("uname")
if platform in ('NetBSD', 'OpenBSD', 'QNX'):
# Attempt to protect against failures/collisions
hasher = hashlib.sha1()
hasher.update(env.host_string)
hasher.update(filename)
context['tmp'] = "/tmp/%s" % hasher.hexdigest()
# Use temp file to work around lack of -i
expr = r"""cp -p %(filename)s %(tmp)s \
&& sed -r -e %(script)s %(filename)s > %(tmp)s \
&& cp -p %(filename)s %(filename)s%(backup)s \
&& mv %(tmp)s %(filename)s"""
else:
context['extended_regex'] = '-E' if platform == 'Darwin' else '-r'
expr = r"sed -i%(backup)s %(extended_regex)s -e %(script)s %(filename)s"
command = expr % context
return func(command, shell=shell)
def uncomment(filename, regex, use_sudo=False, char='#', backup='.bak',
shell=False):
"""
Attempt to uncomment all lines in ``filename`` matching ``regex``.
The default comment delimiter is `#` and may be overridden by the ``char``
argument.
This function uses the `sed` function, and will accept the same
``use_sudo``, ``shell`` and ``backup`` keyword arguments that `sed` does.
`uncomment` will remove a single whitespace character following the comment
character, if it exists, but will preserve all preceding whitespace. For
example, ``# foo`` would become ``foo`` (the single space is stripped) but
`` # foo`` would become `` foo`` (the single space is still stripped,
but the preceding 4 spaces are not.)
.. versionchanged:: 1.6
Added the ``shell`` keyword argument.
"""
return sed(
filename,
before=r'^([[:space:]]*)%s[[:space:]]?' % char,
after=r'\1',
limit=regex,
use_sudo=use_sudo,
backup=backup,
shell=shell
)
def comment(filename, regex, use_sudo=False, char='#', backup='.bak',
shell=False):
"""
Attempt to comment out all lines in ``filename`` matching ``regex``.
The default commenting character is `#` and may be overridden by the
``char`` argument.
This function uses the `sed` function, and will accept the same
``use_sudo``, ``shell`` and ``backup`` keyword arguments that `sed` does.
`comment` will prepend the comment character to the beginning of the line,
so that lines end up looking like so::
this line is uncommented
#this line is commented
# this line is indented and commented
In other words, comment characters will not "follow" indentation as they
sometimes do when inserted by hand. Neither will they have a trailing space
unless you specify e.g. ``char='# '``.
.. note::
In order to preserve the line being commented out, this function will
wrap your ``regex`` argument in parentheses, so you don't need to. It
will ensure that any preceding/trailing ``^`` or ``$`` characters are
correctly moved outside the parentheses. For example, calling
``comment(filename, r'^foo$')`` will result in a `sed` call with the
"before" regex of ``r'^(foo)$'`` (and the "after" regex, naturally, of
``r'#\\1'``.)
.. versionadded:: 1.5
Added the ``shell`` keyword argument.
"""
carot, dollar = '', ''
if regex.startswith('^'):
carot = '^'
regex = regex[1:]
if regex.endswith('$'):
dollar = '$'
regex = regex[:-1]
regex = "%s(%s)%s" % (carot, regex, dollar)
return sed(
filename,
before=regex,
after=r'%s\1' % char,
use_sudo=use_sudo,
backup=backup,
shell=shell
)
def contains(filename, text, exact=False, use_sudo=False, escape=True,
shell=False):
"""
Return True if ``filename`` contains ``text`` (which may be a regex.)
By default, this function will consider a partial line match (i.e. where
``text`` only makes up part of the line it's on). Specify ``exact=True`` to
change this behavior so that only a line containing exactly ``text``
results in a True return value.
This function leverages ``egrep`` on the remote end (so it may not follow
Python regular expression syntax perfectly), and skips ``env.shell``
wrapper by default.
If ``use_sudo`` is True, will use `sudo` instead of `run`.
If ``escape`` is False, no extra regular expression related escaping is
performed (this includes overriding ``exact`` so that no ``^``/``$`` is
added.)
The ``shell`` argument will be eventually passed to ``run/sudo``. See
description of the same argumnet in ``~fabric.contrib.sed`` for details.
.. versionchanged:: 1.0
Swapped the order of the ``filename`` and ``text`` arguments to be
consistent with other functions in this module.
.. versionchanged:: 1.4
Updated the regular expression related escaping to try and solve
various corner cases.
.. versionchanged:: 1.4
Added ``escape`` keyword argument.
.. versionadded:: 1.6
Added the ``shell`` keyword argument.
"""
func = use_sudo and sudo or run
if escape:
text = _escape_for_regex(text)
if exact:
text = "^%s$" % text
with settings(hide('everything'), warn_only=True):
egrep_cmd = 'egrep "%s" %s' % (text, _expand_path(filename))
return func(egrep_cmd, shell=shell).succeeded
def append(filename, text, use_sudo=False, partial=False, escape=True,
shell=False):
"""
Append string (or list of strings) ``text`` to ``filename``.
When a list is given, each string inside is handled independently (but in
the order given.)
If ``text`` is already found in ``filename``, the append is not run, and
None is returned immediately. Otherwise, the given text is appended to the
end of the given ``filename`` via e.g. ``echo '$text' >> $filename``.
The test for whether ``text`` already exists defaults to a full line match,
e.g. ``^<text>$``, as this seems to be the most sensible approach for the
"append lines to a file" use case. You may override this and force partial
searching (e.g. ``^<text>``) by specifying ``partial=True``.
Because ``text`` is single-quoted, single quotes will be transparently
backslash-escaped. This can be disabled with ``escape=False``.
If ``use_sudo`` is True, will use `sudo` instead of `run`.
The ``shell`` argument will be eventually passed to ``run/sudo``. See
description of the same argumnet in ``~fabric.contrib.sed`` for details.
.. versionchanged:: 0.9.1
Added the ``partial`` keyword argument.
.. versionchanged:: 1.0
Swapped the order of the ``filename`` and ``text`` arguments to be
consistent with other functions in this module.
.. versionchanged:: 1.0
Changed default value of ``partial`` kwarg to be ``False``.
.. versionchanged:: 1.4
Updated the regular expression related escaping to try and solve
various corner cases.
.. versionadded:: 1.6
Added the ``shell`` keyword argument.
"""
func = use_sudo and sudo or run
# Normalize non-list input to be a list
if isinstance(text, string_types):
text = [text]
for line in text:
regex = '^' + _escape_for_regex(line) + ('' if partial else '$')
if (exists(filename, use_sudo=use_sudo) and line
and contains(filename, regex, use_sudo=use_sudo, escape=False,
shell=shell)):
continue
line = line.replace("'", r"'\\''") if escape else line
func("echo '%s' >> %s" % (line, _expand_path(filename)))
def _escape_for_regex(text):
"""Escape ``text`` to allow literal matching using egrep"""
regex = re.escape(text)
# Seems like double escaping is needed for \
regex = regex.replace('\\\\', '\\\\\\')
# Triple-escaping seems to be required for $ signs
regex = regex.replace(r'\$', r'\\\$')
# Whereas single quotes should not be escaped
regex = regex.replace(r"\'", "'")
return regex
def _expand_path(path):
return '"$(echo %s)"' % path
|
pashinin/fabric
|
fabric/contrib/files.py
|
Python
|
bsd-2-clause
| 15,835
|
# pylint: disable=no-self-use,invalid-name
import numpy
from numpy.testing import assert_almost_equal
import keras.backend as K
from keras.layers import Input, Masking
from keras.models import Model
from deep_qa.layers.backend import BatchDot
from deep_qa.layers.wrappers import OutputMask
from deep_qa.testing.test_case import DeepQaTestCase
class TestBatchDotLayer(DeepQaTestCase):
def test_compute_mask_basic(self):
batch_size = 2
# test the case where the tensors are even
# tensor_a has shape (2, 3, 2), so mask_a has shape (2, 3)
tensor_a = K.variable(numpy.random.randint(7, size=(batch_size, 3, 2)))
mask_a = K.variable(numpy.array([[1, 0, 1], [1, 1, 0]]))
# tensor_b has shape (2, 4, 2), so mask_b has shape (2, 4)
tensor_b = K.variable(numpy.random.randint(7, size=(batch_size, 4, 2)))
mask_b = K.variable(numpy.array([[0, 1, 1, 1], [1, 0, 1, 1]]))
# a_dot_b would have shape (2, 3, 4), so mask of a_dot_b has shape (2, 3, 4)
calculated_mask = K.eval(BatchDot().compute_mask([tensor_a, tensor_b],
[mask_a, mask_b]))
assert_almost_equal(calculated_mask, numpy.array([[[0.0, 1.0, 1.0, 1.0],
[0.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 1.0, 1.0]],
[[1.0, 0.0, 1.0, 1.0],
[1.0, 0.0, 1.0, 1.0],
[0.0, 0.0, 0.0, 0.0]]]))
# test the case where tensor_a has less dimensions than tensor_b
# tensor_a has shape (2, 4, 2), so mask_a has shape (2, 4)
tensor_a = K.variable(numpy.random.randint(7, size=(batch_size, 4, 2)))
mask_a = K.variable(numpy.array([[1, 0, 1, 0], [1, 1, 0, 0]]))
# tensor_b has shape (2, 4, 3, 2), so mask_b has shape (2, 4, 3)
tensor_b = K.variable(numpy.random.randint(7, size=(batch_size, 4, 3, 2)))
mask_b = K.variable(numpy.array([[[1, 1, 1],
[1, 1, 1],
[1, 1, 0],
[1, 0, 0]],
[[1, 1, 1],
[1, 1, 0],
[1, 0, 0],
[0, 0, 0]]]))
# a_dot_b would have shape (2, 4, 3), so mask of a_dot_b has shape (2, 4, 3)
calculated_mask = K.eval(BatchDot().compute_mask([tensor_a, tensor_b],
[mask_a, mask_b]))
assert calculated_mask.shape == (batch_size, 4, 3)
assert_almost_equal(calculated_mask, numpy.array([[[1.0, 1.0, 1.0],
[0.0, 0.0, 0.0],
[1.0, 1.0, 0.0],
[0.0, 0.0, 0.0]],
[[1.0, 1.0, 1.0],
[1.0, 1.0, 0.0],
[0.0, 0.0, 0.0],
[0.0, 0.0, 0.0]]]))
# test the case where tensor_a has more dimensions than tensor_b
# tensor_a has shape (2, 3, 4, 2), so mask_a has shape (2, 3, 4)
tensor_a = K.variable(numpy.random.randint(7, size=(batch_size, 3, 4, 2)))
mask_a = K.variable(numpy.array([[[1, 1, 1, 1],
[1, 1, 1, 1],
[1, 1, 0, 1]],
[[1, 1, 1, 1],
[1, 1, 0, 1],
[1, 0, 0, 1]]]))
# tensor_b has shape (2, 3, 2), so mask_b has shape (2, 3)
tensor_b = K.variable(numpy.random.randint(7, size=(batch_size, 3, 2)))
mask_b = K.variable(numpy.array([[1, 0, 1], [1, 1, 0]]))
# a_dot_b would have shape (2, 3, 4), so mask of a_dot_b has shape (2, 3, 4)
calculated_mask = K.eval(BatchDot().compute_mask([tensor_a, tensor_b],
[mask_a, mask_b]))
assert calculated_mask.shape == (batch_size, 3, 4)
assert_almost_equal(calculated_mask, numpy.array([[[1.0, 1.0, 1.0, 1.0],
[0.0, 0.0, 0.0, 0.0],
[1.0, 1.0, 0.0, 1.0]],
[[1.0, 1.0, 1.0, 1.0],
[1.0, 1.0, 0.0, 1.0],
[0.0, 0.0, 0.0, 0.0]]]))
def test_a_smaller_than_b(self):
batch_size = 3
tensor_a = numpy.random.randint(7, size=(batch_size, 5))
tensor_b = numpy.random.randint(7, size=(batch_size, 2, 5))
# Manually set some values to 1 here, which will be masked later
# (1 and not 0 so that masked values are still non-zero in the output)
tensor_a[0] = 0
tensor_b[0][1] = 0
input_tensor_a = Input(shape=(5,))
masked_tensor_a = Masking(mask_value=0)(input_tensor_a)
input_tensor_b = Input(shape=(2, 5))
masked_tensor_b = Masking(mask_value=0)(input_tensor_b)
a_dot_b = BatchDot()([masked_tensor_a, masked_tensor_b])
a_dot_b_mask = OutputMask()(a_dot_b)
model = Model(inputs=[input_tensor_a, input_tensor_b],
outputs=[a_dot_b, a_dot_b_mask])
# a_dot_b and mask_tensor are of shape (3, 2).
a_dot_b_tensor, mask_tensor = model.predict([tensor_a, tensor_b])
# Test that the dot happened like we expected.
for i in range(batch_size):
# each dot product should be of shape (2,)
assert_almost_equal(a_dot_b_tensor[i],
numpy.einsum("i,mi->m", tensor_a[i], tensor_b[i]))
# Check that the values in the output mask are 0 where the
# values were set to 1 above.
assert mask_tensor[0][0] == 0
assert mask_tensor[0][1] == 0
def test_a_larger_than_b(self):
batch_size = 3
tensor_a = numpy.random.randint(7, size=(batch_size, 2, 5))
tensor_b = numpy.random.randint(7, size=(batch_size, 5))
# Manually set some values to 1 here, which will be masked later
# (1 and not 0 so that masked values are still non-zero in the output)
tensor_a[0][1] = 0
tensor_b[0] = 0
input_tensor_a = Input(shape=(2, 5))
masked_tensor_a = Masking(mask_value=0)(input_tensor_a)
input_tensor_b = Input(shape=(5,))
masked_tensor_b = Masking(mask_value=0)(input_tensor_b)
a_dot_b = BatchDot()([masked_tensor_a, masked_tensor_b])
a_dot_b_mask = OutputMask()(a_dot_b)
model = Model(inputs=[input_tensor_a, input_tensor_b],
outputs=[a_dot_b, a_dot_b_mask])
# a_dot_b and mask_tensor are of shape (3, 2).
a_dot_b_tensor, mask_tensor = model.predict([tensor_a, tensor_b])
# Test that the dot happened like we expected.
for i in range(batch_size):
# each dot product should be of shape (2,)
assert_almost_equal(a_dot_b_tensor[i],
numpy.einsum("mi,i->m", tensor_a[i], tensor_b[i]))
# Check that the values in the output mask are 0 where the
# values were set to 1 above.
assert mask_tensor[0][0] == 0
assert mask_tensor[0][1] == 0
def test_a_smaller_than_b_higher_dimension(self):
batch_size = 3
tensor_a = numpy.random.randint(7, size=(batch_size, 4, 5))
tensor_b = numpy.random.randint(7, size=(batch_size, 4, 2, 5))
# Manually set some values to 1 here, which will be masked later
# (1 and not 0 so that masked values are still non-zero in the output)
tensor_a[0][1] = 0
tensor_a[1][3] = 0
tensor_b[0][1][1] = 0
tensor_b[0][2][1] = 0
input_tensor_a = Input(shape=(4, 5))
masked_tensor_a = Masking(mask_value=0)(input_tensor_a)
input_tensor_b = Input(shape=(4, 2, 5))
masked_tensor_b = Masking(mask_value=0)(input_tensor_b)
a_dot_b = BatchDot()([masked_tensor_a, masked_tensor_b])
a_dot_b_mask = OutputMask()(a_dot_b)
model = Model(inputs=[input_tensor_a, input_tensor_b],
outputs=[a_dot_b, a_dot_b_mask])
# a_dot_b and mask_tensor are of shape (3, 4, 2).
a_dot_b_tensor, mask_tensor = model.predict([tensor_a, tensor_b])
# Test that the dot happened like we expected.
for i in range(batch_size):
# each dot product should be of shape (4, 2)
assert_almost_equal(a_dot_b_tensor[i],
numpy.einsum("ij,imj->im", tensor_a[i], tensor_b[i]))
# Check that the values in the output mask are 0 where the
# values were set to 1 above.
assert mask_tensor[0][1][0] == 0
assert mask_tensor[0][1][1] == 0
assert mask_tensor[0][2][1] == 0
assert mask_tensor[1][3][0] == 0
assert mask_tensor[1][3][1] == 0
def test_a_larger_than_b_higher_dimension(self):
batch_size = 3
tensor_a = numpy.random.randint(7, size=(batch_size, 4, 2, 5))
tensor_b = numpy.random.randint(7, size=(batch_size, 4, 5))
# Manually set some values to 1 here, which will be masked later
# (1 and not 0 so that masked values are still non-zero in the output)
tensor_a[0][1][1] = 0
tensor_a[0][2][1] = 0
tensor_b[0][1] = 0
tensor_b[1][3] = 0
input_tensor_a = Input(shape=(4, 2, 5))
masked_tensor_a = Masking(mask_value=0)(input_tensor_a)
input_tensor_b = Input(shape=(4, 5))
masked_tensor_b = Masking(mask_value=0)(input_tensor_b)
a_dot_b = BatchDot()([masked_tensor_a, masked_tensor_b])
a_dot_b_mask = OutputMask()(a_dot_b)
model = Model(inputs=[input_tensor_a, input_tensor_b],
outputs=[a_dot_b, a_dot_b_mask])
# a_dot_b and mask_tensor are of shape (3, 4, 2).
a_dot_b_tensor, mask_tensor = model.predict([tensor_a, tensor_b])
# Test that the dot happened like we expected.
for i in range(batch_size):
# each dot product should be of shape (4, 2)
assert_almost_equal(a_dot_b_tensor[i],
numpy.einsum("imj,ij->im", tensor_a[i], tensor_b[i]))
# Check that the values in the output mask are 0 where the
# values were set to 1 above.
assert mask_tensor[0][1][0] == 0
assert mask_tensor[0][1][1] == 0
assert mask_tensor[0][2][1] == 0
assert mask_tensor[1][3][0] == 0
assert mask_tensor[1][3][1] == 0
def test_output_shapes(self):
bd = BatchDot()
a_shapes = [(5, 10), (1, 1, 1), (1, 5, 3), (1, 5, 4, 3), (1, 5, 3)]
b_shapes = [(5, 10), (1, 1, 1), (1, 2, 3), (1, 5, 3), (1, 5, 4, 3)]
expected_shapes = [(5, 1), (1, 1, 1), (1, 5, 2), (1, 5, 4), (1, 5, 4)]
for a_shape, b_shape, expected_shape in zip(a_shapes, b_shapes, expected_shapes):
assert K.eval(bd([K.ones(shape=a_shape), K.ones(shape=b_shape)])).shape == expected_shape
assert bd.compute_output_shape([a_shape, b_shape]) == expected_shape
|
allenai/deep_qa
|
tests/layers/backend/batch_dot_test.py
|
Python
|
apache-2.0
| 11,778
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# rdiffweb, A web interface to rdiff-backup repositories
# Copyright (C) 2014 rdiffweb contributors
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import cherrypy
import logging
import page_main
import rdw_spider_repos
import email_notification
# Define the logger
logger = logging.getLogger(__name__)
class rdiffPreferencesPage(page_main.rdiffPage):
sampleEmail = 'joe@example.com'
@cherrypy.expose
def index(self, action=u"", current=u"", new=u"", confirm=u""):
params = {}
# Process the parameters.
if self._is_submit():
try:
if action == "set_password":
params = self._set_password(current, new, confirm)
elif action == "update_repos":
params = self._update_repos()
elif action == 'set_notifications':
params = self._setNotifications()
except ValueError as e:
params['error'] = unicode(e)
except Exception as e:
logger.exception("unknown error processing action")
params['error'] = unicode(e)
# Get page params
try:
params.update(self._get_parms_for_page())
except Exception as e:
params['error'] = unicode(e)
return self._writePage("prefs.html", **params)
def _set_password(self, old_password, new_password, confirm_password):
# Check if current database support it.
if not self.getUserDB().is_modifiable():
return {'error': """Password changing is not
supported with the active user
database."""}
# Check if confirmation is valid.
if new_password != confirm_password:
return {'error': "The passwords do not match."}
self.getUserDB().set_password(self.getUsername(),
old_password,
new_password)
return {'success': "Password updated successfully."}
def _update_repos(self):
rdw_spider_repos.findReposForUser(self.getUsername(), self.getUserDB())
return {'success': """Successfully updated repositories."""}
def _setNotifications(self, parms):
if not self.getUserDB().is_modifiable():
return self._writePrefsPage(error="""Email notification is not
supported with the active user
database.""")
repos = self.getUserDB().get_repos(self.getUsername())
for parmName in parms.keys():
if parmName == "userEmail":
if parms[parmName] == self.sampleEmail:
parms[parmName] = ''
self.getUserDB().set_email(
self.getUsername(), parms[parmName])
if parmName.endswith("numDays"):
backupName = parmName[:-7]
if backupName in repos:
if parms[parmName] == "Don't notify":
maxDays = 0
else:
maxDays = int(parms[parmName][0])
self.getUserDB().set_repo_maxage(
self.getUsername(), backupName, maxDays)
return self._writePrefsPage(success="""Successfully changed
notification settings.""")
def _get_parms_for_page(self):
email = self.getUserDB().get_email(self.getUsername())
parms = {
"userEmail": email,
"notificationsEnabled": False,
"backups": [],
"sampleEmail": self.sampleEmail
}
if email_notification.emailNotifier().notificationsEnabled():
repos = self.getUserDB().get_repos(self.getUsername())
backups = []
for repo in repos:
maxAge = self.getUserDB().get_repo_maxage(
self.getUsername(), repo)
notifyOptions = []
for i in range(0, 8):
notifyStr = "Don't notify"
if i == 1:
notifyStr = "1 day"
elif i > 1:
notifyStr = str(i) + " days"
selectedStr = ""
if i == maxAge:
selectedStr = "selected"
notifyOptions.append(
{"optionStr": notifyStr, "selectedStr": selectedStr})
backups.append(
{"backupName": repo, "notifyOptions": notifyOptions})
parms.update({"notificationsEnabled": True, "backups": backups})
return parms
|
sbellver/rdiffweb
|
rdiffweb/page_prefs.py
|
Python
|
gpl-3.0
| 5,423
|
from activitystreams import parse as as_parser
from dino.config import ApiActions
from dino.config import ErrorCodes
from dino.config import SessionKeys
from dino.validation import request
from test.base import BaseTest
class RequestListRoomsTest(BaseTest):
def test_list_rooms_status_code_true(self):
self.assert_in_room(False)
self.create_and_join_room()
self.assert_in_room(True)
response_data = request.on_list_rooms(as_parser(self.activity_for_list_rooms()))
self.assertEqual(True, response_data[0])
def test_list_rooms_no_actor_id_status_code_false(self):
self.assert_in_room(False)
self.create_and_join_room()
self.assert_in_room(True)
activity = self.activity_for_list_rooms()
del activity['actor']['id']
response_data = request.on_list_rooms(as_parser(activity))
self.assertEqual(True, response_data[0])
def test_list_rooms_not_allowed(self):
self.assert_in_room(False)
self.set_channel_acl({ApiActions.LIST: {'gender': 'm'}})
activity = self.activity_for_list_rooms()
is_valid, code, msg = request.on_list_rooms(as_parser(activity))
self.assertFalse(is_valid)
self.assertEqual(code, ErrorCodes.NOT_ALLOWED)
def test_list_rooms_spoken_country_none(self):
self._test_spoken_language(False, 'de', None)
def test_list_rooms_spoken_country_empty(self):
self._test_spoken_language(False, 'de', '')
def test_list_rooms_spoken_country_wrong_single(self):
self._test_spoken_language(False, 'de', 'en')
def test_list_rooms_spoken_country_wrong_multi(self):
self._test_spoken_language(False, 'de', 'en,es')
def test_list_rooms_spoken_country_allows_multi_user_none(self):
self._test_spoken_language(False, 'de,en', None)
def test_list_rooms_spoken_country_allows_multi_user_empty(self):
self._test_spoken_language(False, 'de,en', '')
def test_list_rooms_spoken_country_allows_multi_user_not_matching(self):
self._test_spoken_language(False, 'de,en', 'es')
def test_list_rooms_spoken_country_allows_multi_user_multi_none_matching(self):
self._test_spoken_language(False, 'de,en', 'es,sv')
def test_list_rooms_spoken_country_allows_multi_user_multi_none_matching_trailing(self):
self._test_spoken_language(False, 'de,en', 'es,sv,')
def test_list_rooms_spoken_country_same_single(self):
self._test_spoken_language(True, 'de', 'de')
def test_list_rooms_spoken_country_same_multi(self):
self._test_spoken_language(True, 'de', 'de,en')
def test_list_rooms_spoken_country_allows_multi_user_matching(self):
self._test_spoken_language(True, 'de,en', 'en')
def test_list_rooms_spoken_country_allows_multi_user_multi_matching_single(self):
self._test_spoken_language(True, 'de,en', 'es,en')
def test_list_rooms_spoken_country_allows_multi_user_multi_matching_single_reverse(self):
self._test_spoken_language(True, 'de,en', 'en,es')
def test_list_rooms_spoken_country_allows_multi_user_multi_matching_single_reverse_trailing(self):
self._test_spoken_language(True, 'de,en', 'en,es,')
def test_list_rooms_no_channel_id_status_code_false(self):
self.assert_in_room(False)
activity = self.activity_for_list_rooms()
del activity['object']['url']
is_valid, code, msg = request.on_list_rooms(as_parser(activity))
self.assertFalse(is_valid)
self.assertEqual(code, ErrorCodes.MISSING_OBJECT_URL)
def test_list_rooms_status_code_true_if_no_rooms(self):
self.assert_in_room(False)
response_data = request.on_list_rooms(as_parser(self.activity_for_list_rooms()))
self.assertEqual(True, response_data[0])
def _test_spoken_language(self, should_succeed: bool, channel_lang, user_lang):
self.assert_in_room(False)
self.set_channel_acl({ApiActions.LIST: {'spoken_language': channel_lang}})
self.set_session(SessionKeys.spoken_language.value, user_lang)
activity = self.activity_for_list_rooms()
is_valid, code, msg = request.on_list_rooms(as_parser(activity))
if should_succeed:
self.assertTrue(is_valid)
self.assertIsNone(code)
else:
self.assertFalse(is_valid)
self.assertEqual(code, ErrorCodes.NOT_ALLOWED)
|
thenetcircle/dino
|
test/validation/test_request_list_rooms.py
|
Python
|
apache-2.0
| 4,432
|
#!/usr/bin/env python3
"""Example YAML input:
geom:
fn: lib:h2o2_hf_321g_opt.xyz
calc1:
type: orca5
keywords: hf sto-3g
blocks: "%tddft nroots 2 iroot 1 end"
pal: 2
calc2:
type: orca5
keywords: hf sto-3g
blocks: "%tddft nroots 2 iroot 1 end"
pal: 2
# Either wf|tden
ovlp_type: wf
"""
import argparse
from pprint import pprint
import sys
import time
import numpy as np
import yaml
from pysisyphus.calculators import ORCA, ORCA5, Gaussian16
from pysisyphus.helpers import geom_loader
from pysisyphus.init_logging import init_logging
init_logging()
np.set_printoptions(suppress=True, precision=6)
def parse_args(args):
parser = argparse.ArgumentParser()
parser.add_argument("yaml")
parser.add_argument("--ovlp-fn", dest="ovlp_fn", default="ovlp_mat.dat")
parser.add_argument("--skip-calcs", dest="do_calc", action="store_false")
parser.add_argument("--h5-fns", dest="h5_fns", nargs=2, default=None)
parser.add_argument("--conf-thresh", dest="conf_thresh", default=0.001, type=float)
return parser.parse_args(args)
def run():
args = parse_args(sys.argv[1:])
with open(args.yaml) as handle:
run_dict = yaml.load(handle.read(), Loader=yaml.SafeLoader)
pprint(run_dict)
print()
geom = geom_loader(run_dict["geom"]["fn"])
CALCS = {"orca": ORCA, "gaussian16": Gaussian16, "orca5": ORCA5}
ovlp_type = run_dict["ovlp_type"]
def get_calc(key):
calc_kwargs = run_dict[key]
calc_kwargs["ovlp_type"] = ovlp_type
calc_key = calc_kwargs.pop("type")
dump_fn = f"overlap_data_{key}.h5"
calc = CALCS[calc_key](**calc_kwargs, base_name=key, dump_fn=dump_fn)
assert calc.root, "No 'root' set on calculator. Please specify an initial root."
return calc
calc1 = get_calc("calc1")
calc2 = get_calc("calc2")
calc_args = (geom.atoms, geom.coords)
def calc_es(calc):
print(f"Calculating ES for {calc} ... ", end="")
start = time.time()
calc.get_energy(*calc_args)
dur = time.time() - start
print(f"finished in {dur:.1f} s.")
calc.store_overlap_data(*calc_args)
calc.dump_overlap_data()
if args.do_calc:
calc_es(calc1)
calc_es(calc2)
else:
try:
h5_fn1, h5_fn2 = args.h5_fns
except TypeError:
h5_fn1 = calc1.dump_fn
h5_fn2 = calc2.dump_fn
print(f"Taking overlap_data from '{h5_fn1}' and '{h5_fn2}'.")
calc1 = calc1.from_overlap_data(h5_fn1, set_wfow=True)
calc2 = calc2.from_overlap_data(h5_fn2)
conf_thresh = args.conf_thresh
calc1.conf_thresh = conf_thresh
if ovlp_type == "wf":
calc1.wfow.conf_thresh = conf_thresh
ao_ovlp = calc1.get_sao_from_mo_coeffs(calc1.mo_coeff_list[-1])
print("Recreate S_AO from MO coeffs at calc1")
ovlp_funcs = {
"tden": "tden_overlap_with_calculator",
"wf": "wf_overlap_with_calculator",
}
ovlp_func = ovlp_funcs[ovlp_type]
print(f"Calculating {ovlp_type} overlaps")
ovlp_mat = getattr(calc1, ovlp_func)(calc2, ao_ovlp=ao_ovlp)
if ovlp_type == "wf":
ovlp_mat = ovlp_mat[0]
print("Rows along states of calc1, columns along states of calc2")
print(ovlp_mat)
ovlp_fn = f"{ovlp_type}_{args.ovlp_fn}"
np.savetxt(ovlp_fn, ovlp_mat)
print(f"Dumped overlap matrix to '{ovlp_fn}'.")
if __name__ == "__main__":
run()
|
eljost/pysisyphus
|
scripts/overlaps.py
|
Python
|
gpl-3.0
| 3,442
|
# -*- coding: utf-8 -*-
# ***************************************************************************
# * Copyright (c) 2019 sliptonic <shopinthewoods@gmail.com> *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
import FreeCAD
import FreeCADGui
import PathGui as PGui # ensure Path/Gui/Resources are loaded
import PathScripts
import PathScripts.PathGui as PathGui
import PathScripts.PathLog as PathLog
import PathScripts.PathToolBitGui as PathToolBitGui
import PathScripts.PathToolEdit as PathToolEdit
import PathScripts.PathUtil as PathUtil
from PySide import QtCore, QtGui
# lazily loaded modules
from lazy_loader.lazy_loader import LazyLoader
Part = LazyLoader('Part', globals(), 'Part')
# Qt translation handling
def translate(context, text, disambig=None):
return QtCore.QCoreApplication.translate(context, text, disambig)
class ViewProvider:
def __init__(self, vobj):
vobj.Proxy = self
self.vobj = vobj
def attach(self, vobj):
mode = 2
vobj.setEditorMode('LineWidth', mode)
vobj.setEditorMode('MarkerColor', mode)
vobj.setEditorMode('NormalColor', mode)
vobj.setEditorMode('DisplayMode', mode)
vobj.setEditorMode('BoundingBox', mode)
vobj.setEditorMode('Selectable', mode)
vobj.setEditorMode('ShapeColor', mode)
vobj.setEditorMode('Transparency', mode)
vobj.setEditorMode('Visibility', mode)
self.vobj = vobj
def __getstate__(self):
return None
def __setstate__(self, state):
return None
def getIcon(self):
return ":/icons/Path_ToolController.svg"
def onChanged(self, vobj, prop):
# pylint: disable=unused-argument
mode = 2
vobj.setEditorMode('LineWidth', mode)
vobj.setEditorMode('MarkerColor', mode)
vobj.setEditorMode('NormalColor', mode)
vobj.setEditorMode('DisplayMode', mode)
vobj.setEditorMode('BoundingBox', mode)
vobj.setEditorMode('Selectable', mode)
def onDelete(self, vobj, args=None):
# pylint: disable=unused-argument
PathUtil.clearExpressionEngine(vobj.Object)
self.vobj.Object.Proxy.onDelete(vobj.Object, args)
return True
def updateData(self, vobj, prop):
# this is executed when a property of the APP OBJECT changes
# pylint: disable=unused-argument
pass
def setEdit(self, vobj=None, mode=0):
if 0 == mode:
if vobj is None:
vobj = self.vobj
FreeCADGui.Control.closeDialog()
taskd = TaskPanel(vobj.Object)
FreeCADGui.Control.showDialog(taskd)
taskd.setupUi()
FreeCAD.ActiveDocument.recompute()
return True
return False
def unsetEdit(self, vobj, mode):
# this is executed when the user cancels or terminates edit mode
# pylint: disable=unused-argument
return False
def setupContextMenu(self, vobj, menu):
# pylint: disable=unused-argument
PathLog.track()
for action in menu.actions():
menu.removeAction(action)
action = QtGui.QAction(translate('Path', 'Edit'), menu)
action.triggered.connect(self.setEdit)
menu.addAction(action)
def claimChildren(self):
obj = self.vobj.Object
if obj and obj.Proxy and not obj.Proxy.usesLegacyTool(obj):
return [obj.Tool]
return []
def Create(name='Default Tool', tool=None, toolNumber=1):
PathLog.track(tool, toolNumber)
obj = PathScripts.PathToolController.Create(name, tool, toolNumber)
ViewProvider(obj.ViewObject)
if not obj.Proxy.usesLegacyTool(obj):
# ToolBits are visible by default, which is typically not what the user wants
if tool and tool.ViewObject and tool.ViewObject.Visibility:
tool.ViewObject.Visibility = False
return obj
class CommandPathToolController(object):
# pylint: disable=no-init
def GetResources(self):
return {'Pixmap': 'Path_LengthOffset',
'MenuText': QtCore.QT_TRANSLATE_NOOP("Path_ToolController", "Add Tool Controller to the Job"),
'ToolTip': QtCore.QT_TRANSLATE_NOOP("Path_ToolController", "Add Tool Controller")}
def selectedJob(self):
if FreeCAD.ActiveDocument:
sel = FreeCADGui.Selection.getSelectionEx()
if sel and sel[0].Object.Name[:3] == 'Job':
return sel[0].Object
jobs = [o for o in FreeCAD.ActiveDocument.Objects if o.Name[:3] == 'Job']
if 1 == len(jobs):
return jobs[0]
return None
def IsActive(self):
return self.selectedJob() is not None
def Activated(self):
PathLog.track()
job = self.selectedJob()
if job:
tool = PathToolBitGui.ToolBitSelector().getTool()
if tool:
toolNr = None
for tc in job.Tools.Group:
if tc.Tool == tool:
toolNr = tc.ToolNumber
break
if not toolNr:
toolNr = max([tc.ToolNumber for tc in job.Tools.Group]) + 1
tc = Create("TC: {}".format(tool.Label), tool, toolNr)
job.Proxy.addToolController(tc)
FreeCAD.ActiveDocument.recompute()
class ToolControllerEditor(object):
def __init__(self, obj, asDialog):
self.form = FreeCADGui.PySideUic.loadUi(":/panels/DlgToolControllerEdit.ui")
if not asDialog:
self.form.buttonBox.hide()
self.obj = obj
self.vertFeed = PathGui.QuantitySpinBox(self.form.vertFeed, obj,
'VertFeed')
self.horizFeed = PathGui.QuantitySpinBox(self.form.horizFeed, obj,
'HorizFeed')
self.vertRapid = PathGui.QuantitySpinBox(self.form.vertRapid, obj,
'VertRapid')
self.horizRapid = PathGui.QuantitySpinBox(self.form.horizRapid, obj,
'HorizRapid')
if obj.Proxy.usesLegacyTool(obj):
self.editor = PathToolEdit.ToolEditor(obj.Tool,
self.form.toolEditor)
else:
self.editor = None
self.form.toolBox.widget(1).hide()
self.form.toolBox.removeItem(1)
def updateUi(self):
tc = self.obj
self.form.tcName.setText(tc.Label)
self.form.tcNumber.setValue(tc.ToolNumber)
self.horizFeed.updateSpinBox()
self.horizRapid.updateSpinBox()
self.vertFeed.updateSpinBox()
self.vertRapid.updateSpinBox()
self.form.spindleSpeed.setValue(tc.SpindleSpeed)
index = self.form.spindleDirection.findText(tc.SpindleDir,
QtCore.Qt.MatchFixedString)
if index >= 0:
self.form.spindleDirection.setCurrentIndex(index)
if self.editor:
self.editor.updateUI()
def updateToolController(self):
tc = self.obj
try:
tc.Label = self.form.tcName.text()
tc.ToolNumber = self.form.tcNumber.value()
self.horizFeed.updateProperty()
self.vertFeed.updateProperty()
self.horizRapid.updateProperty()
self.vertRapid.updateProperty()
tc.SpindleSpeed = self.form.spindleSpeed.value()
tc.SpindleDir = self.form.spindleDirection.currentText()
if self.editor:
self.editor.updateTool()
tc.Tool = self.editor.tool
except Exception as e:
PathLog.error(translate("PathToolController",
"Error updating TC: %s") % e)
def refresh(self):
self.form.blockSignals(True)
self.updateToolController()
self.updateUi()
self.form.blockSignals(False)
def setupUi(self):
if self.editor:
self.editor.setupUI()
self.form.tcName.editingFinished.connect(self.refresh)
self.form.horizFeed.editingFinished.connect(self.refresh)
self.form.vertFeed.editingFinished.connect(self.refresh)
self.form.horizRapid.editingFinished.connect(self.refresh)
self.form.vertRapid.editingFinished.connect(self.refresh)
self.form.spindleSpeed.editingFinished.connect(self.refresh)
self.form.spindleDirection.currentIndexChanged.connect(self.refresh)
class TaskPanel:
def __init__(self, obj):
self.editor = ToolControllerEditor(obj, False)
self.form = self.editor.form
self.updating = False
self.toolrep = None
self.obj = obj
def accept(self):
self.getFields()
FreeCADGui.ActiveDocument.resetEdit()
FreeCADGui.Control.closeDialog()
if self.toolrep:
FreeCAD.ActiveDocument.removeObject(self.toolrep.Name)
FreeCAD.ActiveDocument.recompute()
def reject(self):
FreeCADGui.Control.closeDialog()
if self.toolrep:
FreeCAD.ActiveDocument.removeObject(self.toolrep.Name)
FreeCAD.ActiveDocument.recompute()
def getFields(self):
self.editor.updateToolController()
self.obj.Proxy.execute(self.obj)
def setFields(self):
self.editor.updateUi()
if self.toolrep:
tool = self.obj.Tool
radius = float(tool.Diameter) / 2
length = tool.CuttingEdgeHeight
t = Part.makeCylinder(radius, length)
self.toolrep.Shape = t
def edit(self, item, column):
# pylint: disable=unused-argument
if not self.updating:
self.resetObject()
def resetObject(self, remove=None):
# pylint: disable=unused-argument
"transfers the values from the widget to the object"
FreeCAD.ActiveDocument.recompute()
def setupUi(self):
if self.editor.editor:
t = Part.makeCylinder(1, 1)
self.toolrep = FreeCAD.ActiveDocument.addObject("Part::Feature",
"tool")
self.toolrep.Shape = t
self.setFields()
self.editor.setupUi()
class DlgToolControllerEdit:
def __init__(self, obj):
self.editor = ToolControllerEditor(obj, True)
self.editor.updateUi()
self.editor.setupUi()
self.obj = obj
def exec_(self):
restoreTC = self.obj.Proxy.templateAttrs(self.obj)
rc = False
if not self.editor.form.exec_():
PathLog.info("revert")
self.obj.Proxy.setFromTemplate(self.obj, restoreTC)
rc = True
return rc
if FreeCAD.GuiUp:
# register the FreeCAD command
FreeCADGui.addCommand('Path_ToolController', CommandPathToolController())
FreeCAD.Console.PrintLog("Loading PathToolControllerGui... done\n")
|
sanguinariojoe/FreeCAD
|
src/Mod/Path/PathScripts/PathToolControllerGui.py
|
Python
|
lgpl-2.1
| 12,377
|
"""
Utilities for SUR and 3SLS estimation
"""
__author__= "Luc Anselin lanselin@gmail.com, \
Pedro V. Amaral pedrovma@gmail.com"
import numpy as np
import numpy.linalg as la
from .utils import spdot
__all__ = ['sur_dictxy','sur_dictZ','sur_mat2dict','sur_dict2mat',\
'sur_corr','sur_crossprod','sur_est','sur_resids',\
'filter_dict','check_k']
def sur_dictxy(db,y_vars,x_vars,space_id=None,time_id=None):
"""Stack X and y matrices and variable names as dictionaries by equation
Parameters
----------
db : data object created by pysal.lib.io.open
y_vars : list of lists with variable name(s) for dependent var
(Note must be a list of lists, even in splm case)
x_vars : list of lists with variable names for explanatory vars
space_id : variable with space ID, used for splm format
time_id : variable with time ID, used for splm format
Return
------
(bigy,bigX,bigy_vars,bigX_vars)
: tuple with dictionaries for y and X, one for
each equation, bigy and bigX
Note: bigX already includes the constant term
dictionaries with y and X variables, by equation
(includes constant for X)
"""
c = "Constant"
if (len(y_vars) > 1): # old format
n_eq = len(y_vars)
y = np.array([db.by_col(name) for name in y_vars]).T
n = y.shape[0]
bigy = {}
bigy_vars = dict((r,y_vars[r]) for r in range(n_eq))
bigy = dict((r,np.resize(y[:,r],(n,1))) for r in range(n_eq))
if not(len(x_vars) == n_eq): #CHANGE into exception
print("Error: mismatch variable lists")
bigX = {}
bigX_vars = {}
for r in range(n_eq):
litx = np.array([db.by_col(name) for name in x_vars[r]]).T
ic = c + "_" + str(r+1)
x_vars[r].insert(0,ic)
litxc = np.hstack((np.ones((n, 1)), litx))
bigX[r] = litxc
bigX_vars[r] = x_vars[r]
k = litxc.shape[1]
return (bigy,bigX,bigy_vars,bigX_vars)
elif (len(y_vars) == 1): #splm format
if not(time_id): #CHANGE into exception
print("Error: time id must be specified")
y = np.array([db.by_col(name) for name in y_vars]).T
bign = y.shape[0]
tt = np.array([db.by_col(name) for name in time_id]).T
tt1 = set([val for sublist in tt.tolist() for val in sublist])
n_eq = len(tt1)
tt2 = list(tt1)
tt2.sort()
tt3 = [str(int(a)+1) for a in tt2]
n = bign/n_eq
longx = np.array([db.by_col(name) for name in x_vars[0]]).T
longxc = np.hstack((np.ones((bign, 1)), longx))
xvars = x_vars[0][:]
xvars.insert(0,c)
bigy = {}
bigX = {}
bigy_vars = {}
bigX_vars = {}
for r in range(n_eq):
k0 = r * n
ke = r * n + n
bigy[r] = y[k0:ke,:]
bigy_vars[r] = y_vars[0] + "_" + tt3[r]
bigX[r] = longxc[k0:ke,:]
bxvars = [i + "_" + tt3[r] for i in xvars]
bigX_vars[r] = bxvars
return (bigy,bigX,bigy_vars,bigX_vars)
else:
print("error message, but should never be here")
def sur_dictZ(db,z_vars,form="spreg",const=False,space_id=None,time_id=None):
"""Generic stack data matrices and variable names as dictionaries by equation
Parameters
----------
db : data object created by pysal.lib.io.open
varnames : list of lists with variable name(s)
(Note must be a list of lists, even in splm case)
form : format used for data set
default="spreg": cross-sectional format
form="plm" : plm (R) compatible using space and time id
const : boolean
flag for constant term, default = "False"
space_id : variable with space ID, used for plm format
time_id : variable with time ID, used for plm format
Return
------
(bigZ,bigZ_names) : tuple with dictionaries variables and variable
names, one for each equation
Note: bigX already includes the constant term
"""
c = "Constant"
if form == "spreg": # old format
n_eq = len(z_vars)
bigZ = {}
bigZ_names = {}
for r in range(n_eq):
litz = np.array([db.by_col(name) for name in z_vars[r]]).T
if const:
ic = c + "_" + str(r+1)
z_vars[r].insert(0,ic)
litz = np.hstack((np.ones((litz.shape[0], 1)), litz))
bigZ[r] = litz
bigZ_names[r] = z_vars[r]
return (bigZ,bigZ_names)
elif (form == "plm"): #plm format
if not(time_id): #CHANGE into exception
raise Exception("Error: time id must be specified for plm format")
tt = np.array([db.by_col(name) for name in time_id]).T
bign = tt.shape[0]
tt1 = set([val for sublist in tt.tolist() for val in sublist])
n_eq = len(tt1)
tt2 = list(tt1)
tt2.sort()
tt3 = [str(int(a)+1) for a in tt2]
n = bign/n_eq
longz = np.array([db.by_col(name) for name in z_vars[0]]).T
zvars = z_vars[0][:]
if const:
longz = np.hstack((np.ones((bign, 1)), longz))
zvars.insert(0,c)
bigZ = {}
bigZ_names = {}
for r in range(n_eq):
k0 = r * n
ke = r * n + n
bigZ[r] = longz[k0:ke,:]
bzvars = [i + "_" + tt3[r] for i in zvars]
bigZ_names[r] = bzvars
return (bigZ,bigZ_names)
else:
raise KeyError("Invalid format used for data set. form must be either "
" 'spreg' or 'plm', and {} was provided.".format(form))
def sur_mat2dict(mat,ndim):
""" Utility to convert a vector or matrix to a dictionary with ndim keys,
one for each equation
Parameters
----------
mat : vector or matrix with elements to be converted
ndim : vector with number of elements (rows) to belong to each
dict
Returns
-------
dicts : dictionary with len(ndim) keys, from 0 to len(ndim)-1
"""
kwork = np.vstack((np.zeros((1,1),dtype=np.int_),ndim))
dicts = {}
ki = 0
for r in range(1,len(kwork)):
ki = ki + kwork[r-1][0]
ke = ki + kwork[r][0]
dicts[r-1] = mat[ki:ke,:]
return(dicts)
def sur_dict2mat(dicts):
""" Utility to stack the elements of a dictionary of vectors
Parameters
----------
dicts : dictionary of vectors or matrices with same number
of columns (no checks yet!)
Returns
-------
mat : a vector or matrix of vertically stacked vectors
"""
n_dicts = len(dicts.keys())
mat = np.vstack((dicts[t] for t in range(n_dicts)))
return(mat)
def sur_corr(sig):
"""SUR error correlation matrix
Parameters
----------
sig : Sigma cross-equation covariance matrix
Returns
-------
corr : correlation matrix corresponding to sig
"""
v = sig.diagonal()
s = np.sqrt(v)
s.resize(len(s),1)
sxs = np.dot(s,s.T)
corr = sig / sxs
return corr
def sur_crossprod(bigZ,bigy):
'''Creates dictionaries of cross products by time period for both SUR and 3SLS
Parameters
----------
bigZ : dictionary with matrix of explanatory variables,
including constant, exogenous and endogenous, one
for each equation
bigy : dictionary with vectors of dependent variable, one
for each equation
Returns
-------
bigZy : dictionary of all r,s cross-products
of Z_r'y_s
bigZZ : dictionary of all r,s cross-products
of Z_r'Z_s
'''
bigZZ = {}
n_eq = len(bigy.keys())
for r in range(n_eq):
for t in range(n_eq):
bigZZ[(r,t)] = spdot(bigZ[r].T,bigZ[t])
bigZy = {}
for r in range(n_eq):
for t in range(n_eq):
bigZy[(r,t)] = spdot(bigZ[r].T,bigy[t])
return bigZZ,bigZy
def sur_est(bigXX,bigXy,bigE,bigK):
''' Basic SUR estimation equations for both SUR and 3SLS
Parameters
----------
bigXX : dictionary of cross-product matrices X_t'X_r
(created by sur_crossprod)
bigXy : dictionary of cross-product matrices X_t'y_r
(created by sur_crossprod)
bigE : n by n_eq array of residuals
Returns
-------
bSUR : dictionary with regression coefficients by equation
varb : variance-covariance matrix for the regression coefficients
sig : residual covariance matrix (using previous residuals)
'''
n = bigE.shape[0]
n_eq = bigE.shape[1]
sig = np.dot(bigE.T,bigE) / n
sigi = la.inv(sig)
sigiXX = {}
for r in range(n_eq):
for t in range(n_eq):
sigiXX[(r,t)] = bigXX[(r,t)]*sigi[r,t]
sigiXy = {}
for r in range(n_eq):
sxy=0.0
for t in range(n_eq):
sxy = sxy + sigi[r,t]*bigXy[(r,t)]
sigiXy[r] = sxy
xsigy = np.vstack((sigiXy[t] for t in range(n_eq)))
xsigx = np.vstack(((np.hstack(sigiXX[(r,t)] for t in range(n_eq))) for r in range(n_eq)))
varb = la.inv(xsigx)
beta = np.dot(varb,xsigy)
bSUR = sur_mat2dict(beta,bigK)
return bSUR, varb, sig
def sur_resids(bigy,bigX,beta):
''' Computation of a matrix with residuals by equation
Parameters
----------
bigy : dictionary with vector of dependent variable, one for each equation
bigX : dictionary with matrix of explanatory variables, one for
each equation
beta : dictionary with estimation coefficients by
equation
Returns
-------
bigE : a n x n_eq matrix of vectors of residuals
'''
n_eq = len(bigy.keys())
bigE = np.hstack((bigy[r] - spdot(bigX[r],beta[r])) for r in range(n_eq))
return(bigE)
def sur_predict(bigy,bigX,beta):
''' Computation of a matrix with predicted values by equation
Parameters
----------
bigy : dictionary with vector of dependent variable, one for each equation
bigX : dictionary with matrix of explanatory variables, one for
each equation
beta : dictionary with estimation coefficients by
equation
Returns
-------
bigYP : a n x n_eq matrix of vectors of predicted values
'''
n_eq = len(bigy.keys())
bigYP = np.hstack(spdot(bigX[r],beta[r]) for r in range(n_eq))
return(bigYP)
def filter_dict(lam,bigZ,bigZlag):
"""Dictionary of spatially filtered variables for use in SUR
Parameters
----------
lam : n_eq x 1 array of spatial autoregressive parameters
bigZ : dictionary of vector or matrix of variables, one for
each equation
bigZlag : dictionaly of vector or matrix of spatially lagged
variables, one for each equation
Returns
-------
Zfilt : dictionary with spatially filtered variables
Z - lam*WZ, one for each equation
"""
n_eq = lam.shape[0]
if not(len(bigZ.keys()) == n_eq and len(bigZlag.keys()) == n_eq):
raise Exception("Error: incompatible dimensions")
Zfilt = {}
for r in range(n_eq):
lami = lam[r][0]
Zfilt[r] = bigZ[r] - lami*bigZlag[r]
return Zfilt
def check_k(bigK):
"""Check on equality of number of variables by equation
Parameter
---------
bigK : n_eq x 1 array of number of variables (includes constant)
Returns
-------
True/False : result of test
"""
kk = bigK.flatten()
k = kk[0]
check = np.equal(k,kk)
return all(check)
|
lixun910/pysal
|
pysal/model/spreg/sur_utils.py
|
Python
|
bsd-3-clause
| 12,555
|
#!/usr/bin/env python3
from test_framework.authproxy import JSONRPCException
from test_framework.test_framework import ElysiumTestFramework
from test_framework.util import assert_equal, assert_raises_message
class ElysiumSendMintTest(ElysiumTestFramework):
def run_test(self):
super().run_test()
sigma_start_block = 260
self.nodes[0].generatetoaddress(30, self.addrs[0])
self.nodes[0].generate(sigma_start_block - self.nodes[0].getblockcount())
assert_equal(sigma_start_block, self.nodes[0].getblockcount())
# create non-sigma
self.nodes[0].elysium_sendissuancefixed(
self.addrs[0], 1, 1, 0, '', '', 'Non-Sigma', '', '', '1000000'
)
self.nodes[0].generate(1)
nonSigmaProperty = 3
# create sigma with denominations (1, 2)
self.nodes[0].elysium_sendissuancefixed(
self.addrs[0], 1, 1, 0, '', '', 'Sigma', '', '', '1000000', 1
)
self.nodes[0].generate(1)
sigmaProperty = 4
self.nodes[0].elysium_sendcreatedenomination(self.addrs[0], sigmaProperty, '1')
self.nodes[0].generate(1)
self.nodes[0].elysium_sendcreatedenomination(self.addrs[0], sigmaProperty, '2')
self.nodes[0].generate(10)
# non-sigma
addr = self.nodes[0].getnewaddress()
self.nodes[0].elysium_send(self.addrs[0], addr, nonSigmaProperty, "100")
self.nodes[0].sendtoaddress(addr, 100)
self.nodes[0].generate(10)
assert_raises_message(
JSONRPCException,
'Property has not enabled Sigma',
self.nodes[0].elysium_sendmint, addr, nonSigmaProperty, {"0": 1}
)
assert_equal("100", self.nodes[0].elysium_getbalance(addr, nonSigmaProperty)['balance'])
# sigma
# mint without firo and token
addr = self.nodes[0].getnewaddress()
assert_raises_message(
JSONRPCException,
'Sender has insufficient balance',
self.nodes[0].elysium_sendmint, addr, sigmaProperty, {"0": 1}
)
# mint without firo then fail
addr = self.nodes[0].getnewaddress()
self.nodes[0].elysium_send(self.addrs[0], addr, sigmaProperty, "100")
self.nodes[0].generate(10)
assert_raises_message(
JSONRPCException,
'Error choosing inputs for the send transaction',
self.nodes[0].elysium_sendmint, addr, sigmaProperty, {"0": 1}
)
assert_equal("100", self.nodes[0].elysium_getbalance(addr, sigmaProperty)['balance'])
assert_equal(0, len(self.nodes[0].elysium_listpendingmints()))
# mint without token then fail
addr = self.nodes[0].getnewaddress()
self.nodes[0].sendtoaddress(addr, 100)
self.nodes[0].generate(10)
assert_raises_message(
JSONRPCException,
'Sender has insufficient balance',
self.nodes[0].elysium_sendmint, addr, sigmaProperty, {"0":1}
)
assert_equal("0", self.nodes[0].elysium_getbalance(addr, sigmaProperty)['balance'])
assert_equal(0, len(self.nodes[0].elysium_listpendingmints()))
# success to mint should be shown on pending
addr = self.nodes[0].getnewaddress()
self.nodes[0].elysium_send(self.addrs[0], addr, sigmaProperty, "100")
self.nodes[0].sendtoaddress(addr, 100)
self.nodes[0].generate(10)
self.nodes[0].elysium_sendmint(addr, sigmaProperty, {"0":1})
assert_equal(1, len(self.nodes[0].elysium_listpendingmints()))
assert_equal("99", self.nodes[0].elysium_getbalance(addr, sigmaProperty)['balance'])
self.nodes[0].generate(1)
assert_equal(0, len(self.nodes[0].elysium_listpendingmints()))
assert_equal(1, len(self.nodes[0].elysium_listmints()))
if __name__ == '__main__':
ElysiumSendMintTest().main()
|
zcoinofficial/zcoin
|
qa/rpc-tests/elysium_sendmint.py
|
Python
|
mit
| 3,905
|
# Generated by Django 3.0.5 on 2020-05-01 08:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('adventure', '0061_auto_20191213_0007'),
]
operations = [
migrations.AlterField(
model_name='artifact',
name='armor_type',
field=models.IntegerField(blank=True, choices=[(0, 'Armor'), (1, 'Shield'), (2, 'Helmet'), (3, 'Gloves'), (4, 'Ring')], null=True),
),
migrations.AlterField(
model_name='effect',
name='style',
field=models.CharField(blank=True, choices=[('', 'Normal'), ('emphasis', 'Bold'), ('success', 'Success (green)'), ('special', 'Special 1 (blue)'), ('special2', 'Special 1 (purple)'), ('warning', 'Warning (orange)'), ('danger', 'Danger (red)')], max_length=20, null=True),
),
migrations.AlterField(
model_name='playerartifact',
name='armor_type',
field=models.IntegerField(choices=[(0, 'Armor'), (1, 'Shield'), (2, 'Helmet'), (3, 'Gloves'), (4, 'Ring')], default=0, null=True),
),
]
|
kdechant/eamon
|
adventure/migrations/0062_auto_20200501_0144.py
|
Python
|
mit
| 1,129
|
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import os
import mock
from gzip import GzipFile
from shutil import rmtree
import six.moves.cPickle as pickle
import time
import tempfile
from contextlib import contextmanager, closing
from collections import defaultdict
from errno import ENOENT, ENOTEMPTY, ENOTDIR
from eventlet.green import subprocess
from eventlet import Timeout, tpool
from test.unit import (debug_logger, patch_policies, make_timestamp_iter,
mocked_http_conn)
from swift.common import utils
from swift.common.utils import (hash_path, mkdirs, normalize_timestamp,
storage_directory)
from swift.common import ring
from swift.obj import diskfile, replicator as object_replicator
from swift.common.storage_policy import StoragePolicy, POLICIES
def _ips(*args, **kwargs):
return ['127.0.0.0']
def mock_http_connect(status):
class FakeConn(object):
def __init__(self, status, *args, **kwargs):
self.status = status
self.reason = 'Fake'
self.host = args[0]
self.port = args[1]
self.method = args[4]
self.path = args[5]
self.with_exc = False
self.headers = kwargs.get('headers', {})
def getresponse(self):
if self.with_exc:
raise Exception('test')
return self
def getheader(self, header):
return self.headers[header]
def read(self, amt=None):
return pickle.dumps({})
def close(self):
return
return lambda *args, **kwargs: FakeConn(status, *args, **kwargs)
process_errors = []
class MockProcess(object):
ret_code = None
ret_log = None
check_args = None
captured_log = None
class Stream(object):
def read(self):
return next(MockProcess.ret_log)
def __init__(self, *args, **kwargs):
targs = next(MockProcess.check_args)
for targ in targs:
# Allow more than 2 candidate targs
# (e.g. a case that either node is fine when nodes shuffled)
if isinstance(targ, tuple):
allowed = False
for target in targ:
if target in args[0]:
allowed = True
if not allowed:
process_errors.append("Invalid: %s not in %s" % (targ,
args))
else:
if targ not in args[0]:
process_errors.append("Invalid: %s not in %s" % (targ,
args))
self.captured_info = {
'rsync_args': args[0],
}
self.stdout = self.Stream()
def wait(self):
# the _mock_process context manager assures this class attribute is a
# mutable list and takes care of resetting it
rv = next(self.ret_code)
if self.captured_log is not None:
self.captured_info['ret_code'] = rv
self.captured_log.append(self.captured_info)
return rv
@contextmanager
def _mock_process(ret):
captured_log = []
MockProcess.captured_log = captured_log
orig_process = subprocess.Popen
MockProcess.ret_code = (i[0] for i in ret)
MockProcess.ret_log = (i[1] for i in ret)
MockProcess.check_args = (i[2] for i in ret)
object_replicator.subprocess.Popen = MockProcess
yield captured_log
MockProcess.captured_log = None
object_replicator.subprocess.Popen = orig_process
def _create_test_rings(path, devs=None):
testgz = os.path.join(path, 'object.ring.gz')
intended_replica2part2dev_id = [
[0, 1, 2, 3, 4, 5, 6],
[1, 2, 3, 0, 5, 6, 4],
[2, 3, 0, 1, 6, 4, 5],
]
intended_devs = devs or [
{'id': 0, 'device': 'sda', 'zone': 0,
'region': 1, 'ip': '127.0.0.0', 'port': 6200},
{'id': 1, 'device': 'sda', 'zone': 1,
'region': 2, 'ip': '127.0.0.1', 'port': 6200},
{'id': 2, 'device': 'sda', 'zone': 2,
'region': 3, 'ip': '127.0.0.2', 'port': 6200},
{'id': 3, 'device': 'sda', 'zone': 4,
'region': 2, 'ip': '127.0.0.3', 'port': 6200},
{'id': 4, 'device': 'sda', 'zone': 5,
'region': 1, 'ip': '127.0.0.4', 'port': 6200,
'replication_ip': '127.0.1.4'},
{'id': 5, 'device': 'sda', 'zone': 6,
'region': 3, 'ip': 'fe80::202:b3ff:fe1e:8329', 'port': 6200},
{'id': 6, 'device': 'sda', 'zone': 7, 'region': 1,
'ip': '2001:0db8:85a3:0000:0000:8a2e:0370:7334', 'port': 6200},
]
intended_part_shift = 30
with closing(GzipFile(testgz, 'wb')) as f:
pickle.dump(
ring.RingData(intended_replica2part2dev_id,
intended_devs, intended_part_shift),
f)
testgz = os.path.join(path, 'object-1.ring.gz')
with closing(GzipFile(testgz, 'wb')) as f:
pickle.dump(
ring.RingData(intended_replica2part2dev_id,
intended_devs, intended_part_shift),
f)
for policy in POLICIES:
policy.object_ring = None # force reload
return
@patch_policies([StoragePolicy(0, 'zero', False),
StoragePolicy(1, 'one', True)])
class TestObjectReplicator(unittest.TestCase):
def setUp(self):
utils.HASH_PATH_SUFFIX = 'endcap'
utils.HASH_PATH_PREFIX = ''
# recon cache path
self.recon_cache = tempfile.mkdtemp()
rmtree(self.recon_cache, ignore_errors=1)
os.mkdir(self.recon_cache)
# Setup a test ring (stolen from common/test_ring.py)
self.testdir = tempfile.mkdtemp()
self.devices = os.path.join(self.testdir, 'node')
rmtree(self.testdir, ignore_errors=1)
os.mkdir(self.testdir)
os.mkdir(self.devices)
self.objects, self.objects_1, self.parts, self.parts_1 = \
self._write_disk_data('sda')
_create_test_rings(self.testdir)
self.logger = debug_logger('test-replicator')
self.conf = dict(
bind_ip=_ips()[0], bind_port=6200,
swift_dir=self.testdir, devices=self.devices, mount_check='false',
timeout='300', stats_interval='1', sync_method='rsync')
self._create_replicator()
self.ts = make_timestamp_iter()
def tearDown(self):
self.assertFalse(process_errors)
rmtree(self.testdir, ignore_errors=1)
rmtree(self.recon_cache, ignore_errors=1)
def test_handoff_replication_setting_warnings(self):
conf_tests = [
# (config, expected_warning)
({}, False),
({'handoff_delete': 'auto'}, False),
({'handoffs_first': 'no'}, False),
({'handoff_delete': '2'}, True),
({'handoffs_first': 'yes'}, True),
({'handoff_delete': '1', 'handoffs_first': 'yes'}, True),
]
log_message = 'Handoff only mode is not intended for normal ' \
'operation, please disable handoffs_first and ' \
'handoff_delete before the next normal rebalance'
for config, expected_warning in conf_tests:
self.logger.clear()
object_replicator.ObjectReplicator(config, logger=self.logger)
warning_log_lines = self.logger.get_lines_for_level('warning')
if expected_warning:
expected_log_lines = [log_message]
else:
expected_log_lines = []
self.assertEqual(expected_log_lines, warning_log_lines,
'expected %s != %s for config %r' % (
expected_log_lines,
warning_log_lines,
config,
))
def _write_disk_data(self, disk_name, with_json=False):
os.mkdir(os.path.join(self.devices, disk_name))
objects = os.path.join(self.devices, disk_name,
diskfile.get_data_dir(POLICIES[0]))
objects_1 = os.path.join(self.devices, disk_name,
diskfile.get_data_dir(POLICIES[1]))
os.mkdir(objects)
os.mkdir(objects_1)
parts = {}
parts_1 = {}
for part in ['0', '1', '2', '3']:
parts[part] = os.path.join(objects, part)
os.mkdir(parts[part])
parts_1[part] = os.path.join(objects_1, part)
os.mkdir(parts_1[part])
if with_json:
for json_file in ['auditor_status_ZBF.json',
'auditor_status_ALL.json']:
for obj_dir in [objects, objects_1]:
with open(os.path.join(obj_dir, json_file), 'w'):
pass
return objects, objects_1, parts, parts_1
def _create_replicator(self):
self.replicator = object_replicator.ObjectReplicator(self.conf)
self.replicator.logger = self.logger
self.replicator._zero_stats()
self.replicator.all_devs_info = set()
self.df_mgr = diskfile.DiskFileManager(self.conf, self.logger)
def test_run_once_no_local_device_in_ring(self):
conf = dict(swift_dir=self.testdir, devices=self.devices,
bind_ip='1.1.1.1', recon_cache_path=self.recon_cache,
mount_check='false', timeout='300', stats_interval='1')
replicator = object_replicator.ObjectReplicator(conf,
logger=self.logger)
replicator.run_once()
expected = [
"Can't find itself in policy with index 0 with ips 1.1.1.1 and"
" with port 6200 in ring file, not replicating",
"Can't find itself in policy with index 1 with ips 1.1.1.1 and"
" with port 6200 in ring file, not replicating",
]
self.assertEqual(expected, self.logger.get_lines_for_level('error'))
def test_run_once(self):
conf = dict(swift_dir=self.testdir, devices=self.devices,
bind_ip=_ips()[0], recon_cache_path=self.recon_cache,
mount_check='false', timeout='300', stats_interval='1')
replicator = object_replicator.ObjectReplicator(conf,
logger=self.logger)
was_connector = object_replicator.http_connect
object_replicator.http_connect = mock_http_connect(200)
cur_part = '0'
df = self.df_mgr.get_diskfile('sda', cur_part, 'a', 'c', 'o',
policy=POLICIES[0])
mkdirs(df._datadir)
f = open(os.path.join(df._datadir,
normalize_timestamp(time.time()) + '.data'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, cur_part, data_dir)
process_arg_checker = []
ring = replicator.load_object_ring(POLICIES[0])
nodes = [node for node in
ring.get_part_nodes(int(cur_part))
if node['ip'] not in _ips()]
rsync_mods = tuple(['%s::object/sda/objects/%s' %
(node['ip'], cur_part) for node in nodes])
for node in nodes:
process_arg_checker.append(
(0, '', ['rsync', whole_path_from, rsync_mods]))
start = replicator.replication_cycle
self.assertGreaterEqual(start, 0)
self.assertLess(start, 9)
with _mock_process(process_arg_checker):
replicator.run_once()
self.assertEqual(start + 1, replicator.replication_cycle)
self.assertFalse(process_errors)
self.assertFalse(self.logger.get_lines_for_level('error'))
object_replicator.http_connect = was_connector
with _mock_process(process_arg_checker):
for cycle in range(1, 10):
replicator.run_once()
self.assertEqual((start + 1 + cycle) % 10,
replicator.replication_cycle)
# policy 1
def test_run_once_1(self):
conf = dict(swift_dir=self.testdir, devices=self.devices,
recon_cache_path=self.recon_cache,
mount_check='false', timeout='300', stats_interval='1')
replicator = object_replicator.ObjectReplicator(conf,
logger=self.logger)
was_connector = object_replicator.http_connect
object_replicator.http_connect = mock_http_connect(200)
cur_part = '0'
df = self.df_mgr.get_diskfile('sda', cur_part, 'a', 'c', 'o',
policy=POLICIES[1])
mkdirs(df._datadir)
f = open(os.path.join(df._datadir,
normalize_timestamp(time.time()) + '.data'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects_1, cur_part, data_dir)
process_arg_checker = []
ring = replicator.load_object_ring(POLICIES[1])
nodes = [node for node in
ring.get_part_nodes(int(cur_part))
if node['ip'] not in _ips()]
rsync_mods = tuple(['%s::object/sda/objects-1/%s' %
(node['ip'], cur_part) for node in nodes])
for node in nodes:
process_arg_checker.append(
(0, '', ['rsync', whole_path_from, rsync_mods]))
with _mock_process(process_arg_checker):
with mock.patch('swift.obj.replicator.whataremyips',
side_effect=_ips):
replicator.run_once()
self.assertFalse(process_errors)
self.assertFalse(self.logger.get_lines_for_level('error'))
object_replicator.http_connect = was_connector
def test_check_ring(self):
for pol in POLICIES:
obj_ring = self.replicator.load_object_ring(pol)
self.assertTrue(self.replicator.check_ring(obj_ring))
orig_check = self.replicator.next_check
self.replicator.next_check = orig_check - 30
self.assertTrue(self.replicator.check_ring(obj_ring))
self.replicator.next_check = orig_check
orig_ring_time = obj_ring._mtime
obj_ring._mtime = orig_ring_time - 30
self.assertTrue(self.replicator.check_ring(obj_ring))
self.replicator.next_check = orig_check - 30
self.assertFalse(self.replicator.check_ring(obj_ring))
def test_collect_jobs_mkdirs_error(self):
non_local = {}
def blowup_mkdirs(path):
non_local['path'] = path
raise OSError('Ow!')
with mock.patch.object(object_replicator, 'mkdirs', blowup_mkdirs):
rmtree(self.objects, ignore_errors=1)
object_replicator.mkdirs = blowup_mkdirs
self.replicator.collect_jobs()
self.assertEqual(self.logger.get_lines_for_level('error'), [
'ERROR creating %s: ' % non_local['path']])
log_args, log_kwargs = self.logger.log_dict['error'][0]
self.assertEqual(str(log_kwargs['exc_info'][1]), 'Ow!')
def test_collect_jobs(self):
jobs = self.replicator.collect_jobs()
jobs_to_delete = [j for j in jobs if j['delete']]
jobs_by_pol_part = {}
for job in jobs:
jobs_by_pol_part[str(int(job['policy'])) + job['partition']] = job
self.assertEqual(len(jobs_to_delete), 2)
self.assertTrue('1', jobs_to_delete[0]['partition'])
self.assertEqual(
[node['id'] for node in jobs_by_pol_part['00']['nodes']], [1, 2])
self.assertEqual(
[node['id'] for node in jobs_by_pol_part['01']['nodes']],
[1, 2, 3])
self.assertEqual(
[node['id'] for node in jobs_by_pol_part['02']['nodes']], [2, 3])
self.assertEqual(
[node['id'] for node in jobs_by_pol_part['03']['nodes']], [3, 1])
self.assertEqual(
[node['id'] for node in jobs_by_pol_part['10']['nodes']], [1, 2])
self.assertEqual(
[node['id'] for node in jobs_by_pol_part['11']['nodes']],
[1, 2, 3])
self.assertEqual(
[node['id'] for node in jobs_by_pol_part['12']['nodes']], [2, 3])
self.assertEqual(
[node['id'] for node in jobs_by_pol_part['13']['nodes']], [3, 1])
for part in ['00', '01', '02', '03']:
for node in jobs_by_pol_part[part]['nodes']:
self.assertEqual(node['device'], 'sda')
self.assertEqual(jobs_by_pol_part[part]['path'],
os.path.join(self.objects, part[1:]))
for part in ['10', '11', '12', '13']:
for node in jobs_by_pol_part[part]['nodes']:
self.assertEqual(node['device'], 'sda')
self.assertEqual(jobs_by_pol_part[part]['path'],
os.path.join(self.objects_1, part[1:]))
def test_collect_jobs_failure_report_with_auditor_stats_json(self):
devs = [
{'id': 0, 'device': 'sda', 'zone': 0,
'region': 1, 'ip': '1.1.1.1', 'port': 1111,
'replication_ip': '127.0.0.0', 'replication_port': 6200},
{'id': 1, 'device': 'sdb', 'zone': 1,
'region': 1, 'ip': '1.1.1.1', 'port': 1111,
'replication_ip': '127.0.0.0', 'replication_port': 6200},
{'id': 2, 'device': 'sdc', 'zone': 2,
'region': 1, 'ip': '1.1.1.1', 'port': 1111,
'replication_ip': '127.0.0.1', 'replication_port': 6200},
{'id': 3, 'device': 'sdd', 'zone': 3,
'region': 1, 'ip': '1.1.1.1', 'port': 1111,
'replication_ip': '127.0.0.1', 'replication_port': 6200},
]
objects_sdb, objects_1_sdb, _, _ = \
self._write_disk_data('sdb', with_json=True)
objects_sdc, objects_1_sdc, _, _ = \
self._write_disk_data('sdc', with_json=True)
objects_sdd, objects_1_sdd, _, _ = \
self._write_disk_data('sdd', with_json=True)
_create_test_rings(self.testdir, devs)
self.replicator.collect_jobs()
self.assertEqual(self.replicator.stats['failure'], 0)
@mock.patch('swift.obj.replicator.random.shuffle', side_effect=lambda l: l)
def test_collect_jobs_multi_disk(self, mock_shuffle):
devs = [
# Two disks on same IP/port
{'id': 0, 'device': 'sda', 'zone': 0,
'region': 1, 'ip': '1.1.1.1', 'port': 1111,
'replication_ip': '127.0.0.0', 'replication_port': 6200},
{'id': 1, 'device': 'sdb', 'zone': 1,
'region': 1, 'ip': '1.1.1.1', 'port': 1111,
'replication_ip': '127.0.0.0', 'replication_port': 6200},
# Two disks on same server, different ports
{'id': 2, 'device': 'sdc', 'zone': 2,
'region': 2, 'ip': '1.1.1.2', 'port': 1112,
'replication_ip': '127.0.0.1', 'replication_port': 6200},
{'id': 3, 'device': 'sdd', 'zone': 4,
'region': 2, 'ip': '1.1.1.2', 'port': 1112,
'replication_ip': '127.0.0.1', 'replication_port': 6201},
]
objects_sdb, objects_1_sdb, _, _ = self._write_disk_data('sdb')
objects_sdc, objects_1_sdc, _, _ = self._write_disk_data('sdc')
objects_sdd, objects_1_sdd, _, _ = self._write_disk_data('sdd')
_create_test_rings(self.testdir, devs)
jobs = self.replicator.collect_jobs()
self.assertEqual([mock.call(jobs)], mock_shuffle.mock_calls)
jobs_to_delete = [j for j in jobs if j['delete']]
self.assertEqual(len(jobs_to_delete), 4)
self.assertEqual([
'1', '2', # policy 0; 1 not on sda, 2 not on sdb
'1', '2', # policy 1; 1 not on sda, 2 not on sdb
], [j['partition'] for j in jobs_to_delete])
jobs_by_pol_part_dev = {}
for job in jobs:
# There should be no jobs with a device not in just sda & sdb
self.assertTrue(job['device'] in ('sda', 'sdb'))
jobs_by_pol_part_dev[
str(int(job['policy'])) + job['partition'] + job['device']
] = job
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['00sda']['nodes']],
[1, 2])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['00sdb']['nodes']],
[0, 2])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['01sda']['nodes']],
[1, 2, 3])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['01sdb']['nodes']],
[2, 3])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['02sda']['nodes']],
[2, 3])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['02sdb']['nodes']],
[2, 3, 0])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['03sda']['nodes']],
[3, 1])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['03sdb']['nodes']],
[3, 0])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['10sda']['nodes']],
[1, 2])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['10sdb']['nodes']],
[0, 2])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['11sda']['nodes']],
[1, 2, 3])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['11sdb']['nodes']],
[2, 3])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['12sda']['nodes']],
[2, 3])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['12sdb']['nodes']],
[2, 3, 0])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['13sda']['nodes']],
[3, 1])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['13sdb']['nodes']],
[3, 0])
for part in ['00', '01', '02', '03']:
self.assertEqual(jobs_by_pol_part_dev[part + 'sda']['path'],
os.path.join(self.objects, part[1:]))
self.assertEqual(jobs_by_pol_part_dev[part + 'sdb']['path'],
os.path.join(objects_sdb, part[1:]))
for part in ['10', '11', '12', '13']:
self.assertEqual(jobs_by_pol_part_dev[part + 'sda']['path'],
os.path.join(self.objects_1, part[1:]))
self.assertEqual(jobs_by_pol_part_dev[part + 'sdb']['path'],
os.path.join(objects_1_sdb, part[1:]))
@mock.patch('swift.obj.replicator.random.shuffle', side_effect=lambda l: l)
def test_collect_jobs_multi_disk_diff_ports_normal(self, mock_shuffle):
# Normally (servers_per_port=0), replication_ip AND replication_port
# are used to determine local ring device entries. Here we show that
# with bind_ip='127.0.0.1', bind_port=6200, only "sdc" is local.
devs = [
# Two disks on same IP/port
{'id': 0, 'device': 'sda', 'zone': 0,
'region': 1, 'ip': '1.1.1.1', 'port': 1111,
'replication_ip': '127.0.0.0', 'replication_port': 6200},
{'id': 1, 'device': 'sdb', 'zone': 1,
'region': 1, 'ip': '1.1.1.1', 'port': 1111,
'replication_ip': '127.0.0.0', 'replication_port': 6200},
# Two disks on same server, different ports
{'id': 2, 'device': 'sdc', 'zone': 2,
'region': 2, 'ip': '1.1.1.2', 'port': 1112,
'replication_ip': '127.0.0.1', 'replication_port': 6200},
{'id': 3, 'device': 'sdd', 'zone': 4,
'region': 2, 'ip': '1.1.1.2', 'port': 1112,
'replication_ip': '127.0.0.1', 'replication_port': 6201},
]
objects_sdb, objects_1_sdb, _, _ = self._write_disk_data('sdb')
objects_sdc, objects_1_sdc, _, _ = self._write_disk_data('sdc')
objects_sdd, objects_1_sdd, _, _ = self._write_disk_data('sdd')
_create_test_rings(self.testdir, devs)
self.conf['bind_ip'] = '127.0.0.1'
self._create_replicator()
jobs = self.replicator.collect_jobs()
self.assertEqual([mock.call(jobs)], mock_shuffle.mock_calls)
jobs_to_delete = [j for j in jobs if j['delete']]
self.assertEqual(len(jobs_to_delete), 2)
self.assertEqual([
'3', # policy 0; 3 not on sdc
'3', # policy 1; 3 not on sdc
], [j['partition'] for j in jobs_to_delete])
jobs_by_pol_part_dev = {}
for job in jobs:
# There should be no jobs with a device not sdc
self.assertEqual(job['device'], 'sdc')
jobs_by_pol_part_dev[
str(int(job['policy'])) + job['partition'] + job['device']
] = job
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['00sdc']['nodes']],
[0, 1])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['01sdc']['nodes']],
[1, 3])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['02sdc']['nodes']],
[3, 0])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['03sdc']['nodes']],
[3, 0, 1])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['10sdc']['nodes']],
[0, 1])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['11sdc']['nodes']],
[1, 3])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['12sdc']['nodes']],
[3, 0])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['13sdc']['nodes']],
[3, 0, 1])
for part in ['00', '01', '02', '03']:
self.assertEqual(jobs_by_pol_part_dev[part + 'sdc']['path'],
os.path.join(objects_sdc, part[1:]))
for part in ['10', '11', '12', '13']:
self.assertEqual(jobs_by_pol_part_dev[part + 'sdc']['path'],
os.path.join(objects_1_sdc, part[1:]))
@mock.patch('swift.obj.replicator.random.shuffle', side_effect=lambda l: l)
def test_collect_jobs_multi_disk_servers_per_port(self, mock_shuffle):
# Normally (servers_per_port=0), replication_ip AND replication_port
# are used to determine local ring device entries. Here we show that
# with servers_per_port > 0 and bind_ip='127.0.0.1', bind_port=6200,
# then both "sdc" and "sdd" are local.
devs = [
# Two disks on same IP/port
{'id': 0, 'device': 'sda', 'zone': 0,
'region': 1, 'ip': '1.1.1.1', 'port': 1111,
'replication_ip': '127.0.0.0', 'replication_port': 6200},
{'id': 1, 'device': 'sdb', 'zone': 1,
'region': 1, 'ip': '1.1.1.1', 'port': 1111,
'replication_ip': '127.0.0.0', 'replication_port': 6200},
# Two disks on same server, different ports
{'id': 2, 'device': 'sdc', 'zone': 2,
'region': 2, 'ip': '1.1.1.2', 'port': 1112,
'replication_ip': '127.0.0.1', 'replication_port': 6200},
{'id': 3, 'device': 'sdd', 'zone': 4,
'region': 2, 'ip': '1.1.1.2', 'port': 1112,
'replication_ip': '127.0.0.1', 'replication_port': 6201},
]
objects_sdb, objects_1_sdb, _, _ = self._write_disk_data('sdb')
objects_sdc, objects_1_sdc, _, _ = self._write_disk_data('sdc')
objects_sdd, objects_1_sdd, _, _ = self._write_disk_data('sdd')
_create_test_rings(self.testdir, devs)
self.conf['bind_ip'] = '127.0.0.1'
self.conf['servers_per_port'] = 1 # diff port ok
self._create_replicator()
jobs = self.replicator.collect_jobs()
self.assertEqual([mock.call(jobs)], mock_shuffle.mock_calls)
jobs_to_delete = [j for j in jobs if j['delete']]
self.assertEqual(len(jobs_to_delete), 4)
self.assertEqual([
'3', '0', # policy 0; 3 not on sdc, 0 not on sdd
'3', '0', # policy 1; 3 not on sdc, 0 not on sdd
], [j['partition'] for j in jobs_to_delete])
jobs_by_pol_part_dev = {}
for job in jobs:
# There should be no jobs with a device not in just sdc & sdd
self.assertTrue(job['device'] in ('sdc', 'sdd'))
jobs_by_pol_part_dev[
str(int(job['policy'])) + job['partition'] + job['device']
] = job
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['00sdc']['nodes']],
[0, 1])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['00sdd']['nodes']],
[0, 1, 2])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['01sdc']['nodes']],
[1, 3])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['01sdd']['nodes']],
[1, 2])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['02sdc']['nodes']],
[3, 0])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['02sdd']['nodes']],
[2, 0])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['03sdc']['nodes']],
[3, 0, 1])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['03sdd']['nodes']],
[0, 1])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['10sdc']['nodes']],
[0, 1])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['10sdd']['nodes']],
[0, 1, 2])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['11sdc']['nodes']],
[1, 3])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['11sdd']['nodes']],
[1, 2])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['12sdc']['nodes']],
[3, 0])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['12sdd']['nodes']],
[2, 0])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['13sdc']['nodes']],
[3, 0, 1])
self.assertEqual([node['id']
for node in jobs_by_pol_part_dev['13sdd']['nodes']],
[0, 1])
for part in ['00', '01', '02', '03']:
self.assertEqual(jobs_by_pol_part_dev[part + 'sdc']['path'],
os.path.join(objects_sdc, part[1:]))
self.assertEqual(jobs_by_pol_part_dev[part + 'sdd']['path'],
os.path.join(objects_sdd, part[1:]))
for part in ['10', '11', '12', '13']:
self.assertEqual(jobs_by_pol_part_dev[part + 'sdc']['path'],
os.path.join(objects_1_sdc, part[1:]))
self.assertEqual(jobs_by_pol_part_dev[part + 'sdd']['path'],
os.path.join(objects_1_sdd, part[1:]))
def test_collect_jobs_handoffs_first(self):
self.replicator.handoffs_first = True
jobs = self.replicator.collect_jobs()
self.assertTrue(jobs[0]['delete'])
self.assertEqual('1', jobs[0]['partition'])
def test_handoffs_first_mode_will_process_all_jobs_after_handoffs(self):
# make a object in the handoff & primary partition
expected_suffix_paths = []
for policy in POLICIES:
# primary
ts = next(self.ts)
df = self.df_mgr.get_diskfile('sda', '0', 'a', 'c', 'o', policy)
with df.create() as w:
w.write('asdf')
w.put({'X-Timestamp': ts.internal})
w.commit(ts)
expected_suffix_paths.append(os.path.dirname(df._datadir))
# handoff
ts = next(self.ts)
df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o', policy)
with df.create() as w:
w.write('asdf')
w.put({'X-Timestamp': ts.internal})
w.commit(ts)
expected_suffix_paths.append(os.path.dirname(df._datadir))
# rsync will be called for all parts we created objects in
process_arg_checker = [
# (return_code, stdout, <each in capture rsync args>)
(0, '', []),
(0, '', []),
(0, '', []), # handoff job "first" policy
(0, '', []),
(0, '', []),
(0, '', []), # handoff job "second" policy
(0, '', []),
(0, '', []), # update job "first" policy
(0, '', []),
(0, '', []), # update job "second" policy
]
# each handoff partition node gets one replicate request for after
# rsync (2 * 3), each primary partition with objects gets two
# replicate requests (pre-flight and post sync) to each of each
# partners (2 * 2 * 2), the 2 remaining empty parts (2 & 3) get a
# pre-flight replicate request per node for each storage policy
# (2 * 2 * 2) - so 6 + 8 + 8 == 22
replicate_responses = [200] * 22
stub_body = pickle.dumps({})
with _mock_process(process_arg_checker) as rsync_log, \
mock.patch('swift.obj.replicator.whataremyips',
side_effect=_ips), \
mocked_http_conn(*replicate_responses,
body=stub_body) as conn_log:
self.replicator.handoffs_first = True
self.replicator.replicate()
# all jobs processed!
self.assertEqual(self.replicator.job_count,
self.replicator.replication_count)
self.assertFalse(self.replicator.handoffs_remaining)
# sanity, all the handoffs suffixes we filled in were rsync'd
found_rsync_suffix_paths = set()
for subprocess_info in rsync_log:
local_path, remote_path = subprocess_info['rsync_args'][-2:]
found_rsync_suffix_paths.add(local_path)
self.assertEqual(set(expected_suffix_paths), found_rsync_suffix_paths)
# sanity, all nodes got replicated
found_replicate_calls = defaultdict(int)
for req in conn_log.requests:
self.assertEqual(req['method'], 'REPLICATE')
found_replicate_key = (
int(req['headers']['X-Backend-Storage-Policy-Index']),
req['path'])
found_replicate_calls[found_replicate_key] += 1
expected_replicate_calls = {
(0, '/sda/1/a83'): 3,
(1, '/sda/1/a83'): 3,
(0, '/sda/0'): 2,
(0, '/sda/0/a83'): 2,
(1, '/sda/0'): 2,
(1, '/sda/0/a83'): 2,
(0, '/sda/2'): 2,
(1, '/sda/2'): 2,
(0, '/sda/3'): 2,
(1, '/sda/3'): 2,
}
self.assertEqual(dict(found_replicate_calls),
expected_replicate_calls)
def test_handoffs_first_mode_will_abort_if_handoffs_remaining(self):
# make an object in the handoff partition
handoff_suffix_paths = []
for policy in POLICIES:
ts = next(self.ts)
df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o', policy)
with df.create() as w:
w.write('asdf')
w.put({'X-Timestamp': ts.internal})
w.commit(ts)
handoff_suffix_paths.append(os.path.dirname(df._datadir))
process_arg_checker = [
# (return_code, stdout, <each in capture rsync args>)
(0, '', []),
(1, '', []),
(0, '', []),
(0, '', []),
(0, '', []),
(0, '', []),
]
stub_body = pickle.dumps({})
with _mock_process(process_arg_checker) as rsync_log, \
mock.patch('swift.obj.replicator.whataremyips',
side_effect=_ips), \
mocked_http_conn(*[200] * 5, body=stub_body) as conn_log:
self.replicator.handoffs_first = True
self.replicator.replicate()
# stopped after handoffs!
self.assertEqual(1, self.replicator.handoffs_remaining)
self.assertEqual(8, self.replicator.job_count)
# in addition to the two update_deleted jobs as many as "concurrency"
# jobs may have been spawned into the pool before the failed
# update_deleted job incremented handoffs_remaining and caused the
# handoffs_first check to abort the current pass
self.assertLessEqual(self.replicator.replication_count,
2 + self.replicator.concurrency)
# sanity, all the handoffs suffixes we filled in were rsync'd
found_rsync_suffix_paths = set()
expected_replicate_requests = set()
for subprocess_info in rsync_log:
local_path, remote_path = subprocess_info['rsync_args'][-2:]
found_rsync_suffix_paths.add(local_path)
if subprocess_info['ret_code'] == 0:
node_ip = remote_path.split(':', 1)[0]
expected_replicate_requests.add(node_ip)
self.assertEqual(set(handoff_suffix_paths), found_rsync_suffix_paths)
# sanity, all successful rsync nodes got REPLICATE requests
found_replicate_requests = set()
self.assertEqual(5, len(conn_log.requests))
for req in conn_log.requests:
self.assertEqual(req['method'], 'REPLICATE')
found_replicate_requests.add(req['ip'])
self.assertEqual(expected_replicate_requests,
found_replicate_requests)
# and at least one partition got removed!
remaining_policies = []
for path in handoff_suffix_paths:
if os.path.exists(path):
policy = diskfile.extract_policy(path)
remaining_policies.append(policy)
self.assertEqual(len(remaining_policies), 1)
remaining_policy = remaining_policies[0]
# try again but with handoff_delete allowing for a single failure
with _mock_process(process_arg_checker) as rsync_log, \
mock.patch('swift.obj.replicator.whataremyips',
side_effect=_ips), \
mocked_http_conn(*[200] * 14, body=stub_body) as conn_log:
self.replicator.handoff_delete = 2
self.replicator.replicate()
# all jobs processed!
self.assertEqual(self.replicator.job_count,
self.replicator.replication_count)
self.assertFalse(self.replicator.handoffs_remaining)
# sanity, all parts got replicated
found_replicate_calls = defaultdict(int)
for req in conn_log.requests:
self.assertEqual(req['method'], 'REPLICATE')
found_replicate_key = (
int(req['headers']['X-Backend-Storage-Policy-Index']),
req['path'])
found_replicate_calls[found_replicate_key] += 1
expected_replicate_calls = {
(int(remaining_policy), '/sda/1/a83'): 2,
(0, '/sda/0'): 2,
(1, '/sda/0'): 2,
(0, '/sda/2'): 2,
(1, '/sda/2'): 2,
(0, '/sda/3'): 2,
(1, '/sda/3'): 2,
}
self.assertEqual(dict(found_replicate_calls),
expected_replicate_calls)
# and now all handoff partitions have been rebalanced away!
removed_paths = set()
for path in handoff_suffix_paths:
if not os.path.exists(path):
removed_paths.add(path)
self.assertEqual(removed_paths, set(handoff_suffix_paths))
def test_replicator_skips_bogus_partition_dirs(self):
# A directory in the wrong place shouldn't crash the replicator
rmtree(self.objects)
rmtree(self.objects_1)
os.mkdir(self.objects)
os.mkdir(self.objects_1)
os.mkdir(os.path.join(self.objects, "burrito"))
jobs = self.replicator.collect_jobs()
self.assertEqual(len(jobs), 0)
def test_replicator_skips_rsync_temp_files(self):
# the empty pre-setup dirs aren't that useful to us
device_path = os.path.join(self.devices, 'sda')
rmtree(device_path, ignore_errors=1)
os.mkdir(device_path)
# create a real data file to trigger rsync
df = self.df_mgr.get_diskfile('sda', '0', 'a', 'c', 'o',
policy=POLICIES.legacy)
ts = next(self.ts)
with df.create() as w:
w.write('asdf')
w.put({'X-Timestamp': ts.internal})
w.commit(ts)
# pre-flight and post sync request for both other primaries
expected_replicate_requests = 4
process_arg_checker = [
# (return_code, stdout, <each in capture rsync args>)
(0, '', []),
(0, '', []),
]
stub_body = pickle.dumps({})
with _mock_process(process_arg_checker) as rsync_log, \
mock.patch('swift.obj.replicator.whataremyips',
side_effect=_ips), \
mocked_http_conn(*[200] * expected_replicate_requests,
body=stub_body) as conn_log:
self.replicator.replicate()
self.assertEqual(['REPLICATE'] * expected_replicate_requests,
[r['method'] for r in conn_log.requests])
# expect one rsync to each other primary node
self.assertEqual(2, len(rsync_log))
expected = '--exclude=.*.[0-9a-zA-Z][0-9a-zA-Z][0-9a-zA-Z]' \
'[0-9a-zA-Z][0-9a-zA-Z][0-9a-zA-Z]'
for subprocess_info in rsync_log:
rsync_args = subprocess_info['rsync_args']
for arg in rsync_args:
if arg.startswith('--exclude'):
self.assertEqual(arg, expected)
break
else:
self.fail('Did not find --exclude argument in %r' %
rsync_args)
def test_replicator_removes_zbf(self):
# After running xfs_repair, a partition directory could become a
# zero-byte file. If this happens, the replicator should clean it
# up, log something, and move on to the next partition.
# Surprise! Partition dir 1 is actually a zero-byte file.
pol_0_part_1_path = os.path.join(self.objects, '1')
rmtree(pol_0_part_1_path)
with open(pol_0_part_1_path, 'w'):
pass
self.assertTrue(os.path.isfile(pol_0_part_1_path)) # sanity check
# Policy 1's partition dir 1 is also a zero-byte file.
pol_1_part_1_path = os.path.join(self.objects_1, '1')
rmtree(pol_1_part_1_path)
with open(pol_1_part_1_path, 'w'):
pass
self.assertTrue(os.path.isfile(pol_1_part_1_path)) # sanity check
# Don't delete things in collect_jobs(); all the stat() calls would
# make replicator startup really slow.
self.replicator.collect_jobs()
self.assertTrue(os.path.exists(pol_0_part_1_path))
self.assertTrue(os.path.exists(pol_1_part_1_path))
# After a replication pass, the files should be gone
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
self.replicator.run_once()
self.assertFalse(os.path.exists(pol_0_part_1_path))
self.assertFalse(os.path.exists(pol_1_part_1_path))
self.assertEqual(
sorted(self.logger.get_lines_for_level('warning')), [
('Removing partition directory which was a file: %s'
% pol_1_part_1_path),
('Removing partition directory which was a file: %s'
% pol_0_part_1_path),
])
def test_delete_partition(self):
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o',
policy=POLICIES.legacy)
mkdirs(df._datadir)
f = open(os.path.join(df._datadir,
normalize_timestamp(time.time()) + '.data'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, '1', data_dir)
part_path = os.path.join(self.objects, '1')
self.assertTrue(os.access(part_path, os.F_OK))
ring = self.replicator.load_object_ring(POLICIES[0])
nodes = [node for node in
ring.get_part_nodes(1)
if node['ip'] not in _ips()]
process_arg_checker = []
for node in nodes:
rsync_mod = '%s::object/sda/objects/%s' % (node['ip'], 1)
process_arg_checker.append(
(0, '', ['rsync', whole_path_from, rsync_mod]))
with _mock_process(process_arg_checker):
self.replicator.replicate()
self.assertFalse(os.access(part_path, os.F_OK))
def test_delete_partition_default_sync_method(self):
self.replicator.conf.pop('sync_method')
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o',
policy=POLICIES.legacy)
mkdirs(df._datadir)
f = open(os.path.join(df._datadir,
normalize_timestamp(time.time()) + '.data'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, '1', data_dir)
part_path = os.path.join(self.objects, '1')
self.assertTrue(os.access(part_path, os.F_OK))
ring = self.replicator.load_object_ring(POLICIES[0])
nodes = [node for node in
ring.get_part_nodes(1)
if node['ip'] not in _ips()]
process_arg_checker = []
for node in nodes:
rsync_mod = '%s::object/sda/objects/%s' % (node['ip'], 1)
process_arg_checker.append(
(0, '', ['rsync', whole_path_from, rsync_mod]))
with _mock_process(process_arg_checker):
self.replicator.replicate()
self.assertFalse(os.access(part_path, os.F_OK))
def test_delete_partition_ssync_single_region(self):
devs = [
{'id': 0, 'device': 'sda', 'zone': 0,
'region': 1, 'ip': '127.0.0.0', 'port': 6200},
{'id': 1, 'device': 'sda', 'zone': 1,
'region': 1, 'ip': '127.0.0.1', 'port': 6200},
{'id': 2, 'device': 'sda', 'zone': 2,
'region': 1, 'ip': '127.0.0.2', 'port': 6200},
{'id': 3, 'device': 'sda', 'zone': 4,
'region': 1, 'ip': '127.0.0.3', 'port': 6200},
{'id': 4, 'device': 'sda', 'zone': 5,
'region': 1, 'ip': '127.0.0.4', 'port': 6200},
{'id': 5, 'device': 'sda', 'zone': 6,
'region': 1, 'ip': 'fe80::202:b3ff:fe1e:8329', 'port': 6200},
{'id': 6, 'device': 'sda', 'zone': 7, 'region': 1,
'ip': '2001:0db8:85a3:0000:0000:8a2e:0370:7334', 'port': 6200},
]
_create_test_rings(self.testdir, devs=devs)
self.conf['sync_method'] = 'ssync'
self.replicator = object_replicator.ObjectReplicator(self.conf)
self.replicator.logger = debug_logger()
self.replicator._zero_stats()
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o',
policy=POLICIES.legacy)
mkdirs(df._datadir)
ts = normalize_timestamp(time.time())
f = open(os.path.join(df._datadir, ts + '.data'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
whole_path_from = storage_directory(self.objects, 1, ohash)
suffix_dir_path = os.path.dirname(whole_path_from)
part_path = os.path.join(self.objects, '1')
self.assertTrue(os.access(part_path, os.F_OK))
def _fake_ssync(node, job, suffixes, **kwargs):
return True, {ohash: ts}
self.replicator.sync_method = _fake_ssync
self.replicator.replicate()
self.assertFalse(os.access(whole_path_from, os.F_OK))
self.assertFalse(os.access(suffix_dir_path, os.F_OK))
self.assertFalse(os.access(part_path, os.F_OK))
def test_delete_partition_1(self):
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o',
policy=POLICIES[1])
mkdirs(df._datadir)
f = open(os.path.join(df._datadir,
normalize_timestamp(time.time()) + '.data'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects_1, '1', data_dir)
part_path = os.path.join(self.objects_1, '1')
self.assertTrue(os.access(part_path, os.F_OK))
ring = self.replicator.load_object_ring(POLICIES[1])
nodes = [node for node in
ring.get_part_nodes(1)
if node['ip'] not in _ips()]
process_arg_checker = []
for node in nodes:
rsync_mod = '%s::object/sda/objects-1/%s' % (node['ip'], 1)
process_arg_checker.append(
(0, '', ['rsync', whole_path_from, rsync_mod]))
with _mock_process(process_arg_checker):
self.replicator.replicate()
self.assertFalse(os.access(part_path, os.F_OK))
def test_delete_partition_with_failures(self):
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o',
policy=POLICIES.legacy)
mkdirs(df._datadir)
f = open(os.path.join(df._datadir,
normalize_timestamp(time.time()) + '.data'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, '1', data_dir)
part_path = os.path.join(self.objects, '1')
self.assertTrue(os.access(part_path, os.F_OK))
ring = self.replicator.load_object_ring(POLICIES[0])
nodes = [node for node in
ring.get_part_nodes(1)
if node['ip'] not in _ips()]
process_arg_checker = []
for i, node in enumerate(nodes):
rsync_mod = '%s::object/sda/objects/%s' % (node['ip'], 1)
if i == 0:
# force one of the rsync calls to fail
ret_code = 1
else:
ret_code = 0
process_arg_checker.append(
(ret_code, '', ['rsync', whole_path_from, rsync_mod]))
with _mock_process(process_arg_checker):
self.replicator.replicate()
# The path should still exist
self.assertTrue(os.access(part_path, os.F_OK))
def test_delete_partition_with_handoff_delete(self):
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
self.replicator.handoff_delete = 2
df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o',
policy=POLICIES.legacy)
mkdirs(df._datadir)
f = open(os.path.join(df._datadir,
normalize_timestamp(time.time()) + '.data'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, '1', data_dir)
part_path = os.path.join(self.objects, '1')
self.assertTrue(os.access(part_path, os.F_OK))
ring = self.replicator.load_object_ring(POLICIES[0])
nodes = [node for node in
ring.get_part_nodes(1)
if node['ip'] not in _ips()]
process_arg_checker = []
for i, node in enumerate(nodes):
rsync_mod = '%s::object/sda/objects/%s' % (node['ip'], 1)
if i == 0:
# force one of the rsync calls to fail
ret_code = 1
else:
ret_code = 0
process_arg_checker.append(
(ret_code, '', ['rsync', whole_path_from, rsync_mod]))
with _mock_process(process_arg_checker):
self.replicator.replicate()
self.assertFalse(os.access(part_path, os.F_OK))
def test_delete_partition_with_handoff_delete_failures(self):
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
self.replicator.handoff_delete = 2
df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o',
policy=POLICIES.legacy)
mkdirs(df._datadir)
f = open(os.path.join(df._datadir,
normalize_timestamp(time.time()) + '.data'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, '1', data_dir)
part_path = os.path.join(self.objects, '1')
self.assertTrue(os.access(part_path, os.F_OK))
ring = self.replicator.load_object_ring(POLICIES[0])
nodes = [node for node in
ring.get_part_nodes(1)
if node['ip'] not in _ips()]
process_arg_checker = []
for i, node in enumerate(nodes):
rsync_mod = '%s::object/sda/objects/%s' % (node['ip'], 1)
if i in (0, 1):
# force two of the rsync calls to fail
ret_code = 1
else:
ret_code = 0
process_arg_checker.append(
(ret_code, '', ['rsync', whole_path_from, rsync_mod]))
with _mock_process(process_arg_checker):
self.replicator.replicate()
# The file should still exist
self.assertTrue(os.access(part_path, os.F_OK))
def test_delete_partition_with_handoff_delete_fail_in_other_region(self):
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o',
policy=POLICIES.legacy)
mkdirs(df._datadir)
f = open(os.path.join(df._datadir,
normalize_timestamp(time.time()) + '.data'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, '1', data_dir)
part_path = os.path.join(self.objects, '1')
self.assertTrue(os.access(part_path, os.F_OK))
ring = self.replicator.load_object_ring(POLICIES[0])
nodes = [node for node in
ring.get_part_nodes(1)
if node['ip'] not in _ips()]
process_arg_checker = []
for node in nodes:
rsync_mod = '%s::object/sda/objects/%s' % (node['ip'], 1)
if node['region'] != 1:
# the rsync calls for other region to fail
ret_code = 1
else:
ret_code = 0
process_arg_checker.append(
(ret_code, '', ['rsync', whole_path_from, rsync_mod]))
with _mock_process(process_arg_checker):
self.replicator.replicate()
# The file should still exist
self.assertTrue(os.access(part_path, os.F_OK))
def test_delete_partition_override_params(self):
df = self.df_mgr.get_diskfile('sda', '0', 'a', 'c', 'o',
policy=POLICIES.legacy)
mkdirs(df._datadir)
part_path = os.path.join(self.objects, '1')
self.assertTrue(os.access(part_path, os.F_OK))
self.replicator.replicate(override_devices=['sdb'])
self.assertTrue(os.access(part_path, os.F_OK))
self.replicator.replicate(override_partitions=['9'])
self.assertTrue(os.access(part_path, os.F_OK))
self.replicator.replicate(override_devices=['sda'],
override_partitions=['1'])
self.assertFalse(os.access(part_path, os.F_OK))
def test_delete_policy_override_params(self):
df0 = self.df_mgr.get_diskfile('sda', '99', 'a', 'c', 'o',
policy=POLICIES.legacy)
df1 = self.df_mgr.get_diskfile('sda', '99', 'a', 'c', 'o',
policy=POLICIES[1])
mkdirs(df0._datadir)
mkdirs(df1._datadir)
pol0_part_path = os.path.join(self.objects, '99')
pol1_part_path = os.path.join(self.objects_1, '99')
# sanity checks
self.assertTrue(os.access(pol0_part_path, os.F_OK))
self.assertTrue(os.access(pol1_part_path, os.F_OK))
# a bogus policy index doesn't bother the replicator any more than a
# bogus device or partition does
self.replicator.run_once(policies='1,2,5')
self.assertFalse(os.access(pol1_part_path, os.F_OK))
self.assertTrue(os.access(pol0_part_path, os.F_OK))
def test_delete_partition_ssync(self):
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o',
policy=POLICIES.legacy)
mkdirs(df._datadir)
ts = normalize_timestamp(time.time())
f = open(os.path.join(df._datadir, ts + '.data'),
'wb')
f.write('0')
f.close()
ohash = hash_path('a', 'c', 'o')
whole_path_from = storage_directory(self.objects, 1, ohash)
suffix_dir_path = os.path.dirname(whole_path_from)
part_path = os.path.join(self.objects, '1')
self.assertTrue(os.access(part_path, os.F_OK))
self.call_nums = 0
self.conf['sync_method'] = 'ssync'
def _fake_ssync(node, job, suffixes, **kwargs):
success = True
ret_val = {ohash: ts}
if self.call_nums == 2:
# ssync should return (True, []) only when the second
# candidate node has not get the replica yet.
success = False
ret_val = {}
self.call_nums += 1
return success, ret_val
self.replicator.sync_method = _fake_ssync
self.replicator.replicate()
# The file should still exist
self.assertTrue(os.access(whole_path_from, os.F_OK))
self.assertTrue(os.access(suffix_dir_path, os.F_OK))
self.assertTrue(os.access(part_path, os.F_OK))
self.replicator.replicate()
# The file should be deleted at the second replicate call
self.assertFalse(os.access(whole_path_from, os.F_OK))
self.assertFalse(os.access(suffix_dir_path, os.F_OK))
self.assertTrue(os.access(part_path, os.F_OK))
self.replicator.replicate()
# The partition should be deleted at the third replicate call
self.assertFalse(os.access(whole_path_from, os.F_OK))
self.assertFalse(os.access(suffix_dir_path, os.F_OK))
self.assertFalse(os.access(part_path, os.F_OK))
del self.call_nums
def test_delete_partition_ssync_with_sync_failure(self):
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o',
policy=POLICIES.legacy)
ts = normalize_timestamp(time.time())
mkdirs(df._datadir)
f = open(os.path.join(df._datadir, ts + '.data'), 'wb')
f.write('0')
f.close()
ohash = hash_path('a', 'c', 'o')
whole_path_from = storage_directory(self.objects, 1, ohash)
suffix_dir_path = os.path.dirname(whole_path_from)
part_path = os.path.join(self.objects, '1')
self.assertTrue(os.access(part_path, os.F_OK))
self.call_nums = 0
self.conf['sync_method'] = 'ssync'
def _fake_ssync(node, job, suffixes, **kwags):
success = False
ret_val = {}
if self.call_nums == 2:
# ssync should return (True, []) only when the second
# candidate node has not get the replica yet.
success = True
ret_val = {ohash: ts}
self.call_nums += 1
return success, ret_val
self.replicator.sync_method = _fake_ssync
self.replicator.replicate()
# The file should still exist
self.assertTrue(os.access(whole_path_from, os.F_OK))
self.assertTrue(os.access(suffix_dir_path, os.F_OK))
self.assertTrue(os.access(part_path, os.F_OK))
self.replicator.replicate()
# The file should still exist
self.assertTrue(os.access(whole_path_from, os.F_OK))
self.assertTrue(os.access(suffix_dir_path, os.F_OK))
self.assertTrue(os.access(part_path, os.F_OK))
self.replicator.replicate()
# The file should still exist
self.assertTrue(os.access(whole_path_from, os.F_OK))
self.assertTrue(os.access(suffix_dir_path, os.F_OK))
self.assertTrue(os.access(part_path, os.F_OK))
del self.call_nums
def test_delete_objs_ssync_only_when_in_sync(self):
self.replicator.logger = debug_logger('test-replicator')
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o',
policy=POLICIES.legacy)
mkdirs(df._datadir)
ts = normalize_timestamp(time.time())
f = open(os.path.join(df._datadir, ts + '.data'), 'wb')
f.write('0')
f.close()
ohash = hash_path('a', 'c', 'o')
whole_path_from = storage_directory(self.objects, 1, ohash)
suffix_dir_path = os.path.dirname(whole_path_from)
part_path = os.path.join(self.objects, '1')
self.assertTrue(os.access(part_path, os.F_OK))
self.call_nums = 0
self.conf['sync_method'] = 'ssync'
in_sync_objs = {}
def _fake_ssync(node, job, suffixes, remote_check_objs=None):
self.call_nums += 1
if remote_check_objs is None:
# sync job
ret_val = {ohash: ts}
else:
ret_val = in_sync_objs
return True, ret_val
self.replicator.sync_method = _fake_ssync
self.replicator.replicate()
self.assertEqual(3, self.call_nums)
# The file should still exist
self.assertTrue(os.access(whole_path_from, os.F_OK))
self.assertTrue(os.access(suffix_dir_path, os.F_OK))
self.assertTrue(os.access(part_path, os.F_OK))
del self.call_nums
def test_delete_partition_ssync_with_cleanup_failure(self):
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
self.replicator.logger = mock_logger = \
debug_logger('test-replicator')
df = self.df_mgr.get_diskfile('sda', '1', 'a', 'c', 'o',
policy=POLICIES.legacy)
mkdirs(df._datadir)
ts = normalize_timestamp(time.time())
f = open(os.path.join(df._datadir, ts + '.data'), 'wb')
f.write('0')
f.close()
ohash = hash_path('a', 'c', 'o')
whole_path_from = storage_directory(self.objects, 1, ohash)
suffix_dir_path = os.path.dirname(whole_path_from)
part_path = os.path.join(self.objects, '1')
self.assertTrue(os.access(part_path, os.F_OK))
self.call_nums = 0
self.conf['sync_method'] = 'ssync'
def _fake_ssync(node, job, suffixes, **kwargs):
success = True
ret_val = {ohash: ts}
if self.call_nums == 2:
# ssync should return (True, []) only when the second
# candidate node has not get the replica yet.
success = False
ret_val = {}
self.call_nums += 1
return success, ret_val
rmdir_func = os.rmdir
def raise_exception_rmdir(exception_class, error_no):
instance = exception_class()
instance.errno = error_no
def func(directory):
if directory == suffix_dir_path:
raise instance
else:
rmdir_func(directory)
return func
self.replicator.sync_method = _fake_ssync
self.replicator.replicate()
# The file should still exist
self.assertTrue(os.access(whole_path_from, os.F_OK))
self.assertTrue(os.access(suffix_dir_path, os.F_OK))
self.assertTrue(os.access(part_path, os.F_OK))
# Fail with ENOENT
with mock.patch('os.rmdir',
raise_exception_rmdir(OSError, ENOENT)):
self.replicator.replicate()
self.assertFalse(mock_logger.get_lines_for_level('error'))
self.assertFalse(os.access(whole_path_from, os.F_OK))
self.assertTrue(os.access(suffix_dir_path, os.F_OK))
self.assertTrue(os.access(part_path, os.F_OK))
# Fail with ENOTEMPTY
with mock.patch('os.rmdir',
raise_exception_rmdir(OSError, ENOTEMPTY)):
self.replicator.replicate()
self.assertFalse(mock_logger.get_lines_for_level('error'))
self.assertFalse(os.access(whole_path_from, os.F_OK))
self.assertTrue(os.access(suffix_dir_path, os.F_OK))
self.assertTrue(os.access(part_path, os.F_OK))
# Fail with ENOTDIR
with mock.patch('os.rmdir',
raise_exception_rmdir(OSError, ENOTDIR)):
self.replicator.replicate()
self.assertEqual(len(mock_logger.get_lines_for_level('error')), 1)
self.assertFalse(os.access(whole_path_from, os.F_OK))
self.assertTrue(os.access(suffix_dir_path, os.F_OK))
self.assertTrue(os.access(part_path, os.F_OK))
# Finally we can cleanup everything
self.replicator.replicate()
self.assertFalse(os.access(whole_path_from, os.F_OK))
self.assertFalse(os.access(suffix_dir_path, os.F_OK))
self.assertTrue(os.access(part_path, os.F_OK))
self.replicator.replicate()
self.assertFalse(os.access(whole_path_from, os.F_OK))
self.assertFalse(os.access(suffix_dir_path, os.F_OK))
self.assertFalse(os.access(part_path, os.F_OK))
def test_run_once_recover_from_failure(self):
conf = dict(swift_dir=self.testdir, devices=self.devices,
bind_ip=_ips()[0],
mount_check='false', timeout='300', stats_interval='1')
replicator = object_replicator.ObjectReplicator(conf)
was_connector = object_replicator.http_connect
try:
object_replicator.http_connect = mock_http_connect(200)
# Write some files into '1' and run replicate- they should be moved
# to the other partitions and then node should get deleted.
cur_part = '1'
df = self.df_mgr.get_diskfile('sda', cur_part, 'a', 'c', 'o',
policy=POLICIES.legacy)
mkdirs(df._datadir)
f = open(os.path.join(df._datadir,
normalize_timestamp(time.time()) + '.data'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, cur_part, data_dir)
ring = replicator.load_object_ring(POLICIES[0])
process_arg_checker = []
nodes = [node for node in
ring.get_part_nodes(int(cur_part))
if node['ip'] not in _ips()]
for node in nodes:
rsync_mod = '%s::object/sda/objects/%s' % (node['ip'],
cur_part)
process_arg_checker.append(
(0, '', ['rsync', whole_path_from, rsync_mod]))
self.assertTrue(os.access(os.path.join(self.objects,
'1', data_dir, ohash),
os.F_OK))
with _mock_process(process_arg_checker):
replicator.run_once()
self.assertFalse(process_errors)
for i, result in [('0', True), ('1', False),
('2', True), ('3', True)]:
self.assertEqual(os.access(
os.path.join(self.objects,
i, diskfile.HASH_FILE),
os.F_OK), result)
finally:
object_replicator.http_connect = was_connector
def test_run_once_recover_from_timeout(self):
conf = dict(swift_dir=self.testdir, devices=self.devices,
bind_ips=_ips()[0],
mount_check='false', timeout='300', stats_interval='1')
replicator = object_replicator.ObjectReplicator(conf)
was_connector = object_replicator.http_connect
was_get_hashes = object_replicator.DiskFileManager._get_hashes
was_execute = tpool.execute
self.get_hash_count = 0
try:
def fake_get_hashes(*args, **kwargs):
self.get_hash_count += 1
if self.get_hash_count == 3:
# raise timeout on last call to get hashes
raise Timeout()
return 2, {'abc': 'def'}
def fake_exc(tester, *args, **kwargs):
if 'Error syncing partition timeout' in args[0]:
tester.i_failed = True
self.i_failed = False
object_replicator.http_connect = mock_http_connect(200)
object_replicator.DiskFileManager._get_hashes = fake_get_hashes
replicator.logger.exception = \
lambda *args, **kwargs: fake_exc(self, *args, **kwargs)
# Write some files into '1' and run replicate- they should be moved
# to the other partitions and then node should get deleted.
cur_part = '1'
df = self.df_mgr.get_diskfile('sda', cur_part, 'a', 'c', 'o',
policy=POLICIES.legacy)
mkdirs(df._datadir)
f = open(os.path.join(df._datadir,
normalize_timestamp(time.time()) + '.data'),
'wb')
f.write('1234567890')
f.close()
ohash = hash_path('a', 'c', 'o')
data_dir = ohash[-3:]
whole_path_from = os.path.join(self.objects, cur_part, data_dir)
process_arg_checker = []
ring = replicator.load_object_ring(POLICIES[0])
nodes = [node for node in
ring.get_part_nodes(int(cur_part))
if node['ip'] not in _ips()]
for node in nodes:
rsync_mod = '%s::object/sda/objects/%s' % (node['ip'],
cur_part)
process_arg_checker.append(
(0, '', ['rsync', whole_path_from, rsync_mod]))
self.assertTrue(os.access(os.path.join(self.objects,
'1', data_dir, ohash),
os.F_OK))
with _mock_process(process_arg_checker):
replicator.run_once()
self.assertFalse(process_errors)
self.assertFalse(self.i_failed)
finally:
object_replicator.http_connect = was_connector
object_replicator.DiskFileManager._get_hashes = was_get_hashes
tpool.execute = was_execute
def test_run(self):
with _mock_process([(0, '')] * 100):
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
self.replicator.replicate()
def test_run_withlog(self):
with _mock_process([(0, "stuff in log")] * 100):
with mock.patch('swift.obj.replicator.http_connect',
mock_http_connect(200)):
self.replicator.replicate()
def test_sync_just_calls_sync_method(self):
self.replicator.sync_method = mock.MagicMock()
self.replicator.sync('node', 'job', 'suffixes')
self.replicator.sync_method.assert_called_once_with(
'node', 'job', 'suffixes')
@mock.patch('swift.obj.replicator.tpool_reraise')
@mock.patch('swift.obj.replicator.http_connect', autospec=True)
@mock.patch('swift.obj.replicator._do_listdir')
def test_update(self, mock_do_listdir, mock_http, mock_tpool_reraise):
def set_default(self):
self.replicator.suffix_count = 0
self.replicator.suffix_sync = 0
self.replicator.suffix_hash = 0
self.replicator.replication_count = 0
self.replicator.partition_times = []
self.headers = {'Content-Length': '0',
'user-agent': 'object-replicator %s' % os.getpid()}
mock_tpool_reraise.return_value = (0, {})
all_jobs = self.replicator.collect_jobs()
jobs = [job for job in all_jobs if not job['delete']]
mock_http.return_value = answer = mock.MagicMock()
answer.getresponse.return_value = resp = mock.MagicMock()
# Check incorrect http_connect with status 507 and
# count of attempts and call args
resp.status = 507
error = '%(replication_ip)s/%(device)s responded as unmounted'
expect = 'Error syncing partition: '
expected_listdir_calls = [
mock.call(int(job['partition']),
self.replicator.replication_cycle)
for job in jobs]
do_listdir_results = [False, False, True, False, True, False]
mock_do_listdir.side_effect = do_listdir_results
expected_tpool_calls = [
mock.call(self.replicator._diskfile_mgr._get_hashes, job['path'],
do_listdir=do_listdir,
reclaim_age=self.replicator.reclaim_age)
for job, do_listdir in zip(jobs, do_listdir_results)
]
for job in jobs:
set_default(self)
ring = job['policy'].object_ring
self.headers['X-Backend-Storage-Policy-Index'] = int(job['policy'])
self.replicator.update(job)
error_lines = self.logger.get_lines_for_level('error')
expected = []
# ... first the primaries
for node in job['nodes']:
expected.append(error % node)
# ... then it will get handoffs
for node in job['policy'].object_ring.get_more_nodes(
int(job['partition'])):
expected.append(error % node)
# ... and finally it will exception out
expected.append(expect)
self.assertEqual(expected, error_lines)
self.assertEqual(len(self.replicator.partition_times), 1)
self.assertEqual(mock_http.call_count, len(ring._devs) - 1)
reqs = []
for node in job['nodes']:
reqs.append(mock.call(node['ip'], node['port'], node['device'],
job['partition'], 'REPLICATE', '',
headers=self.headers))
if job['partition'] == '0':
self.assertEqual(self.replicator.suffix_hash, 0)
mock_http.assert_has_calls(reqs, any_order=True)
mock_http.reset_mock()
self.logger.clear()
mock_do_listdir.assert_has_calls(expected_listdir_calls)
mock_tpool_reraise.assert_has_calls(expected_tpool_calls)
mock_do_listdir.side_effect = None
mock_do_listdir.return_value = False
# Check incorrect http_connect with status 400 != HTTP_OK
resp.status = 400
error = 'Invalid response %(resp)s from %(ip)s'
for job in jobs:
set_default(self)
self.replicator.update(job)
# ... only the primaries
expected = [error % {'resp': 400, 'ip': node['replication_ip']}
for node in job['nodes']]
self.assertEqual(expected,
self.logger.get_lines_for_level('error'))
self.assertEqual(len(self.replicator.partition_times), 1)
self.logger.clear()
# Check successful http_connection and exception with
# incorrect pickle.loads(resp.read())
resp.status = 200
expect = 'Error syncing with node: %r: '
for job in jobs:
set_default(self)
self.replicator.update(job)
# ... only the primaries
expected = [expect % node for node in job['nodes']]
error_lines = self.logger.get_lines_for_level('error')
self.assertEqual(expected, error_lines)
self.assertEqual(len(self.replicator.partition_times), 1)
self.logger.clear()
# Check successful http_connection and correct
# pickle.loads(resp.read()) for non local node
resp.status = 200
local_job = None
resp.read.return_value = pickle.dumps({})
for job in jobs:
set_default(self)
# limit local job to policy 0 for simplicity
if job['partition'] == '0' and int(job['policy']) == 0:
local_job = job.copy()
continue
self.replicator.update(job)
self.assertEqual([], self.logger.get_lines_for_level('error'))
self.assertEqual(len(self.replicator.partition_times), 1)
self.assertEqual(self.replicator.suffix_hash, 0)
self.assertEqual(self.replicator.suffix_sync, 0)
self.assertEqual(self.replicator.suffix_count, 0)
self.logger.clear()
# Check successful http_connect and sync for local node
mock_tpool_reraise.return_value = (1, {'a83': 'ba47fd314242ec8c'
'7efb91f5d57336e4'})
resp.read.return_value = pickle.dumps({'a83': 'c130a2c17ed45102a'
'ada0f4eee69494ff'})
set_default(self)
self.replicator.sync = fake_func = \
mock.MagicMock(return_value=(True, []))
self.replicator.update(local_job)
reqs = []
for node in local_job['nodes']:
reqs.append(mock.call(node, local_job, ['a83']))
fake_func.assert_has_calls(reqs, any_order=True)
self.assertEqual(fake_func.call_count, 2)
self.assertEqual(self.replicator.replication_count, 1)
self.assertEqual(self.replicator.suffix_sync, 2)
self.assertEqual(self.replicator.suffix_hash, 1)
self.assertEqual(self.replicator.suffix_count, 1)
# Efficient Replication Case
set_default(self)
self.replicator.sync = fake_func = \
mock.MagicMock(return_value=(True, []))
all_jobs = self.replicator.collect_jobs()
job = None
for tmp in all_jobs:
if tmp['partition'] == '3':
job = tmp
break
# The candidate nodes to replicate (i.e. dev1 and dev3)
# belong to another region
self.replicator.update(job)
self.assertEqual(fake_func.call_count, 1)
self.assertEqual(self.replicator.replication_count, 1)
self.assertEqual(self.replicator.suffix_sync, 1)
self.assertEqual(self.replicator.suffix_hash, 1)
self.assertEqual(self.replicator.suffix_count, 1)
mock_http.reset_mock()
self.logger.clear()
# test for replication params on policy 0 only
repl_job = local_job.copy()
for node in repl_job['nodes']:
node['replication_ip'] = '127.0.0.11'
node['replication_port'] = '6011'
set_default(self)
# with only one set of headers make sure we specify index 0 here
# as otherwise it may be different from earlier tests
self.headers['X-Backend-Storage-Policy-Index'] = 0
self.replicator.update(repl_job)
reqs = []
for node in repl_job['nodes']:
reqs.append(mock.call(node['replication_ip'],
node['replication_port'], node['device'],
repl_job['partition'], 'REPLICATE',
'', headers=self.headers))
reqs.append(mock.call(node['replication_ip'],
node['replication_port'], node['device'],
repl_job['partition'], 'REPLICATE',
'/a83', headers=self.headers))
mock_http.assert_has_calls(reqs, any_order=True)
def test_rsync_compress_different_region(self):
self.assertEqual(self.replicator.sync_method, self.replicator.rsync)
jobs = self.replicator.collect_jobs()
_m_rsync = mock.Mock(return_value=0)
_m_os_path_exists = mock.Mock(return_value=True)
with mock.patch.object(self.replicator, '_rsync', _m_rsync), \
mock.patch('os.path.exists', _m_os_path_exists):
for job in jobs:
self.assertTrue('region' in job)
for node in job['nodes']:
for rsync_compress in (True, False):
self.replicator.rsync_compress = rsync_compress
ret = self.replicator.sync(node, job,
['fake_suffix'])
self.assertTrue(ret)
if node['region'] != job['region']:
if rsync_compress:
# --compress arg should be passed to rsync
# binary only when rsync_compress option is
# enabled AND destination node is in a
# different region
self.assertTrue('--compress' in
_m_rsync.call_args[0][0])
else:
self.assertFalse('--compress' in
_m_rsync.call_args[0][0])
else:
self.assertFalse('--compress' in
_m_rsync.call_args[0][0])
self.assertEqual(
_m_os_path_exists.call_args_list[-1][0][0],
os.path.join(job['path'], 'fake_suffix'))
self.assertEqual(
_m_os_path_exists.call_args_list[-2][0][0],
os.path.join(job['path']))
def test_do_listdir(self):
# Test if do_listdir is enabled for every 10th partition to rehash
# First number is the number of partitions in the job, list entries
# are the expected partition numbers per run
test_data = {
9: [1, 0, 1, 1, 1, 1, 1, 1, 1, 1],
29: [3, 2, 3, 3, 3, 3, 3, 3, 3, 3],
111: [12, 11, 11, 11, 11, 11, 11, 11, 11, 11]}
for partitions, expected in test_data.items():
seen = []
for phase in range(10):
invalidated = 0
for partition in range(partitions):
if object_replicator._do_listdir(partition, phase):
seen.append(partition)
invalidated += 1
# Every 10th partition is seen after each phase
self.assertEqual(expected[phase], invalidated)
# After 10 cycles every partition is seen exactly once
self.assertEqual(sorted(range(partitions)), sorted(seen))
if __name__ == '__main__':
unittest.main()
|
hurricanerix/swift
|
test/unit/obj/test_replicator.py
|
Python
|
apache-2.0
| 89,158
|
#!/usr/bin/env python
import os
import sys
import json
import argparse
import stat
import string
import time
hooks_script = {'pre-receive': 'PreReceiveHook',
'post-receive': 'PostReceiveHook',
'update': 'UpdateHook',
'pre-commit': 'PreCommitHook',
'prepare-commit-msg': 'PrepareCommitMsgHook',
'pre-push': 'PrePushHook'}
def get_args():
parser = argparse.ArgumentParser(description="Installer for PyGitHook, this is a usefull tool but you can do it by hand.")
parser.add_argument('-v', '--version', action='version', version="0.1")
parser.add_argument('-gd', '--git_directory', action='store',
help="The directory of the git project you want to creat a hook for.")
parser.add_argument('-pd', '--pygithook_directory', action='store', help="The directory of PyGitHook.")
parser.add_argument('-ht', '--hook_type', action='store', help="The type of hook you want to create.")
parser.add_argument('-t', '--tasks', nargs='+', help="The tasks you want your hook to execute.")
return vars(parser.parse_args())
def get_hooks_path(git_directory):
if os.path.exists(git_directory):
if git_directory.endswith('.git'):
git_hook_path = os.path.join(git_directory, "hooks")
else:
git_hook_path = os.path.join(git_directory, ".git/hooks")
if os.path.exists(git_hook_path):
return git_hook_path
else:
print("Invalid git directory, cannot find the {0} repertory".format(git_hook_path))
else:
print("Invalid git directory, this repertory doesn't exist")
sys.exit(1)
def make_executable(file_path):
st = os.stat(file_path)
os.chmod(file_path, st.st_mode | stat.S_IEXEC)
def create_hook(hook_type, git_hook_path, pygithook_path, tasks):
hook_template = ("#!${exec} \n\n" +
"import sys\n\n" +
"sys.path.append('${PyGitHookPath}')\n\n" +
"from src.Hooks.Hook import main\n" +
"from src.Hooks.${HookType} import ${HookType}\n" +
reduce(lambda x, y: x + "from src.Tasks." + str(y) +
" import " + str(y) + "\n", tasks, "") +
"tasks = [${TasksList}]\n\n" +
"if __name__ == '__main__':\n" +
" main( ${HookType}, tasks, '${ConfPath}' )\n")
hook_template = string.Template(hook_template)
params = {'exec': sys.executable,
'PyGitHookPath': pygithook_path,
'HookType': hooks_script[hook_type],
'TasksList': ",".join(tasks),
'ConfPath': os.path.join(pygithook_path, "conf")}
file_name = os.path.join(git_hook_path, hook_type)
if os.path.lexists(file_name):
os.rename(file_name, "{0}.old{1}".format(file_name, time.time()))
with open(file_name, "w") as hook_file:
hook_file.write(hook_template.substitute(params))
make_executable(file_name)
if __name__ == "__main__":
args = get_args()
hook_directory = os.path.abspath(args['git_directory'])
hook_type = args['hook_type']
pygithook_path = os.path.abspath(args['pygithook_directory'])
tasks = args['tasks']
if hook_type not in hooks_script.keys():
print("Invalid hook_type({0}), should be one of:{1}".format((hook_type, hooks_script.keys())))
sys.exit(1)
hook_path = get_hooks_path(hook_directory)
create_hook(hook_type, hook_path, pygithook_path, tasks)
|
GaelMagnan/PyGitHook
|
src/PyGitHookDeployment.py
|
Python
|
gpl-2.0
| 3,574
|
# PyTransit: fast and easy exoplanet transit modelling in Python.
# Copyright (C) 2010-2020 Hannu Parviainen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
from matplotlib.pyplot import subplots, setp
from numba import njit
from numpy import inf, repeat, atleast_2d, sqrt, arctan2, squeeze, ndarray, argsort, array, unique, arange
from seaborn import despine
from .loglikelihood.fmcloglikelihood import FrozenMultiCeleriteLogLikelihood
from .lpf import BaseLPF
from .. import UniformModel
from .tesslpf import downsample_time
from ..param import GParameter, PParameter, UniformPrior as UP, NormalPrior as NP
from ..orbits import as_from_rhop, i_from_ba, i_from_baew, eclipse_phase, d_from_pkaiews, epoch
from ..utils.misc import fold
@njit
def set_depths(f, pvp, pbids, lcids, ik2, sfr):
f = atleast_2d(f)
pvp = atleast_2d(pvp)
npt = f.shape[1]
for i in range(npt):
ipb = pbids[lcids[i]]
f[:, i] = 1 + (f[:, i] - 1.) * pvp[:, sfr + ipb]
return f
class EclipseLPF(BaseLPF):
def _init_p_limb_darkening(self):
pass
def _init_p_orbit(self):
porbit = [
GParameter('tc', 'zero epoch', 'd', NP(0.0, 0.1), (-inf, inf)),
GParameter('p', 'period', 'd', NP(1.0, 1e-5), (0, inf)),
GParameter('rho', 'stellar density', 'g/cm^3', UP(0.1, 25.0), (0, inf)),
GParameter('b', 'impact parameter', 'R_s', UP(0.0, 1.0), (0, 1)),
GParameter('secw', 'sqrt(e) cos(w)', '', UP(-0.5, 0.5), (-1, 1)),
GParameter('sesw', 'sqrt(e) sin(w)', '', UP(-0.5, 0.5), (-1, 1))]
self.ps.add_global_block('orbit', porbit)
def _init_p_planet(self):
"""Planet parameter initialisation.
"""
pk2 = [PParameter('k2', 'area ratio', 'A_s', UP(0.01 ** 2, 0.2 ** 2), (0, inf))]
self.ps.add_global_block('k2', pk2)
self._pid_k2 = repeat(self.ps.blocks[-1].start, 1)
self._start_k2 = self.ps.blocks[-1].start
self._sl_k2 = self.ps.blocks[-1].slice
self._ix_k2 = self._start_k2
pfr = [PParameter(f'fr_{pb}', 'Flux ratio', '', UP(0, 1), (0, 1)) for pb in self.passbands]
self.ps.add_passband_block('fr', len(self.passbands), 1, pfr)
self._pid_fr = repeat(self.ps.blocks[-1].start, self.npb)
self._start_fr = self.ps.blocks[-1].start
self._sl_fr = self.ps.blocks[-1].slice
def _init_lnlikelihood(self):
self._add_lnlikelihood_model(FrozenMultiCeleriteLogLikelihood(self))
def _post_initialisation(self):
super()._post_initialisation()
self.tm = UniformModel(eclipse=True)
self.tm.set_data(self.timea - self._tref, self.lcids, self.pbids, self.nsamples, self.exptimes)
def transit_model(self, pv, copy=True):
pv = atleast_2d(pv)
zero_epoch = pv[:, 0] - self._tref
period = pv[:, 1]
smaxis = as_from_rhop(pv[:, 2], period)
inclination = i_from_ba(pv[:, 3], smaxis)
radius_ratio = sqrt(pv[:, 6:7])
eccentricity = pv[:, 4] ** 2 + pv[:, 5] ** 2
omega = arctan2(pv[:, 5], pv[:, 4])
fmod = self.tm.evaluate(radius_ratio, zero_epoch, period, smaxis, inclination, eccentricity, omega)
fmod = set_depths(fmod, pv, self.pbids, self.lcids, self._start_k2, self._start_fr)
return squeeze(fmod)
def create_pv_population(self, npop):
return self.ps.sample_from_prior(npop)
def plot_light_curves(self, pv=None, figsize=None, remove_baseline: bool = False):
if pv is None:
if self.de is not None:
pv = self.de.minimum_location
else:
pv = self.ps.mean_pv
tc, p, rho, b, secw, sesw, k2 = pv[:7]
a = as_from_rhop(rho, p)
e = secw ** 2 + sesw ** 2
w = arctan2(sesw, secw)
i = i_from_baew(b, a, e, w)
ec = tc + eclipse_phase(p, i, e, w)
t14 = d_from_pkaiews(p, sqrt(k2), a, i, e, w, -1)
eps = array([epoch(t.mean(), ec, p) for t in self.times])
uep = unique(eps)
nep = uep.size
npb = self.npb
fig, axs = subplots(nep, npb, sharey='all', sharex='all', figsize=figsize)
emap = {e: ied for e, ied in zip(uep, arange(nep))}
fmodel = self.flux_model(self.de.minimum_location)
bline = self._lnlikelihood_models[0].predict_baseline(pv)
for ilc in range(self.nlc):
iep = emap[eps[ilc]]
ipb = self.pbids[ilc]
ax = axs[iep, ipb]
time = 24 * (self.times[ilc] - (ec + eps[ilc] * p))
ax.plot(time, self.fluxes[ilc])
if remove_baseline:
ax.plot(time, fmodel[self.lcslices[ilc]], 'k')
else:
ax.plot(time, fmodel[self.lcslices[ilc]] + bline[self.lcslices[ilc]] - 1, 'k')
ax.axvspan(-24 * 0.5 * t14, 24 * 0.5 * t14, alpha=0.25)
setp(axs[-1], xlabel='Time - T$_c$ [h]')
setp(axs[:, 0], ylabel='Normalised flux')
fig.tight_layout()
return fig
def plot_folded_transit(self, solution: str = 'de', pv: ndarray = None, binwidth: float = 1,
plot_model: bool = True, plot_unbinned: bool = True, plot_binned: bool = True,
xlim: tuple = None, ylim: tuple = None, ax=None, figsize: tuple = None, malpha=0.1):
# TODO: Doesn't take the passband information into account yet -> FIX!
if pv is None:
if solution.lower() == 'local':
pv = self._local_minimization.x
elif solution.lower() in ('de', 'global'):
pv = self.de.minimum_location
elif solution.lower() in ('mcmc', 'mc'):
pv = self.posterior_samples(derived_parameters=False).median().values
else:
raise NotImplementedError("'solution' should be either 'local', 'global', or 'mcmc'")
if ax is None:
fig, ax = subplots(figsize=figsize)
else:
fig, ax = None, ax
ax.autoscale(enable=True, axis='x', tight=True)
t = self.timea
fo = self.ofluxa
fm = squeeze(self.transit_model(pv))
bl = squeeze(self.baseline(pv))
phase = 24 * pv[1] * (fold(t, pv[1], pv[0], 0.0) - 0.5)
sids = argsort(phase)
phase = phase[sids]
if plot_unbinned:
ax.plot(phase, (fo / bl)[sids], 'k.', alpha=malpha, ms=2)
if plot_binned:
bp, bf, be = downsample_time(phase, (fo / bl)[sids], binwidth / 60)
ax.errorbar(bp, bf, be, fmt='ko', ms=3)
if plot_model:
ax.plot(phase, fm[sids], 'k')
setp(ax, ylim=ylim, xlim=xlim, xlabel='Time - T$_c$ [h]', ylabel='Normalised flux')
if fig is not None:
fig.tight_layout()
return fig
def plot_flux_ratio_posteriors(self, figsize=None):
df = self.posterior_samples(derived_parameters=False)
frcols = [c for c in df.columns if 'fr' in c]
fig, axs = subplots(1, len(frcols), sharex='all', sharey='all', figsize=figsize)
for i, c in enumerate(frcols):
axs[i].hist(df[c])
axs[i].set_xlabel(f"{c.strip('_s').split('_')[-1]} flux ratio")
setp(axs, yticks=[])
despine(fig, offset=10)
fig.tight_layout()
return fig
|
hpparvi/PyTransit
|
pytransit/lpf/eclipselpf.py
|
Python
|
gpl-2.0
| 7,924
|
# -*- coding: utf-8 -*-
from ..utility import generate_file
class ReduceProperty:
def generate(self, module):
ctx = {'module': module}
generate_file(module['module_path'], 'mpi/ReduceProperty.templ.h', ctx)
|
lssfau/walberla
|
python/mesa_pd/mpi/ReduceProperty.py
|
Python
|
gpl-3.0
| 230
|
#!/usr/bin/env python
## @package testsLeapMotionAngle
#
# - program of tests : save data for results analyse and validation of software
#
# DEPENDENCIES
# ----------------------------------------------------------------------------------------------------------------------
#
# EXTERNAL PYTHON PACKAGES
# - Leap Motion SDK : <https://developer.leapmotion.com/>
#
# ----------------------------------------------------------------------------------------------------------------------
# Copyright (c) 2015 GBM4900, Polytechnique Montreal
# Authors: Aldo ZAIMI, Vincent GAGNON, Eden ABITBOL, Eddie MAGNIDE
#
# License: See LICENSE file
# ======================================================================================================================
# == IMPORTS ===========================================================================================================
import sys # For using system parameters
import os # For using system parameters
import math # Calculate angle
import csv # Save in format .csv
import time # Check the time
import signal # Exit program at any time when user press enter
import threading # Make multi-threading
import Queue # Make queue data
import datetime # Determinate date
sys.path.insert(0, "../lib") # CHANGEMENT ICI
# Check if the correct LeapMotion library is installed
try:
import Leap
except ImportError:
print '\n---------------------------- Leap Motion Library not installed --------------------------\n'
print 'Install library at : <https://developer.leapmotion.com/downloads/skeletal-beta> '
print 'For more information : <https://www.youtube.com/watch?v=T9k7rdY625M> \n'
print '---------------------------------------Exit program. ------------------------------------\n'
sys.exit(2)
# Check if the correct python version in installed----not sys.version_info[:3] == (2, 7, 6):
if not sys.version_info[:3] >= (2, 7, 6):
print '\n--------------------- The correct version of python is not installed --------------------\n'
print 'You need to install Python 2.7.6 '
print '1. Delete your version of python '
print '2. Install Canopy: <https://store.enthought.com/> '
print '3. Configure new Canopy python 2.7.6 in PyCharm \n'
print '---------------------------------------Exit program. ------------------------------------\n'
sys.exit(2)
# Check if colorama library is installed
try:
import colorama
except ImportError:
print '\n------------------------------ Coloram Library not installed ----------------------------\n'
print 'Install library at : <https://pypi.python.org/pypi/colorama> \n'
print '---------------------------------------Exit program. ------------------------------------\n'
sys.exit(2)
#=======================================================================================================================
#== FUNCTION: Display background of hands =============================================================================
def display_template():
# Clear console before display
os.system('cls' if os.name == 'nt' else 'clear')
# For color and cursor movement
colorama.init()
# Define print format
print '\033[0;0H' + ('TEST PROCEDURE-GBM4900-04 ')
print '\033[1;0H' + (' TFEL '[::-1] + ' RIGHT ')
print '\033[2;0H' + (' ---- '[::-1] + ' ----- ')
print '\033[3;0H' + (' '[::-1] + ' ')
print '\033[4;0H' + (' ELDDIM '[::-1] + ' MIDDLE ')
print '\033[5;0H' + (' ............... '[::-1] + ' ............... ')
print '\033[6;0H' + (' . . '[::-1] + ' . . ')
print '\033[7;0H' + (' . . '[::-1] + ' . . ')
print '\033[8;0H' + (' . . '[::-1] + ' . . ')
print '\033[9;0H' + (' . . '[::-1] + ' . . ')
print '\033[10;0H' + (' XEDNI . . GNIR '[::-1] + ' INDEX . . RING ')
print '\033[11;0H' + (' ............... ............... '[::-1] + ' ............... ............... ')
print '\033[12;0H' + (' . . ' + results[Left][Middle][IPD][Max][::-1] + ' . . ')[::-1] + (' . . ' + results[Right][Middle][IPD][Max] + ' . . ')
print '\033[13;0H' + (' . . ~ ~ ' + results[Left][Middle][IPD][Value][::-1] + ' ~ ~ . . ')[::-1] + (' . . ~ ~ ' + results[Right][Middle][IPD][Value] + ' ~ ~ . . ')
print '\033[14;0H' + (' . . ' + results[Left][Middle][IPD][Min][::-1] + ' . . ')[::-1] + (' . . ' + results[Right][Middle][IPD][Min] + ' . . ')
print '\033[15;0H' + (' . . . . '[::-1] + ' . . . . ')
print '\033[16;0H' + (' . ' + results[Left][Index][IPD][Max][::-1] + ' . . ' + results[Left][Ring][IPD][Max][::-1] + ' . ')[::-1] + (' . ' + results[Right][Index][IPD][Max] + ' . . ' + results[Right][Ring][IPD][Max] + ' . ')
print '\033[17;0H' + (' . ~ ~ ' + results[Left][Index][IPD][Value][::-1] + ' ~ ~ . . ~ ~ ' + results[Left][Ring][IPD][Value][::-1] + ' ~ ~ . ')[::-1] + (' . ~ ~ ' + results[Right][Index][IPD][Value] + ' ~ ~ . . ~ ~ ' + results[Right][Ring][IPD][Value] + ' ~ ~ . ')
print '\033[18;0H' + (' . ' + results[Left][Index][IPD][Min][::-1] + ' . . ' + results[Left][Ring][IPD][Min][::-1] + ' . ')[::-1] + (' . ' + results[Right][Index][IPD][Min] + ' . . ' + results[Right][Ring][IPD][Max] + ' . ')
print '\033[19;0H' + (' . . . . '[::-1] + ' . . . . ')
print '\033[20;0H' + (' . . . . '[::-1] + ' . . . . ')
print '\033[21;0H' + (' . . . . '[::-1] + ' . . . . ')
print '\033[22;0H' + (' . . . . YKNIP '[::-1] + ' . . . . PINKY ')
print '\033[23;0H' + (' . . ' + results[Left][Middle][IPP][Max][::-1] + ' . ............... ')[::-1] + (' . . ' + results[Right][Middle][IPP][Max] + ' . ............... ')
print '\033[24;0H' + (' . . ~ ~ ' + results[Left][Middle][IPP][Value][::-1] + ' ~ ~ . . . ')[::-1] + (' . . ~ ~ ' + results[Right][Middle][IPP][Value] + ' ~ ~ . . . ')
print '\033[25;0H' + (' . . ' + results[Left][Middle][IPP][Min][::-1] + ' . . . ')[::-1] + (' . . ' + results[Right][Middle][IPP][Min] + ' . . . ')
print '\033[26;0H' + (' . ' + results[Left][Index][IPP][Max][::-1] + ' . . ' + results[Left][Ring][IPP][Max][::-1] + ' . ' + results[Left][Pinky][IPD][Max][::-1] + ' . ')[::-1] + (' . ' + results[Right][Index][IPP][Max] + ' . . ' + results[Right][Ring][IPP][Max] + ' . ' + results[Right][Pinky][IPD][Max] + ' . ')
print '\033[27;0H' + (' . ~ ~ ' + results[Left][Index][IPP][Value][::-1] + ' ~ ~ . . ~ ~ ' + results[Left][Ring][IPP][Value][::-1] + ' ~ ~ . ~ ~ ' + results[Left][Pinky][IPD][Value][::-1] + ' ~ ~ . ')[::-1] + (' . ~ ~ ' + results[Right][Index][IPP][Value] + ' ~ ~ . . ~ ~ ' + results[Right][Ring][IPP][Value] + ' ~ ~ . ~ ~ ' + results[Right][Pinky][IPD][Value] + ' ~ ~ . ')
print '\033[28;0H' + (' . ' + results[Left][Index][IPP][Min][::-1] + ' . . ' + results[Left][Ring][IPP][Min][::-1] + ' . ' + results[Left][Pinky][IPD][Min][::-1] + ' . ')[::-1] + (' . ' + results[Right][Index][IPP][Min] + ' . . ' + results[Right][Ring][IPP][Min] + ' . ' + results[Right][Pinky][IPD][Min] + ' . ')
print '\033[29;0H' + (' . . . . . '[::-1] + ' . . . . . ')
print '\033[30;0H' + (' . . . . . '[::-1] + ' . . . . . ')
print '\033[31;0H' + (' . . . . . '[::-1] + ' . . . . . ')
print '\033[32;0H' + (' . . . . ' + results[Left][Pinky][IPP][Max][::-1] + ' . ')[::-1] + (' . . . . ' + results[Right][Pinky][IPP][Max] + ' . ')
print '\033[33;0H' + (' . . . . ~ ~ ' + results[Left][Pinky][IPP][Value][::-1] + ' ~ ~ . ')[::-1] + (' . . . . ~ ~ ' + results[Right][Pinky][IPP][Value] + ' ~ ~ . ')
print '\033[34;0H' + (' . . . . ' + results[Left][Pinky][IPP][Min][::-1] + ' . ')[::-1] + (' . . . . ' + results[Right][Pinky][IPP][Min] + ' . ')
print '\033[35;0H' + (' BMUHT . . . . . '[::-1] + ' THUMB . . . . . ')
print '\033[36;0H' + (' ................ . . . . '[::-1] + ' ................ . . . . ')
print '\033[37;0H' + (' . . ' + results[Left][Index][MP][Max][::-1] + ' . ' + results[Left][Middle][MP][Max][::-1] + ' . ' + results[Left][Ring][MP][Max][::-1] + ' . ' + results[Left][Pinky][MP][Max][::-1] + ' . ')[::-1] + (' . . ' + results[Right][Index][MP][Max] + ' . ' + results[Right][Middle][MP][Max] + ' . ' + results[Right][Ring][MP][Max] + ' . ' + results[Right][Pinky][MP][Max] + ' . ')
print '\033[38;0H' + (' . . ~ ~ ' + results[Left][Index][MP][Value][::-1] + ' ~ ~ . ~ ~ ' + results[Left][Middle][MP][Value][::-1] + ' ~ ~ . ~ ~ ' + results[Left][Ring][MP][Value][::-1] + ' ~ ~ . ~ ~ ' + results[Left][Pinky][MP][Value][::-1] + ' ~ ~ . ')[::-1] + (' . . ~ ~ ' + results[Right][Index][MP][Value] + ' ~ ~ . ~ ~ ' + results[Right][Middle][MP][Value] + ' ~ ~ . ~ ~ ' + results[Right][Ring][MP][Value] + ' ~ ~ . ~ ~ ' + results[Right][Pinky][MP][Value] + ' ~ ~ . ')
print '\033[39;0H' + (' . . ' + results[Left][Index][MP][Min][::-1] + ' . ' + results[Left][Middle][MP][Min][::-1] + ' . ' + results[Left][Ring][MP][Min][::-1] + ' . ' + results[Left][Pinky][MP][Min][::-1] + ' . ')[::-1] + (' . . ' + results[Right][Index][MP][Min] + ' . ' + results[Right][Middle][MP][Min] + ' . ' + results[Right][Ring][MP][Min] + ' . ' + results[Right][Pinky][MP][Min] + ' . ')
print '\033[40;0H' + (' . . | | | . '[::-1] + ' . . | | | . ')
print '\033[41;0H' + (' . . . '[::-1] + ' . . . ')
print '\033[42;0H' + (' . ' + results[Left][Thumb][IPP][Max][::-1] + ' . | | | . ')[::-1] + (' . ' + results[Right][Thumb][IPP][Max] + ' . | | | . ')
print '\033[43;0H' + (' . ~ ~ ' + results[Left][Thumb][IPP][Value][::-1] + ' ~ ~ . . ')[::-1] + (' . ~ ~ ' + results[Right][Thumb][IPP][Value] + ' ~ ~ . . ')
print '\033[44;0H' + (' . ' + results[Left][Thumb][IPP][Min][::-1] + ' . | | | . ')[::-1] + (' . ' + results[Right][Thumb][IPP][Min] + ' . | | | . ')
print '\033[45;0H' + (' . . . '[::-1] + ' . . . ')
print '\033[46;0H' + (' . . | | | . '[::-1] + ' . . | | | . ')
print '\033[47;0H' + (' . . . '[::-1] + ' . . . ')
print '\033[48;0H' + (' . . | | | . '[::-1] + ' . . | | | . ')
print '\033[49;0H' + (' . . . '[::-1] + ' . . . ')
print '\033[50;0H' + (' . ' + results[Left][Thumb][MP][Max][::-1] + ' | | | . ')[::-1] + (' . ' + results[Right][Thumb][MP][Max] + ' | | | . ')
print '\033[51;0H' + (' . ' + results[Left][Thumb][MP][Value][::-1] + ' . ')[::-1] + (' . ' + results[Right][Thumb][MP][Value] + ' . ')
print '\033[52;0H' + (' . ' + results[Left][Thumb][MP][Min][::-1] + ' . ')[::-1] + (' . ' + results[Right][Thumb][MP][Min] + ' . ')
print '\033[53;0H' + (' . . '[::-1] + ' . . ')
print '\033[54;0H' + (' . . '[::-1] + ' . . ')
print '\033[55;0H' + (' . . '[::-1] + ' . . ')
print '\033[56;0H' + (' . . '[::-1] + ' . . ')
print '\033[57;0H' + (' . . '[::-1] + ' . . ')
print '\033[58;0H' + (' ')
print '\033[59;0H' + (' ')
print '\033[60;0H' + ('Press Enter to quit')
#=======================================================================================================================
#== FUNCTION: Display results =========================================================================================
def display(Hands_Angle):
# Red color detection of hyper-extension initialized
color_in_red = False
# Actualize results printing with user choices selected
for Hand_select in User_Choices[0]: # Each hand chosen
for Finger_select in User_Choices[1]: # Each finger chosen for each hand
for Joint_select in Finger_select[1]: # Each joint chosen for each finger
# Check if Leap Motion acquire frame
if Hands_Angle[Hand_select][Finger_select[0]][Joint_select][Value] !=0:
# Store temporarily the results
temp = Hands_Angle[Hand_select][Finger_select[0]][Joint_select][Value]
# Store results for saving in file
results_to_save[Hand_select][Finger_select[0]][Joint_select][Value] = str(temp).replace('.',',')
# Round angle amplitude
temp = round(temp)
# Convert angle amplitude float to integer
temp = int(temp)
# Check if it is hyper-extension
if temp < 0:
color_in_red = True
temp = -temp
# Stock results in display format
temp = str(temp).zfill(3)
results[Hand_select][Finger_select[0]][Joint_select][Value] = temp
# Color in red in console if it is hyper-extension
if color_in_red:
# Initialize color parameter
colorama.init()
# Apply color ASCII code
results[Hand_select][Finger_select[0]][Joint_select][Value] = \
colorama.Fore.RED + temp + colorama.Fore.RESET
# Initialize hyper-extension marker
color_in_red = False
# Define print format without clear background
print'\033[14;55H' + results[Left][Middle][IPD][Value]
print'\033[14;154H' + results[Right][Middle][IPD][Value]
print'\033[25;55H' + results[Left][Middle][IPP][Value]
print'\033[25;154H' + results[Right][Middle][IPP][Value]
print'\033[39;55H' + results[Left][Middle][MP][Value]
print'\033[39;154H' + results[Right][Middle][MP][Value]
print'\033[18;41H' + results[Left][Ring][IPD][Value]
print'\033[18;168H' + results[Right][Ring][IPD][Value]
print'\033[28;41H' + results[Left][Ring][IPP][Value]
print'\033[28;168H' + results[Right][Ring][IPP][Value]
print'\033[39;41H' + results[Left][Ring][MP][Value]
print'\033[39;168H' + results[Right][Ring][MP][Value]
print'\033[18;69H' + results[Left][Index][IPD][Value]
print'\033[18;140H' + results[Right][Index][IPD][Value]
print'\033[28;69H' + results[Left][Index][IPP][Value]
print'\033[28;140H' + results[Right][Index][IPP][Value]
print'\033[39;69H' + results[Left][Index][MP][Value]
print'\033[39;140H' + results[Right][Index][MP][Value]
print'\033[28;27H' + results[Left][Pinky][IPD][Value]
print'\033[28;182H' + results[Right][Pinky][IPD][Value]
print'\033[34;27H' + results[Left][Pinky][IPP][Value]
print'\033[34;182H' + results[Right][Pinky][IPP][Value]
print'\033[39;27H' + results[Left][Pinky][MP][Value]
print'\033[39;182H' + results[Right][Pinky][MP][Value]
print'\033[44;84H' + results[Left][Thumb][IPP][Value]
print'\033[44;125H' + results[Right][Thumb][IPP][Value]
print'\033[52;83H' + results[Left][Thumb][MP][Value]
print'\033[52;126H' + results[Right][Thumb][MP][Value]
#=======================================================================================================================
#== FUNCTION: Do dot product: returns dot product of 2 3D unit vectors =================================================
def dot_product(v1, v2):
a = v1[0] # x for vector 1
b = v2[0] # x for vector 2
c = v1[1] # y for vector 1
d = v2[1] # y for vector 2
e = v1[2] # z for vector 1
f = v2[2] # z for vector 2
g = a * b + c * d + e * f # do dot product of v1 and v2
return g
#=======================================================================================================================
#== FUNCTION: save ; Write angles calculated in files =================================================================
def save(Hands_Angle):
# Define global variable for save only if it's necessary
global data_ok
# Check if Leap Motion acquire data
if set(sum(sum(sum(results_to_save, []), []), [])) != {'-'}:
# Permit to save data
data_ok = True
# Create files if this is not done and write headers
save_headers()
# Search start time in file of raw data
bottle_list = []
with open(path_script+'Results'+'/'+Patient_First_Name+'_'+Patient_Name+'/'+Report_Number
+'_'+'raw.csv', 'rb') as b:
bottles = csv.reader(b)
bottle_list.extend(bottles)
b.close()
# Extract date parameters
Year, Month, Day = str(bottle_list[5][0][-8:]).split('-')
Year = int('20'+Year)
Month = int(Month)
Day = int(Day)
# Extract time parameters
Hour, Minute, Second = str (bottle_list[6][0][-8:]).split (':')
Hour = int (Hour)
Minute = int (Minute)
Second = int (Second)
# Convert start time in seconds
start = datetime.datetime(Year, Month, Day, Hour, Minute, Second)
# Determinate the between start and now
t = str(round((time.time()-time.mktime(start.timetuple())), 1000)).replace('.',',')
# Data to saving in file
data = [str(t),
results_to_save[Left][Thumb][MP][Value],results_to_save[Left][Thumb][IPP][Value],
results_to_save[Left][Index][MP][Value],results_to_save[Left][Index][IPP][Value],results_to_save[Left][Index][IPD][Value],
results_to_save[Left][Middle][MP][Value],results_to_save[Left][Middle][IPP][Value],results_to_save[Left][Middle][IPD][Value],
results_to_save[Left][Ring][MP][Value],results_to_save[Left][Ring][IPP][Value],results_to_save[Left][Ring][IPD][Value],
results_to_save[Left][Pinky][MP][Value],results_to_save[Left][Pinky][IPP][Value],results_to_save[Left][Pinky][IPD][Value],
results_to_save[Right][Thumb][MP][Value],results_to_save[Right][Thumb][IPP][Value],
results_to_save[Right][Index][MP][Value],results_to_save[Right][Index][IPP][Value],results_to_save[Right][Index][IPD][Value],
results_to_save[Right][Middle][MP][Value],results_to_save[Right][Middle][IPP][Value],results_to_save[Right][Middle][IPD][Value],
results_to_save[Right][Ring][MP][Value],results_to_save[Right][Ring][IPP][Value],results_to_save[Right][Ring][IPD][Value],
results_to_save[Right][Pinky][MP][Value],results_to_save[Right][Pinky][IPP][Value],results_to_save[Right][Pinky][IPD][Value]]
#-------------------------------------------------------------------------------------------------------------------
# FILE OF RAW DATA
#-------------------------------------------------------------------------------------------------------------------
# Write data in file each time if Leap Motion acquire data
if data_ok:
with open(path_script+'results'+'/'+Patient_First_Name+'_'+Patient_Name+'/'+Report_Number+'_' + 'raw.csv', 'ab') as fp:
# Define format as excel format
a = csv.writer(fp, delimiter=';')
# Write all data in file
a.writerow(data)
# Close file
fp.close()
#=======================================================================================================================
#== FUNCTION: save_headers ; Create and write headers in files ========================================================
def save_headers():
# Define global variable for session number
global Session_Number
# Define path of script repertory as global variable
global path_script
# Extract path of script repertory
path_script = os.getcwd() + '/'
# Create repertory of all files if it does not exist
if not os.path.exists(path_script+'Results'):
os.makedirs(path_script+'Results')
# Create repertory of patients files if it does not exist
if not os.path.exists(path_script+'Results'+'/'+Patient_First_Name+'_'+Patient_Name):
os.makedirs(path_script+'Results'+'/'+Patient_First_Name+'_'+Patient_Name)
# Determinate number of session and create folder of new session
while True:
# Check if old session exist
if os.path.exists(path_script+'Results'+'/'+Patient_First_Name+'_'+Patient_Name
+ '/' + Report_Number + '_' + 'raw.csv'):
# Extract data of file for checking if this acquisition is done the same day
bottle_list = []
with open(path_script+'Results'+'/'+Patient_First_Name+'_'+Patient_Name+'/'+Report_Number
+'_'+'raw.csv', 'rb') as b:
bottles = csv.reader(b)
bottle_list.extend(bottles)
b.close()
# Check if this acquisition is done the same day
if str(["Date", str(datetime.datetime.fromtimestamp(time.time())).split(' ')[0]])[-10:-2] == bottle_list[5][0][-8:]:
# Stop searching and overwrite data in the session files
break
else:
# Search the following session file
Session_Number += 1
# Create the new session file with new session number
else:
with open(path_script+'Results'+'/'+Patient_First_Name+'_'+Patient_Name+'/'+Report_Number
+'_'+'raw.csv', 'ab') as fp:
# Define format as excel format
a = csv.writer(fp, delimiter=';')
# Write all headers in file of raw data
a.writerow(["First Name", Patient_First_Name])
a.writerow(["Name", Patient_Name])
a.writerow(["Age", Patient_Age])
a.writerow(["Report", Report_Number])
a.writerow(["Session", Session_Number])
a.writerow(["Date", str(datetime.datetime.fromtimestamp(time.time())).split(' ')[0]])
a.writerow(["Hour", str(datetime.datetime.fromtimestamp(time.time())).split(' ')[1][:8]])
a.writerow("---------------------------------------------------------")
a.writerow(["Time",
"THUMB MP LEFT", "THUMB IP LEFT",
"INDEX MP LEFT", "INDEX IPP LEFT", "INDEX IPD LEFT",
"MIDDLE MP LEFT", "MIDDLE IPP LEFT", "MIDDLE IPD LEFT",
"RING MP LEFT", "RING IPP LEFT", "RING IPD LEFT",
"PINKY MP LEFT", "PINKY IPP LEFT", "PINKY IPD LEFT",
"THUMB MP RIGHT", "THUMB IP RIGHT",
"INDEX MP RIGHT", "INDEX IPP RIGHT", "INDEX IPD RIGHT",
"MIDDLE MP RIGHT", "MIDDLE IPP RIGHT", "MIDDLE IPD RIGHT",
"RING MP RIGHT", "RING IPP RIGHT", "RING IPD RIGHT",
"PINKY MP RIGHT", "PINKY IPP RIGHT", "PINKY IPD RIGHT"])
# Close file
fp.close()
#=======================================================================================================================
#== CLASS: SampleListener; receives events from controller and executes accordingly ====================================
class SampleListener(Leap.Listener):
# Function executed for each frame:
def on_frame(self, controller):
# Get the most recent frame and report some basic information
frame = controller.frame()
#----------------------------------------------------
# Get hands (for each hand in the current frame...):
#----------------------------------------------------
for hand in frame.hands:
# Find out which hand is in the frame (h = 0 for left and 1 for right)
h = 0 if hand.is_left else 1
# Filter data : permit only data for a minimum confidence equal to 0.5
if hand.confidence >= 0.5:
#---------------------------------------------------------------------
# Get fingers (for each finger of each hand in the current frame...):
#---------------------------------------------------------------------
for finger in hand.fingers:
#--------------------------------------------------------------------------------
# Get bones (for each bone of each finger of each hand in the current frame...):
#--------------------------------------------------------------------------------
# If its a thumb...
if finger.type() == 0:
# Iterate through Proximal, Intermediate and Distal (no Metacarpal).
for b in range(2, 4):
# Determinate joint type
j = b - 2
# Identification of the previous bone
bone1 = finger.bone(b - 1)
# Identification of the actual bone
bone2 = finger.bone(b)
# For MP
if b == 2:
# Store direction unit vector of previous bone in v0
v0 = bone1.basis.y_basis
# Store direction unit vector of actual bone
v1 = bone2.basis.y_basis
# Calculate scalar product
sp = dot_product(v0, v1)
# Verify if scalar product is valid, i.e. it between -1 and 1
if (sp <= 1) and (sp >= -1) and (v0 != v1):
# Get angle (in radians) between the 2 unit vectors
angle_in_radians = math.acos(sp)
# Get angle in degrees
angle_in_degrees = math.degrees(angle_in_radians)
# Detect hyper-extension with direction bone1 and normal bone2
if dot_product(bone1.direction, -bone2.basis.y_basis) < 0:
angle_in_degrees = -angle_in_degrees
# Store actual value of angle
Hands_Angle[h][finger.type()][j][Value] = angle_in_degrees
# For others joints
else:
# Determinate each length in triangle
length_a = bone1.length
length_b = bone2.length
length_c = bone2.next_joint.distance_to(bone1.prev_joint)
# Apply cos law
sp = (length_c*length_c-length_b*length_b-length_a*length_a)/(2*length_a*length_b)
# Verify if scalar product is valid, i.e. it between -1 and 1
if (sp <= 1) and (sp >= -1):
# Get angle (in radians) between the 2 unit vectors
angle_in_radians = math.acos(sp)
# Get angle in degrees
angle_in_degrees = math.degrees(angle_in_radians)
# Detect hyper-extension with direction bone1 and normal bone2
if dot_product(bone1.direction, -bone2.basis.y_basis) < 0:
angle_in_degrees = 0.00001
# Store actual value of angle
Hands_Angle[h][finger.type()][j][Value] = angle_in_degrees
# ... otherwise (i.e., if its NOT a thumb)...
else:
# Iterate through Proximal, Intermediate and Distal (no Metacarpal).
for b in range(1, 4):
# Determinate joint type
j = b - 1
# Identification of the previous bone
bone1 = finger.bone(b - 1)
# Identification of the actual bone
bone2 = finger.bone(b)
# For MP
if b == 1:
# Store direction unit vector of previous bone in v0
v0 = bone1.basis.y_basis
# Store direction unit vector of actual bone
v1 = bone2.basis.y_basis
# Calculate scalar product
sp = dot_product(v0, v1)
# Verify if scalar product is valid, i.e. it between -1 and 1
if (sp <= 1) and (sp >= -1) and (v0 != v1):
# Get angle (in radians) between the 2 unit vectors
angle_in_radians = math.acos(sp)
# Get angle in degrees
angle_in_degrees = math.degrees(angle_in_radians)
# Detect hyper-extension with direction bone1 and normal bone2
if dot_product(bone1.direction, -bone2.basis.y_basis) < 0:
angle_in_degrees = -angle_in_degrees
# Store actual value of angle
Hands_Angle[h][finger.type()][j][Value] = angle_in_degrees
# For others joints
else:
# Determinate each length in triangle
length_a = bone1.length
length_b = bone2.length
length_c = bone2.next_joint.distance_to(bone1.prev_joint)
# Apply cos law
sp = (length_c*length_c-length_b*length_b-length_a*length_a)/(2*length_a*length_b)
# Verify if scalar product is valid, i.e. it between -1 and 1
if (sp <= 1) and (sp >= -1):
# Get angle (in radians) between the 2 unit vectors
angle_in_radians = math.acos(sp)
# Get angle in degrees
angle_in_degrees = math.degrees(angle_in_radians)
# Detect hyper-extension with direction bone1 and normal bone2
if dot_product(bone1.direction, -bone2.basis.y_basis) < 0:
angle_in_degrees = 0.00001
# Store actual value of angle
Hands_Angle[h][finger.type()][j][Value] = angle_in_degrees
#=======================================================================================================================
#== FUNCTION: acquisition_angle; calculate angle each time =============================================================
def acquisition_angle():
# Create controller
controller = Leap.Controller()
# Create a sample listener
listener = SampleListener()
# Have the sample listener receive events from the controller
controller = Leap.Controller(listener)
#=======================================================================================================================
#== FUNCTION THREAD #1 : kill_handler ; exit program at any time when user press enter =================================
def kill_handler():
# Read input (key on the keyboard)
sys.stdin.read(1)
# Exit the program if the user press
os.system('cls' if os.name == 'nt' else 'clear')
os.kill(os.getpid(), signal.SIGINT)
#=======================================================================================================================
#== FUNCTION THREAD #2 : consumer_display ; implement display as a consumer in multi-threading==========================
def consumer_display():
# Infinite loop
while True:
# Take angles calculated which is in queue
Angles = data_queue.get()
# Apply priority of multi-threading
with lock:
# Display angles calculated
display(Angles)
#=======================================================================================================================
#== FUNCTION THREAD #3 : producer_acquisition_angle ; implement acquisition_angle as a producer in multi-threading =====
def producer_acquisition_angle():
# Infinite loop
while True:
# Apply priority of multi-threading
with lock:
# Acquire angles calculated
acquisition_angle()
# Put angles calculated in a queue
data_queue.put(Hands_Angle)
#=======================================================================================================================
#== FUNCTION THREAD #4 : consumer_save ; implement save as a consumer in multi-threading================================
def consumer_save():
# Infinite loop
while True:
# Take angles calculated which is in queue
Angles = data_queue.get()
# Apply priority of multi-threading
with lock:
# Save angles calculated
save(Angles)
#=======================================================================================================================
#== FUNCTION MULTI THREADING : multi_threading ; implement multi-threading as producer-consumer pattern ================
# Define data queue for angles calculated each time
data_queue = Queue.Queue()
# Define priority of each thread of multi-threading
lock = threading.Lock()
def multi_threading():
#--------------------------------------------------------
# THREAD #1 : Check if user press enter for quit program
#--------------------------------------------------------
kill_thread = threading.Thread(target=kill_handler) # Define thread for kill_handler
kill_thread.daemon = True # Daemon mode (kill thread automatically at program end)
kill_thread.start() # Start this thread in first position
#--------------------------------------------------------
# THREAD #3 : Acquire angles calculated with Leap Motion
#--------------------------------------------------------
p = threading.Thread(target=producer_acquisition_angle) # Define thread for producer_acquisition_angle
p.daemon = True # Daemon mode (kill thread automatically at program end)
p.start() # Start this thread in third position
#--------------------------------------------------------
# THREAD #4 : Save angles calculated in file .csv
#--------------------------------------------------------
cc = threading.Thread(target=consumer_save) # Define thread for consumer_save
cc.daemon = True # Daemon mode (kill thread automatically at program end)
cc.start() # Start this thread in fourth position
#--------------------------------------------------------
# THREAD #2 : Display angles calculated with acquisition
#--------------------------------------------------------
c = threading.Thread(target=consumer_display) # Define thread for consumer_display
c.daemon = True # Daemon mode (kill thread automatically at program end)
c.start() # Start this thread in second position
#--------------------------------------------------------
# Wait for the producers threads to finish
#--------------------------------------------------------
p.join()
#--------------------------------------------------------
# wait till all the jobs are done in the queue
#--------------------------------------------------------
data_queue.join()
#=======================================================================================================================
#== FUNCTION: main; executes program ===================================================================================
def main():
# Display only template of hands without angles
display_template()
# Apply multi-threading for acquisition, display angles calculated and save results
multi_threading()
#== START PROGRAM ======================================================================================================
if __name__ == "__main__":
#-------------------------------------------------------------------------------------------------------------------
# Global variables
#-------------------------------------------------------------------------------------------------------------------
# Index of data, joints, fingers and hands
Min, Value, Max = 0, 1, 2 # Define type of data
MP, IPP, IPD = 0, 1, 2 # Define type of each joint
Thumb, Index, Middle, Ring, Pinky = 0, 1, 2, 3, 4 # Define type of each finger
Left, Right = 0, 1 # Define type of each hand
# List of finger extrema, joints, fingers and hands
Hands_Angle = [[[[180, 0, -180], [180, 0, -180]], # [Min, Value, Max]--> [MP][IPP] --> [Thumb]
[[180, 0, -180], [180, 0, -180], [180, 0, -180]], # [Min, Value, Max]--> [[MP][IPP][IPD]] --> [Index]
[[180, 0, -180], [180, 0, -180], [180, 0, -180]], # [Min, Value, Max]--> [[MP][IPP][IPD]] --> [Middle]
[[180, 0, -180], [180, 0, -180], [180, 0, -180]], # [Min, Value, Max]--> [[MP][IPP][IPD]] --> [Ring]
[[180, 0, -180], [180, 0, -180], [180, 0, -180]]], # [Min, Value, Max]--> [[MP][IPP][IPD]] --> [Pinky]
[[[180, 0, -180], [180, 0, -180]], # Same for right hand
[[180, 0, -180], [180, 0, -180], [180, 0, -180]], # Same for right hand
[[180, 0, -180], [180, 0, -180], [180, 0, -180]], # Same for right hand
[[180, 0, -180], [180, 0, -180], [180, 0, -180]], # Same for right hand
[[180, 0, -180], [180, 0, -180], [180, 0, -180]]]] # Same for right hand
# Initialisation of results printing as global variable
results = [[[[' ', '~|~', ' '], [' ', '~|~', ' ']],
[[' ', '~|~', ' '], [' ', '~|~', ' '], [' ', '~|~', ' ']],
[[' ', '~|~', ' '], [' ', '~|~', ' '], [' ', '~|~', ' ']],
[[' ', '~|~', ' '], [' ', '~|~', ' '], [' ', '~|~', ' ']],
[[' ', '~|~', ' '], [' ', '~|~', ' '], [' ', '~|~', ' ']]],
[[[' ', '~|~', ' '], [' ', '~|~', ' ']],
[[' ', '~|~', ' '], [' ', '~|~', ' '], [' ', '~|~', ' ']],
[[' ', '~|~', ' '], [' ', '~|~', ' '], [' ', '~|~', ' ']],
[[' ', '~|~', ' '], [' ', '~|~', ' '], [' ', '~|~', ' ']],
[[' ', '~|~', ' '], [' ', '~|~', ' '], [' ', '~|~', ' ']]]]
# Initialisation of results saving as global variable
results_to_save = [[[['-', '-', '-'], ['-', '-', '-']],
[['-', '-', '-'], ['-', '-', '-'], ['-', '-', '-']],
[['-', '-', '-'], ['-', '-', '-'], ['-', '-', '-']],
[['-', '-', '-'], ['-', '-', '-'], ['-', '-', '-']],
[['-', '-', '-'], ['-', '-', '-'], ['-', '-', '-']]],
[[['-', '-', '-'], ['-', '-', '-']],
[['-', '-', '-'], ['-', '-', '-'], ['-', '-', '-']],
[['-', '-', '-'], ['-', '-', '-'], ['-', '-', '-']],
[['-', '-', '-'], ['-', '-', '-'], ['-', '-', '-']],
[['-', '-', '-'], ['-', '-', '-'], ['-', '-', '-']]]]
# Define patient data
Patient_First_Name = "Model 3D"
Patient_Name = "Hand"
Patient_Age = "2 ans"
Session_Number = 1
Report_Number = str(datetime.datetime.fromtimestamp(time.time())).split(' ')[0].replace('-','') +'_'+ (str(datetime.datetime.fromtimestamp(time.time())).split(' ')[1][:8]).replace(':','')
# Initialize markers
data_ok = False
#-------------------------------------------------------------------------------------------------------------------
# Stocking user choices
#-------------------------------------------------------------------------------------------------------------------
User_Choices = [[Left, Right], # List of hand selected by user
[[Thumb, [MP, IPP]], # Finger joints : [finger 1,[list of joints selected]]
[Index, [MP, IPP, IPD]], # Finger joints : [finger 2,[list of joints selected]]
[Middle, [MP, IPP, IPD]], # Finger joints : [finger 3,[list of joints selected]]
[Ring, [MP, IPP, IPD]], # Finger joints : [finger 4,[list of joints selected]]
[Pinky, [MP, IPP, IPD]]]] # Finger joints : [finger 5,[list of joints selected]]
# Call main function
main()
#=======================================================================================================================
|
marieandreeo/GBM4900
|
LMPY/src/testsLeapMotionAngle.py
|
Python
|
apache-2.0
| 53,403
|
"""
Classes representing uploaded files.
"""
import os
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from airy.core.conf import settings
from airy.core.files.base import File
from airy.core.files import temp as tempfile
from airy.utils.encoding import smart_str
__all__ = ('UploadedFile', 'TemporaryUploadedFile', 'InMemoryUploadedFile',
'SimpleUploadedFile')
class UploadedFile(File):
"""
A abstract uploaded file (``TemporaryUploadedFile`` and
``InMemoryUploadedFile`` are the built-in concrete subclasses).
An ``UploadedFile`` object behaves somewhat like a file object and
represents some file data that the user submitted with a form.
"""
DEFAULT_CHUNK_SIZE = 64 * 2**10
def __init__(self, file=None, name=None, content_type=None, size=None, charset=None):
super(UploadedFile, self).__init__(file, name)
self.size = size
self.content_type = content_type
self.charset = charset
def __repr__(self):
return "<%s: %s (%s)>" % (
self.__class__.__name__, smart_str(self.name), self.content_type)
def _get_name(self):
return self._name
def _set_name(self, name):
# Sanitize the file name so that it can't be dangerous.
if name is not None:
# Just use the basename of the file -- anything else is dangerous.
name = os.path.basename(name)
# File names longer than 255 characters can cause problems on older OSes.
if len(name) > 255:
name, ext = os.path.splitext(name)
name = name[:255 - len(ext)] + ext
self._name = name
name = property(_get_name, _set_name)
class TemporaryUploadedFile(UploadedFile):
"""
A file uploaded to a temporary location (i.e. stream-to-disk).
"""
def __init__(self, name, content_type, size, charset):
if settings.FILE_UPLOAD_TEMP_DIR:
file = tempfile.NamedTemporaryFile(suffix='.upload',
dir=settings.FILE_UPLOAD_TEMP_DIR)
else:
file = tempfile.NamedTemporaryFile(suffix='.upload')
super(TemporaryUploadedFile, self).__init__(file, name, content_type, size, charset)
def temporary_file_path(self):
"""
Returns the full path of this file.
"""
return self.file.name
def close(self):
try:
return self.file.close()
except OSError, e:
if e.errno != 2:
# Means the file was moved or deleted before the tempfile
# could unlink it. Still sets self.file.close_called and
# calls self.file.file.close() before the exception
raise
class InMemoryUploadedFile(UploadedFile):
"""
A file uploaded into memory (i.e. stream-to-memory).
"""
def __init__(self, file, field_name, name, content_type, size, charset):
super(InMemoryUploadedFile, self).__init__(file, name, content_type, size, charset)
self.field_name = field_name
def open(self, mode=None):
self.file.seek(0)
def close(self):
pass
def chunks(self, chunk_size=None):
self.file.seek(0)
yield self.read()
def multiple_chunks(self, chunk_size=None):
# Since it's in memory, we'll never have multiple chunks.
return False
class SimpleUploadedFile(InMemoryUploadedFile):
"""
A simple representation of a file, which just has content, size, and a name.
"""
def __init__(self, name, content, content_type='text/plain'):
content = content or ''
super(SimpleUploadedFile, self).__init__(StringIO(content), None, name,
content_type, len(content), None)
def from_dict(cls, file_dict):
"""
Creates a SimpleUploadedFile object from
a dictionary object with the following keys:
- filename
- content-type
- content
"""
return cls(file_dict['filename'],
file_dict['content'],
file_dict.get('content-type', 'text/plain'))
from_dict = classmethod(from_dict)
|
letolab/airy
|
airy/core/files/uploadedfile.py
|
Python
|
bsd-2-clause
| 4,222
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2016, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import unittest
import random
from htmresearch.support.register_regions import registerAllResearchRegions
from htmresearch.frameworks.layers.laminar_network import createNetwork
networkConfig1 = {
"networkType": "L4L2Column",
"externalInputSize": 1024,
"sensorInputSize": 1024,
"L4RegionType": "py.ApicalTMPairRegion",
"L4Params": {
"columnCount": 1024,
"cellsPerColumn": 8,
"learn": True,
"learnOnOneCell": False,
"initialPermanence": 0.51,
"connectedPermanence": 0.6,
"permanenceIncrement": 0.1,
"permanenceDecrement": 0.02,
"minThreshold": 10,
"basalPredictedSegmentDecrement": 0.004,
"activationThreshold": 13,
"sampleSize": 20,
},
"L2Params": {
"inputWidth": 1024 * 8,
"cellCount": 4096,
"sdrSize": 40,
"synPermProximalInc": 0.1,
"synPermProximalDec": 0.001,
"initialProximalPermanence": 0.6,
"minThresholdProximal": 10,
"sampleSizeProximal": 20,
"connectedPermanenceProximal": 0.5,
"synPermDistalInc": 0.1,
"synPermDistalDec": 0.001,
"initialDistalPermanence": 0.41,
"activationThresholdDistal": 13,
"sampleSizeDistal": 20,
"connectedPermanenceDistal": 0.5,
"learningMode": True,
},
}
networkConfig2 = {
"networkType": "MultipleL4L2Columns",
"numCorticalColumns": 3,
"externalInputSize": 1024,
"sensorInputSize": 1024,
"L4RegionType": "py.ApicalTMPairRegion",
"L4Params": {
"columnCount": 1024,
"cellsPerColumn": 8,
"learn": True,
"learnOnOneCell": False,
"initialPermanence": 0.51,
"connectedPermanence": 0.6,
"permanenceIncrement": 0.1,
"permanenceDecrement": 0.02,
"minThreshold": 10,
"basalPredictedSegmentDecrement": 0.004,
"activationThreshold": 13,
"sampleSize": 20,
"seed": 42,
},
"L2Params": {
"inputWidth": 1024 * 8,
"cellCount": 4096,
"sdrSize": 40,
"synPermProximalInc": 0.1,
"synPermProximalDec": 0.001,
"initialProximalPermanence": 0.6,
"minThresholdProximal": 10,
"sampleSizeProximal": 20,
"connectedPermanenceProximal": 0.5,
"synPermDistalInc": 0.1,
"synPermDistalDec": 0.001,
"initialDistalPermanence": 0.41,
"activationThresholdDistal": 13,
"sampleSizeDistal": 20,
"connectedPermanenceDistal": 0.5,
"learningMode": True,
}
}
networkConfig3 = {
"networkType": "MultipleL4L2Columns",
"numCorticalColumns": 2,
"externalInputSize": 1024,
"sensorInputSize": 1024,
"L4RegionType": "py.ApicalTMPairRegion",
"L4Params": {
"columnCount": 1024,
"cellsPerColumn": 8,
"learn": True,
"learnOnOneCell": False,
"initialPermanence": 0.51,
"connectedPermanence": 0.6,
"permanenceIncrement": 0.1,
"permanenceDecrement": 0.02,
"minThreshold": 10,
"basalPredictedSegmentDecrement": 0.004,
"activationThreshold": 13,
"sampleSize": 20,
},
"L2Params": {
"inputWidth": 1024 * 8,
"cellCount": 4096,
"sdrSize": 40,
"synPermProximalInc": 0.1,
"synPermProximalDec": 0.001,
"initialProximalPermanence": 0.6,
"minThresholdProximal": 10,
"sampleSizeProximal": 20,
"connectedPermanenceProximal": 0.5,
"synPermDistalInc": 0.1,
"synPermDistalDec": 0.001,
"initialDistalPermanence": 0.41,
"activationThresholdDistal": 13,
"sampleSizeDistal": 20,
"connectedPermanenceDistal": 0.5,
"learningMode": True,
}
}
networkConfig4 = {
"networkType": "MultipleL4L2ColumnsWithTopology",
"numCorticalColumns": 5,
"externalInputSize": 1024,
"sensorInputSize": 1024,
"columnPositions": [(0, 0), (1, 0), (2, 0), (2, 1), (2, -1)],
"maxConnectionDistance": 1,
"L4RegionType": "py.ApicalTMPairRegion",
"L4Params": {
"columnCount": 1024,
"cellsPerColumn": 8,
"learn": True,
"learnOnOneCell": False,
"initialPermanence": 0.51,
"connectedPermanence": 0.6,
"permanenceIncrement": 0.1,
"permanenceDecrement": 0.02,
"minThreshold": 10,
"basalPredictedSegmentDecrement": 0.004,
"activationThreshold": 13,
"sampleSize": 20,
"seed": 42,
},
"L2Params": {
"inputWidth": 1024 * 8,
"cellCount": 4096,
"sdrSize": 40,
"synPermProximalInc": 0.1,
"synPermProximalDec": 0.001,
"initialProximalPermanence": 0.6,
"minThresholdProximal": 10,
"sampleSizeProximal": 20,
"connectedPermanenceProximal": 0.5,
"synPermDistalInc": 0.1,
"synPermDistalDec": 0.001,
"initialDistalPermanence": 0.41,
"activationThresholdDistal": 13,
"sampleSizeDistal": 20,
"connectedPermanenceDistal": 0.5,
"learningMode": True,
}
}
class LaminarNetworkTest(unittest.TestCase):
""" Super simple test of laminar network factory"""
@classmethod
def setUpClass(cls):
random.seed(42)
registerAllResearchRegions()
def testL4L2ColumnCreate(self):
"""
In this simplistic test we just create a network, ensure it has the
right number of regions and try to run some inputs through it without
crashing.
"""
# Create a simple network to test the sensor
net = createNetwork(networkConfig1)
self.assertEqual(len(net.regions.keys()),4,
"Incorrect number of regions")
# Add some input vectors to the queue
externalInput = net.regions["externalInput_0"].getSelf()
sensorInput = net.regions["sensorInput_0"].getSelf()
# Add 3 input vectors
externalInput.addDataToQueue([2, 42, 1023], 0, 9)
sensorInput.addDataToQueue([2, 42, 1023], 0, 0)
externalInput.addDataToQueue([1, 42, 1022], 0, 0)
sensorInput.addDataToQueue([1, 42, 1022], 0, 0)
externalInput.addDataToQueue([3, 42, 1021], 0, 0)
sensorInput.addDataToQueue([3, 42, 1021], 0, 0)
# Run the network and check outputs are as expected
net.run(3)
def testL4L2ColumnLinks(self):
"""
In this simplistic test we create a network and ensure that it has the
correct links between regions.
"""
# Create a simple network to check its architecture
net = createNetwork(networkConfig1)
links = net.getLinks()
# These are all the links we're hoping to find
desired_links=set(["sensorInput_0.dataOut-->L4Column_0.activeColumns",
"L2Column_0.feedForwardOutput-->L4Column_0.apicalInput",
"externalInput_0.dataOut-->L4Column_0.basalInput",
"L4Column_0.predictedActiveCells-->L2Column_0.feedforwardGrowthCandidates",
"L4Column_0.activeCells-->L2Column_0.feedforwardInput",
"sensorInput_0.resetOut-->L2Column_0.resetIn",
"sensorInput_0.resetOut-->L4Column_0.resetIn",
"externalInput_0.dataOut-->L4Column_0.basalGrowthCandidates"])
# This gets textual representations of the links.
links = set([link.second.getMoniker() for link in links])
# Build a descriptive error message to pass to the user
error_message = "Error: Links incorrectly formed in simple L2L4 network: \n"
for link in desired_links:
if not link in links:
error_message += "Failed to find link: {}\n".format(link)
for link in links:
if not link in desired_links:
error_message += "Found unexpected link: {}\n".format(link)
self.assertSetEqual(desired_links, links, error_message)
def testMultipleL4L2ColumnsCreate(self):
"""
In this simplistic test we create a network with 3 L4L2Columns, ensure it
has the right number of regions and try to run some inputs through it
without crashing.
"""
net = createNetwork(networkConfig2)
self.assertEqual(len(net.regions.keys()), 4*3,
"Incorrect number of regions")
# Add some input vectors to the queue
externalInput0 = net.regions["externalInput_0"].getSelf()
sensorInput0 = net.regions["sensorInput_0"].getSelf()
externalInput1 = net.regions["externalInput_1"].getSelf()
sensorInput1 = net.regions["sensorInput_1"].getSelf()
externalInput2 = net.regions["externalInput_2"].getSelf()
sensorInput2 = net.regions["sensorInput_2"].getSelf()
externalInput0.addDataToQueue([2, 42, 1023], 0, 9)
sensorInput0.addDataToQueue([2, 42, 1023], 0, 0)
externalInput1.addDataToQueue([2, 42, 1023], 0, 9)
sensorInput1.addDataToQueue([2, 42, 1023], 0, 0)
externalInput2.addDataToQueue([2, 42, 1023], 0, 9)
sensorInput2.addDataToQueue([2, 42, 1023], 0, 0)
# Run the network and check outputs are as expected
net.run(1)
# Spotcheck some of the phases
self.assertEqual(net.getPhases("externalInput_0"), (0,),
"Incorrect phase externalInput_0")
self.assertEqual(net.getPhases("externalInput_1"), (0,),
"Incorrect phase for externalInput_1")
self.assertEqual(net.getPhases("sensorInput_0"), (0,),
"Incorrect phase for sensorInput_0")
self.assertEqual(net.getPhases("sensorInput_1"), (0,),
"Incorrect phase for sensorInput_1")
self.assertEqual(net.getPhases("L4Column_0"), (2,),
"Incorrect phase for L4Column_0")
self.assertEqual(net.getPhases("L4Column_1"), (2,),
"Incorrect phase for L4Column_1")
def testMultipleL4L2ColumnLinks(self):
"""
In this simplistic test we create a network with 3 L4L2 columns, and
ensure that it has the correct links between regions.
"""
# Create a simple network to check its architecture
net = createNetwork(networkConfig2)
links = net.getLinks()
# These are all the links we're hoping to find
desired_links=set(["sensorInput_0.dataOut-->L4Column_0.activeColumns",
"L2Column_0.feedForwardOutput-->L4Column_0.apicalInput",
"externalInput_0.dataOut-->L4Column_0.basalInput",
"L4Column_0.predictedActiveCells-->"+
"L2Column_0.feedforwardGrowthCandidates",
"L4Column_0.activeCells-->L2Column_0.feedforwardInput",
"sensorInput_0.resetOut-->L2Column_0.resetIn",
"sensorInput_0.resetOut-->L4Column_0.resetIn",
"sensorInput_1.dataOut-->L4Column_1.activeColumns",
"L2Column_1.feedForwardOutput-->L4Column_1.apicalInput",
"externalInput_1.dataOut-->L4Column_1.basalInput",
"L4Column_1.predictedActiveCells-->"+
"L2Column_1.feedforwardGrowthCandidates",
"L4Column_1.activeCells-->L2Column_1.feedforwardInput",
"sensorInput_1.resetOut-->L2Column_1.resetIn",
"sensorInput_1.resetOut-->L4Column_1.resetIn",
"sensorInput_2.dataOut-->L4Column_2.activeColumns",
"L2Column_2.feedForwardOutput-->L4Column_2.apicalInput",
"externalInput_2.dataOut-->L4Column_2.basalInput",
"L4Column_2.predictedActiveCells-->"+
"L2Column_2.feedforwardGrowthCandidates",
"L4Column_2.activeCells-->L2Column_2.feedforwardInput",
"sensorInput_2.resetOut-->L2Column_2.resetIn",
"sensorInput_2.resetOut-->L4Column_2.resetIn",
"L2Column_0.feedForwardOutput-->L2Column_1.lateralInput",
"L2Column_0.feedForwardOutput-->L2Column_2.lateralInput",
"L2Column_1.feedForwardOutput-->L2Column_0.lateralInput",
"L2Column_1.feedForwardOutput-->L2Column_2.lateralInput",
"L2Column_2.feedForwardOutput-->L2Column_0.lateralInput",
"L2Column_2.feedForwardOutput-->L2Column_1.lateralInput",
"externalInput_0.dataOut-->L4Column_0.basalGrowthCandidates",
"externalInput_1.dataOut-->L4Column_1.basalGrowthCandidates",
"externalInput_2.dataOut-->L4Column_2.basalGrowthCandidates"])
# This gets textual representations of the links.
links = set([link.second.getMoniker() for link in links])
# Build a descriptive error message to pass to the user
error_message = "Links incorrectly formed in multicolumn L2L4 network: \n"
for link in desired_links:
if not link in links:
error_message += "Failed to find link: {}\n".format(link)
for link in links:
if not link in desired_links:
error_message += "Found unexpected link: {}\n".format(link)
self.assertSetEqual(desired_links, links, error_message)
def testMultipleL4L2ColumnsWithTopologyCreate(self):
"""
In this simplistic test we create a network with 5 L4L2Columns and
topological lateral connections, ensure it has the right number of regions,
and try to run some inputs through it without crashing.
"""
net = createNetwork(networkConfig4)
self.assertEqual(len(net.regions.keys()), 20, "Incorrect number of regions")
# Add some input vectors to the queue
externalInput0 = net.regions["externalInput_0"].getSelf()
sensorInput0 = net.regions["sensorInput_0"].getSelf()
externalInput1 = net.regions["externalInput_1"].getSelf()
sensorInput1 = net.regions["sensorInput_1"].getSelf()
externalInput2 = net.regions["externalInput_2"].getSelf()
sensorInput2 = net.regions["sensorInput_2"].getSelf()
externalInput3 = net.regions["externalInput_3"].getSelf()
sensorInput3 = net.regions["sensorInput_3"].getSelf()
externalInput4 = net.regions["externalInput_4"].getSelf()
sensorInput4 = net.regions["sensorInput_4"].getSelf()
externalInput0.addDataToQueue([2, 42, 1023], 0, 9)
sensorInput0.addDataToQueue([2, 42, 1023], 0, 0)
externalInput1.addDataToQueue([2, 42, 1023], 0, 9)
sensorInput1.addDataToQueue([2, 42, 1023], 0, 0)
externalInput2.addDataToQueue([2, 42, 1023], 0, 9)
sensorInput2.addDataToQueue([2, 42, 1023], 0, 0)
externalInput3.addDataToQueue([2, 42, 1023], 0, 9)
sensorInput3.addDataToQueue([2, 42, 1023], 0, 0)
externalInput4.addDataToQueue([2, 42, 1023], 0, 9)
sensorInput4.addDataToQueue([2, 42, 1023], 0, 0)
# Run the network and check outputs are as expected
net.run(1)
# Spotcheck some of the phases
self.assertEqual(net.getPhases("externalInput_0"), (0,),
"Incorrect phase externalInput_0")
self.assertEqual(net.getPhases("externalInput_1"), (0,),
"Incorrect phase for externalInput_1")
self.assertEqual(net.getPhases("L4Column_0"), (2,),
"Incorrect phase for L4Column_0")
self.assertEqual(net.getPhases("L4Column_1"), (2,),
"Incorrect phase for L4Column_1")
def testMultipleL4L2ColumnsWithTopologyLinks(self):
"""
In this simplistic test we create a network with 5 L4L2Columns and
topological lateral connections, and ensure that it has the correct links
between regions. The network is laid out as follows:
3
|
0---1---2
|
4
"""
net = createNetwork(networkConfig4)
links = net.getLinks()
# These are all the links we're hoping to find
desired_links=set(["sensorInput_0.dataOut-->L4Column_0.activeColumns",
"L2Column_0.feedForwardOutput-->L4Column_0.apicalInput",
"externalInput_0.dataOut-->L4Column_0.basalInput",
"L4Column_0.predictedActiveCells-->"+
"L2Column_0.feedforwardGrowthCandidates",
"L4Column_0.activeCells-->L2Column_0.feedforwardInput",
"sensorInput_0.resetOut-->L2Column_0.resetIn",
"sensorInput_0.resetOut-->L4Column_0.resetIn",
"sensorInput_1.dataOut-->L4Column_1.activeColumns",
"L2Column_1.feedForwardOutput-->L4Column_1.apicalInput",
"externalInput_1.dataOut-->L4Column_1.basalInput",
"L4Column_1.predictedActiveCells-->"+
"L2Column_1.feedforwardGrowthCandidates",
"L4Column_1.activeCells-->L2Column_1.feedforwardInput",
"sensorInput_1.resetOut-->L2Column_1.resetIn",
"sensorInput_1.resetOut-->L4Column_1.resetIn",
"sensorInput_2.dataOut-->L4Column_2.activeColumns",
"L2Column_2.feedForwardOutput-->L4Column_2.apicalInput",
"externalInput_2.dataOut-->L4Column_2.basalInput",
"L4Column_2.predictedActiveCells-->"+
"L2Column_2.feedforwardGrowthCandidates",
"L4Column_2.activeCells-->L2Column_2.feedforwardInput",
"sensorInput_2.resetOut-->L2Column_2.resetIn",
"sensorInput_2.resetOut-->L4Column_2.resetIn",
"sensorInput_3.dataOut-->L4Column_3.activeColumns",
"L2Column_3.feedForwardOutput-->L4Column_3.apicalInput",
"externalInput_3.dataOut-->L4Column_3.basalInput",
"L4Column_3.predictedActiveCells-->"+
"L2Column_3.feedforwardGrowthCandidates",
"L4Column_3.activeCells-->L2Column_3.feedforwardInput",
"sensorInput_3.resetOut-->L2Column_3.resetIn",
"sensorInput_3.resetOut-->L4Column_3.resetIn",
"sensorInput_4.dataOut-->L4Column_4.activeColumns",
"L2Column_4.feedForwardOutput-->L4Column_4.apicalInput",
"externalInput_4.dataOut-->L4Column_4.basalInput",
"L4Column_4.predictedActiveCells-->"+
"L2Column_4.feedforwardGrowthCandidates",
"L4Column_4.activeCells-->L2Column_4.feedforwardInput",
"sensorInput_4.resetOut-->L2Column_4.resetIn",
"sensorInput_4.resetOut-->L4Column_4.resetIn",
"L2Column_0.feedForwardOutput-->L2Column_1.lateralInput",
"L2Column_1.feedForwardOutput-->L2Column_0.lateralInput",
"L2Column_1.feedForwardOutput-->L2Column_2.lateralInput",
"L2Column_2.feedForwardOutput-->L2Column_1.lateralInput",
"L2Column_2.feedForwardOutput-->L2Column_3.lateralInput",
"L2Column_2.feedForwardOutput-->L2Column_4.lateralInput",
"L2Column_3.feedForwardOutput-->L2Column_2.lateralInput",
"L2Column_4.feedForwardOutput-->L2Column_2.lateralInput",
"externalInput_0.dataOut-->L4Column_0.basalGrowthCandidates",
"externalInput_1.dataOut-->L4Column_1.basalGrowthCandidates",
"externalInput_2.dataOut-->L4Column_2.basalGrowthCandidates",
"externalInput_3.dataOut-->L4Column_3.basalGrowthCandidates",
"externalInput_4.dataOut-->L4Column_4.basalGrowthCandidates"])
# This gets textual representations of the links.
links = set([link.second.getMoniker() for link in links])
# Build a descriptive error message to pass to the user
error_message = "Links incorrectly formed in multicolumn L2L4 network: \n"
for link in desired_links:
if not link in links:
error_message += "Failed to find link: {}\n".format(link)
for link in links:
if not link in desired_links:
error_message += "Found unexpected link: {}\n".format(link)
self.assertSetEqual(desired_links, links, error_message)
@unittest.skip("Need to implement")
def testMultipleL4L2ColumnsSPCreate(self):
"""
In this simplistic test we create a network with 3 L4L2Columns, with spatial
poolers. We ensure it has the right number of regions, that spatial poolers
are named appropriately, and try to run some inputs through it without
crashing.
"""
pass
def testCustomParameters(self):
"""
This test creates a network with custom parameters and tests that the
network gets correctly constructed.
"""
customConfig = {
"networkType": "L4L2Column",
"externalInputSize": 256,
"sensorInputSize": 512,
"L4RegionType": "py.ApicalTMPairRegion",
"L4Params": {
"columnCount": 512,
"cellsPerColumn": 16,
"learn": True,
"learnOnOneCell": False,
"initialPermanence": 0.23,
"connectedPermanence": 0.75,
"permanenceIncrement": 0.45,
"permanenceDecrement": 0.1,
"minThreshold": 15,
"basalPredictedSegmentDecrement": 0.21,
"activationThreshold": 16,
"sampleSize": 24,
},
"L2Params": {
"inputWidth": 512 * 8,
"cellCount": 2048,
"sdrSize": 30,
"synPermProximalInc": 0.12,
"synPermProximalDec": 0.011,
"initialProximalPermanence": 0.8,
"minThresholdProximal": 8,
"sampleSizeProximal": 17,
"connectedPermanenceProximal": 0.6,
"synPermDistalInc": 0.09,
"synPermDistalDec": 0.002,
"initialDistalPermanence": 0.52,
"activationThresholdDistal": 15,
"sampleSizeDistal": 25,
"connectedPermanenceDistal": 0.6,
"learningMode": True,
},
}
net = createNetwork(customConfig)
self.assertEqual(
len(net.regions.keys()), 4,
"Incorrect number of regions"
)
# Get various regions
externalInput = net.regions["externalInput_0"].getSelf()
sensorInput = net.regions["sensorInput_0"].getSelf()
L4Column = net.regions["L4Column_0"].getSelf()
L2Column = net.regions["L2Column_0"].getSelf()
# we need to do a first compute for the various elements to be constructed
sensorInput.addDataToQueue([], 0, 0)
externalInput.addDataToQueue([], 0, 0)
net.run(1)
# check that parameters are correct in L4
for param, value in customConfig["L4Params"].iteritems():
self.assertEqual(L4Column.getParameter(param), value)
# check that parameters are correct in L2
# some parameters are in the tm members
for param, value in customConfig["L2Params"].iteritems():
self.assertEqual(L2Column.getParameter(param), value)
# check that parameters are correct in L2
self.assertEqual(externalInput.outputWidth,
customConfig["externalInputSize"])
self.assertEqual(sensorInput.outputWidth,
customConfig["sensorInputSize"])
def testSingleColumnL4L2DataFlow(self):
"""
This test trains a network with a few (feature, location) pairs and checks
the data flows correctly, and that each intermediate representation is
correct.
"""
# Create a simple network to test the sensor
net = createNetwork(networkConfig1)
self.assertEqual(
len(net.regions.keys()), 4,
"Incorrect number of regions"
)
# Get various regions
externalInput = net.regions["externalInput_0"].getSelf()
sensorInput = net.regions["sensorInput_0"].getSelf()
L4Column = net.regions["L4Column_0"].getSelf()
L2Column = net.regions["L2Column_0"].getSelf()
# create a feature and location pool
features = [self.generatePattern(1024, 20) for _ in xrange(2)]
locations = [self.generatePattern(1024, 20) for _ in xrange(3)]
# train with following pairs:
# (F0, L0) (F1, L1) on object A
# (F0, L2) (F1, L1) on object B
# Object A
# start with an object 1 input to get L2 representation for object 1
sensorInput.addDataToQueue(features[0], 0, 0)
externalInput.addDataToQueue(locations[0], 0, 0)
net.run(1)
# get L2 representation for object A
L2RepresentationA = self.getCurrentL2Representation(L2Column)
self.assertEqual(len(L2RepresentationA), 40)
for _ in xrange(4):
sensorInput.addDataToQueue(features[0], 0, 0)
externalInput.addDataToQueue(locations[0], 0, 0)
net.run(1)
# check L2
self.assertEqual(
self.getCurrentL2Representation(L2Column),
L2RepresentationA
)
sensorInput.addDataToQueue(features[1], 0, 0)
externalInput.addDataToQueue(locations[1], 0, 0)
net.run(1)
# check L2
self.assertEqual(
self.getCurrentL2Representation(L2Column),
L2RepresentationA
)
# get L4 representations when they are stable
sensorInput.addDataToQueue(features[0], 0, 0)
externalInput.addDataToQueue(locations[0], 0, 0)
net.run(1)
L4Representation00 = self.getL4PredictedActiveCells(L4Column)
self.assertEqual(len(L4Representation00), 20)
# send reset signal
sensorInput.addResetToQueue(0)
externalInput.addResetToQueue(0)
net.run(1)
# Object B
# start with empty input
sensorInput.addDataToQueue(features[0], 0, 0)
externalInput.addDataToQueue(locations[2], 0, 0)
net.run(1)
# get L2 representation for object B
L2RepresentationB = self.getCurrentL2Representation(L2Column)
self.assertEqual(len(L2RepresentationB), 40)
# check that it is very different from object A
self.assertLessEqual(len(L2RepresentationA & L2RepresentationB), 5)
for _ in xrange(4):
sensorInput.addDataToQueue(features[0], 0, 0)
externalInput.addDataToQueue(locations[2], 0, 0)
net.run(1)
# check L2
self.assertEqual(
self.getCurrentL2Representation(L2Column),
L2RepresentationB
)
sensorInput.addDataToQueue(features[1], 0, 0)
externalInput.addDataToQueue(locations[1], 0, 0)
net.run(1)
# check L2
self.assertEqual(
self.getCurrentL2Representation(L2Column),
L2RepresentationB
)
# get L4 representations when they are stable
sensorInput.addDataToQueue(features[0], 0, 0)
externalInput.addDataToQueue(locations[2], 0, 0)
net.run(1)
L4Representation02 = self.getL4PredictedActiveCells(L4Column)
self.assertEqual(len(L4Representation02), 20)
sensorInput.addDataToQueue(features[1], 0, 0)
externalInput.addDataToQueue(locations[1], 0, 0)
net.run(1)
L4Representation11 = self.getL4PredictedActiveCells(L4Column)
self.assertEqual(len(L4Representation11), 20)
# send reset signal
sensorInput.addResetToQueue(0)
externalInput.addResetToQueue(0)
net.run(1)
# check inference with each (feature, location) pair
L2Column.setParameter("learningMode", 0, False)
L4Column.setParameter("learn", 0, False)
# (F0, L0)
sensorInput.addDataToQueue(features[0], 0, 0)
externalInput.addDataToQueue(locations[0], 0, 0)
net.run(1)
# check L2 representation, L4 representation, no bursting
self.assertEqual(
self.getCurrentL2Representation(L2Column),
L2RepresentationA
)
self.assertEqual(
self.getL4PredictedActiveCells(L4Column),
L4Representation00
)
self.assertEqual(len(self.getL4BurstingCells(L4Column)), 0)
# send reset signal
sensorInput.addResetToQueue(0)
externalInput.addResetToQueue(0)
net.run(1)
# (F0, L2)
sensorInput.addDataToQueue(features[0], 0, 0)
externalInput.addDataToQueue(locations[2], 0, 0)
net.run(1)
# check L2 representation, L4 representation, no bursting
self.assertEqual(
self.getCurrentL2Representation(L2Column),
L2RepresentationB
)
self.assertEqual(
self.getL4PredictedActiveCells(L4Column),
L4Representation02
)
self.assertEqual(len(self.getL4BurstingCells(L4Column)), 0)
# send reset signal
sensorInput.addResetToQueue(0)
externalInput.addResetToQueue(0)
net.run(1)
# (F1, L1)
sensorInput.addDataToQueue(features[1], 0, 0)
externalInput.addDataToQueue(locations[1], 0, 0)
net.run(1)
# check L2 representation, L4 representation, no bursting
self.assertEqual(
self.getCurrentL2Representation(L2Column),
L2RepresentationA | L2RepresentationB
)
self.assertEqual(
self.getL4PredictedActiveCells(L4Column),
L4Representation11
)
self.assertEqual(len(self.getL4BurstingCells(L4Column)), 0)
sensorInput.addDataToQueue(features[1], 0, 0)
externalInput.addDataToQueue(locations[2], 0, 0)
net.run(1)
# check bursting (representation in L2 should be like in a random SP)
self.assertEqual(len(self.getL4PredictedActiveCells(L4Column)), 0)
self.assertEqual(len(self.getL4BurstingCells(L4Column)), 20 * 8)
def testTwoColumnsL4L2DataFlow(self):
"""
This test trains a network with a few (feature, location) pairs and checks
the data flows correctly, and that each intermediate representation is
correct.
Indices 0 and 1 in variable names refer to cortical column number.
"""
# Create a simple network to test the sensor
net = createNetwork(networkConfig3)
self.assertEqual(
len(net.regions.keys()), 4 * 2,
"Incorrect number of regions, expected {} but had {}".format(8*2,
len(net.regions.keys()))
)
# Get various regions
externalInput0 = net.regions["externalInput_0"].getSelf()
sensorInput0 = net.regions["sensorInput_0"].getSelf()
L4Column0 = net.regions["L4Column_0"].getSelf()
L2Column0 = net.regions["L2Column_0"].getSelf()
externalInput1 = net.regions["externalInput_1"].getSelf()
sensorInput1 = net.regions["sensorInput_1"].getSelf()
L4Column1 = net.regions["L4Column_1"].getSelf()
L2Column1 = net.regions["L2Column_1"].getSelf()
# create a feature and location pool for column 0
features0 = [self.generatePattern(1024, 20) for _ in xrange(2)]
locations0 = [self.generatePattern(1024, 20) for _ in xrange(3)]
# create a feature and location pool for column 1
features1 = [self.generatePattern(1024, 20) for _ in xrange(2)]
locations1 = [self.generatePattern(1024, 20) for _ in xrange(3)]
# train with following pairs:
# (F0, L0) (F1, L1) on object 1
# (F0, L2) (F1, L1) on object 2
# Object 1
# start with an object A input to get L2 representations for object A
sensorInput0.addDataToQueue(features0[0], 0, 0)
externalInput0.addDataToQueue(locations0[0], 0, 0)
sensorInput1.addDataToQueue(features1[0], 0, 0)
externalInput1.addDataToQueue(locations1[0], 0, 0)
net.run(1)
# get L2 representation for object B
L2RepresentationA0 = self.getCurrentL2Representation(L2Column0)
L2RepresentationA1 = self.getCurrentL2Representation(L2Column1)
self.assertEqual(len(L2RepresentationA0), 40)
self.assertEqual(len(L2RepresentationA0), 40)
for _ in xrange(3):
sensorInput0.addDataToQueue(features0[0], 0, 0)
externalInput0.addDataToQueue(locations0[0], 0, 0)
sensorInput1.addDataToQueue(features1[0], 0, 0)
externalInput1.addDataToQueue(locations1[0], 0, 0)
net.run(1)
# check L2
self.assertEqual(
self.getCurrentL2Representation(L2Column0),
L2RepresentationA0
)
# check L2
self.assertEqual(
self.getCurrentL2Representation(L2Column1),
L2RepresentationA1
)
sensorInput0.addDataToQueue(features0[1], 0, 0)
externalInput0.addDataToQueue(locations0[1], 0, 0)
sensorInput1.addDataToQueue(features1[1], 0, 0)
externalInput1.addDataToQueue(locations1[1], 0, 0)
net.run(1)
# check L2
self.assertEqual(
self.getCurrentL2Representation(L2Column0),
L2RepresentationA0
)
self.assertEqual(
self.getCurrentL2Representation(L2Column1),
L2RepresentationA1
)
# get L4 representations when they are stable
sensorInput0.addDataToQueue(features0[0], 0, 0)
externalInput0.addDataToQueue(locations0[0], 0, 0)
sensorInput1.addDataToQueue(features1[0], 0, 0)
externalInput1.addDataToQueue(locations1[0], 0, 0)
net.run(1)
L4Representation00_0 = self.getL4PredictedActiveCells(L4Column0)
L4Representation00_1 = self.getL4PredictedActiveCells(L4Column1)
self.assertEqual(len(L4Representation00_0), 20)
self.assertEqual(len(L4Representation00_1), 20)
# send reset signal
sensorInput0.addResetToQueue(0)
externalInput0.addResetToQueue(0)
sensorInput1.addResetToQueue(0)
externalInput1.addResetToQueue(0)
net.run(1)
# Object B
# start with input to get L2 representations
sensorInput0.addDataToQueue(features0[0], 0, 0)
externalInput0.addDataToQueue(locations0[2], 0, 0)
sensorInput1.addDataToQueue(features1[0], 0, 0)
externalInput1.addDataToQueue(locations1[2], 0, 0)
net.run(1)
# get L2 representations for object B
L2RepresentationB0 = self.getCurrentL2Representation(L2Column0)
L2RepresentationB1 = self.getCurrentL2Representation(L2Column1)
self.assertEqual(len(L2RepresentationB0), 40)
self.assertEqual(len(L2RepresentationB1), 40)
# check that it is very different from object A
self.assertLessEqual(len(L2RepresentationA0 & L2RepresentationB0), 5)
self.assertLessEqual(len(L2RepresentationA1 & L2RepresentationB1), 5)
for _ in xrange(3):
sensorInput0.addDataToQueue(features0[0], 0, 0)
externalInput0.addDataToQueue(locations0[2], 0, 0)
sensorInput1.addDataToQueue(features1[0], 0, 0)
externalInput1.addDataToQueue(locations1[2], 0, 0)
net.run(1)
# check L2
self.assertEqual(
self.getCurrentL2Representation(L2Column0),
L2RepresentationB0
)
# check L2
self.assertEqual(
self.getCurrentL2Representation(L2Column1),
L2RepresentationB1
)
sensorInput0.addDataToQueue(features0[1], 0, 0)
externalInput0.addDataToQueue(locations0[1], 0, 0)
sensorInput1.addDataToQueue(features1[1], 0, 0)
externalInput1.addDataToQueue(locations1[1], 0, 0)
net.run(1)
# check L2
self.assertEqual(
self.getCurrentL2Representation(L2Column0),
L2RepresentationB0
)
# check L2
self.assertEqual(
self.getCurrentL2Representation(L2Column1),
L2RepresentationB1
)
# get L4 representations when they are stable
sensorInput0.addDataToQueue(features0[0], 0, 0)
externalInput0.addDataToQueue(locations0[2], 0, 0)
sensorInput1.addDataToQueue(features1[0], 0, 0)
externalInput1.addDataToQueue(locations1[2], 0, 0)
net.run(1)
L4Representation02_0 = self.getL4PredictedActiveCells(L4Column0)
L4Representation02_1 = self.getL4PredictedActiveCells(L4Column1)
self.assertEqual(len(L4Representation02_0), 20)
self.assertEqual(len(L4Representation02_1), 20)
sensorInput0.addDataToQueue(features0[1], 0, 0)
externalInput0.addDataToQueue(locations0[1], 0, 0)
sensorInput1.addDataToQueue(features1[1], 0, 0)
externalInput1.addDataToQueue(locations1[1], 0, 0)
net.run(1)
L4Representation11_0 = self.getL4PredictedActiveCells(L4Column0)
L4Representation11_1 = self.getL4PredictedActiveCells(L4Column1)
self.assertEqual(len(L4Representation11_0), 20)
self.assertEqual(len(L4Representation11_1), 20)
sensorInput0.addResetToQueue(0)
externalInput0.addResetToQueue(0)
sensorInput1.addResetToQueue(0)
externalInput1.addResetToQueue(0)
net.run(1)
# check inference with each (feature, location) pair
L2Column0.setParameter("learningMode", 0, False)
L4Column0.setParameter("learn", 0, False)
L2Column1.setParameter("learningMode", 0, False)
L4Column1.setParameter("learn", 0, False)
# (F0, L0)
sensorInput0.addDataToQueue(features0[0], 0, 0)
externalInput0.addDataToQueue(locations0[0], 0, 0)
sensorInput1.addDataToQueue(features1[0], 0, 0)
externalInput1.addDataToQueue(locations1[0], 0, 0)
net.run(1)
# check L2 representations, L4 representations, no bursting
self.assertLessEqual(
len(self.getCurrentL2Representation(L2Column0) - L2RepresentationA0),
5
)
self.assertGreaterEqual(
len(self.getCurrentL2Representation(L2Column0) & L2RepresentationA0),
35
)
self.assertEqual(
self.getL4PredictedActiveCells(L4Column0),
L4Representation00_0
)
self.assertEqual(len(self.getL4BurstingCells(L4Column0)), 0)
# be a little tolerant on this test
self.assertLessEqual(
len(self.getCurrentL2Representation(L2Column1) - L2RepresentationA1),
5
)
self.assertGreaterEqual(
len(self.getCurrentL2Representation(L2Column1) & L2RepresentationA1),
35
)
self.assertEqual(
self.getL4PredictedActiveCells(L4Column1),
L4Representation00_1
)
self.assertEqual(len(self.getL4BurstingCells(L4Column1)), 0)
sensorInput0.addResetToQueue(0)
externalInput0.addResetToQueue(0)
sensorInput1.addResetToQueue(0)
externalInput1.addResetToQueue(0)
net.run(1)
# (F0, L2)
# It is fed twice, for the ambiguous prediction test, because of the
# one-off error in distal predictions
# FIXME when this is changed in ColumnPooler
sensorInput0.addDataToQueue(features0[0], 0, 0)
externalInput0.addDataToQueue(locations0[2], 0, 0)
sensorInput1.addDataToQueue(features1[0], 0, 0)
externalInput1.addDataToQueue(locations1[2], 0, 0)
sensorInput0.addDataToQueue(features0[0], 0, 0)
externalInput0.addDataToQueue(locations0[2], 0, 0)
sensorInput1.addDataToQueue(features1[0], 0, 0)
externalInput1.addDataToQueue(locations1[2], 0, 0)
net.run(2)
# check L2 representation, L4 representation, no bursting
self.assertEqual(
self.getCurrentL2Representation(L2Column0),
L2RepresentationB0
)
self.assertEqual(
self.getL4PredictedActiveCells(L4Column0),
L4Representation02_0
)
self.assertEqual(len(self.getL4BurstingCells(L4Column0)), 0)
self.assertEqual(
self.getCurrentL2Representation(L2Column1),
L2RepresentationB1
)
self.assertEqual(
self.getL4PredictedActiveCells(L4Column1),
L4Representation02_1
)
self.assertEqual(len(self.getL4BurstingCells(L4Column1)), 0)
# ambiguous pattern: (F1, L1)
sensorInput0.addDataToQueue(features0[1], 0, 0)
externalInput0.addDataToQueue(locations0[1], 0, 0)
sensorInput1.addDataToQueue(features1[1], 0, 0)
externalInput1.addDataToQueue(locations1[1], 0, 0)
net.run(1)
# check L2 representation, L4 representation, no bursting
# as opposed to the previous test, the representation is not ambiguous
self.assertEqual(
self.getCurrentL2Representation(L2Column0),
L2RepresentationB0
)
self.assertEqual(
self.getL4PredictedActiveCells(L4Column0),
L4Representation11_0
)
self.assertEqual(len(self.getL4BurstingCells(L4Column0)), 0)
self.assertEqual(
self.getCurrentL2Representation(L2Column1),
L2RepresentationB1
)
self.assertEqual(
self.getL4PredictedActiveCells(L4Column1),
L4Representation11_1
)
self.assertEqual(len(self.getL4BurstingCells(L4Column1)), 0)
# unknown signal
sensorInput0.addDataToQueue(features0[1], 0, 0)
externalInput0.addDataToQueue(locations0[2], 0, 0)
sensorInput1.addDataToQueue(features1[1], 0, 0)
externalInput1.addDataToQueue(locations1[2], 0, 0)
net.run(1)
# check bursting (representation in L2 should be like in a random SP)
self.assertLessEqual(len(self.getL4PredictedActiveCells(L4Column0)), 3)
self.assertGreaterEqual(len(self.getL4BurstingCells(L4Column0)), 20 * 7)
self.assertLessEqual(len(self.getL4PredictedActiveCells(L4Column1)), 3)
self.assertGreaterEqual(len(self.getL4BurstingCells(L4Column1)), 20 * 7)
def generatePattern(self, max, size):
"""Generates a random feedback pattern."""
cellsIndices = range(max)
random.shuffle(cellsIndices)
return cellsIndices[:size]
def getL4PredictedCells(self, column):
"""
Returns the cells in L4 that were predicted at the beginning of the last
call to 'compute'.
"""
return set(column._tm.getPredictedCells())
def getL4PredictedActiveCells(self, column):
"""Returns the predicted active cells in L4."""
activeCells = set(column._tm.getActiveCells())
predictedCells = set(column._tm.getPredictedCells())
return activeCells & predictedCells
def getL4BurstingCells(self, column):
"""Returns the bursting cells in L4."""
activeCells = set(column._tm.getActiveCells())
predictedCells = set(column._tm.getPredictedCells())
return activeCells - predictedCells
def getCurrentL2Representation(self, column):
"""Returns the current active representation in a given L2 column."""
return set(column._pooler.activeCells)
if __name__ == "__main__":
unittest.main()
|
subutai/htmresearch
|
tests/frameworks/layers/l2l4_network_creation_test.py
|
Python
|
agpl-3.0
| 40,335
|
#!/usr/bin/env python
# This file is part of Openplotter.
# Copyright (C) 2015 by sailoog <https://github.com/sailoog/openplotter>
#
# Openplotter is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# any later version.
# Openplotter is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Openplotter. If not, see <http://www.gnu.org/licenses/>.
import ConfigParser, os
class Conf:
def __init__(self):
self.home = os.path.expanduser('~')
if 'root' in self.home:
self.home = '/home/'+os.path.expanduser(os.environ["SUDO_USER"])
self.conf_folder = self.home+'/.openplotter'
self.data_conf = ConfigParser.SafeConfigParser()
self.read()
def read(self):
self.data_conf.read(self.conf_folder+'/openplotter.conf')
def write(self):
with open(self.conf_folder+'/openplotter.conf', 'wb') as configfile:
self.data_conf.write(configfile)
def get(self, section, item):
result = ''
try:
result = self.data_conf.get(section, item)
except ConfigParser.NoSectionError:
self.read()
try:
self.data_conf.add_section(section)
except ConfigParser.DuplicateSectionError: pass
self.data_conf.set(section, item, '')
self.write()
except ConfigParser.NoOptionError:
self.set(section, item, '')
return result
def set(self, section, item, value):
self.read()
try:
self.data_conf.set(section, item, value)
except ConfigParser.NoSectionError:
self.data_conf.add_section(section)
self.data_conf.set(section, item, value)
self.write()
def has_option(self, section, item):
return self.data_conf.has_option(section, item)
def has_section(self, section):
return self.data_conf.has_section(section)
def add_section(self, section):
return self.data_conf.add_section(section)
class Conf2:
def __init__(self, file):
self.home = os.path.expanduser('~')
if 'root' in self.home:
self.home = '/home/'+os.path.expanduser(os.environ["SUDO_USER"])
self.conf_file_path = self.home+'/.openplotter/'+file
self.data_conf = ConfigParser.SafeConfigParser()
if not os.path.isfile(self.conf_file_path):
with open(self.conf_file_path,'w') as f:
f.write('[GENERAL]')
self.read()
def read(self):
self.data_conf.read(self.conf_file_path)
def write(self):
with open(self.conf_file_path, 'wb') as configfile:
self.data_conf.write(configfile)
def get(self, section, item):
result = ''
try:
result = self.data_conf.get(section, item)
except ConfigParser.NoSectionError:
self.read()
try:
self.data_conf.add_section(section)
except ConfigParser.DuplicateSectionError: pass
self.data_conf.set(section, item, '')
self.write()
except ConfigParser.NoOptionError:
self.set(section, item, '')
return result
def set(self, section, item, value):
self.read()
try:
self.data_conf.set(section, item, value)
except ConfigParser.NoSectionError:
self.data_conf.add_section(section)
self.data_conf.set(section, item, value)
self.write()
def has_option(self, section, item):
return self.data_conf.has_option(section, item)
def has_section(self, section):
return self.data_conf.has_section(section)
def add_section(self, section):
return self.data_conf.add_section(section)
|
sailoog/openplotter
|
classes/conf.py
|
Python
|
gpl-2.0
| 3,587
|
import unittest
from django.http import Http404
from django.core.exceptions import ObjectDoesNotExist
# Ugh. Settings for Django.
from django.conf import settings
settings.configure(DEBUG=True)
from restless.dj import DjangoResource
from restless.exceptions import Unauthorized
from restless.preparers import FieldsPreparer
from restless.resources import skip_prepare
from restless.utils import json
from .fakes import FakeHttpRequest, FakeModel
class DjTestResource(DjangoResource):
preparer = FieldsPreparer(fields={
'id': 'id',
'title': 'title',
'author': 'username',
'body': 'content'
})
fake_db = []
def __init__(self, *args, **kwargs):
super(DjTestResource, self).__init__(*args, **kwargs)
self.http_methods.update({
'schema': {
'GET': 'schema',
}
})
def fake_init(self):
# Just for testing.
self.__class__.fake_db = [
FakeModel(
id=2,
title='First post',
username='daniel',
content='Hello world!'),
FakeModel(
id=4,
title='Another',
username='daniel',
content='Stuff here.'),
FakeModel(id=5, title='Last', username='daniel', content="G'bye!"),
]
def is_authenticated(self):
if self.request_method() == 'DELETE':
return False
return True
def list(self):
return self.fake_db
def detail(self, pk):
for item in self.fake_db:
if item.id == pk:
return item
# If it wasn't found in our fake DB, raise a Django-esque exception.
raise ObjectDoesNotExist("Model with pk {0} not found.".format(pk))
def create(self):
self.fake_db.append(FakeModel(
**self.data
))
def update(self, pk):
for item in self.fake_db:
if item.id == pk:
for k, v in self.data:
setattr(item, k, v)
return
def create_detail(self):
raise ValueError("This is a random & crazy exception.")
@skip_prepare
def schema(self):
# A WILD SCHEMA VIEW APPEARS!
return {
'fields': {
'id': {
'type': 'integer',
'required': True,
'help_text': 'The unique id for the post',
},
'title': {
'type': 'string',
'required': True,
'help_text': "The post's title",
},
'author': {
'type': 'string',
'required': True,
'help_text': 'The username of the author of the post',
},
'body': {
'type': 'string',
'required': False,
'default': '',
'help_text': 'The content of the post',
}
},
'format': 'application/json',
'allowed_list_http_methods': ['GET', 'POST'],
'allowed_detail_http_methods': ['GET', 'PUT', 'DELETE'],
}
class DjTestResourceHttp404Handling(DjTestResource):
def detail(self, pk):
for item in self.fake_db:
if item.id == pk:
return item
# If it wasn't found in our fake DB, raise a Django-esque exception.
raise Http404("Model with pk {0} not found.".format(pk))
class DjangoResourceTestCase(unittest.TestCase):
def setUp(self):
super(DjangoResourceTestCase, self).setUp()
self.res = DjTestResource()
# Just for the fake data.
self.res.fake_init()
def test_as_list(self):
list_endpoint = DjTestResource.as_list()
req = FakeHttpRequest('GET')
resp = list_endpoint(req)
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertEqual(resp.status_code, 200)
self.assertEqual(json.loads(resp.content.decode('utf-8')), {
'objects': [
{
'author': 'daniel',
'body': 'Hello world!',
'id': 2,
'title': 'First post'
},
{
'author': 'daniel',
'body': 'Stuff here.',
'id': 4,
'title': 'Another'
},
{
'author': 'daniel',
'body': "G'bye!",
'id': 5,
'title': 'Last'
}
]
})
def test_as_detail(self):
detail_endpoint = DjTestResource.as_detail()
req = FakeHttpRequest('GET')
resp = detail_endpoint(req, 4)
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertEqual(resp.status_code, 200)
self.assertEqual(json.loads(resp.content.decode('utf-8')), {
'author': 'daniel',
'body': 'Stuff here.',
'id': 4,
'title': 'Another'
})
def test_as_view(self):
# This would be hooked up via the URLconf...
schema_endpoint = DjTestResource.as_view('schema')
req = FakeHttpRequest('GET')
resp = schema_endpoint(req)
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertEqual(resp.status_code, 200)
schema = json.loads(resp.content.decode('utf-8'))
self.assertEqual(
sorted(list(schema['fields'].keys())),
[
'author',
'body',
'id',
'title',
]
)
self.assertEqual(schema['fields']['id']['type'], 'integer')
self.assertEqual(schema['format'], 'application/json')
def test_handle_not_implemented(self):
self.res.request = FakeHttpRequest('TRACE')
resp = self.res.handle('list')
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertEqual(resp.status_code, 501)
resp_json = json.loads(resp.content.decode('utf-8'))
self.assertEqual(
resp_json['error'], "Unsupported method 'TRACE' for list endpoint.")
self.assertTrue('traceback' in resp_json)
def test_handle_not_authenticated(self):
# Special-cased above for testing.
self.res.request = FakeHttpRequest('DELETE')
# First with DEBUG on
resp = self.res.handle('list')
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertEqual(resp.status_code, 401)
resp_json = json.loads(resp.content.decode('utf-8'))
self.assertEqual(resp_json['error'], 'Unauthorized.')
self.assertTrue('traceback' in resp_json)
# Now with DEBUG off.
settings.DEBUG = False
self.addCleanup(setattr, settings, 'DEBUG', True)
resp = self.res.handle('list')
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertEqual(resp.status_code, 401)
resp_json = json.loads(resp.content.decode('utf-8'))
self.assertEqual(resp_json, {
'error': 'Unauthorized.',
})
self.assertFalse('traceback' in resp_json)
# Last, with bubble_exceptions.
class Bubbly(DjTestResource):
def bubble_exceptions(self):
return True
with self.assertRaises(Unauthorized):
bubb = Bubbly()
bubb.request = FakeHttpRequest('DELETE')
bubb.handle('list')
def test_handle_build_err(self):
# Special-cased above for testing.
self.res.request = FakeHttpRequest('POST')
settings.DEBUG = False
self.addCleanup(setattr, settings, 'DEBUG', True)
resp = self.res.handle('detail')
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertEqual(resp.status_code, 500)
self.assertEqual(json.loads(resp.content.decode('utf-8')), {
'error': 'This is a random & crazy exception.'
})
def test_object_does_not_exist(self):
# Make sure we get a proper Not Found exception rather than a
# generic 500, when code raises a ObjectDoesNotExist exception.
self.res.request = FakeHttpRequest('GET')
settings.DEBUG = False
self.addCleanup(setattr, settings, 'DEBUG', True)
resp = self.res.handle('detail', 1001)
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertEqual(resp.status_code, 404)
self.assertEqual(json.loads(resp.content.decode('utf-8')), {
'error': 'Model with pk 1001 not found.'
})
def test_http404_exception_handling(self):
# Make sure we get a proper Not Found exception rather than a
# generic 500, when code raises a Http404 exception.
res = DjTestResourceHttp404Handling()
res.request = FakeHttpRequest('GET')
settings.DEBUG = False
self.addCleanup(setattr, settings, 'DEBUG', True)
resp = res.handle('detail', 1001)
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertEqual(resp.status_code, 404)
self.assertEqual(json.loads(resp.content.decode('utf-8')), {
'error': 'Model with pk 1001 not found.'
})
def test_build_url_name(self):
self.assertEqual(
DjTestResource.build_url_name('list'),
'api_djtest_list'
)
self.assertEqual(
DjTestResource.build_url_name('detail'),
'api_djtest_detail'
)
self.assertEqual(
DjTestResource.build_url_name('schema'),
'api_djtest_schema'
)
self.assertEqual(
DjTestResource.build_url_name('list', name_prefix='v2_'),
'v2_list'
)
self.assertEqual(
DjTestResource.build_url_name('detail', name_prefix='v2_'),
'v2_detail'
)
self.assertEqual(
DjTestResource.build_url_name('schema', name_prefix='v2_'),
'v2_schema'
)
def test_urls(self):
patterns = DjTestResource.urls()
self.assertEqual(len(patterns), 2)
self.assertEqual(patterns[0].name, 'api_djtest_list')
self.assertEqual(patterns[1].name, 'api_djtest_detail')
patterns = DjTestResource.urls(name_prefix='v2_tests')
self.assertEqual(len(patterns), 2)
self.assertEqual(patterns[0].name, 'v2_tests_list')
self.assertEqual(patterns[1].name, 'v2_tests_detail')
def test_create(self):
self.res.request = FakeHttpRequest(
'POST',
body='{"id": 6, "title": "Moved hosts", "author": "daniel"}')
self.assertEqual(len(self.res.fake_db), 3)
resp = self.res.handle('list')
self.assertEqual(resp['Content-Type'], 'application/json')
self.assertEqual(resp.status_code, 201)
self.assertEqual(resp.content.decode('utf-8'), '')
# Check the internal state.
self.assertEqual(len(self.res.fake_db), 4)
self.assertEqual(self.res.data, {
'author': 'daniel',
'id': 6,
'title': 'Moved hosts'
})
|
viniciuscainelli/restless
|
tests/test_dj.py
|
Python
|
bsd-3-clause
| 11,425
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from odoo import fields, osv, models, api
class MercadolibreBanner(models.Model):
_name = "mercadolibre.banner"
_description = "Banners for MercadoLibre descriptions"
name = fields.Char('Name')
description = fields.Html(string='Description')
|
ctmil/meli_oerp
|
models/banner.py
|
Python
|
agpl-3.0
| 1,268
|
"""mapa URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from rest_framework import routers
from cronotacografo.views import RegistroViewSet
router = routers.SimpleRouter()
router.register(r'registro', RegistroViewSet)
urlpatterns = [
url(r'^admin/', admin.site.urls),
# Endpoints da API
url(r'^api/', include(router.urls)),
# Autenticação da API
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
|
laurybueno/MoniBus
|
mapa/urls.py
|
Python
|
agpl-3.0
| 1,112
|
# Copyright 2011 OpenStack Foundation.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""OpenStack logging handler.
This module adds to logging functionality by adding the option to specify
a context object when calling the various log methods. If the context object
is not specified, default formatting is used. Additionally, an instance uuid
may be passed as part of the log message, which is intended to make it easier
for admins to find messages related to a specific instance.
It also allows setting of formatting information through conf.
"""
import inspect
import itertools
import logging
import logging.config
import logging.handlers
import os
import sys
import traceback
from oslo.config import cfg
import six
from six import moves
from cdn.openstack.common.gettextutils import _
from cdn.openstack.common import importutils
from cdn.openstack.common import jsonutils
from cdn.openstack.common import local
# NOTE(flaper87): Pls, remove when graduating this module
# from the incubator.
from cdn.openstack.common.strutils import mask_password # noqa
_DEFAULT_LOG_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
common_cli_opts = [
cfg.BoolOpt('debug',
short='d',
default=False,
help='Print debugging output (set logging level to '
'DEBUG instead of default WARNING level).'),
cfg.BoolOpt('verbose',
short='v',
default=False,
help='Print more verbose output (set logging level to '
'INFO instead of default WARNING level).'),
]
logging_cli_opts = [
cfg.StrOpt('log-config-append',
metavar='PATH',
deprecated_name='log-config',
help='The name of a logging configuration file. This file '
'is appended to any existing logging configuration '
'files. For details about logging configuration files, '
'see the Python logging module documentation.'),
cfg.StrOpt('log-format',
metavar='FORMAT',
help='DEPRECATED. '
'A logging.Formatter log message format string which may '
'use any of the available logging.LogRecord attributes. '
'This option is deprecated. Please use '
'logging_context_format_string and '
'logging_default_format_string instead.'),
cfg.StrOpt('log-date-format',
default=_DEFAULT_LOG_DATE_FORMAT,
metavar='DATE_FORMAT',
help='Format string for %%(asctime)s in log records. '
'Default: %(default)s .'),
cfg.StrOpt('log-file',
metavar='PATH',
deprecated_name='logfile',
help='(Optional) Name of log file to output to. '
'If no default is set, logging will go to stdout.'),
cfg.StrOpt('log-dir',
deprecated_name='logdir',
help='(Optional) The base directory used for relative '
'--log-file paths.'),
cfg.BoolOpt('use-syslog',
default=False,
help='Use syslog for logging. '
'Existing syslog format is DEPRECATED during I, '
'and will change in J to honor RFC5424.'),
cfg.BoolOpt('use-syslog-rfc-format',
# TODO(bogdando) remove or use True after existing
# syslog format deprecation in J
default=False,
help='(Optional) Enables or disables syslog rfc5424 format '
'for logging. If enabled, prefixes the MSG part of the '
'syslog message with APP-NAME (RFC5424). The '
'format without the APP-NAME is deprecated in I, '
'and will be removed in J.'),
cfg.StrOpt('syslog-log-facility',
default='LOG_USER',
help='Syslog facility to receive log lines.')
]
generic_log_opts = [
cfg.BoolOpt('use_stderr',
default=True,
help='Log output to standard error.')
]
DEFAULT_LOG_LEVELS = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN',
'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO',
'oslo.messaging=INFO', 'iso8601=WARN',
'requests.packages.urllib3.connectionpool=WARN',
'urllib3.connectionpool=WARN', 'websocket=WARN']
log_opts = [
cfg.StrOpt('logging_context_format_string',
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [%(request_id)s %(user_identity)s] '
'%(instance)s%(message)s',
help='Format string to use for log messages with context.'),
cfg.StrOpt('logging_default_format_string',
default='%(asctime)s.%(msecs)03d %(process)d %(levelname)s '
'%(name)s [-] %(instance)s%(message)s',
help='Format string to use for log messages without context.'),
cfg.StrOpt('logging_debug_format_suffix',
default='%(funcName)s %(pathname)s:%(lineno)d',
help='Data to append to log format when level is DEBUG.'),
cfg.StrOpt('logging_exception_prefix',
default='%(asctime)s.%(msecs)03d %(process)d TRACE %(name)s '
'%(instance)s',
help='Prefix each line of exception output with this format.'),
cfg.ListOpt('default_log_levels',
default=DEFAULT_LOG_LEVELS,
help='List of logger=LEVEL pairs.'),
cfg.BoolOpt('publish_errors',
default=False,
help='Enables or disables publication of error events.'),
cfg.BoolOpt('fatal_deprecations',
default=False,
help='Enables or disables fatal status of deprecations.'),
# NOTE(mikal): there are two options here because sometimes we are handed
# a full instance (and could include more information), and other times we
# are just handed a UUID for the instance.
cfg.StrOpt('instance_format',
default='[instance: %(uuid)s] ',
help='The format for an instance that is passed with the log '
'message.'),
cfg.StrOpt('instance_uuid_format',
default='[instance: %(uuid)s] ',
help='The format for an instance UUID that is passed with the '
'log message.'),
]
CONF = cfg.CONF
CONF.register_cli_opts(common_cli_opts)
CONF.register_cli_opts(logging_cli_opts)
CONF.register_opts(generic_log_opts)
CONF.register_opts(log_opts)
# our new audit level
# NOTE(jkoelker) Since we synthesized an audit level, make the logging
# module aware of it so it acts like other levels.
logging.AUDIT = logging.INFO + 1
logging.addLevelName(logging.AUDIT, 'AUDIT')
try:
NullHandler = logging.NullHandler
except AttributeError: # NOTE(jkoelker) NullHandler added in Python 2.7
class NullHandler(logging.Handler):
def handle(self, record):
pass
def emit(self, record):
pass
def createLock(self):
self.lock = None
def _dictify_context(context):
if context is None:
return None
if not isinstance(context, dict) and getattr(context, 'to_dict', None):
context = context.to_dict()
return context
def _get_binary_name():
return os.path.basename(inspect.stack()[-1][1])
def _get_log_file_path(binary=None):
logfile = CONF.log_file
logdir = CONF.log_dir
if logfile and not logdir:
return logfile
if logfile and logdir:
return os.path.join(logdir, logfile)
if logdir:
binary = binary or _get_binary_name()
return '%s.log' % (os.path.join(logdir, binary),)
return None
class BaseLoggerAdapter(logging.LoggerAdapter):
def audit(self, msg, *args, **kwargs):
self.log(logging.AUDIT, msg, *args, **kwargs)
class LazyAdapter(BaseLoggerAdapter):
def __init__(self, name='unknown', version='unknown'):
self._logger = None
self.extra = {}
self.name = name
self.version = version
@property
def logger(self):
if not self._logger:
self._logger = getLogger(self.name, self.version)
if six.PY3:
# In Python 3, the code fails because the 'manager' attribute
# cannot be found when using a LoggerAdapter as the
# underlying logger. Work around this issue.
self._logger.manager = self._logger.logger.manager
return self._logger
class ContextAdapter(BaseLoggerAdapter):
warn = logging.LoggerAdapter.warning
def __init__(self, logger, project_name, version_string):
self.logger = logger
self.project = project_name
self.version = version_string
self._deprecated_messages_sent = dict()
@property
def handlers(self):
return self.logger.handlers
def deprecated(self, msg, *args, **kwargs):
"""Call this method when a deprecated feature is used.
If the system is configured for fatal deprecations then the message
is logged at the 'critical' level and :class:`DeprecatedConfig` will
be raised.
Otherwise, the message will be logged (once) at the 'warn' level.
:raises: :class:`DeprecatedConfig` if the system is configured for
fatal deprecations.
"""
stdmsg = _("Deprecated: %s") % msg
if CONF.fatal_deprecations:
self.critical(stdmsg, *args, **kwargs)
raise DeprecatedConfig(msg=stdmsg)
# Using a list because a tuple with dict can't be stored in a set.
sent_args = self._deprecated_messages_sent.setdefault(msg, list())
if args in sent_args:
# Already logged this message, so don't log it again.
return
sent_args.append(args)
self.warn(stdmsg, *args, **kwargs)
def process(self, msg, kwargs):
# NOTE(mrodden): catch any Message/other object and
# coerce to unicode before they can get
# to the python logging and possibly
# cause string encoding trouble
if not isinstance(msg, six.string_types):
msg = six.text_type(msg)
if 'extra' not in kwargs:
kwargs['extra'] = {}
extra = kwargs['extra']
context = kwargs.pop('context', None)
if not context:
context = getattr(local.store, 'context', None)
if context:
extra.update(_dictify_context(context))
instance = kwargs.pop('instance', None)
instance_uuid = (extra.get('instance_uuid') or
kwargs.pop('instance_uuid', None))
instance_extra = ''
if instance:
instance_extra = CONF.instance_format % instance
elif instance_uuid:
instance_extra = (CONF.instance_uuid_format
% {'uuid': instance_uuid})
extra['instance'] = instance_extra
extra.setdefault('user_identity', kwargs.pop('user_identity', None))
extra['project'] = self.project
extra['version'] = self.version
extra['extra'] = extra.copy()
return msg, kwargs
class JSONFormatter(logging.Formatter):
def __init__(self, fmt=None, datefmt=None):
# NOTE(jkoelker) we ignore the fmt argument, but its still there
# since logging.config.fileConfig passes it.
self.datefmt = datefmt
def formatException(self, ei, strip_newlines=True):
lines = traceback.format_exception(*ei)
if strip_newlines:
lines = [moves.filter(
lambda x: x,
line.rstrip().splitlines()) for line in lines]
lines = list(itertools.chain(*lines))
return lines
def format(self, record):
message = {'message': record.getMessage(),
'asctime': self.formatTime(record, self.datefmt),
'name': record.name,
'msg': record.msg,
'args': record.args,
'levelname': record.levelname,
'levelno': record.levelno,
'pathname': record.pathname,
'filename': record.filename,
'module': record.module,
'lineno': record.lineno,
'funcname': record.funcName,
'created': record.created,
'msecs': record.msecs,
'relative_created': record.relativeCreated,
'thread': record.thread,
'thread_name': record.threadName,
'process_name': record.processName,
'process': record.process,
'traceback': None}
if hasattr(record, 'extra'):
message['extra'] = record.extra
if record.exc_info:
message['traceback'] = self.formatException(record.exc_info)
return jsonutils.dumps(message)
def _create_logging_excepthook(product_name):
def logging_excepthook(exc_type, value, tb):
extra = {'exc_info': (exc_type, value, tb)}
getLogger(product_name).critical(
"".join(traceback.format_exception_only(exc_type, value)),
**extra)
return logging_excepthook
class LogConfigError(Exception):
message = _('Error loading logging config %(log_config)s: %(err_msg)s')
def __init__(self, log_config, err_msg):
self.log_config = log_config
self.err_msg = err_msg
def __str__(self):
return self.message % dict(log_config=self.log_config,
err_msg=self.err_msg)
def _load_log_config(log_config_append):
try:
logging.config.fileConfig(log_config_append,
disable_existing_loggers=False)
except (moves.configparser.Error, KeyError) as exc:
raise LogConfigError(log_config_append, six.text_type(exc))
def setup(product_name, version='unknown'):
"""Setup logging."""
if CONF.log_config_append:
_load_log_config(CONF.log_config_append)
else:
_setup_logging_from_conf(product_name, version)
sys.excepthook = _create_logging_excepthook(product_name)
def set_defaults(logging_context_format_string=None,
default_log_levels=None):
# Just in case the caller is not setting the
# default_log_level. This is insurance because
# we introduced the default_log_level parameter
# later in a backwards in-compatible change
if default_log_levels is not None:
cfg.set_defaults(
log_opts,
default_log_levels=default_log_levels)
if logging_context_format_string is not None:
cfg.set_defaults(
log_opts,
logging_context_format_string=logging_context_format_string)
def _find_facility_from_conf():
facility_names = logging.handlers.SysLogHandler.facility_names
facility = getattr(logging.handlers.SysLogHandler,
CONF.syslog_log_facility,
None)
if facility is None and CONF.syslog_log_facility in facility_names:
facility = facility_names.get(CONF.syslog_log_facility)
if facility is None:
valid_facilities = facility_names.keys()
consts = ['LOG_AUTH', 'LOG_AUTHPRIV', 'LOG_CRON', 'LOG_DAEMON',
'LOG_FTP', 'LOG_KERN', 'LOG_LPR', 'LOG_MAIL', 'LOG_NEWS',
'LOG_AUTH', 'LOG_SYSLOG', 'LOG_USER', 'LOG_UUCP',
'LOG_LOCAL0', 'LOG_LOCAL1', 'LOG_LOCAL2', 'LOG_LOCAL3',
'LOG_LOCAL4', 'LOG_LOCAL5', 'LOG_LOCAL6', 'LOG_LOCAL7']
valid_facilities.extend(consts)
raise TypeError(_('syslog facility must be one of: %s') %
', '.join("'%s'" % fac
for fac in valid_facilities))
return facility
class RFCSysLogHandler(logging.handlers.SysLogHandler):
def __init__(self, *args, **kwargs):
self.binary_name = _get_binary_name()
# Do not use super() unless type(logging.handlers.SysLogHandler)
# is 'type' (Python 2.7).
# Use old style calls, if the type is 'classobj' (Python 2.6)
logging.handlers.SysLogHandler.__init__(self, *args, **kwargs)
def format(self, record):
# Do not use super() unless type(logging.handlers.SysLogHandler)
# is 'type' (Python 2.7).
# Use old style calls, if the type is 'classobj' (Python 2.6)
msg = logging.handlers.SysLogHandler.format(self, record)
msg = self.binary_name + ' ' + msg
return msg
def _setup_logging_from_conf(project, version):
log_root = getLogger(None).logger
for handler in log_root.handlers:
log_root.removeHandler(handler)
if CONF.use_syslog:
facility = _find_facility_from_conf()
# TODO(bogdando) use the format provided by RFCSysLogHandler
# after existing syslog format deprecation in J
if CONF.use_syslog_rfc_format:
syslog = RFCSysLogHandler(address='/dev/log',
facility=facility)
else:
syslog = logging.handlers.SysLogHandler(address='/dev/log',
facility=facility)
log_root.addHandler(syslog)
logpath = _get_log_file_path()
if logpath:
filelog = logging.handlers.WatchedFileHandler(logpath)
log_root.addHandler(filelog)
if CONF.use_stderr:
streamlog = ColorHandler()
log_root.addHandler(streamlog)
elif not logpath:
# pass sys.stdout as a positional argument
# python2.6 calls the argument strm, in 2.7 it's stream
streamlog = logging.StreamHandler(sys.stdout)
log_root.addHandler(streamlog)
if CONF.publish_errors:
try:
handler = importutils.import_object(
"cdn.openstack.common.log_handler.PublishErrorsHandler",
logging.ERROR)
except ImportError:
handler = importutils.import_object(
"oslo.messaging.notify.log_handler.PublishErrorsHandler",
logging.ERROR)
log_root.addHandler(handler)
datefmt = CONF.log_date_format
for handler in log_root.handlers:
# NOTE(alaski): CONF.log_format overrides everything currently. This
# should be deprecated in favor of context aware formatting.
if CONF.log_format:
handler.setFormatter(logging.Formatter(fmt=CONF.log_format,
datefmt=datefmt))
log_root.info('Deprecated: log_format is now deprecated and will '
'be removed in the next release')
else:
handler.setFormatter(ContextFormatter(project=project,
version=version,
datefmt=datefmt))
if CONF.debug:
log_root.setLevel(logging.DEBUG)
elif CONF.verbose:
log_root.setLevel(logging.INFO)
else:
log_root.setLevel(logging.WARNING)
for pair in CONF.default_log_levels:
mod, _sep, level_name = pair.partition('=')
logger = logging.getLogger(mod)
# NOTE(AAzza) in python2.6 Logger.setLevel doesn't convert string name
# to integer code.
if sys.version_info < (2, 7):
level = logging.getLevelName(level_name)
logger.setLevel(level)
else:
logger.setLevel(level_name)
_loggers = {}
def getLogger(name='unknown', version='unknown'):
if name not in _loggers:
_loggers[name] = ContextAdapter(logging.getLogger(name),
name,
version)
return _loggers[name]
def getLazyLogger(name='unknown', version='unknown'):
"""Returns lazy logger.
Creates a pass-through logger that does not create the real logger
until it is really needed and delegates all calls to the real logger
once it is created.
"""
return LazyAdapter(name, version)
class WritableLogger(object):
"""A thin wrapper that responds to `write` and logs."""
def __init__(self, logger, level=logging.INFO):
self.logger = logger
self.level = level
def write(self, msg):
self.logger.log(self.level, msg.rstrip())
class ContextFormatter(logging.Formatter):
"""A context.RequestContext aware formatter configured through flags.
The flags used to set format strings are: logging_context_format_string
and logging_default_format_string. You can also specify
logging_debug_format_suffix to append extra formatting if the log level is
debug.
For information about what variables are available for the formatter see:
http://docs.python.org/library/logging.html#formatter
If available, uses the context value stored in TLS - local.store.context
"""
def __init__(self, *args, **kwargs):
"""Initialize ContextFormatter instance
Takes additional keyword arguments which can be used in the message
format string.
:keyword project: project name
:type project: string
:keyword version: project version
:type version: string
"""
self.project = kwargs.pop('project', 'unknown')
self.version = kwargs.pop('version', 'unknown')
logging.Formatter.__init__(self, *args, **kwargs)
def format(self, record):
"""Uses contextstring if request_id is set, otherwise default."""
# store project info
record.project = self.project
record.version = self.version
# store request info
context = getattr(local.store, 'context', None)
if context:
d = _dictify_context(context)
for k, v in d.items():
setattr(record, k, v)
# NOTE(sdague): default the fancier formatting params
# to an empty string so we don't throw an exception if
# they get used
for key in ('instance', 'color', 'user_identity'):
if key not in record.__dict__:
record.__dict__[key] = ''
if record.__dict__.get('request_id'):
fmt = CONF.logging_context_format_string
else:
fmt = CONF.logging_default_format_string
if (record.levelno == logging.DEBUG and
CONF.logging_debug_format_suffix):
fmt += " " + CONF.logging_debug_format_suffix
if sys.version_info < (3, 2):
self._fmt = fmt
else:
self._style = logging.PercentStyle(fmt)
self._fmt = self._style._fmt
# Cache this on the record, Logger will respect our formatted copy
if record.exc_info:
record.exc_text = self.formatException(record.exc_info, record)
return logging.Formatter.format(self, record)
def formatException(self, exc_info, record=None):
"""Format exception output with CONF.logging_exception_prefix."""
if not record:
return logging.Formatter.formatException(self, exc_info)
stringbuffer = moves.StringIO()
traceback.print_exception(exc_info[0], exc_info[1], exc_info[2],
None, stringbuffer)
lines = stringbuffer.getvalue().split('\n')
stringbuffer.close()
if CONF.logging_exception_prefix.find('%(asctime)') != -1:
record.asctime = self.formatTime(record, self.datefmt)
formatted_lines = []
for line in lines:
pl = CONF.logging_exception_prefix % record.__dict__
fl = '%s%s' % (pl, line)
formatted_lines.append(fl)
return '\n'.join(formatted_lines)
class ColorHandler(logging.StreamHandler):
LEVEL_COLORS = {
logging.DEBUG: '\033[00;32m', # GREEN
logging.INFO: '\033[00;36m', # CYAN
logging.AUDIT: '\033[01;36m', # BOLD CYAN
logging.WARN: '\033[01;33m', # BOLD YELLOW
logging.ERROR: '\033[01;31m', # BOLD RED
logging.CRITICAL: '\033[01;31m', # BOLD RED
}
def format(self, record):
record.color = self.LEVEL_COLORS[record.levelno]
return logging.StreamHandler.format(self, record)
class DeprecatedConfig(Exception):
message = _("Fatal call to deprecated config: %(msg)s")
def __init__(self, msg):
super(Exception, self).__init__(self.message % dict(msg=msg))
|
obulpathi/cdn1
|
cdn/openstack/common/log.py
|
Python
|
apache-2.0
| 25,481
|
import sys
if len(sys.argv) < 2:
print("\nUso: e09-09.py arquivo1 arquivo2 arquivo3 arquivoN\n")
sys.exit()
for nome in sys.argv[1:]:
arquivo = open(nome, "r")
for linha in arquivo:
print(linha, end="")
arquivo.close()
|
laenderoliveira/exerclivropy
|
cap09/exercicio-09-09.py
|
Python
|
mit
| 248
|
'''
multi-processing abstraction
This wraps the multiprocessing module, using billiard on MacOS
and multiprocessing on Linux and Windows
The key problem on MacOS is that you can't fork in any process that uses
threading, which is almost all of processes as so many libraries use
threads. So instead billiard uses an approach that uses fork+exec and re-runs
the script in the child. It is horrible, but it seems to be the only way to
make things work on MacOS
'''
class PipeQueue(object):
'''simulate a queue using a pipe. This is used to avoid a problem with
pipes on MacOS, while still keeping similar syntax'''
def __init__(self):
(self.sender,self.receiver) = Pipe()
self.alive = True
self.pending = []
def close(self):
self.alive = False
self.sender.close()
self.receiver.close()
def put(self, *args):
if not self.alive:
return
try:
self.sender.send(*args)
except Exception:
self.close()
def fill(self):
if not self.alive:
return
while self.receiver.poll():
try:
m = self.receiver.recv()
self.pending.append(m)
except Exception:
self.close()
break
def get(self):
if not self.alive:
return None
self.fill()
if len(self.pending) > 0:
return self.pending.pop(0)
return None
def qsize(self):
self.fill()
return len(self.pending)
def empty(self):
return self.qsize() == 0
import platform, os, sys
# we use billiard (and forking disable) on MacOS, and also if USE_BILLIARD environment
# is set. Using USE_BILLIARD allows for debugging of the crazy forking disable approach on
# a saner platform
# As of Python 3.8 the default start method for macOS is spawn and billiard is not required.
if ((platform.system() == 'Darwin' or os.environ.get('USE_BILLIARD',None) is not None)
and sys.version_info < (3, 8)):
from billiard import Process, forking_enable, freeze_support, Pipe, Semaphore, Event, Lock
forking_enable(False)
Queue = PipeQueue
else:
from multiprocessing import Process, freeze_support, Pipe, Semaphore, Event, Lock, Queue
|
ArduPilot/MAVProxy
|
MAVProxy/modules/lib/multiproc.py
|
Python
|
gpl-3.0
| 2,301
|
from collections import defaultdict
import rest_framework_filters as filters
from rest_framework import viewsets
from rest_framework.response import Response
from rest_framework.status import (HTTP_200_OK,
HTTP_400_BAD_REQUEST,
HTTP_404_NOT_FOUND)
from treeherder.model.derived import JobsModel
from treeherder.model.models import TextLogSummaryLine
from treeherder.webapp.api import (pagination,
serializers)
class TextLogSummaryLineFilter(filters.FilterSet):
class Meta(object):
model = TextLogSummaryLine
fields = ["bug_number"]
class TextLogSummaryLineViewSet(viewsets.ModelViewSet):
serializer_class = serializers.TextLogSummaryLineSerializer
queryset = TextLogSummaryLine.objects.all()
filter_class = TextLogSummaryLineFilter
pagination_class = pagination.IdPagination
def _update(self, data, user, many=False):
line_ids = []
for item in data:
line_id = item.get("id")
if line_id is None:
return "No id provided", HTTP_400_BAD_REQUEST
line_ids.append(int(line_id))
if "bug_number" not in item:
return "No bug number provided", HTTP_400_BAD_REQUEST
rv = []
objs = TextLogSummaryLine.objects.filter(id__in=line_ids).all()
lines_by_id = {obj.id: obj for obj in objs}
if len(objs) != len(lines_by_id):
body = "Line id(s) %s do not exist" % (",".join(item for item in line_ids
if item not in lines_by_id))
return body, HTTP_400_BAD_REQUEST
if len(lines_by_id) != len(line_ids):
return "Got duplicate line ids", HTTP_400_BAD_REQUEST
by_project = defaultdict(list)
for line in data:
line_id = int(line.get("id"))
obj = lines_by_id[line_id]
obj.bug_number = line.get("bug_number")
obj.verified = line.get("verified", False)
obj.save()
summary = obj.summary
by_project[summary.repository.name].append(summary.job_guid)
rv.append(obj)
for project, job_guids in by_project.iteritems():
with JobsModel(project) as jm:
jobs = jm.get_job_ids_by_guid(job_guids)
for job in jobs.values():
jm.update_after_verification(job["id"], user)
if not many:
rv = rv[0]
return self.serializer_class(rv, many=many).data, HTTP_200_OK
def update(self, request, pk=None):
data = {"id": pk}
for k, v in request.data.iteritems():
if k not in data:
data[k] = v
body, status = self._update([data], request.user, many=False)
return Response(body, status=status)
def update_many(self, request):
body, status = self._update(request.data, request.user, many=True)
if status == HTTP_404_NOT_FOUND:
# 404 doesn't make sense for updating many since the path is always
# valid, so if we get an invalid id instead return 400
status = HTTP_400_BAD_REQUEST
return Response(body, status=status)
|
akhileshpillai/treeherder
|
treeherder/webapp/api/text_log_summary_line.py
|
Python
|
mpl-2.0
| 3,293
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': ' Adding Survey Form in Refund Payment',
'version': '1.0',
'category': 'Generic Modules',
'author': 'Riza',
'website': 'http://www.openerp.com',
'depends': ['account','base','crm','sale_crm'],
'init_xml': [ ],
'update_xml': ['refund_survey_view.xml'],
'demo_xml': [ ],
'test': [ ],
'installable': True,
'active': False,
'description': """ Adding Survey Form in Refund Payment"""
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
ksrajkumar/openerp-6.1
|
openerp/addons/itara_refund_survey/__openerp__.py
|
Python
|
agpl-3.0
| 1,491
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe, erpnext
import frappe.defaults
from frappe import msgprint, _
from frappe.utils import cstr, flt, cint
from erpnext.stock.stock_ledger import update_entries_after
from erpnext.controllers.stock_controller import StockController
from erpnext.accounts.utils import get_company_default
from erpnext.stock.utils import get_stock_balance
class OpeningEntryAccountError(frappe.ValidationError): pass
class EmptyStockReconciliationItemsError(frappe.ValidationError): pass
class StockReconciliation(StockController):
def __init__(self, *args, **kwargs):
super(StockReconciliation, self).__init__(*args, **kwargs)
self.head_row = ["Item Code", "Warehouse", "Quantity", "Valuation Rate"]
def validate(self):
if not self.expense_account:
self.expense_account = frappe.get_cached_value('Company', self.company, "stock_adjustment_account")
if not self.cost_center:
self.cost_center = frappe.get_cached_value('Company', self.company, "cost_center")
self.validate_posting_time()
self.remove_items_with_no_change()
self.validate_data()
self.validate_expense_account()
self.set_total_qty_and_amount()
def on_submit(self):
self.update_stock_ledger()
self.make_gl_entries()
def on_cancel(self):
self.delete_and_repost_sle()
self.make_gl_entries_on_cancel()
def remove_items_with_no_change(self):
"""Remove items if qty or rate is not changed"""
self.difference_amount = 0.0
def _changed(item):
qty, rate = get_stock_balance(item.item_code, item.warehouse,
self.posting_date, self.posting_time, with_valuation_rate=True)
if (item.qty==None or item.qty==qty) and (item.valuation_rate==None or item.valuation_rate==rate):
return False
else:
# set default as current rates
if item.qty==None:
item.qty = qty
if item.valuation_rate==None:
item.valuation_rate = rate
item.current_qty = qty
item.current_valuation_rate = rate
self.difference_amount += (flt(item.qty, item.precision("qty")) * \
flt(item.valuation_rate or rate, item.precision("valuation_rate")) \
- flt(qty, item.precision("qty")) * flt(rate, item.precision("valuation_rate")))
return True
items = list(filter(lambda d: _changed(d), self.items))
if not items:
frappe.throw(_("None of the items have any change in quantity or value."),
EmptyStockReconciliationItemsError)
elif len(items) != len(self.items):
self.items = items
for i, item in enumerate(self.items):
item.idx = i + 1
frappe.msgprint(_("Removed items with no change in quantity or value."))
def validate_data(self):
def _get_msg(row_num, msg):
return _("Row # {0}: ").format(row_num+1) + msg
self.validation_messages = []
item_warehouse_combinations = []
default_currency = frappe.db.get_default("currency")
for row_num, row in enumerate(self.items):
# find duplicates
if [row.item_code, row.warehouse] in item_warehouse_combinations:
self.validation_messages.append(_get_msg(row_num, _("Duplicate entry")))
else:
item_warehouse_combinations.append([row.item_code, row.warehouse])
self.validate_item(row.item_code, row_num+1)
# validate warehouse
if not frappe.db.get_value("Warehouse", row.warehouse):
self.validation_messages.append(_get_msg(row_num, _("Warehouse not found in the system")))
# if both not specified
if row.qty in ["", None] and row.valuation_rate in ["", None]:
self.validation_messages.append(_get_msg(row_num,
_("Please specify either Quantity or Valuation Rate or both")))
# do not allow negative quantity
if flt(row.qty) < 0:
self.validation_messages.append(_get_msg(row_num,
_("Negative Quantity is not allowed")))
# do not allow negative valuation
if flt(row.valuation_rate) < 0:
self.validation_messages.append(_get_msg(row_num,
_("Negative Valuation Rate is not allowed")))
if row.qty and row.valuation_rate in ["", None]:
row.valuation_rate = get_stock_balance(row.item_code, row.warehouse,
self.posting_date, self.posting_time, with_valuation_rate=True)[1]
if not row.valuation_rate:
# try if there is a buying price list in default currency
buying_rate = frappe.db.get_value("Item Price", {"item_code": row.item_code,
"buying": 1, "currency": default_currency}, "price_list_rate")
if buying_rate:
row.valuation_rate = buying_rate
else:
# get valuation rate from Item
row.valuation_rate = frappe.get_value('Item', row.item_code, 'valuation_rate')
# throw all validation messages
if self.validation_messages:
for msg in self.validation_messages:
msgprint(msg)
raise frappe.ValidationError(self.validation_messages)
def validate_item(self, item_code, row_num):
from erpnext.stock.doctype.item.item import validate_end_of_life, \
validate_is_stock_item, validate_cancelled_item
# using try except to catch all validation msgs and display together
try:
item = frappe.get_doc("Item", item_code)
# end of life and stock item
validate_end_of_life(item_code, item.end_of_life, item.disabled, verbose=0)
validate_is_stock_item(item_code, item.is_stock_item, verbose=0)
# item should not be serialized
if item.has_serial_no == 1:
raise frappe.ValidationError(_("Serialized Item {0} cannot be updated using Stock Reconciliation, please use Stock Entry").format(item_code))
# item managed batch-wise not allowed
if item.has_batch_no == 1:
raise frappe.ValidationError(_("Batched Item {0} cannot be updated using Stock Reconciliation, instead use Stock Entry").format(item_code))
# docstatus should be < 2
validate_cancelled_item(item_code, item.docstatus, verbose=0)
except Exception as e:
self.validation_messages.append(_("Row # ") + ("%d: " % (row_num)) + cstr(e))
def update_stock_ledger(self):
""" find difference between current and expected entries
and create stock ledger entries based on the difference"""
from erpnext.stock.stock_ledger import get_previous_sle
for row in self.items:
previous_sle = get_previous_sle({
"item_code": row.item_code,
"warehouse": row.warehouse,
"posting_date": self.posting_date,
"posting_time": self.posting_time
})
if previous_sle:
if row.qty in ("", None):
row.qty = previous_sle.get("qty_after_transaction", 0)
if row.valuation_rate in ("", None):
row.valuation_rate = previous_sle.get("valuation_rate", 0)
if row.qty and not row.valuation_rate:
frappe.throw(_("Valuation Rate required for Item in row {0}").format(row.idx))
if ((previous_sle and row.qty == previous_sle.get("qty_after_transaction")
and row.valuation_rate == previous_sle.get("valuation_rate"))
or (not previous_sle and not row.qty)):
continue
self.insert_entries(row)
def insert_entries(self, row):
"""Insert Stock Ledger Entries"""
args = frappe._dict({
"doctype": "Stock Ledger Entry",
"item_code": row.item_code,
"warehouse": row.warehouse,
"posting_date": self.posting_date,
"posting_time": self.posting_time,
"voucher_type": self.doctype,
"voucher_no": self.name,
"company": self.company,
"stock_uom": frappe.db.get_value("Item", row.item_code, "stock_uom"),
"is_cancelled": "No",
"qty_after_transaction": flt(row.qty, row.precision("qty")),
"valuation_rate": flt(row.valuation_rate, row.precision("valuation_rate"))
})
self.make_sl_entries([args])
def delete_and_repost_sle(self):
""" Delete Stock Ledger Entries related to this voucher
and repost future Stock Ledger Entries"""
existing_entries = frappe.db.sql("""select distinct item_code, warehouse
from `tabStock Ledger Entry` where voucher_type=%s and voucher_no=%s""",
(self.doctype, self.name), as_dict=1)
# delete entries
frappe.db.sql("""delete from `tabStock Ledger Entry`
where voucher_type=%s and voucher_no=%s""", (self.doctype, self.name))
# repost future entries for selected item_code, warehouse
for entries in existing_entries:
update_entries_after({
"item_code": entries.item_code,
"warehouse": entries.warehouse,
"posting_date": self.posting_date,
"posting_time": self.posting_time
})
def get_gl_entries(self, warehouse_account=None):
if not self.cost_center:
msgprint(_("Please enter Cost Center"), raise_exception=1)
return super(StockReconciliation, self).get_gl_entries(warehouse_account,
self.expense_account, self.cost_center)
def validate_expense_account(self):
if not cint(erpnext.is_perpetual_inventory_enabled(self.company)):
return
if not self.expense_account:
msgprint(_("Please enter Expense Account"), raise_exception=1)
elif not frappe.db.sql("""select name from `tabStock Ledger Entry` limit 1"""):
if frappe.db.get_value("Account", self.expense_account, "report_type") == "Profit and Loss":
frappe.throw(_("Difference Account must be a Asset/Liability type account, since this Stock Reconciliation is an Opening Entry"), OpeningEntryAccountError)
def set_total_qty_and_amount(self):
for d in self.get("items"):
d.amount = flt(d.qty, d.precision("qty")) * flt(d.valuation_rate, d.precision("valuation_rate"))
d.current_amount = (flt(d.current_qty,
d.precision("current_qty")) * flt(d.current_valuation_rate, d.precision("current_valuation_rate")))
d.quantity_difference = flt(d.qty) - flt(d.current_qty)
d.amount_difference = flt(d.amount) - flt(d.current_amount)
def get_items_for(self, warehouse):
self.items = []
for item in get_items(warehouse, self.posting_date, self.posting_time, self.company):
self.append("items", item)
def submit(self):
if len(self.items) > 100:
self.queue_action('submit')
else:
self._submit()
def cancel(self):
if len(self.items) > 100:
self.queue_action('cancel')
else:
self._cancel()
@frappe.whitelist()
def get_items(warehouse, posting_date, posting_time, company):
items = frappe.db.sql('''select i.name, i.item_name from `tabItem` i, `tabBin` bin where i.name=bin.item_code
and i.disabled=0 and bin.warehouse=%s''', (warehouse), as_dict=True)
items += frappe.db.sql('''select i.name, i.item_name from `tabItem` i, `tabItem Default` id where i.name = id.parent
and i.is_stock_item=1 and i.has_serial_no=0 and i.has_batch_no=0 and i.has_variants=0 and i.disabled=0
and id.default_warehouse=%s and id.company=%s group by i.name''', (warehouse, company), as_dict=True)
res = []
for item in items:
qty, rate = get_stock_balance(item.name, warehouse, posting_date, posting_time,
with_valuation_rate=True)
res.append({
"item_code": item.name,
"warehouse": warehouse,
"qty": qty,
"item_name": item.item_name,
"valuation_rate": rate,
"current_qty": qty,
"current_valuation_rate": rate
})
return res
@frappe.whitelist()
def get_stock_balance_for(item_code, warehouse, posting_date, posting_time):
frappe.has_permission("Stock Reconciliation", "write", throw = True)
qty, rate = get_stock_balance(item_code, warehouse,
posting_date, posting_time, with_valuation_rate=True)
return {
'qty': qty,
'rate': rate
}
@frappe.whitelist()
def get_difference_account(purpose, company):
if purpose == 'Stock Reconciliation':
account = get_company_default(company, "stock_adjustment_account")
else:
account = frappe.db.get_value('Account', {'is_group': 0,
'company': company, 'account_type': 'Temporary'}, 'name')
return account
|
patilsangram/erpnext
|
erpnext/stock/doctype/stock_reconciliation/stock_reconciliation.py
|
Python
|
gpl-3.0
| 11,573
|
# -*- coding: utf-8 -*-
#!/usr/bin/python
# Copyright Pi-Developers
# @author Mohamed rashad
import sys
import math
import binascii
from math import *
from sys import *
from decimal import *
##########################
def calc(n):
t= Decimal(0)
pi = Decimal(0)
deno= Decimal(0)
k = 0
for k in range(n):
t = ((-1)**k)*(factorial(6*k))*(13591409+545140134*k)
deno = factorial(3*k)*(factorial(k)**3)*(640320**(3*k))
pi += Decimal(t)/Decimal(deno)
pi = pi * Decimal(12)/Decimal(640320**(1.5))
pi = 1/pi
return pi
##########################
def generate_pascal_triangle(rows):
if rows == 1: return [[1]]
triangle = [[1], [1, 1]] # pre-populate with the first two rows
row = [1, 1] # Starts with the second row and calculate the next
for i in range(2, rows):
row = [1] + [sum(column) for column in zip(row[1:], row)] + [1]
triangle.append(row)
return triangle
##########################
while True:
print("\n\n ~~~~~~~~~~~~~~~~~~ PMaths Tool (Ultimate Python Maths App) ~~~~~~~~~~~~~~~~~~~")
print(
"\n Would you like to do..?\n\t1-check even or odd\n\t2-check prime number or not\n\t3-Check Divisbility\n\t4-Check golden ratio\n\t5-Compute Factorial\n\t6-Compute Fibonacci series\n\t7-Compute Pi (Limit 10 million)\n\t8-Encode Text \n\t9-Decode To text")
print ("\n Choose your Option:")
Input = input("> ")
##########################
if Input == "1":
print ("\n Enter Desired Number Here")
Input2 = int(input("> "))
if Input2 %2 != 0:
print ("Your input is odd")
else:
print ("Your number is even")
##########################
elif Input == "2":
print("\n Enter Desired number Here")
Input2 = int(input("> "))
if Input2 > 1:
for i in range(2, Input2 + 1):
if Input2 % i == 0 and i != Input2 and i != 1:
print("This is not a Prime Number")
else:
print("This is a Prime Number")
else:
print("This is not a Prime Number")
##########################
elif Input == "3":
print("\n Enter Desired Number Here")
Input2 = int(input("> "))
print("\n Enter number to be divided with")
Input4 = int(input("> "))
if Input2 %Input4 != 0:
print("\n Your input is not divisible by %r" % Input4)
else:
print("\n Your input is divisible by %r" % Input4)
##########################
elif Input == "4":
print("\n insert first number (must be larger than the second)")
n = float(input("> "))
print("\n insert second number")
m = float(input("> "))
if m > n:
print("\n insert numbers correctly and try again")
print("\n\n insert first number (must be larger than the second)")
n = float(input("> "))
print("\n insert second number")
m = float(input("> "))
else:
T = n/m
Q = float("{0:.1f}".format(T))
if Q > 1.6 and Q < 1.7:
print("\n Golden Ratio exists between these 2 digits (approximately)")
else:
print(R)
print(T)
print("\n Golden Ratio doesn't exist between these 2 digits")
##########################
elif Input == "5":
print("\n insert a number (integer only)")
n = int(input("> "))
print(str(math.factorial(n)))
##########################
elif Input == "6":
print("\n How many terms to generate?")
x = int(input("> "))
a = [0,1]
for n in range(1,x):
a.append(a[n]+a[n-1])
print(a[-1])
elif Input == "7" :
print
'\n How many digits of pi to generate ? (limit 10 million)'
n = int(input("> "))
print("\n")
print("\n")
print("Pi = " + str(calc(n)))
##########################
elif Input == "8" :
print("\n Choose conversion type\n\t1-Binary\n\t2-Octal\n\t3-Hex ")
n = input("> ")
if n == "1" :
print("\n Enter Text")
mm = input("> ")
print(str(bin(int(binascii.hexlify(mm), 16))).replace("0b", ""))
elif n == "3" :
print("\n Enter Text")
mm = input("> ")
print(str(":".join("{:02x}".format(ord(c)) for c in mm)))
elif n == "2" :
print("\n Enter number")
mm = int(input("> "))
print(str(oct(mm)))
##########################
elif Input == "9" :
print("\n Choose conversion type\n\t1-Binary\n\t2-Hex ")
n = input("> ")
if n == "1" :
print("\n Enter Binary")
mm = input("> ")
x = int( mm , 2)
print(binascii.unhexlify('%x' % x))
elif n == "3" :
print("\n Enter Hex")
mm = input("> ")
print(mm.input('hex'))
##########################
elif Input == "10" :
print
'\n How many rows of pascal\'s triangle to generate ? (limit 10 million)'
mm = int(input("> "))
for row in generate_pascal_triangle(mm):
print(row)
|
AndroidFire/PMaths
|
PMaths.py
|
Python
|
gpl-2.0
| 5,425
|
import logging
import sys
import time
import urllib.parse
from .trace import FunctionTrace
from .transaction import Transaction
from .wrapper import callable_name, FuncWrapper
_logger = logging.getLogger(__name__)
class WebTransaction(Transaction):
def __init__(self, environ): # flake8: noqa
# The web transaction can be enabled/disabled by the value of the variable "pamagent.enabled" in the WSGI
# environ dictionary.
enabled = True
self._port = None
self._request_params = {}
super(WebTransaction, self).__init__(enabled)
self._name = "Uri"
if not self.enabled:
return
port = environ.get('SERVER_PORT')
try:
self._port = int(port)
except ValueError:
_logger.error("SERVER_PORT is not valid. Found %s expected integer" % port)
self._request_uri = environ.get('REQUEST_URI', None)
script_name = environ.get('SCRIPT_NAME', None)
path_info = environ.get('PATH_INFO', None)
if self._request_uri is not None:
self._request_uri = urllib.parse.urlparse(self._request_uri)[2]
if script_name is not None or path_info is not None:
if path_info is None:
self._path = script_name
elif script_name is None:
self._path = path_info
else:
self._path = script_name + path_info
if self._request_uri is None:
self._request_uri = self._path
else:
if self._request_uri is not None:
self._path = self._request_uri
self.save_transaction()
qs = environ.get('QUERY_STRING', None)
if qs:
params = urllib.parse.parse_qs(qs, keep_blank_values=True)
self._request_params.update(params)
self.url_name = 'unknown'
self.view_name = 'unknown'
@property
def path(self):
return self._request_uri
class _WSGIInputWrapper(object):
def __init__(self, transaction, input_stream):
self.__transaction = transaction
self.__input = input_stream
def __getattr__(self, name):
return getattr(self.__input, name)
def close(self):
if hasattr(self.__input, 'close'):
self.__input.close()
def read(self, *args, **kwargs):
if not self.__transaction._read_start:
self.__transaction._read_start = time.time()
try:
data = self.__input.read(*args, **kwargs)
finally:
self.__transaction._read_end = time.time()
return data
def readline(self, *args, **kwargs):
if not self.__transaction._read_start:
self.__transaction._read_start = time.time()
try:
line = self.__input.readline(*args, **kwargs)
finally:
self.__transaction._read_end = time.time()
return line
def readlines(self, *args, **kwargs):
if not self.__transaction._read_start:
self.__transaction._read_start = time.time()
try:
lines = self.__input.readlines(*args, **kwargs)
finally:
self.__transaction._read_end = time.time()
return lines
class _WSGIApplicationIterable(object):
def __init__(self, transaction, generator):
self.transaction = transaction
self.generator = generator
self.closed = False
def __iter__(self):
try:
for item in self.generator:
yield item
except GeneratorExit:
raise
except Exception:
raise
finally:
self.close()
def close(self):
if self.closed:
return
try:
self.generator.close()
except Exception:
self.transaction.__exit__(*sys.exc_info())
raise
else:
self.transaction.__exit__(None, None, None)
finally:
self.closed = True
def wsgi_application_wrapper(wrapped, name=None, framework=None):
if framework is not None and not isinstance(framework, tuple):
framework = (framework, None)
def _pam_wsgi_application_wrapper_(wrapped_func, _, args, kwargs):
def _args(environment, start_response, *_, **__):
return environment, start_response
environ, _ = _args(*args, **kwargs)
transaction = WebTransaction(environ)
if framework is not None:
transaction._name = callable_name(wrapped_func)
elif name:
transaction._name = name
transaction.__enter__()
try:
if 'wsgi.input' in environ:
environ['wsgi.input'] = _WSGIInputWrapper(transaction, environ['wsgi.input'])
with FunctionTrace(transaction.thread_id, name='Application', func_name=callable_name(wrapped_func)):
result = wrapped_func(*args, **kwargs)
except Exception:
transaction.__exit__(*sys.exc_info())
raise
return _WSGIApplicationIterable(transaction, result)
return FuncWrapper(wrapped, _pam_wsgi_application_wrapper_)
|
PushAMP/pamagent
|
pamagent/web_transaction.py
|
Python
|
gpl-3.0
| 5,179
|
# -*- coding: utf-8 -*-
# Copyright 2014 Objectif Libre
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Stéphane Albert
#
import warnings
from cloudkitty.rating.hash.db.sqlalchemy import * # noqa
def deprecated():
warnings.warn(
('The hashmap db API is deprecated. '
'Please use rating\'s one instead.'),
DeprecationWarning,
stacklevel=3)
deprecated()
|
muraliselva10/cloudkitty
|
cloudkitty/billing/hash/db/sqlalchemy/__init__.py
|
Python
|
apache-2.0
| 935
|
##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2019 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
from typing import Dict
from django.contrib.auth.models import Permission, Group
from django.core.exceptions import ImproperlyConfigured
from django.db.models import Prefetch
from django.db.models.query import QuerySet
from django.utils.translation import gettext_lazy as _
from rules_management.models import FieldReference
class ModelFormMixin:
def disable_field(self, field_name):
field = self.fields[field_name]
field.disabled = True
field.required = False
field.widget.attrs["title"] = _("You don't have sufficient rights to edit the field.")
class PermissionFieldMixin(ModelFormMixin):
"""
Mixin to connect to form
It enables/disables fields according to permissions and the context
"""
model_permission = FieldReference
context = ""
user = None
def __init__(self, *args, **kwargs):
if 'user' in kwargs:
self.user = kwargs.get('user')
if not self.user:
raise ImproperlyConfigured("This form must receive the user to determine his permissions")
if "context" in kwargs:
self.context = kwargs.pop("context")
super().__init__(*args, **kwargs)
if not self.user.is_superuser:
self._disable_form_fields()
def _disable_form_fields(self):
for field_ref in self.get_permission_field_queryset():
field_name = field_ref.field_name
if field_name in self.fields and not self.check_user_permission(field_ref):
self.disable_field(field_name)
def check_user_permission(self, field_reference) -> bool:
if field_reference.user_groups:
# Check at group level
return True
elif self._check_at_permissions_level(field_reference):
# Check at permission level
return True
return False
def _check_at_permissions_level(self, field_reference) -> bool:
for perm in field_reference.permissions.all():
app_label = perm.content_type.app_label
codename = perm.codename
if self.user.has_perm('{}.{}'.format(app_label, codename)):
return True
return False
def get_permission_field_queryset(self) -> QuerySet:
return self.model_permission.objects.filter(
**self.get_model_permission_filter_kwargs()
).prefetch_related(
Prefetch('permissions', queryset=Permission.objects.select_related('content_type')),
Prefetch('groups', queryset=Group.objects.filter(user=self.user), to_attr="user_groups")
)
def get_model_permission_filter_kwargs(self) -> Dict:
"""
Can be override to filter in other way that on model provided in Meta of ModelForm
"""
return {
'content_type__app_label': self._meta.model._meta.app_label,
'content_type__model': self._meta.model._meta.model_name,
'context': self.get_context()
}
def get_context(self) -> str:
"""
Can be override to use a specific context according to business
:return: self.context
"""
return self.context
|
uclouvain/OSIS-Louvain
|
rules_management/mixins.py
|
Python
|
agpl-3.0
| 4,454
|
from __future__ import absolute_import
from typing import Any, Dict, List, Set, Tuple, TypeVar, Text, \
Union, Optional, Sequence, AbstractSet, Pattern, AnyStr
from typing.re import Match
from zerver.lib.str_utils import NonBinaryStr
from django.db import models
from django.db.models.query import QuerySet
from django.db.models import Manager
from django.conf import settings
from django.contrib.auth.models import AbstractBaseUser, UserManager, \
PermissionsMixin
import django.contrib.auth
from django.core.exceptions import ValidationError
from django.core.validators import URLValidator
from django.dispatch import receiver
from zerver.lib.cache import cache_with_key, flush_user_profile, flush_realm, \
user_profile_by_id_cache_key, user_profile_by_email_cache_key, \
generic_bulk_cached_fetch, cache_set, flush_stream, \
display_recipient_cache_key, cache_delete, \
get_stream_cache_key, active_user_dicts_in_realm_cache_key, \
active_bot_dicts_in_realm_cache_key, active_user_dict_fields, \
active_bot_dict_fields, flush_message
from zerver.lib.utils import make_safe_digest, generate_random_token
from zerver.lib.str_utils import ModelReprMixin
from django.db import transaction
from zerver.lib.camo import get_camo_url
from django.utils import timezone
from django.contrib.sessions.models import Session
from zerver.lib.timestamp import datetime_to_timestamp
from django.db.models.signals import pre_save, post_save, post_delete
from django.core.validators import MinLengthValidator, RegexValidator
from django.utils.translation import ugettext_lazy as _
from zerver.lib import cache
from bitfield import BitField
from bitfield.types import BitHandler
from collections import defaultdict
from datetime import timedelta
import pylibmc
import re
import logging
import sre_constants
import time
import datetime
MAX_SUBJECT_LENGTH = 60
MAX_MESSAGE_LENGTH = 10000
MAX_LANGUAGE_ID_LENGTH = 50 # type: int
STREAM_NAMES = TypeVar('STREAM_NAMES', Sequence[Text], AbstractSet[Text])
# Doing 1000 remote cache requests to get_display_recipient is quite slow,
# so add a local cache as well as the remote cache cache.
per_request_display_recipient_cache = {} # type: Dict[int, List[Dict[str, Any]]]
def get_display_recipient_by_id(recipient_id, recipient_type, recipient_type_id):
# type: (int, int, int) -> Union[Text, List[Dict[str, Any]]]
if recipient_id not in per_request_display_recipient_cache:
result = get_display_recipient_remote_cache(recipient_id, recipient_type, recipient_type_id)
per_request_display_recipient_cache[recipient_id] = result
return per_request_display_recipient_cache[recipient_id]
def get_display_recipient(recipient):
# type: (Recipient) -> Union[Text, List[Dict[str, Any]]]
return get_display_recipient_by_id(
recipient.id,
recipient.type,
recipient.type_id
)
def flush_per_request_caches():
# type: () -> None
global per_request_display_recipient_cache
per_request_display_recipient_cache = {}
global per_request_realm_filters_cache
per_request_realm_filters_cache = {}
@cache_with_key(lambda *args: display_recipient_cache_key(args[0]),
timeout=3600*24*7)
def get_display_recipient_remote_cache(recipient_id, recipient_type, recipient_type_id):
# type: (int, int, int) -> Union[Text, List[Dict[str, Any]]]
"""
returns: an appropriate object describing the recipient. For a
stream this will be the stream name as a string. For a huddle or
personal, it will be an array of dicts about each recipient.
"""
if recipient_type == Recipient.STREAM:
stream = Stream.objects.get(id=recipient_type_id)
return stream.name
# We don't really care what the ordering is, just that it's deterministic.
user_profile_list = (UserProfile.objects.filter(subscription__recipient_id=recipient_id)
.select_related()
.order_by('email'))
return [{'email': user_profile.email,
'domain': user_profile.realm.domain,
'full_name': user_profile.full_name,
'short_name': user_profile.short_name,
'id': user_profile.id,
'is_mirror_dummy': user_profile.is_mirror_dummy} for user_profile in user_profile_list]
def get_realm_emoji_cache_key(realm):
# type: (Realm) -> Text
return u'realm_emoji:%s' % (realm.id,)
class Realm(ModelReprMixin, models.Model):
# domain is a domain in the Internet sense. It must be structured like a
# valid email domain. We use is to restrict access, identify bots, etc.
domain = models.CharField(max_length=40, db_index=True, unique=True) # type: Text
# name is the user-visible identifier for the realm. It has no required
# structure.
AUTHENTICATION_FLAGS = [u'Google', u'Email', u'GitHub', u'LDAP', u'Dev', u'RemoteUser']
name = models.CharField(max_length=40, null=True) # type: Optional[Text]
string_id = models.CharField(max_length=40, unique=True) # type: Text
restricted_to_domain = models.BooleanField(default=False) # type: bool
invite_required = models.BooleanField(default=True) # type: bool
invite_by_admins_only = models.BooleanField(default=False) # type: bool
create_stream_by_admins_only = models.BooleanField(default=False) # type: bool
add_emoji_by_admins_only = models.BooleanField(default=False) # type: bool
mandatory_topics = models.BooleanField(default=False) # type: bool
show_digest_email = models.BooleanField(default=True) # type: bool
name_changes_disabled = models.BooleanField(default=False) # type: bool
allow_message_editing = models.BooleanField(default=True) # type: bool
DEFAULT_MESSAGE_CONTENT_EDIT_LIMIT_SECONDS = 600 # if changed, also change in admin.js
message_content_edit_limit_seconds = models.IntegerField(default=DEFAULT_MESSAGE_CONTENT_EDIT_LIMIT_SECONDS) # type: int
message_retention_days = models.IntegerField(null=True) # type: Optional[int]
# Valid org_types are {CORPORATE, COMMUNITY}
CORPORATE = 1
COMMUNITY = 2
org_type = models.PositiveSmallIntegerField(default=COMMUNITY) # type: int
date_created = models.DateTimeField(default=timezone.now) # type: datetime.datetime
notifications_stream = models.ForeignKey('Stream', related_name='+', null=True, blank=True) # type: Optional[Stream]
deactivated = models.BooleanField(default=False) # type: bool
default_language = models.CharField(default=u'en', max_length=MAX_LANGUAGE_ID_LENGTH) # type: Text
authentication_methods = BitField(flags=AUTHENTICATION_FLAGS,
default=2**31 - 1) # type: BitHandler
waiting_period_threshold = models.PositiveIntegerField(default=0) # type: int
DEFAULT_NOTIFICATION_STREAM_NAME = u'announce'
def authentication_methods_dict(self):
# type: () -> Dict[Text, bool]
"""Returns the a mapping from authentication flags to their status,
showing only those authentication flags that are supported on
the current server (i.e. if EmailAuthBackend is not configured
on the server, this will not return an entry for "Email")."""
# This mapping needs to be imported from here due to the cyclic
# dependency.
from zproject.backends import AUTH_BACKEND_NAME_MAP
ret = {} # type: Dict[Text, bool]
supported_backends = {backend.__class__ for backend in django.contrib.auth.get_backends()}
for k, v in self.authentication_methods.iteritems():
backend = AUTH_BACKEND_NAME_MAP[k]
if backend in supported_backends:
ret[k] = v
return ret
def __unicode__(self):
# type: () -> Text
return u"<Realm: %s %s>" % (self.domain, self.id)
@cache_with_key(get_realm_emoji_cache_key, timeout=3600*24*7)
def get_emoji(self):
# type: () -> Dict[Text, Dict[str, Text]]
return get_realm_emoji_uncached(self)
@property
def deployment(self):
# type: () -> Any # returns a Deployment from zilencer.models
# see https://github.com/zulip/zulip/issues/1845 before you
# attempt to add test coverage for this method, as we may
# be revisiting the deployments model soon
try:
return self._deployments.all()[0]
except IndexError:
return None
@deployment.setter # type: ignore # https://github.com/python/mypy/issues/220
def set_deployments(self, value):
# type: (Any) -> None
self._deployments = [value] # type: Any
def get_admin_users(self):
# type: () -> Sequence[UserProfile]
# TODO: Change return type to QuerySet[UserProfile]
return UserProfile.objects.filter(realm=self, is_realm_admin=True,
is_active=True).select_related()
def get_active_users(self):
# type: () -> Sequence[UserProfile]
# TODO: Change return type to QuerySet[UserProfile]
return UserProfile.objects.filter(realm=self, is_active=True).select_related()
@property
def subdomain(self):
# type: () -> Text
if settings.REALMS_HAVE_SUBDOMAINS:
return self.string_id
return None
@property
def uri(self):
# type: () -> str
if settings.REALMS_HAVE_SUBDOMAINS and self.subdomain is not None:
return '%s%s.%s' % (settings.EXTERNAL_URI_SCHEME,
self.subdomain, settings.EXTERNAL_HOST)
return settings.SERVER_URI
@property
def host(self):
# type: () -> str
if settings.REALMS_HAVE_SUBDOMAINS and self.subdomain is not None:
return "%s.%s" % (self.subdomain, settings.EXTERNAL_HOST)
return settings.EXTERNAL_HOST
@property
def is_zephyr_mirror_realm(self):
# type: () -> bool
return self.domain == "mit.edu"
@property
def webathena_enabled(self):
# type: () -> bool
return self.is_zephyr_mirror_realm
@property
def presence_disabled(self):
# type: () -> bool
return self.is_zephyr_mirror_realm
class Meta(object):
permissions = (
('administer', "Administer a realm"),
('api_super_user', "Can send messages as other users for mirroring"),
)
post_save.connect(flush_realm, sender=Realm)
def get_realm(string_id):
# type: (Text) -> Optional[Realm]
if not string_id:
return None
try:
return Realm.objects.get(string_id=string_id)
except Realm.DoesNotExist:
return None
def completely_open(realm):
# type: (Realm) -> bool
# This realm is completely open to everyone on the internet to
# join. E-mail addresses do not need to match a realmalias and
# an invite from an existing user is not required.
if not realm:
return False
return not realm.invite_required and not realm.restricted_to_domain
def get_unique_open_realm():
# type: () -> Optional[Realm]
"""We only return a realm if there is a unique non-system-only realm,
it is completely open, and there are no subdomains."""
if settings.REALMS_HAVE_SUBDOMAINS:
return None
realms = Realm.objects.filter(deactivated=False)
# On production installations, the (usually "zulip.com") system
# realm is an empty realm just used for system bots, so don't
# include it in this accounting.
realms = realms.exclude(domain__in=settings.SYSTEM_ONLY_REALMS)
if len(realms) != 1:
return None
realm = realms[0]
if realm.invite_required or realm.restricted_to_domain:
return None
return realm
def name_changes_disabled(realm):
# type: (Optional[Realm]) -> bool
if realm is None:
return settings.NAME_CHANGES_DISABLED
return settings.NAME_CHANGES_DISABLED or realm.name_changes_disabled
class RealmAlias(models.Model):
realm = models.ForeignKey(Realm, null=True) # type: Optional[Realm]
# should always be stored lowercase
domain = models.CharField(max_length=80, db_index=True) # type: Text
def can_add_alias(domain):
# type: (Text) -> bool
if settings.REALMS_HAVE_SUBDOMAINS:
return True
if RealmAlias.objects.filter(domain=domain).exists():
return False
return True
# These functions should only be used on email addresses that have
# been validated via django.core.validators.validate_email
#
# Note that we need to use some care, since can you have multiple @-signs; e.g.
# "tabbott@test"@zulip.com
# is valid email address
def email_to_username(email):
# type: (Text) -> Text
return "@".join(email.split("@")[:-1]).lower()
# Returns the raw domain portion of the desired email address
def email_to_domain(email):
# type: (Text) -> Text
return email.split("@")[-1].lower()
class GetRealmByDomainException(Exception):
pass
def get_realm_by_email_domain(email):
# type: (Text) -> Optional[Realm]
if settings.REALMS_HAVE_SUBDOMAINS:
raise GetRealmByDomainException(
"Cannot get realm from email domain when settings.REALMS_HAVE_SUBDOMAINS = True")
try:
alias = RealmAlias.objects.select_related('realm').get(domain = email_to_domain(email))
return alias.realm
except RealmAlias.DoesNotExist:
return None
# Is a user with the given email address allowed to be in the given realm?
# (This function does not check whether the user has been invited to the realm.
# So for invite-only realms, this is the test for whether a user can be invited,
# not whether the user can sign up currently.)
def email_allowed_for_realm(email, realm):
# type: (Text, Realm) -> bool
if not realm.restricted_to_domain:
return True
domain = email_to_domain(email)
return RealmAlias.objects.filter(realm = realm, domain = domain).exists()
def list_of_domains_for_realm(realm):
# type: (Realm) -> List[Text]
return list(RealmAlias.objects.filter(realm = realm).values_list('domain', flat=True))
class RealmEmoji(ModelReprMixin, models.Model):
author = models.ForeignKey('UserProfile', blank=True, null=True)
realm = models.ForeignKey(Realm) # type: Realm
# Second part of the regex (negative lookbehind) disallows names ending with one of the punctuation characters
name = models.TextField(validators=[MinLengthValidator(1),
RegexValidator(regex=r'^[0-9a-zA-Z.\-_]+(?<![.\-_])$',
message=_("Invalid characters in Emoji name"))]) # type: Text
# URLs start having browser compatibility problem below 2000
# characters, so 1000 seems like a safe limit.
img_url = models.URLField(max_length=1000) # type: Text
class Meta(object):
unique_together = ("realm", "name")
def __unicode__(self):
# type: () -> Text
return u"<RealmEmoji(%s): %s %s>" % (self.realm.domain, self.name, self.img_url)
def get_realm_emoji_uncached(realm):
# type: (Realm) -> Dict[Text, Dict[str, Text]]
d = {}
for row in RealmEmoji.objects.filter(realm=realm).select_related('author'):
if row.author:
author = {
'id': row.author.id,
'email': row.author.email,
'full_name': row.author.full_name}
else:
author = None
d[row.name] = dict(source_url=row.img_url,
display_url=get_camo_url(row.img_url),
author=author)
return d
def flush_realm_emoji(sender, **kwargs):
# type: (Any, **Any) -> None
realm = kwargs['instance'].realm
cache_set(get_realm_emoji_cache_key(realm),
get_realm_emoji_uncached(realm),
timeout=3600*24*7)
post_save.connect(flush_realm_emoji, sender=RealmEmoji)
post_delete.connect(flush_realm_emoji, sender=RealmEmoji)
def filter_pattern_validator(value):
# type: (Text) -> None
regex = re.compile(r'(?:[\w\-#]*)(\(\?P<\w+>.+\))')
error_msg = 'Invalid filter pattern, you must use the following format OPTIONAL_PREFIX(?P<id>.+)'
if not regex.match(str(value)):
raise ValidationError(error_msg)
try:
re.compile(value)
except sre_constants.error:
# Regex is invalid
raise ValidationError(error_msg)
def filter_format_validator(value):
# type: (str) -> None
regex = re.compile(r'^[\.\/:a-zA-Z0-9_-]+%\(([a-zA-Z0-9_-]+)\)s[a-zA-Z0-9_-]*$')
if not regex.match(value):
raise ValidationError('URL format string must be in the following format: `https://example.com/%(\w+)s`')
class RealmFilter(models.Model):
realm = models.ForeignKey(Realm) # type: Realm
pattern = models.TextField(validators=[filter_pattern_validator]) # type: Text
url_format_string = models.TextField(validators=[URLValidator, filter_format_validator]) # type: Text
class Meta(object):
unique_together = ("realm", "pattern")
def __unicode__(self):
# type: () -> Text
return u"<RealmFilter(%s): %s %s>" % (self.realm.domain, self.pattern, self.url_format_string)
def get_realm_filters_cache_key(realm_id):
# type: (int) -> Text
return u'all_realm_filters:%s' % (realm_id,)
# We have a per-process cache to avoid doing 1000 remote cache queries during page load
per_request_realm_filters_cache = {} # type: Dict[int, List[Tuple[Text, Text, int]]]
def realm_in_local_realm_filters_cache(realm_id):
# type: (int) -> bool
return realm_id in per_request_realm_filters_cache
def realm_filters_for_realm(realm_id):
# type: (int) -> List[Tuple[Text, Text, int]]
if not realm_in_local_realm_filters_cache(realm_id):
per_request_realm_filters_cache[realm_id] = realm_filters_for_realm_remote_cache(realm_id)
return per_request_realm_filters_cache[realm_id]
@cache_with_key(get_realm_filters_cache_key, timeout=3600*24*7)
def realm_filters_for_realm_remote_cache(realm_id):
# type: (int) -> List[Tuple[Text, Text, int]]
filters = []
for realm_filter in RealmFilter.objects.filter(realm_id=realm_id):
filters.append((realm_filter.pattern, realm_filter.url_format_string, realm_filter.id))
return filters
def all_realm_filters():
# type: () -> Dict[int, List[Tuple[Text, Text, int]]]
filters = defaultdict(list) # type: Dict[int, List[Tuple[Text, Text, int]]]
for realm_filter in RealmFilter.objects.all():
filters[realm_filter.realm_id].append((realm_filter.pattern, realm_filter.url_format_string, realm_filter.id))
return filters
def flush_realm_filter(sender, **kwargs):
# type: (Any, **Any) -> None
realm_id = kwargs['instance'].realm_id
cache_delete(get_realm_filters_cache_key(realm_id))
try:
per_request_realm_filters_cache.pop(realm_id)
except KeyError:
pass
post_save.connect(flush_realm_filter, sender=RealmFilter)
post_delete.connect(flush_realm_filter, sender=RealmFilter)
class UserProfile(ModelReprMixin, AbstractBaseUser, PermissionsMixin):
DEFAULT_BOT = 1
"""
Incoming webhook bots are limited to only sending messages via webhooks.
Thus, it is less of a security risk to expose their API keys to third-party services,
since they can't be used to read messages.
"""
INCOMING_WEBHOOK_BOT = 2
# Fields from models.AbstractUser minus last_name and first_name,
# which we don't use; email is modified to make it indexed and unique.
email = models.EmailField(blank=False, db_index=True, unique=True) # type: Text
is_staff = models.BooleanField(default=False) # type: bool
is_active = models.BooleanField(default=True, db_index=True) # type: bool
is_realm_admin = models.BooleanField(default=False, db_index=True) # type: bool
is_bot = models.BooleanField(default=False, db_index=True) # type: bool
bot_type = models.PositiveSmallIntegerField(null=True, db_index=True) # type: Optional[int]
is_api_super_user = models.BooleanField(default=False, db_index=True) # type: bool
date_joined = models.DateTimeField(default=timezone.now) # type: datetime.datetime
is_mirror_dummy = models.BooleanField(default=False) # type: bool
bot_owner = models.ForeignKey('self', null=True, on_delete=models.SET_NULL) # type: Optional[UserProfile]
USERNAME_FIELD = 'email'
MAX_NAME_LENGTH = 100
# Our custom site-specific fields
full_name = models.CharField(max_length=MAX_NAME_LENGTH) # type: Text
short_name = models.CharField(max_length=MAX_NAME_LENGTH) # type: Text
# pointer points to Message.id, NOT UserMessage.id.
pointer = models.IntegerField() # type: int
last_pointer_updater = models.CharField(max_length=64) # type: Text
realm = models.ForeignKey(Realm) # type: Realm
api_key = models.CharField(max_length=32) # type: Text
tos_version = models.CharField(null=True, max_length=10) # type: Text
### Notifications settings. ###
# Stream notifications.
enable_stream_desktop_notifications = models.BooleanField(default=False) # type: bool
enable_stream_sounds = models.BooleanField(default=False) # type: bool
# PM + @-mention notifications.
enable_desktop_notifications = models.BooleanField(default=True) # type: bool
pm_content_in_desktop_notifications = models.BooleanField(default=True) # type: bool
enable_sounds = models.BooleanField(default=True) # type: bool
enable_offline_email_notifications = models.BooleanField(default=True) # type: bool
enable_offline_push_notifications = models.BooleanField(default=True) # type: bool
enable_online_push_notifications = models.BooleanField(default=False) # type: bool
enable_digest_emails = models.BooleanField(default=True) # type: bool
# Old notification field superseded by existence of stream notification
# settings.
default_desktop_notifications = models.BooleanField(default=True) # type: bool
###
last_reminder = models.DateTimeField(default=timezone.now, null=True) # type: Optional[datetime.datetime]
rate_limits = models.CharField(default=u"", max_length=100) # type: Text # comma-separated list of range:max pairs
# Default streams
default_sending_stream = models.ForeignKey('zerver.Stream', null=True, related_name='+') # type: Optional[Stream]
default_events_register_stream = models.ForeignKey('zerver.Stream', null=True, related_name='+') # type: Optional[Stream]
default_all_public_streams = models.BooleanField(default=False) # type: bool
# UI vars
enter_sends = models.NullBooleanField(default=False) # type: Optional[bool]
autoscroll_forever = models.BooleanField(default=False) # type: bool
left_side_userlist = models.BooleanField(default=False) # type: bool
# display settings
twenty_four_hour_time = models.BooleanField(default=False) # type: bool
default_language = models.CharField(default=u'en', max_length=MAX_LANGUAGE_ID_LENGTH) # type: Text
# Hours to wait before sending another email to a user
EMAIL_REMINDER_WAITPERIOD = 24
# Minutes to wait before warning a bot owner that her bot sent a message
# to a nonexistent stream
BOT_OWNER_STREAM_ALERT_WAITPERIOD = 1
AVATAR_FROM_GRAVATAR = u'G'
AVATAR_FROM_USER = u'U'
AVATAR_SOURCES = (
(AVATAR_FROM_GRAVATAR, 'Hosted by Gravatar'),
(AVATAR_FROM_USER, 'Uploaded by user'),
)
avatar_source = models.CharField(default=AVATAR_FROM_GRAVATAR, choices=AVATAR_SOURCES, max_length=1) # type: Text
TUTORIAL_WAITING = u'W'
TUTORIAL_STARTED = u'S'
TUTORIAL_FINISHED = u'F'
TUTORIAL_STATES = ((TUTORIAL_WAITING, "Waiting"),
(TUTORIAL_STARTED, "Started"),
(TUTORIAL_FINISHED, "Finished"))
tutorial_status = models.CharField(default=TUTORIAL_WAITING, choices=TUTORIAL_STATES, max_length=1) # type: Text
# Contains serialized JSON of the form:
# [("step 1", true), ("step 2", false)]
# where the second element of each tuple is if the step has been
# completed.
onboarding_steps = models.TextField(default=u'[]') # type: Text
invites_granted = models.IntegerField(default=0) # type: int
invites_used = models.IntegerField(default=0) # type: int
alert_words = models.TextField(default=u'[]') # type: Text # json-serialized list of strings
# Contains serialized JSON of the form:
# [["social", "mit"], ["devel", "ios"]]
muted_topics = models.TextField(default=u'[]') # type: Text
objects = UserManager() # type: UserManager
def can_admin_user(self, target_user):
# type: (UserProfile) -> bool
"""Returns whether this user has permission to modify target_user"""
if target_user.bot_owner == self:
return True
elif self.is_realm_admin and self.realm == target_user.realm:
return True
else:
return False
def __unicode__(self):
# type: () -> Text
return u"<UserProfile: %s %s>" % (self.email, self.realm)
@property
def is_incoming_webhook(self):
# type: () -> bool
return self.bot_type == UserProfile.INCOMING_WEBHOOK_BOT
@staticmethod
def emails_from_ids(user_ids):
# type: (Sequence[int]) -> Dict[int, Text]
rows = UserProfile.objects.filter(id__in=user_ids).values('id', 'email')
return {row['id']: row['email'] for row in rows}
def can_create_streams(self):
# type: () -> bool
diff = (timezone.now() - self.date_joined).days
if self.is_realm_admin:
return True
elif self.realm.create_stream_by_admins_only:
return False
if diff >= self.realm.waiting_period_threshold:
return True
return False
def major_tos_version(self):
# type: () -> int
if self.tos_version is not None:
return int(self.tos_version.split('.')[0])
else:
return -1
def receives_offline_notifications(user_profile):
# type: (UserProfile) -> bool
return ((user_profile.enable_offline_email_notifications or
user_profile.enable_offline_push_notifications) and
not user_profile.is_bot)
def receives_online_notifications(user_profile):
# type: (UserProfile) -> bool
return (user_profile.enable_online_push_notifications and
not user_profile.is_bot)
def remote_user_to_email(remote_user):
# type: (Text) -> Text
if settings.SSO_APPEND_DOMAIN is not None:
remote_user += "@" + settings.SSO_APPEND_DOMAIN
return remote_user
# Make sure we flush the UserProfile object from our remote cache
# whenever we save it.
post_save.connect(flush_user_profile, sender=UserProfile)
class PreregistrationUser(models.Model):
email = models.EmailField() # type: Text
referred_by = models.ForeignKey(UserProfile, null=True) # Optional[UserProfile]
streams = models.ManyToManyField('Stream') # type: Manager
invited_at = models.DateTimeField(auto_now=True) # type: datetime.datetime
realm_creation = models.BooleanField(default=False)
# status: whether an object has been confirmed.
# if confirmed, set to confirmation.settings.STATUS_ACTIVE
status = models.IntegerField(default=0) # type: int
realm = models.ForeignKey(Realm, null=True) # type: Optional[Realm]
class PushDeviceToken(models.Model):
APNS = 1
GCM = 2
KINDS = (
(APNS, 'apns'),
(GCM, 'gcm'),
)
kind = models.PositiveSmallIntegerField(choices=KINDS) # type: int
# The token is a unique device-specific token that is
# sent to us from each device:
# - APNS token if kind == APNS
# - GCM registration id if kind == GCM
token = models.CharField(max_length=4096, unique=True) # type: Text
last_updated = models.DateTimeField(auto_now=True) # type: datetime.datetime
# The user who's device this is
user = models.ForeignKey(UserProfile, db_index=True) # type: UserProfile
# [optional] Contains the app id of the device if it is an iOS device
ios_app_id = models.TextField(null=True) # type: Optional[Text]
def generate_email_token_for_stream():
# type: () -> Text
return generate_random_token(32)
class Stream(ModelReprMixin, models.Model):
MAX_NAME_LENGTH = 60
name = models.CharField(max_length=MAX_NAME_LENGTH, db_index=True) # type: Text
realm = models.ForeignKey(Realm, db_index=True) # type: Realm
invite_only = models.NullBooleanField(default=False) # type: Optional[bool]
# Used by the e-mail forwarder. The e-mail RFC specifies a maximum
# e-mail length of 254, and our max stream length is 30, so we
# have plenty of room for the token.
email_token = models.CharField(
max_length=32, default=generate_email_token_for_stream) # type: Text
description = models.CharField(max_length=1024, default=u'') # type: Text
date_created = models.DateTimeField(default=timezone.now) # type: datetime.datetime
deactivated = models.BooleanField(default=False) # type: bool
def __unicode__(self):
# type: () -> Text
return u"<Stream: %s>" % (self.name,)
def is_public(self):
# type: () -> bool
# All streams are private in Zephyr mirroring realms.
return not self.invite_only and not self.realm.is_zephyr_mirror_realm
class Meta(object):
unique_together = ("name", "realm")
def num_subscribers(self):
# type: () -> int
return Subscription.objects.filter(
recipient__type=Recipient.STREAM,
recipient__type_id=self.id,
user_profile__is_active=True,
active=True
).count()
# This is stream information that is sent to clients
def to_dict(self):
# type: () -> Dict[str, Any]
return dict(name=self.name,
stream_id=self.id,
description=self.description,
invite_only=self.invite_only)
post_save.connect(flush_stream, sender=Stream)
post_delete.connect(flush_stream, sender=Stream)
def valid_stream_name(name):
# type: (Text) -> bool
return name != ""
# The Recipient table is used to map Messages to the set of users who
# received the message. It is implemented as a set of triples (id,
# type_id, type). We have 3 types of recipients: Huddles (for group
# private messages), UserProfiles (for 1:1 private messages), and
# Streams. The recipient table maps a globally unique recipient id
# (used by the Message table) to the type-specific unique id (the
# stream id, user_profile id, or huddle id).
class Recipient(ModelReprMixin, models.Model):
type_id = models.IntegerField(db_index=True) # type: int
type = models.PositiveSmallIntegerField(db_index=True) # type: int
# Valid types are {personal, stream, huddle}
PERSONAL = 1
STREAM = 2
HUDDLE = 3
class Meta(object):
unique_together = ("type", "type_id")
# N.B. If we used Django's choice=... we would get this for free (kinda)
_type_names = {
PERSONAL: 'personal',
STREAM: 'stream',
HUDDLE: 'huddle'}
def type_name(self):
# type: () -> str
# Raises KeyError if invalid
return self._type_names[self.type]
def __unicode__(self):
# type: () -> Text
display_recipient = get_display_recipient(self)
return u"<Recipient: %s (%d, %s)>" % (display_recipient, self.type_id, self.type)
class Client(ModelReprMixin, models.Model):
name = models.CharField(max_length=30, db_index=True, unique=True) # type: Text
def __unicode__(self):
# type: () -> Text
return u"<Client: %s>" % (self.name,)
get_client_cache = {} # type: Dict[Text, Client]
def get_client(name):
# type: (Text) -> Client
# Accessing KEY_PREFIX through the module is necessary
# because we need the updated value of the variable.
cache_name = cache.KEY_PREFIX + name
if cache_name not in get_client_cache:
result = get_client_remote_cache(name)
get_client_cache[cache_name] = result
return get_client_cache[cache_name]
def get_client_cache_key(name):
# type: (Text) -> Text
return u'get_client:%s' % (make_safe_digest(name),)
@cache_with_key(get_client_cache_key, timeout=3600*24*7)
def get_client_remote_cache(name):
# type: (Text) -> Client
(client, _) = Client.objects.get_or_create(name=name)
return client
# get_stream_backend takes either a realm id or a realm
@cache_with_key(get_stream_cache_key, timeout=3600*24*7)
def get_stream_backend(stream_name, realm):
# type: (Text, Realm) -> Stream
return Stream.objects.select_related("realm").get(
name__iexact=stream_name.strip(), realm_id=realm.id)
def get_active_streams(realm):
# type: (Realm) -> QuerySet
"""
Return all streams (including invite-only streams) that have not been deactivated.
"""
return Stream.objects.filter(realm=realm, deactivated=False)
def get_stream(stream_name, realm):
# type: (Text, Realm) -> Optional[Stream]
try:
return get_stream_backend(stream_name, realm)
except Stream.DoesNotExist:
return None
def bulk_get_streams(realm, stream_names):
# type: (Realm, STREAM_NAMES) -> Dict[Text, Any]
def fetch_streams_by_name(stream_names):
# type: (List[Text]) -> Sequence[Stream]
#
# This should be just
#
# Stream.objects.select_related("realm").filter(name__iexact__in=stream_names,
# realm_id=realm_id)
#
# But chaining __in and __iexact doesn't work with Django's
# ORM, so we have the following hack to construct the relevant where clause
if len(stream_names) == 0:
return []
upper_list = ", ".join(["UPPER(%s)"] * len(stream_names))
where_clause = "UPPER(zerver_stream.name::text) IN (%s)" % (upper_list,)
return get_active_streams(realm.id).select_related("realm").extra(
where=[where_clause],
params=stream_names)
return generic_bulk_cached_fetch(lambda stream_name: get_stream_cache_key(stream_name, realm),
fetch_streams_by_name,
[stream_name.lower() for stream_name in stream_names],
id_fetcher=lambda stream: stream.name.lower())
def get_recipient_cache_key(type, type_id):
# type: (int, int) -> Text
return u"get_recipient:%s:%s" % (type, type_id,)
@cache_with_key(get_recipient_cache_key, timeout=3600*24*7)
def get_recipient(type, type_id):
# type: (int, int) -> Recipient
return Recipient.objects.get(type_id=type_id, type=type)
def bulk_get_recipients(type, type_ids):
# type: (int, List[int]) -> Dict[int, Any]
def cache_key_function(type_id):
# type: (int) -> Text
return get_recipient_cache_key(type, type_id)
def query_function(type_ids):
# type: (List[int]) -> Sequence[Recipient]
# TODO: Change return type to QuerySet[Recipient]
return Recipient.objects.filter(type=type, type_id__in=type_ids)
return generic_bulk_cached_fetch(cache_key_function, query_function, type_ids,
id_fetcher=lambda recipient: recipient.type_id)
def sew_messages_and_reactions(messages, reactions):
# type: (List[Dict[str, Any]], List[Dict[str, Any]]) -> List[Dict[str, Any]]
"""Given a iterable of messages and reactions stitch reactions
into messages.
"""
# Add all messages with empty reaction item
for message in messages:
message['reactions'] = []
# Convert list of messages into dictionary to make reaction stitching easy
converted_messages = {message['id']: message for message in messages}
for reaction in reactions:
converted_messages[reaction['message_id']]['reactions'].append(
reaction)
return list(converted_messages.values())
class Message(ModelReprMixin, models.Model):
sender = models.ForeignKey(UserProfile) # type: UserProfile
recipient = models.ForeignKey(Recipient) # type: Recipient
subject = models.CharField(max_length=MAX_SUBJECT_LENGTH, db_index=True) # type: Text
content = models.TextField() # type: Text
rendered_content = models.TextField(null=True) # type: Optional[Text]
rendered_content_version = models.IntegerField(null=True) # type: Optional[int]
pub_date = models.DateTimeField('date published', db_index=True) # type: datetime.datetime
sending_client = models.ForeignKey(Client) # type: Client
last_edit_time = models.DateTimeField(null=True) # type: Optional[datetime.datetime]
edit_history = models.TextField(null=True) # type: Optional[Text]
has_attachment = models.BooleanField(default=False, db_index=True) # type: bool
has_image = models.BooleanField(default=False, db_index=True) # type: bool
has_link = models.BooleanField(default=False, db_index=True) # type: bool
def topic_name(self):
# type: () -> Text
"""
Please start using this helper to facilitate an
eventual switch over to a separate topic table.
"""
return self.subject
def __unicode__(self):
# type: () -> Text
display_recipient = get_display_recipient(self.recipient)
return u"<Message: %s / %s / %r>" % (display_recipient, self.subject, self.sender)
def get_realm(self):
# type: () -> Realm
return self.sender.realm
def save_rendered_content(self):
# type: () -> None
self.save(update_fields=["rendered_content", "rendered_content_version"])
@staticmethod
def need_to_render_content(rendered_content, rendered_content_version, bugdown_version):
# type: (Optional[Text], int, int) -> bool
return rendered_content is None or rendered_content_version < bugdown_version
def to_log_dict(self):
# type: () -> Dict[str, Any]
return dict(
id = self.id,
sender_id = self.sender.id,
sender_email = self.sender.email,
sender_domain = self.sender.realm.domain,
sender_full_name = self.sender.full_name,
sender_short_name = self.sender.short_name,
sending_client = self.sending_client.name,
type = self.recipient.type_name(),
recipient = get_display_recipient(self.recipient),
subject = self.topic_name(),
content = self.content,
timestamp = datetime_to_timestamp(self.pub_date))
@staticmethod
def get_raw_db_rows(needed_ids):
# type: (List[int]) -> List[Dict[str, Any]]
# This is a special purpose function optimized for
# callers like get_old_messages_backend().
fields = [
'id',
'subject',
'pub_date',
'last_edit_time',
'edit_history',
'content',
'rendered_content',
'rendered_content_version',
'recipient_id',
'recipient__type',
'recipient__type_id',
'sender_id',
'sending_client__name',
'sender__email',
'sender__full_name',
'sender__short_name',
'sender__realm__id',
'sender__realm__domain',
'sender__avatar_source',
'sender__is_mirror_dummy',
]
messages = Message.objects.filter(id__in=needed_ids).values(*fields)
"""Adding one-many or Many-Many relationship in values results in N X
results.
Link: https://docs.djangoproject.com/en/1.8/ref/models/querysets/#values
"""
reactions = Reaction.get_raw_db_rows(needed_ids)
return sew_messages_and_reactions(messages, reactions)
def sent_by_human(self):
# type: () -> bool
sending_client = self.sending_client.name.lower()
return (sending_client in ('zulipandroid', 'zulipios', 'zulipdesktop',
'website', 'ios', 'android')) or (
'desktop app' in sending_client)
@staticmethod
def content_has_attachment(content):
# type: (Text) -> Match
return re.search(r'[/\-]user[\-_]uploads[/\.-]', content)
@staticmethod
def content_has_image(content):
# type: (Text) -> bool
return bool(re.search(r'[/\-]user[\-_]uploads[/\.-]\S+\.(bmp|gif|jpg|jpeg|png|webp)', content, re.IGNORECASE))
@staticmethod
def content_has_link(content):
# type: (Text) -> bool
return ('http://' in content or
'https://' in content or
'/user_uploads' in content or
(settings.ENABLE_FILE_LINKS and 'file:///' in content))
@staticmethod
def is_status_message(content, rendered_content):
# type: (Text, Text) -> bool
"""
Returns True if content and rendered_content are from 'me_message'
"""
if content.startswith('/me ') and '\n' not in content:
if rendered_content.startswith('<p>') and rendered_content.endswith('</p>'):
return True
return False
def update_calculated_fields(self):
# type: () -> None
# TODO: rendered_content could also be considered a calculated field
content = self.content
self.has_attachment = bool(Message.content_has_attachment(content))
self.has_image = bool(Message.content_has_image(content))
self.has_link = bool(Message.content_has_link(content))
@receiver(pre_save, sender=Message)
def pre_save_message(sender, **kwargs):
# type: (Any, **Any) -> None
if kwargs['update_fields'] is None or "content" in kwargs['update_fields']:
message = kwargs['instance']
message.update_calculated_fields()
def get_context_for_message(message):
# type: (Message) -> Sequence[Message]
# TODO: Change return type to QuerySet[Message]
return Message.objects.filter(
recipient_id=message.recipient_id,
subject=message.subject,
id__lt=message.id,
pub_date__gt=message.pub_date - timedelta(minutes=15),
).order_by('-id')[:10]
post_save.connect(flush_message, sender=Message)
class Reaction(ModelReprMixin, models.Model):
user_profile = models.ForeignKey(UserProfile) # type: UserProfile
message = models.ForeignKey(Message) # type: Message
emoji_name = models.TextField() # type: Text
class Meta(object):
unique_together = ("user_profile", "message", "emoji_name")
@staticmethod
def get_raw_db_rows(needed_ids):
# type: (List[int]) -> List[Dict[str, Any]]
fields = ['message_id', 'emoji_name', 'user_profile__email',
'user_profile__id', 'user_profile__full_name']
return Reaction.objects.filter(message_id__in=needed_ids).values(*fields)
# Whenever a message is sent, for each user current subscribed to the
# corresponding Recipient object, we add a row to the UserMessage
# table, which has has columns (id, user profile id, message id,
# flags) indicating which messages each user has received. This table
# allows us to quickly query any user's last 1000 messages to generate
# the home view.
#
# Additionally, the flags field stores metadata like whether the user
# has read the message, starred the message, collapsed or was
# mentioned the message, etc.
#
# UserMessage is the largest table in a Zulip installation, even
# though each row is only 4 integers.
class UserMessage(ModelReprMixin, models.Model):
user_profile = models.ForeignKey(UserProfile) # type: UserProfile
message = models.ForeignKey(Message) # type: Message
# We're not using the archived field for now, but create it anyway
# since this table will be an unpleasant one to do schema changes
# on later
ALL_FLAGS = ['read', 'starred', 'collapsed', 'mentioned', 'wildcard_mentioned',
'summarize_in_home', 'summarize_in_stream', 'force_expand', 'force_collapse',
'has_alert_word', "historical", 'is_me_message']
flags = BitField(flags=ALL_FLAGS, default=0) # type: BitHandler
class Meta(object):
unique_together = ("user_profile", "message")
def __unicode__(self):
# type: () -> Text
display_recipient = get_display_recipient(self.message.recipient)
return u"<UserMessage: %s / %s (%s)>" % (display_recipient, self.user_profile.email, self.flags_list())
def flags_list(self):
# type: () -> List[str]
return [flag for flag in self.flags.keys() if getattr(self.flags, flag).is_set]
def parse_usermessage_flags(val):
# type: (int) -> List[str]
flags = []
mask = 1
for flag in UserMessage.ALL_FLAGS:
if val & mask:
flags.append(flag)
mask <<= 1
return flags
class Attachment(ModelReprMixin, models.Model):
file_name = models.TextField(db_index=True) # type: Text
# path_id is a storage location agnostic representation of the path of the file.
# If the path of a file is http://localhost:9991/user_uploads/a/b/abc/temp_file.py
# then its path_id will be a/b/abc/temp_file.py.
path_id = models.TextField(db_index=True) # type: Text
owner = models.ForeignKey(UserProfile) # type: UserProfile
realm = models.ForeignKey(Realm, blank=True, null=True) # type: Realm
is_realm_public = models.BooleanField(default=False) # type: bool
messages = models.ManyToManyField(Message) # type: Manager
create_time = models.DateTimeField(default=timezone.now, db_index=True) # type: datetime.datetime
def __unicode__(self):
# type: () -> Text
return u"<Attachment: %s>" % (self.file_name,)
def is_claimed(self):
# type: () -> bool
return self.messages.count() > 0
def get_old_unclaimed_attachments(weeks_ago):
# type: (int) -> Sequence[Attachment]
# TODO: Change return type to QuerySet[Attachment]
delta_weeks_ago = timezone.now() - datetime.timedelta(weeks=weeks_ago)
old_attachments = Attachment.objects.filter(messages=None, create_time__lt=delta_weeks_ago)
return old_attachments
class Subscription(ModelReprMixin, models.Model):
user_profile = models.ForeignKey(UserProfile) # type: UserProfile
recipient = models.ForeignKey(Recipient) # type: Recipient
active = models.BooleanField(default=True) # type: bool
in_home_view = models.NullBooleanField(default=True) # type: Optional[bool]
DEFAULT_STREAM_COLOR = u"#c2c2c2"
color = models.CharField(max_length=10, default=DEFAULT_STREAM_COLOR) # type: Text
pin_to_top = models.BooleanField(default=False) # type: bool
desktop_notifications = models.BooleanField(default=True) # type: bool
audible_notifications = models.BooleanField(default=True) # type: bool
# Combination desktop + audible notifications superseded by the
# above.
notifications = models.BooleanField(default=False) # type: bool
class Meta(object):
unique_together = ("user_profile", "recipient")
def __unicode__(self):
# type: () -> Text
return u"<Subscription: %r -> %s>" % (self.user_profile, self.recipient)
@cache_with_key(user_profile_by_id_cache_key, timeout=3600*24*7)
def get_user_profile_by_id(uid):
# type: (int) -> UserProfile
return UserProfile.objects.select_related().get(id=uid)
@cache_with_key(user_profile_by_email_cache_key, timeout=3600*24*7)
def get_user_profile_by_email(email):
# type: (Text) -> UserProfile
return UserProfile.objects.select_related().get(email__iexact=email.strip())
@cache_with_key(active_user_dicts_in_realm_cache_key, timeout=3600*24*7)
def get_active_user_dicts_in_realm(realm):
# type: (Realm) -> List[Dict[str, Any]]
return UserProfile.objects.filter(realm=realm, is_active=True) \
.values(*active_user_dict_fields)
@cache_with_key(active_bot_dicts_in_realm_cache_key, timeout=3600*24*7)
def get_active_bot_dicts_in_realm(realm):
# type: (Realm) -> List[Dict[str, Any]]
return UserProfile.objects.filter(realm=realm, is_active=True, is_bot=True) \
.values(*active_bot_dict_fields)
def get_owned_bot_dicts(user_profile, include_all_realm_bots_if_admin=True):
# type: (UserProfile, bool) -> List[Dict[str, Any]]
if user_profile.is_realm_admin and include_all_realm_bots_if_admin:
result = get_active_bot_dicts_in_realm(user_profile.realm)
else:
result = UserProfile.objects.filter(realm=user_profile.realm, is_active=True, is_bot=True,
bot_owner=user_profile).values(*active_bot_dict_fields)
# TODO: Remove this import cycle
from zerver.lib.avatar import get_avatar_url
return [{'email': botdict['email'],
'user_id': botdict['id'],
'full_name': botdict['full_name'],
'api_key': botdict['api_key'],
'default_sending_stream': botdict['default_sending_stream__name'],
'default_events_register_stream': botdict['default_events_register_stream__name'],
'default_all_public_streams': botdict['default_all_public_streams'],
'owner': botdict['bot_owner__email'],
'avatar_url': get_avatar_url(botdict['avatar_source'], botdict['email']),
}
for botdict in result]
def get_prereg_user_by_email(email):
# type: (Text) -> PreregistrationUser
# A user can be invited many times, so only return the result of the latest
# invite.
return PreregistrationUser.objects.filter(email__iexact=email.strip()).latest("invited_at")
def get_cross_realm_emails():
# type: () -> Set[Text]
return set(settings.CROSS_REALM_BOT_EMAILS)
# The Huddle class represents a group of individuals who have had a
# Group Private Message conversation together. The actual membership
# of the Huddle is stored in the Subscription table just like with
# Streams, and a hash of that list is stored in the huddle_hash field
# below, to support efficiently mapping from a set of users to the
# corresponding Huddle object.
class Huddle(models.Model):
# TODO: We should consider whether using
# CommaSeparatedIntegerField would be better.
huddle_hash = models.CharField(max_length=40, db_index=True, unique=True) # type: Text
def get_huddle_hash(id_list):
# type: (List[int]) -> Text
id_list = sorted(set(id_list))
hash_key = ",".join(str(x) for x in id_list)
return make_safe_digest(hash_key)
def huddle_hash_cache_key(huddle_hash):
# type: (Text) -> Text
return u"huddle_by_hash:%s" % (huddle_hash,)
def get_huddle(id_list):
# type: (List[int]) -> Huddle
huddle_hash = get_huddle_hash(id_list)
return get_huddle_backend(huddle_hash, id_list)
@cache_with_key(lambda huddle_hash, id_list: huddle_hash_cache_key(huddle_hash), timeout=3600*24*7)
def get_huddle_backend(huddle_hash, id_list):
# type: (Text, List[int]) -> Huddle
(huddle, created) = Huddle.objects.get_or_create(huddle_hash=huddle_hash)
if created:
with transaction.atomic():
recipient = Recipient.objects.create(type_id=huddle.id,
type=Recipient.HUDDLE)
subs_to_create = [Subscription(recipient=recipient,
user_profile=get_user_profile_by_id(user_profile_id))
for user_profile_id in id_list]
Subscription.objects.bulk_create(subs_to_create)
return huddle
def clear_database():
# type: () -> None
pylibmc.Client(['127.0.0.1']).flush_all()
model = None # type: Any
for model in [Message, Stream, UserProfile, Recipient,
Realm, Subscription, Huddle, UserMessage, Client,
DefaultStream]:
model.objects.all().delete()
Session.objects.all().delete()
class UserActivity(models.Model):
user_profile = models.ForeignKey(UserProfile) # type: UserProfile
client = models.ForeignKey(Client) # type: Client
query = models.CharField(max_length=50, db_index=True) # type: Text
count = models.IntegerField() # type: int
last_visit = models.DateTimeField('last visit') # type: datetime.datetime
class Meta(object):
unique_together = ("user_profile", "client", "query")
class UserActivityInterval(models.Model):
user_profile = models.ForeignKey(UserProfile) # type: UserProfile
start = models.DateTimeField('start time', db_index=True) # type: datetime.datetime
end = models.DateTimeField('end time', db_index=True) # type: datetime.datetime
class UserPresence(models.Model):
user_profile = models.ForeignKey(UserProfile) # type: UserProfile
client = models.ForeignKey(Client) # type: Client
# Valid statuses
ACTIVE = 1
IDLE = 2
timestamp = models.DateTimeField('presence changed') # type: datetime.datetime
status = models.PositiveSmallIntegerField(default=ACTIVE) # type: int
@staticmethod
def status_to_string(status):
# type: (int) -> str
if status == UserPresence.ACTIVE:
return 'active'
elif status == UserPresence.IDLE:
return 'idle'
@staticmethod
def get_status_dict_by_realm(realm_id):
# type: (int) -> defaultdict[Any, Dict[Any, Any]]
user_statuses = defaultdict(dict) # type: defaultdict[Any, Dict[Any, Any]]
query = UserPresence.objects.filter(
user_profile__realm_id=realm_id,
user_profile__is_active=True,
user_profile__is_bot=False
).values(
'client__name',
'status',
'timestamp',
'user_profile__email',
'user_profile__id',
'user_profile__enable_offline_push_notifications',
'user_profile__is_mirror_dummy',
)
mobile_user_ids = [row['user'] for row in PushDeviceToken.objects.filter(
user__realm_id=1,
user__is_active=True,
user__is_bot=False,
).distinct("user").values("user")]
for row in query:
info = UserPresence.to_presence_dict(
client_name=row['client__name'],
status=row['status'],
dt=row['timestamp'],
push_enabled=row['user_profile__enable_offline_push_notifications'],
has_push_devices=row['user_profile__id'] in mobile_user_ids,
is_mirror_dummy=row['user_profile__is_mirror_dummy'],
)
user_statuses[row['user_profile__email']][row['client__name']] = info
return user_statuses
@staticmethod
def to_presence_dict(client_name=None, status=None, dt=None, push_enabled=None,
has_push_devices=None, is_mirror_dummy=None):
# type: (Optional[Text], Optional[int], Optional[datetime.datetime], Optional[bool], Optional[bool], Optional[bool]) -> Dict[str, Any]
presence_val = UserPresence.status_to_string(status)
timestamp = datetime_to_timestamp(dt)
return dict(
client=client_name,
status=presence_val,
timestamp=timestamp,
pushable=(push_enabled and has_push_devices),
)
def to_dict(self):
# type: () -> Dict[str, Any]
return UserPresence.to_presence_dict(
client_name=self.client.name,
status=self.status,
dt=self.timestamp
)
@staticmethod
def status_from_string(status):
# type: (NonBinaryStr) -> Optional[int]
if status == 'active':
status_val = UserPresence.ACTIVE
elif status == 'idle':
status_val = UserPresence.IDLE
else:
status_val = None
return status_val
class Meta(object):
unique_together = ("user_profile", "client")
class DefaultStream(models.Model):
realm = models.ForeignKey(Realm) # type: Realm
stream = models.ForeignKey(Stream) # type: Stream
class Meta(object):
unique_together = ("realm", "stream")
class Referral(models.Model):
user_profile = models.ForeignKey(UserProfile) # type: UserProfile
email = models.EmailField(blank=False, null=False) # type: Text
timestamp = models.DateTimeField(auto_now_add=True, null=False) # type: datetime.datetime
# This table only gets used on Zulip Voyager instances
# For reasons of deliverability (and sending from multiple email addresses),
# we will still send from mandrill when we send things from the (staging.)zulip.com install
class ScheduledJob(models.Model):
scheduled_timestamp = models.DateTimeField(auto_now_add=False, null=False) # type: datetime.datetime
type = models.PositiveSmallIntegerField() # type: int
# Valid types are {email}
# for EMAIL, filter_string is recipient_email
EMAIL = 1
# JSON representation of the job's data. Be careful, as we are not relying on Django to do validation
data = models.TextField() # type: Text
# Kind if like a ForeignKey, but table is determined by type.
filter_id = models.IntegerField(null=True) # type: Optional[int]
filter_string = models.CharField(max_length=100) # type: Text
|
niftynei/zulip
|
zerver/models.py
|
Python
|
apache-2.0
| 57,696
|
from analog.exceptions import UnknownLogKind
from analog.settings import KINDS
class LogEntryKindMap:
"""
A helper class for transitioning old code.
Allows looking up log entry kinds by "enumish" name, i.e.
``LogEntryKind.OTHER`` would map to the "other" kind's ID.
"""
def __getattr__(self, kind):
"""
Get the kind ID for the given kind mnemonic.
:param kind: Kind mnemonic.
:type kind: str
:return: int
"""
if kind.startswith('_'):
return self.__getattribute__(kind)
kind = str(kind).lower()
if kind not in KINDS:
raise UnknownLogKind(kind)
return KINDS[kind]
|
andersinno/django-analog
|
analog/util.py
|
Python
|
mit
| 695
|
import json
import cherrypy
import cherrypy_cors
import datetime
from dateutil.parser import parse
from jinja2 import Environment, FileSystemLoader
from pymongo import MongoClient
import pymongo
class server:
def __init__(self):
self.env = Environment(loader=FileSystemLoader('../'))
cherrypy_cors.install()
@cherrypy.expose
def index(self, data=None):
view = self.env.get_template('app/index.html')
return view.render(title="Contextualization Interface")
@cherrypy.expose
def getTweets(self, date, concept):
# Parse date information for Mongo Query
parsedDate = parse(date)
year = parsedDate.year
month = parsedDate.month
day = parsedDate.day
# Handle Mongo Query
client = MongoClient()
db = client.Twitter
cursor = db.Zika.find({"lang": "en", concept: "true", "created_at": {"$gte": datetime.datetime(year, month, day),
"$lt": datetime.datetime(year, month, day + 1)}}).limit(1)
tweets = []
for document in cursor:
tweets.append(document["text"])
if len(tweets) == 0:
tweets = ["No tweets for this date"]
return tweets[0]
@cherrypy.expose
def getCount(self, date, concept):
# Parse date information for Mongo Query
parsedDate = parse(date)
year = parsedDate.year
month = parsedDate.month
day = parsedDate.day
print concept
print date
# Handle Mongo Query
client = MongoClient()
db = client.Twitter
if concept == "total":
count = db.Zika.count({"lang": "en", "created_at": {"$gte": datetime.datetime(year, month, day),
"$lt": datetime.datetime(year, month, day + 1)}})
else:
count = db.Zika.count({"lang": "en", concept: "true", "created_at": {"$gte": datetime.datetime(year, month, day),
"$lt": datetime.datetime(year, month, day + 1)}})
return str(count)
cherrypy.quickstart(server(), "", "server.cfg")
|
kearnsw/Twitt.IR
|
src/server.py
|
Python
|
gpl-3.0
| 2,260
|
"""Commands to query Minecraft service status and user information."""
import re
import plumeria.util.http as http
from plumeria.command import commands, CommandError
from plumeria.message import Response
from plumeria.util.ratelimit import rate_limit
MINECRAFT_STATUS_URL = "http://xpaw.ru/mcstatus/status.json"
HEADERS = {"Cookie": "__cfduid=d100ef4ed084175f2382f35613cb9c9dc1458598283"}
VALID_NAME = re.compile("^[A-Za-z0-9_]{1,30}$")
VALID_UUID = re.compile(
"(?:[A-Fa-f0-9]{32}|[A-Fa-f0-9]{8}-?[A-Fa-f0-9]{4}-?[A-Fa-f0-9]{4}-?[A-Fa-f0-9]{4}-?[A-Fa-f0-9]{12})")
def validate_name(name):
name = name.strip()
if not VALID_NAME.match(name):
raise CommandError("Invalid Minecraft name given")
return name
def validate_uuid(uuid):
m = VALID_UUID.search(uuid)
if m:
return m.group(0).replace("-", "")
raise CommandError("Invalid UUID given")
async def name_to_uuid(name):
m = VALID_UUID.search(name)
if m:
return m.group(0)
name = validate_name(name)
r = await http.get("https://mcapi.ca/uuid/player/" + name)
results = r.json()
if len(results):
return results[0]['uuid_formatted']
else:
raise CommandError("user not found")
@commands.create("minecraft status", "mcstatus", category="Games")
@rate_limit()
async def mc_status(message):
"""
Get the status of Minecraft's services.
Example::
/mcstatus
Response::
Realms: OK, Login: OK, Skins: OK, Website: OK, Session: OK
"""
r = await http.get(MINECRAFT_STATUS_URL, headers=HEADERS)
data = r.json()['report']
status = []
for name, v in data.items():
text = v['title']
if text == "Online":
text = ":ok:"
else:
text = ":warning: " + text
status.append("{}: {}".format(name.title(), text))
return Response(", ".join(status))
@commands.create("minecraft uuid", "mcuuid", category="Games")
@rate_limit()
async def mc_uuid(message):
"""
Get the UUID of a Minecraft user.
"""
name = validate_name(message.content)
try:
return await name_to_uuid(name)
except http.BadStatusCodeError as e:
if e.http_code == 404:
raise CommandError("No user found by that name")
raise CommandError("API returned an error code")
@commands.create("minecraft body", "mcbody", category="Games")
@rate_limit()
async def mc_body(message):
"""
Get the rendered 3D body for a Minecraft user with his or her skin.
Example::
/mcbody sk89q
"""
return "https://crafatar.com/renders/body/{}".format(await name_to_uuid(message.content))
@commands.create("minecraft head", "mchead", category="Games")
@rate_limit()
async def mc_head(message):
"""
Get the rendered 3D head for a Minecraft user with his or her skin.
Example::
/mchead sk89q
"""
return "https://crafatar.com/renders/head/{}".format(await name_to_uuid(message.content))
@commands.create("minecraft face", "mcface", category="Games")
@rate_limit()
async def mc_face(message):
"""
Get the face for a Minecraft user with his or her skin.
Example::
/mcface sk89q
"""
return "https://crafatar.com/avatars/{}".format(await name_to_uuid(message.content))
@commands.create("minecraft skin", "mcskin", category="Games")
@rate_limit()
async def mc_skin(message):
"""
Get the skin for a Minecraft user.
Example::
/mcskin sk89q
"""
return "https://crafatar.com/skins/{}".format(await name_to_uuid(message.content))
@commands.create("minecraft cape", "mccape", category="Games")
@rate_limit()
async def mc_cape(message):
"""
Get the cape for a Minecraft user.
Example::
/mccape sk89q
"""
return "https://crafatar.com/capes/{}".format(await name_to_uuid(message.content))
def setup():
commands.add(mc_status)
commands.add(mc_uuid)
commands.add(mc_body)
commands.add(mc_head)
commands.add(mc_face)
commands.add(mc_skin)
commands.add(mc_cape)
|
sk89q/Plumeria
|
orchard/minecraft.py
|
Python
|
mit
| 4,077
|
"""Test module for config.py."""
import sys
import os
import shutil
from tempfile import mkdtemp
from unittest import TestCase, main, TestLoader
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '../lib'))) # noqa
from quattordocbuild import config
class ConfigTest(TestCase):
"""Test class for config.py."""
def setUp(self):
"""Set up temp dir for tests."""
self.tmpdir = mkdtemp()
def tearDown(self):
"""Remove temp dir."""
shutil.rmtree(self.tmpdir)
def test_check_repository_map(self):
"""Test check repository_map function."""
self.assertFalse(config.check_repository_map(None))
self.assertFalse(config.check_repository_map({}))
self.assertFalse(config.check_repository_map({"test": {}}))
self.assertFalse(config.check_repository_map({"test": {"sitesection": "test"}}))
self.assertFalse(config.check_repository_map({"test": {"targets": ["test"]}}))
repomap = {'test': {'sitesection': 'components', 'targets': ['/NCM/Component', 'components']}}
self.assertTrue(config.check_repository_map(repomap))
def test_build_repository_map(self):
"""Test build_repository_map function."""
testdir1 = os.path.join(self.tmpdir, "repo")
testdir2 = os.path.join(self.tmpdir, "repo1")
os.makedirs(testdir1)
os.makedirs(testdir2)
self.assertFalse(config.build_repository_map(self.tmpdir))
open(os.path.join(testdir1, config.cfgfile), 'a').close()
self.assertFalse(config.build_repository_map(self.tmpdir))
def test_read_config(self):
"""Test read_config function."""
testfile = os.path.join(self.tmpdir, config.cfgfile)
with open(testfile, 'w') as fih:
fih.write("\n")
self.assertFalse(config.read_config(testfile))
with open(testfile, 'a') as fih:
fih.write("[docbuilder]\nsitesection=test\n")
self.assertFalse(config.read_config(testfile))
with open(testfile, 'a') as fih:
fih.write("targets=test")
self.assertEquals(config.read_config(testfile), {'sitesection': 'test',
'subdir': None,
'targets': ['test']})
with open(testfile, 'a') as fih:
fih.write(",test2")
self.assertEquals(config.read_config(testfile), {'sitesection': 'test',
'subdir': None,
'targets': ['test', 'test2']})
with open(testfile, 'a') as fih:
fih.write(", \n")
self.assertEquals(config.read_config(testfile), {'sitesection': 'test',
'subdir': None,
'targets': ['test', 'test2']})
with open(testfile, 'a') as fih:
fih.write("subdir=test3\n")
self.assertEquals(config.read_config(testfile), {'sitesection': 'test',
'subdir': 'test3',
'targets': ['test', 'test2']})
def suite(self):
"""Return all the testcases in this module."""
return TestLoader().loadTestsFromTestCase(ConfigTest)
if __name__ == '__main__':
main()
|
jouvin/release
|
src/documentation_builder/test/config.py
|
Python
|
apache-2.0
| 3,456
|
# dispatch.py - command dispatching for mercurial
#
# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from i18n import _
import os, sys, atexit, signal, pdb, socket, errno, shlex, time, traceback, re
import difflib
import util, commands, hg, fancyopts, extensions, hook, error
import cmdutil, encoding
import ui as uimod
class request(object):
def __init__(self, args, ui=None, repo=None, fin=None, fout=None,
ferr=None):
self.args = args
self.ui = ui
self.repo = repo
# input/output/error streams
self.fin = fin
self.fout = fout
self.ferr = ferr
def run():
"run the command in sys.argv"
sys.exit((dispatch(request(sys.argv[1:])) or 0) & 255)
def _getsimilar(symbols, value):
sim = lambda x: difflib.SequenceMatcher(None, value, x).ratio()
# The cutoff for similarity here is pretty arbitrary. It should
# probably be investigated and tweaked.
return [s for s in symbols if sim(s) > 0.6]
def _formatparse(write, inst):
similar = []
if isinstance(inst, error.UnknownIdentifier):
# make sure to check fileset first, as revset can invoke fileset
similar = _getsimilar(inst.symbols, inst.function)
if len(inst.args) > 1:
write(_("hg: parse error at %s: %s\n") %
(inst.args[1], inst.args[0]))
if (inst.args[0][0] == ' '):
write(_("unexpected leading whitespace\n"))
else:
write(_("hg: parse error: %s\n") % inst.args[0])
if similar:
if len(similar) == 1:
write(_("(did you mean %r?)\n") % similar[0])
else:
ss = ", ".join(sorted(similar))
write(_("(did you mean one of %s?)\n") % ss)
def dispatch(req):
"run the command specified in req.args"
if req.ferr:
ferr = req.ferr
elif req.ui:
ferr = req.ui.ferr
else:
ferr = sys.stderr
try:
if not req.ui:
req.ui = uimod.ui()
if '--traceback' in req.args:
req.ui.setconfig('ui', 'traceback', 'on', '--traceback')
# set ui streams from the request
if req.fin:
req.ui.fin = req.fin
if req.fout:
req.ui.fout = req.fout
if req.ferr:
req.ui.ferr = req.ferr
except util.Abort, inst:
ferr.write(_("abort: %s\n") % inst)
if inst.hint:
ferr.write(_("(%s)\n") % inst.hint)
return -1
except error.ParseError, inst:
_formatparse(ferr.write, inst)
return -1
msg = ' '.join(' ' in a and repr(a) or a for a in req.args)
starttime = time.time()
ret = None
try:
ret = _runcatch(req)
return ret
finally:
duration = time.time() - starttime
req.ui.log("commandfinish", "%s exited %s after %0.2f seconds\n",
msg, ret or 0, duration)
def _runcatch(req):
def catchterm(*args):
raise error.SignalInterrupt
ui = req.ui
try:
for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM':
num = getattr(signal, name, None)
if num:
signal.signal(num, catchterm)
except ValueError:
pass # happens if called in a thread
try:
try:
debugger = 'pdb'
debugtrace = {
'pdb' : pdb.set_trace
}
debugmortem = {
'pdb' : pdb.post_mortem
}
# read --config before doing anything else
# (e.g. to change trust settings for reading .hg/hgrc)
cfgs = _parseconfig(req.ui, _earlygetopt(['--config'], req.args))
if req.repo:
# copy configs that were passed on the cmdline (--config) to
# the repo ui
for sec, name, val in cfgs:
req.repo.ui.setconfig(sec, name, val, source='--config')
# if we are in HGPLAIN mode, then disable custom debugging
debugger = ui.config("ui", "debugger")
debugmod = pdb
if not debugger or ui.plain():
debugger = 'pdb'
elif '--debugger' in req.args:
# This import can be slow for fancy debuggers, so only
# do it when absolutely necessary, i.e. when actual
# debugging has been requested
try:
debugmod = __import__(debugger)
except ImportError:
pass # Leave debugmod = pdb
debugtrace[debugger] = debugmod.set_trace
debugmortem[debugger] = debugmod.post_mortem
# enter the debugger before command execution
if '--debugger' in req.args:
ui.warn(_("entering debugger - "
"type c to continue starting hg or h for help\n"))
if (debugger != 'pdb' and
debugtrace[debugger] == debugtrace['pdb']):
ui.warn(_("%s debugger specified "
"but its module was not found\n") % debugger)
debugtrace[debugger]()
try:
return _dispatch(req)
finally:
ui.flush()
except: # re-raises
# enter the debugger when we hit an exception
if '--debugger' in req.args:
traceback.print_exc()
debugmortem[debugger](sys.exc_info()[2])
ui.traceback()
raise
# Global exception handling, alphabetically
# Mercurial-specific first, followed by built-in and library exceptions
except error.AmbiguousCommand, inst:
ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") %
(inst.args[0], " ".join(inst.args[1])))
except error.ParseError, inst:
_formatparse(ui.warn, inst)
return -1
except error.LockHeld, inst:
if inst.errno == errno.ETIMEDOUT:
reason = _('timed out waiting for lock held by %s') % inst.locker
else:
reason = _('lock held by %s') % inst.locker
ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason))
except error.LockUnavailable, inst:
ui.warn(_("abort: could not lock %s: %s\n") %
(inst.desc or inst.filename, inst.strerror))
except error.CommandError, inst:
if inst.args[0]:
ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1]))
commands.help_(ui, inst.args[0], full=False, command=True)
else:
ui.warn(_("hg: %s\n") % inst.args[1])
commands.help_(ui, 'shortlist')
except error.OutOfBandError, inst:
ui.warn(_("abort: remote error:\n"))
ui.warn(''.join(inst.args))
except error.RepoError, inst:
ui.warn(_("abort: %s!\n") % inst)
if inst.hint:
ui.warn(_("(%s)\n") % inst.hint)
except error.ResponseError, inst:
ui.warn(_("abort: %s") % inst.args[0])
if not isinstance(inst.args[1], basestring):
ui.warn(" %r\n" % (inst.args[1],))
elif not inst.args[1]:
ui.warn(_(" empty string\n"))
else:
ui.warn("\n%r\n" % util.ellipsis(inst.args[1]))
except error.CensoredNodeError, inst:
ui.warn(_("abort: file censored %s!\n") % inst)
except error.RevlogError, inst:
ui.warn(_("abort: %s!\n") % inst)
except error.SignalInterrupt:
ui.warn(_("killed!\n"))
except error.UnknownCommand, inst:
ui.warn(_("hg: unknown command '%s'\n") % inst.args[0])
try:
# check if the command is in a disabled extension
# (but don't check for extensions themselves)
commands.help_(ui, inst.args[0], unknowncmd=True)
except error.UnknownCommand:
suggested = False
if len(inst.args) == 2:
sim = _getsimilar(inst.args[1], inst.args[0])
if sim:
ui.warn(_('(did you mean one of %s?)\n') %
', '.join(sorted(sim)))
suggested = True
if not suggested:
commands.help_(ui, 'shortlist')
except error.InterventionRequired, inst:
ui.warn("%s\n" % inst)
return 1
except util.Abort, inst:
ui.warn(_("abort: %s\n") % inst)
if inst.hint:
ui.warn(_("(%s)\n") % inst.hint)
except ImportError, inst:
ui.warn(_("abort: %s!\n") % inst)
m = str(inst).split()[-1]
if m in "mpatch bdiff".split():
ui.warn(_("(did you forget to compile extensions?)\n"))
elif m in "zlib".split():
ui.warn(_("(is your Python install correct?)\n"))
except IOError, inst:
if util.safehasattr(inst, "code"):
ui.warn(_("abort: %s\n") % inst)
elif util.safehasattr(inst, "reason"):
try: # usually it is in the form (errno, strerror)
reason = inst.reason.args[1]
except (AttributeError, IndexError):
# it might be anything, for example a string
reason = inst.reason
if isinstance(reason, unicode):
# SSLError of Python 2.7.9 contains a unicode
reason = reason.encode(encoding.encoding, 'replace')
ui.warn(_("abort: error: %s\n") % reason)
elif (util.safehasattr(inst, "args")
and inst.args and inst.args[0] == errno.EPIPE):
if ui.debugflag:
ui.warn(_("broken pipe\n"))
elif getattr(inst, "strerror", None):
if getattr(inst, "filename", None):
ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename))
else:
ui.warn(_("abort: %s\n") % inst.strerror)
else:
raise
except OSError, inst:
if getattr(inst, "filename", None) is not None:
ui.warn(_("abort: %s: '%s'\n") % (inst.strerror, inst.filename))
else:
ui.warn(_("abort: %s\n") % inst.strerror)
except KeyboardInterrupt:
try:
ui.warn(_("interrupted!\n"))
except IOError, inst:
if inst.errno == errno.EPIPE:
if ui.debugflag:
ui.warn(_("\nbroken pipe\n"))
else:
raise
except MemoryError:
ui.warn(_("abort: out of memory\n"))
except SystemExit, inst:
# Commands shouldn't sys.exit directly, but give a return code.
# Just in case catch this and and pass exit code to caller.
return inst.code
except socket.error, inst:
ui.warn(_("abort: %s\n") % inst.args[-1])
except: # re-raises
myver = util.version()
# For compatibility checking, we discard the portion of the hg
# version after the + on the assumption that if a "normal
# user" is running a build with a + in it the packager
# probably built from fairly close to a tag and anyone with a
# 'make local' copy of hg (where the version number can be out
# of date) will be clueful enough to notice the implausible
# version number and try updating.
compare = myver.split('+')[0]
ct = tuplever(compare)
worst = None, ct, ''
for name, mod in extensions.extensions():
testedwith = getattr(mod, 'testedwith', '')
report = getattr(mod, 'buglink', _('the extension author.'))
if not testedwith.strip():
# We found an untested extension. It's likely the culprit.
worst = name, 'unknown', report
break
# Never blame on extensions bundled with Mercurial.
if testedwith == 'internal':
continue
tested = [tuplever(t) for t in testedwith.split()]
if ct in tested:
continue
lower = [t for t in tested if t < ct]
nearest = max(lower or tested)
if worst[0] is None or nearest < worst[1]:
worst = name, nearest, report
if worst[0] is not None:
name, testedwith, report = worst
if not isinstance(testedwith, str):
testedwith = '.'.join([str(c) for c in testedwith])
warning = (_('** Unknown exception encountered with '
'possibly-broken third-party extension %s\n'
'** which supports versions %s of Mercurial.\n'
'** Please disable %s and try your action again.\n'
'** If that fixes the bug please report it to %s\n')
% (name, testedwith, name, report))
else:
warning = (_("** unknown exception encountered, "
"please report by visiting\n") +
_("** http://mercurial.selenic.com/wiki/BugTracker\n"))
warning += ((_("** Python %s\n") % sys.version.replace('\n', '')) +
(_("** Mercurial Distributed SCM (version %s)\n") % myver) +
(_("** Extensions loaded: %s\n") %
", ".join([x[0] for x in extensions.extensions()])))
ui.log("commandexception", "%s\n%s\n", warning, traceback.format_exc())
ui.warn(warning)
raise
return -1
def tuplever(v):
try:
# Assertion: tuplever is only used for extension compatibility
# checking. Otherwise, the discarding of extra version fields is
# incorrect.
return tuple([int(i) for i in v.split('.')[0:2]])
except ValueError:
return tuple()
def aliasargs(fn, givenargs):
args = getattr(fn, 'args', [])
if args:
cmd = ' '.join(map(util.shellquote, args))
nums = []
def replacer(m):
num = int(m.group(1)) - 1
nums.append(num)
if num < len(givenargs):
return givenargs[num]
raise util.Abort(_('too few arguments for command alias'))
cmd = re.sub(r'\$(\d+|\$)', replacer, cmd)
givenargs = [x for i, x in enumerate(givenargs)
if i not in nums]
args = shlex.split(cmd)
return args + givenargs
def aliasinterpolate(name, args, cmd):
'''interpolate args into cmd for shell aliases
This also handles $0, $@ and "$@".
'''
# util.interpolate can't deal with "$@" (with quotes) because it's only
# built to match prefix + patterns.
replacemap = dict(('$%d' % (i + 1), arg) for i, arg in enumerate(args))
replacemap['$0'] = name
replacemap['$$'] = '$'
replacemap['$@'] = ' '.join(args)
# Typical Unix shells interpolate "$@" (with quotes) as all the positional
# parameters, separated out into words. Emulate the same behavior here by
# quoting the arguments individually. POSIX shells will then typically
# tokenize each argument into exactly one word.
replacemap['"$@"'] = ' '.join(util.shellquote(arg) for arg in args)
# escape '\$' for regex
regex = '|'.join(replacemap.keys()).replace('$', r'\$')
r = re.compile(regex)
return r.sub(lambda x: replacemap[x.group()], cmd)
class cmdalias(object):
def __init__(self, name, definition, cmdtable):
self.name = self.cmd = name
self.cmdname = ''
self.definition = definition
self.fn = None
self.args = []
self.opts = []
self.help = ''
self.norepo = True
self.optionalrepo = False
self.badalias = None
self.unknowncmd = False
try:
aliases, entry = cmdutil.findcmd(self.name, cmdtable)
for alias, e in cmdtable.iteritems():
if e is entry:
self.cmd = alias
break
self.shadows = True
except error.UnknownCommand:
self.shadows = False
if not self.definition:
self.badalias = _("no definition for alias '%s'") % self.name
return
if self.definition.startswith('!'):
self.shell = True
def fn(ui, *args):
env = {'HG_ARGS': ' '.join((self.name,) + args)}
def _checkvar(m):
if m.groups()[0] == '$':
return m.group()
elif int(m.groups()[0]) <= len(args):
return m.group()
else:
ui.debug("No argument found for substitution "
"of %i variable in alias '%s' definition."
% (int(m.groups()[0]), self.name))
return ''
cmd = re.sub(r'\$(\d+|\$)', _checkvar, self.definition[1:])
cmd = aliasinterpolate(self.name, args, cmd)
return ui.system(cmd, environ=env)
self.fn = fn
return
try:
args = shlex.split(self.definition)
except ValueError, inst:
self.badalias = (_("error in definition for alias '%s': %s")
% (self.name, inst))
return
self.cmdname = cmd = args.pop(0)
args = map(util.expandpath, args)
for invalidarg in ("--cwd", "-R", "--repository", "--repo", "--config"):
if _earlygetopt([invalidarg], args):
self.badalias = (_("error in definition for alias '%s': %s may "
"only be given on the command line")
% (self.name, invalidarg))
return
try:
tableentry = cmdutil.findcmd(cmd, cmdtable, False)[1]
if len(tableentry) > 2:
self.fn, self.opts, self.help = tableentry
else:
self.fn, self.opts = tableentry
self.args = aliasargs(self.fn, args)
if cmd not in commands.norepo.split(' '):
self.norepo = False
if cmd in commands.optionalrepo.split(' '):
self.optionalrepo = True
if self.help.startswith("hg " + cmd):
# drop prefix in old-style help lines so hg shows the alias
self.help = self.help[4 + len(cmd):]
self.__doc__ = self.fn.__doc__
except error.UnknownCommand:
self.badalias = (_("alias '%s' resolves to unknown command '%s'")
% (self.name, cmd))
self.unknowncmd = True
except error.AmbiguousCommand:
self.badalias = (_("alias '%s' resolves to ambiguous command '%s'")
% (self.name, cmd))
def __call__(self, ui, *args, **opts):
if self.badalias:
hint = None
if self.unknowncmd:
try:
# check if the command is in a disabled extension
cmd, ext = extensions.disabledcmd(ui, self.cmdname)[:2]
hint = _("'%s' is provided by '%s' extension") % (cmd, ext)
except error.UnknownCommand:
pass
raise util.Abort(self.badalias, hint=hint)
if self.shadows:
ui.debug("alias '%s' shadows command '%s'\n" %
(self.name, self.cmdname))
if util.safehasattr(self, 'shell'):
return self.fn(ui, *args, **opts)
else:
try:
return util.checksignature(self.fn)(ui, *args, **opts)
except error.SignatureError:
args = ' '.join([self.cmdname] + self.args)
ui.debug("alias '%s' expands to '%s'\n" % (self.name, args))
raise
def addaliases(ui, cmdtable):
# aliases are processed after extensions have been loaded, so they
# may use extension commands. Aliases can also use other alias definitions,
# but only if they have been defined prior to the current definition.
for alias, definition in ui.configitems('alias'):
aliasdef = cmdalias(alias, definition, cmdtable)
try:
olddef = cmdtable[aliasdef.cmd][0]
if olddef.definition == aliasdef.definition:
continue
except (KeyError, AttributeError):
# definition might not exist or it might not be a cmdalias
pass
cmdtable[aliasdef.name] = (aliasdef, aliasdef.opts, aliasdef.help)
if aliasdef.norepo:
commands.norepo += ' %s' % alias
if aliasdef.optionalrepo:
commands.optionalrepo += ' %s' % alias
def _parse(ui, args):
options = {}
cmdoptions = {}
try:
args = fancyopts.fancyopts(args, commands.globalopts, options)
except fancyopts.getopt.GetoptError, inst:
raise error.CommandError(None, inst)
if args:
cmd, args = args[0], args[1:]
aliases, entry = cmdutil.findcmd(cmd, commands.table,
ui.configbool("ui", "strict"))
cmd = aliases[0]
args = aliasargs(entry[0], args)
defaults = ui.config("defaults", cmd)
if defaults:
args = map(util.expandpath, shlex.split(defaults)) + args
c = list(entry[1])
else:
cmd = None
c = []
# combine global options into local
for o in commands.globalopts:
c.append((o[0], o[1], options[o[1]], o[3]))
try:
args = fancyopts.fancyopts(args, c, cmdoptions, True)
except fancyopts.getopt.GetoptError, inst:
raise error.CommandError(cmd, inst)
# separate global options back out
for o in commands.globalopts:
n = o[1]
options[n] = cmdoptions[n]
del cmdoptions[n]
return (cmd, cmd and entry[0] or None, args, options, cmdoptions)
def _parseconfig(ui, config):
"""parse the --config options from the command line"""
configs = []
for cfg in config:
try:
name, value = cfg.split('=', 1)
section, name = name.split('.', 1)
if not section or not name:
raise IndexError
ui.setconfig(section, name, value, '--config')
configs.append((section, name, value))
except (IndexError, ValueError):
raise util.Abort(_('malformed --config option: %r '
'(use --config section.name=value)') % cfg)
return configs
def _earlygetopt(aliases, args):
"""Return list of values for an option (or aliases).
The values are listed in the order they appear in args.
The options and values are removed from args.
>>> args = ['x', '--cwd', 'foo', 'y']
>>> _earlygetopt(['--cwd'], args), args
(['foo'], ['x', 'y'])
>>> args = ['x', '--cwd=bar', 'y']
>>> _earlygetopt(['--cwd'], args), args
(['bar'], ['x', 'y'])
>>> args = ['x', '-R', 'foo', 'y']
>>> _earlygetopt(['-R'], args), args
(['foo'], ['x', 'y'])
>>> args = ['x', '-Rbar', 'y']
>>> _earlygetopt(['-R'], args), args
(['bar'], ['x', 'y'])
"""
try:
argcount = args.index("--")
except ValueError:
argcount = len(args)
shortopts = [opt for opt in aliases if len(opt) == 2]
values = []
pos = 0
while pos < argcount:
fullarg = arg = args[pos]
equals = arg.find('=')
if equals > -1:
arg = arg[:equals]
if arg in aliases:
del args[pos]
if equals > -1:
values.append(fullarg[equals + 1:])
argcount -= 1
else:
if pos + 1 >= argcount:
# ignore and let getopt report an error if there is no value
break
values.append(args.pop(pos))
argcount -= 2
elif arg[:2] in shortopts:
# short option can have no following space, e.g. hg log -Rfoo
values.append(args.pop(pos)[2:])
argcount -= 1
else:
pos += 1
return values
def runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions):
# run pre-hook, and abort if it fails
hook.hook(lui, repo, "pre-%s" % cmd, True, args=" ".join(fullargs),
pats=cmdpats, opts=cmdoptions)
ret = _runcommand(ui, options, cmd, d)
# run post-hook, passing command result
hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs),
result=ret, pats=cmdpats, opts=cmdoptions)
return ret
def _getlocal(ui, rpath):
"""Return (path, local ui object) for the given target path.
Takes paths in [cwd]/.hg/hgrc into account."
"""
try:
wd = os.getcwd()
except OSError, e:
raise util.Abort(_("error getting current working directory: %s") %
e.strerror)
path = cmdutil.findrepo(wd) or ""
if not path:
lui = ui
else:
lui = ui.copy()
lui.readconfig(os.path.join(path, ".hg", "hgrc"), path)
if rpath and rpath[-1]:
path = lui.expandpath(rpath[-1])
lui = ui.copy()
lui.readconfig(os.path.join(path, ".hg", "hgrc"), path)
return path, lui
def _checkshellalias(lui, ui, args, precheck=True):
"""Return the function to run the shell alias, if it is required
'precheck' is whether this function is invoked before adding
aliases or not.
"""
options = {}
try:
args = fancyopts.fancyopts(args, commands.globalopts, options)
except fancyopts.getopt.GetoptError:
return
if not args:
return
if precheck:
strict = True
norepo = commands.norepo
optionalrepo = commands.optionalrepo
def restorecommands():
commands.norepo = norepo
commands.optionalrepo = optionalrepo
cmdtable = commands.table.copy()
addaliases(lui, cmdtable)
else:
strict = False
def restorecommands():
pass
cmdtable = commands.table
cmd = args[0]
try:
aliases, entry = cmdutil.findcmd(cmd, cmdtable, strict)
except (error.AmbiguousCommand, error.UnknownCommand):
restorecommands()
return
cmd = aliases[0]
fn = entry[0]
if cmd and util.safehasattr(fn, 'shell'):
d = lambda: fn(ui, *args[1:])
return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d,
[], {})
restorecommands()
_loaded = set()
def _dispatch(req):
args = req.args
ui = req.ui
# check for cwd
cwd = _earlygetopt(['--cwd'], args)
if cwd:
os.chdir(cwd[-1])
rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
path, lui = _getlocal(ui, rpath)
# Now that we're operating in the right directory/repository with
# the right config settings, check for shell aliases
shellaliasfn = _checkshellalias(lui, ui, args)
if shellaliasfn:
return shellaliasfn()
# Configure extensions in phases: uisetup, extsetup, cmdtable, and
# reposetup. Programs like TortoiseHg will call _dispatch several
# times so we keep track of configured extensions in _loaded.
extensions.loadall(lui)
exts = [ext for ext in extensions.extensions() if ext[0] not in _loaded]
# Propagate any changes to lui.__class__ by extensions
ui.__class__ = lui.__class__
# (uisetup and extsetup are handled in extensions.loadall)
for name, module in exts:
cmdtable = getattr(module, 'cmdtable', {})
overrides = [cmd for cmd in cmdtable if cmd in commands.table]
if overrides:
ui.warn(_("extension '%s' overrides commands: %s\n")
% (name, " ".join(overrides)))
commands.table.update(cmdtable)
_loaded.add(name)
# (reposetup is handled in hg.repository)
addaliases(lui, commands.table)
if not lui.configbool("ui", "strict"):
# All aliases and commands are completely defined, now.
# Check abbreviation/ambiguity of shell alias again, because shell
# alias may cause failure of "_parse" (see issue4355)
shellaliasfn = _checkshellalias(lui, ui, args, precheck=False)
if shellaliasfn:
return shellaliasfn()
# check for fallback encoding
fallback = lui.config('ui', 'fallbackencoding')
if fallback:
encoding.fallbackencoding = fallback
fullargs = args
cmd, func, args, options, cmdoptions = _parse(lui, args)
if options["config"]:
raise util.Abort(_("option --config may not be abbreviated!"))
if options["cwd"]:
raise util.Abort(_("option --cwd may not be abbreviated!"))
if options["repository"]:
raise util.Abort(_(
"option -R has to be separated from other options (e.g. not -qR) "
"and --repository may only be abbreviated as --repo!"))
if options["encoding"]:
encoding.encoding = options["encoding"]
if options["encodingmode"]:
encoding.encodingmode = options["encodingmode"]
if options["time"]:
def get_times():
t = os.times()
if t[4] == 0.0: # Windows leaves this as zero, so use time.clock()
t = (t[0], t[1], t[2], t[3], time.clock())
return t
s = get_times()
def print_time():
t = get_times()
ui.warn(_("time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
(t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
atexit.register(print_time)
uis = set([ui, lui])
if req.repo:
uis.add(req.repo.ui)
if options['verbose'] or options['debug'] or options['quiet']:
for opt in ('verbose', 'debug', 'quiet'):
val = str(bool(options[opt]))
for ui_ in uis:
ui_.setconfig('ui', opt, val, '--' + opt)
if options['traceback']:
for ui_ in uis:
ui_.setconfig('ui', 'traceback', 'on', '--traceback')
if options['noninteractive']:
for ui_ in uis:
ui_.setconfig('ui', 'interactive', 'off', '-y')
if cmdoptions.get('insecure', False):
for ui_ in uis:
ui_.setconfig('web', 'cacerts', '!', '--insecure')
if options['version']:
return commands.version_(ui)
if options['help']:
return commands.help_(ui, cmd, command=True)
elif not cmd:
return commands.help_(ui, 'shortlist')
repo = None
cmdpats = args[:]
if cmd not in commands.norepo.split():
# use the repo from the request only if we don't have -R
if not rpath and not cwd:
repo = req.repo
if repo:
# set the descriptors of the repo ui to those of ui
repo.ui.fin = ui.fin
repo.ui.fout = ui.fout
repo.ui.ferr = ui.ferr
else:
try:
repo = hg.repository(ui, path=path)
if not repo.local():
raise util.Abort(_("repository '%s' is not local") % path)
repo.ui.setconfig("bundle", "mainreporoot", repo.root, 'repo')
except error.RequirementError:
raise
except error.RepoError:
if cmd not in commands.optionalrepo.split():
if (cmd in commands.inferrepo.split() and
args and not path): # try to infer -R from command args
repos = map(cmdutil.findrepo, args)
guess = repos[0]
if guess and repos.count(guess) == len(repos):
req.args = ['--repository', guess] + fullargs
return _dispatch(req)
if not path:
raise error.RepoError(_("no repository found in '%s'"
" (.hg not found)")
% os.getcwd())
raise
if repo:
ui = repo.ui
if options['hidden']:
repo = repo.unfiltered()
args.insert(0, repo)
elif rpath:
ui.warn(_("warning: --repository ignored\n"))
msg = ' '.join(' ' in a and repr(a) or a for a in fullargs)
ui.log("command", '%s\n', msg)
d = lambda: util.checksignature(func)(ui, *args, **cmdoptions)
try:
return runcommand(lui, repo, cmd, fullargs, ui, options, d,
cmdpats, cmdoptions)
finally:
if repo and repo != req.repo:
repo.close()
def lsprofile(ui, func, fp):
format = ui.config('profiling', 'format', default='text')
field = ui.config('profiling', 'sort', default='inlinetime')
limit = ui.configint('profiling', 'limit', default=30)
climit = ui.configint('profiling', 'nested', default=5)
if format not in ['text', 'kcachegrind']:
ui.warn(_("unrecognized profiling format '%s'"
" - Ignored\n") % format)
format = 'text'
try:
from mercurial import lsprof
except ImportError:
raise util.Abort(_(
'lsprof not available - install from '
'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
p = lsprof.Profiler()
p.enable(subcalls=True)
try:
return func()
finally:
p.disable()
if format == 'kcachegrind':
import lsprofcalltree
calltree = lsprofcalltree.KCacheGrind(p)
calltree.output(fp)
else:
# format == 'text'
stats = lsprof.Stats(p.getstats())
stats.sort(field)
stats.pprint(limit=limit, file=fp, climit=climit)
def statprofile(ui, func, fp):
try:
import statprof
except ImportError:
raise util.Abort(_(
'statprof not available - install using "easy_install statprof"'))
freq = ui.configint('profiling', 'freq', default=1000)
if freq > 0:
statprof.reset(freq)
else:
ui.warn(_("invalid sampling frequency '%s' - ignoring\n") % freq)
statprof.start()
try:
return func()
finally:
statprof.stop()
statprof.display(fp)
def _runcommand(ui, options, cmd, cmdfunc):
def checkargs():
try:
return cmdfunc()
except error.SignatureError:
raise error.CommandError(cmd, _("invalid arguments"))
if options['profile']:
profiler = os.getenv('HGPROF')
if profiler is None:
profiler = ui.config('profiling', 'type', default='ls')
if profiler not in ('ls', 'stat'):
ui.warn(_("unrecognized profiler '%s' - ignored\n") % profiler)
profiler = 'ls'
output = ui.config('profiling', 'output')
if output:
path = ui.expandpath(output)
fp = open(path, 'wb')
else:
fp = sys.stderr
try:
if profiler == 'ls':
return lsprofile(ui, checkargs, fp)
else:
return statprofile(ui, checkargs, fp)
finally:
if output:
fp.close()
else:
return checkargs()
|
hekra01/mercurial
|
mercurial/dispatch.py
|
Python
|
gpl-2.0
| 35,318
|
import logging
import pytest
import sdk_hosts
import sdk_install
import sdk_networks
from tests import config
log = logging.getLogger(__name__)
@pytest.fixture(scope="module", autouse=True)
def configure_package(configure_security):
try:
sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
yield # let the test session execute
finally:
sdk_install.uninstall(config.PACKAGE_NAME, config.SERVICE_NAME)
@pytest.mark.sanity
def test_custom_service_tld():
task_count = 1
custom_tld = sdk_hosts.get_crypto_id_domain()
sdk_install.install(
config.PACKAGE_NAME,
config.SERVICE_NAME,
task_count,
additional_options={"service": {"custom_service_tld": custom_tld, "yaml": "custom_tld"}},
)
# Verify the endpoint entry is correct
assert set(["test"]) == set(sdk_networks.get_endpoint_names(config.PACKAGE_NAME, config.SERVICE_NAME))
test_endpoint = sdk_networks.get_endpoint(config.PACKAGE_NAME, config.SERVICE_NAME, "test")
assert set(["address", "dns"]) == set(test_endpoint.keys())
assert len(test_endpoint["address"]) == task_count
# Expect ip:port:
for entry in test_endpoint["address"]:
assert len(entry.split(":")) == 2
assert len(test_endpoint["dns"]) == task_count
# Expect custom tld:
for entry in test_endpoint["dns"]:
assert custom_tld in entry
|
mesosphere/dcos-commons
|
frameworks/helloworld/tests/test_custom_service_tld.py
|
Python
|
apache-2.0
| 1,407
|
import sys
from numpy.testing import *
from swig_ext import example
class TestExample(TestCase):
def test_fact(self):
assert_equal(example.fact(10),3628800)
def test_cvar(self):
assert_equal(example.cvar.My_variable,3.0)
example.cvar.My_variable = 5
assert_equal(example.cvar.My_variable,5.0)
if __name__ == "__main__":
run_module_suite()
|
beiko-lab/gengis
|
bin/Lib/site-packages/numpy/distutils/tests/swig_ext/tests/test_example.py
|
Python
|
gpl-3.0
| 403
|
# Send test commands.
import socket
import time
ip_gc = "127.0.0.1"
port_data = 10000
port_health = 10001
port_uplink = 10002
def sendData():
adsbMessage = "%s,2\n1234567890123456789012345678\n1234567890123456789012345679\n" % (int(time.time()))
udpSocket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
udpSocket.sendto(adsbMessage, (ip_gc, port_data))
udpSocket.close()
def sendTime():
timeMessage = "%s\n" % (int(time.time()))
udpSocket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
udpSocket.sendto(timeMessage, (ip_gc, port_data))
udpSocket.close()
def sendHealthData():
healthData = "%s,45000,46000,34000,0,0,-12000" % (int(time.time()))
udpSocket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
udpSocket.sendto(healthData, (ip_gc, port_health))
udpSocket.close()
# UNIX Zeit in lesbares Format wandeln:
# -> time.ctime(int("UNIX-Timestamp"))
sendData()
sendHealthData()
|
thasti/arca-gc
|
send_test_cmd.py
|
Python
|
gpl-2.0
| 914
|
"""The tests for the Restore component."""
from datetime import datetime
from asynctest import patch
from homeassistant.const import EVENT_HOMEASSISTANT_START
from homeassistant.core import CoreState, State
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.restore_state import (
DATA_RESTORE_STATE_TASK,
STORAGE_KEY,
RestoreEntity,
RestoreStateData,
StoredState,
)
from homeassistant.util import dt as dt_util
from tests.common import mock_coro
async def test_caching_data(hass):
"""Test that we cache data."""
now = dt_util.utcnow()
stored_states = [
StoredState(State("input_boolean.b0", "on"), now),
StoredState(State("input_boolean.b1", "on"), now),
StoredState(State("input_boolean.b2", "on"), now),
]
data = await RestoreStateData.async_get_instance(hass)
await data.store.async_save([state.as_dict() for state in stored_states])
# Emulate a fresh load
hass.data[DATA_RESTORE_STATE_TASK] = None
entity = RestoreEntity()
entity.hass = hass
entity.entity_id = "input_boolean.b1"
# Mock that only b1 is present this run
with patch(
"homeassistant.helpers.restore_state.Store.async_save"
) as mock_write_data:
state = await entity.async_get_last_state()
assert state is not None
assert state.entity_id == "input_boolean.b1"
assert state.state == "on"
assert mock_write_data.called
async def test_hass_starting(hass):
"""Test that we cache data."""
hass.state = CoreState.starting
now = dt_util.utcnow()
stored_states = [
StoredState(State("input_boolean.b0", "on"), now),
StoredState(State("input_boolean.b1", "on"), now),
StoredState(State("input_boolean.b2", "on"), now),
]
data = await RestoreStateData.async_get_instance(hass)
await data.store.async_save([state.as_dict() for state in stored_states])
# Emulate a fresh load
hass.data[DATA_RESTORE_STATE_TASK] = None
entity = RestoreEntity()
entity.hass = hass
entity.entity_id = "input_boolean.b1"
# Mock that only b1 is present this run
states = [State("input_boolean.b1", "on")]
with patch(
"homeassistant.helpers.restore_state.Store.async_save"
) as mock_write_data, patch.object(hass.states, "async_all", return_value=states):
state = await entity.async_get_last_state()
assert state is not None
assert state.entity_id == "input_boolean.b1"
assert state.state == "on"
# Assert that no data was written yet, since hass is still starting.
assert not mock_write_data.called
# Finish hass startup
with patch(
"homeassistant.helpers.restore_state.Store.async_save"
) as mock_write_data:
hass.bus.async_fire(EVENT_HOMEASSISTANT_START)
await hass.async_block_till_done()
# Assert that this session states were written
assert mock_write_data.called
async def test_dump_data(hass):
"""Test that we cache data."""
states = [
State("input_boolean.b0", "on"),
State("input_boolean.b1", "on"),
State("input_boolean.b2", "on"),
]
entity = Entity()
entity.hass = hass
entity.entity_id = "input_boolean.b0"
await entity.async_internal_added_to_hass()
entity = RestoreEntity()
entity.hass = hass
entity.entity_id = "input_boolean.b1"
await entity.async_internal_added_to_hass()
data = await RestoreStateData.async_get_instance(hass)
now = dt_util.utcnow()
data.last_states = {
"input_boolean.b0": StoredState(State("input_boolean.b0", "off"), now),
"input_boolean.b1": StoredState(State("input_boolean.b1", "off"), now),
"input_boolean.b2": StoredState(State("input_boolean.b2", "off"), now),
"input_boolean.b3": StoredState(State("input_boolean.b3", "off"), now),
"input_boolean.b4": StoredState(
State("input_boolean.b4", "off"),
datetime(1985, 10, 26, 1, 22, tzinfo=dt_util.UTC),
),
}
with patch(
"homeassistant.helpers.restore_state.Store.async_save"
) as mock_write_data, patch.object(hass.states, "async_all", return_value=states):
await data.async_dump_states()
assert mock_write_data.called
args = mock_write_data.mock_calls[0][1]
written_states = args[0]
# b0 should not be written, since it didn't extend RestoreEntity
# b1 should be written, since it is present in the current run
# b2 should not be written, since it is not registered with the helper
# b3 should be written, since it is still not expired
# b4 should not be written, since it is now expired
assert len(written_states) == 2
assert written_states[0]["state"]["entity_id"] == "input_boolean.b1"
assert written_states[0]["state"]["state"] == "on"
assert written_states[1]["state"]["entity_id"] == "input_boolean.b3"
assert written_states[1]["state"]["state"] == "off"
# Test that removed entities are not persisted
await entity.async_remove()
with patch(
"homeassistant.helpers.restore_state.Store.async_save"
) as mock_write_data, patch.object(hass.states, "async_all", return_value=states):
await data.async_dump_states()
assert mock_write_data.called
args = mock_write_data.mock_calls[0][1]
written_states = args[0]
assert len(written_states) == 1
assert written_states[0]["state"]["entity_id"] == "input_boolean.b3"
assert written_states[0]["state"]["state"] == "off"
async def test_dump_error(hass):
"""Test that we cache data."""
states = [
State("input_boolean.b0", "on"),
State("input_boolean.b1", "on"),
State("input_boolean.b2", "on"),
]
entity = Entity()
entity.hass = hass
entity.entity_id = "input_boolean.b0"
await entity.async_internal_added_to_hass()
entity = RestoreEntity()
entity.hass = hass
entity.entity_id = "input_boolean.b1"
await entity.async_internal_added_to_hass()
data = await RestoreStateData.async_get_instance(hass)
with patch(
"homeassistant.helpers.restore_state.Store.async_save",
return_value=mock_coro(exception=HomeAssistantError),
) as mock_write_data, patch.object(hass.states, "async_all", return_value=states):
await data.async_dump_states()
assert mock_write_data.called
async def test_load_error(hass):
"""Test that we cache data."""
entity = RestoreEntity()
entity.hass = hass
entity.entity_id = "input_boolean.b1"
with patch(
"homeassistant.helpers.storage.Store.async_load",
return_value=mock_coro(exception=HomeAssistantError),
):
state = await entity.async_get_last_state()
assert state is None
async def test_state_saved_on_remove(hass):
"""Test that we save entity state on removal."""
entity = RestoreEntity()
entity.hass = hass
entity.entity_id = "input_boolean.b0"
await entity.async_internal_added_to_hass()
now = dt_util.utcnow()
hass.states.async_set(
"input_boolean.b0", "on", {"complicated": {"value": {1, 2, now}}}
)
data = await RestoreStateData.async_get_instance(hass)
# No last states should currently be saved
assert not data.last_states
await entity.async_remove()
# We should store the input boolean state when it is removed
state = data.last_states["input_boolean.b0"].state
assert state.state == "on"
assert isinstance(state.attributes["complicated"]["value"], list)
assert set(state.attributes["complicated"]["value"]) == {1, 2, now.isoformat()}
async def test_restoring_invalid_entity_id(hass, hass_storage):
"""Test restoring invalid entity IDs."""
entity = RestoreEntity()
entity.hass = hass
entity.entity_id = "test.invalid__entity_id"
now = dt_util.utcnow().isoformat()
hass_storage[STORAGE_KEY] = {
"version": 1,
"key": STORAGE_KEY,
"data": [
{
"state": {
"entity_id": "test.invalid__entity_id",
"state": "off",
"attributes": {},
"last_changed": now,
"last_updated": now,
"context": {
"id": "3c2243ff5f30447eb12e7348cfd5b8ff",
"user_id": None,
},
},
"last_seen": dt_util.utcnow().isoformat(),
}
],
}
state = await entity.async_get_last_state()
assert state is None
|
leppa/home-assistant
|
tests/helpers/test_restore_state.py
|
Python
|
apache-2.0
| 8,646
|
# coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from twilio.base import deserialize
from twilio.base import serialize
from twilio.base import values
from twilio.base.instance_context import InstanceContext
from twilio.base.instance_resource import InstanceResource
from twilio.base.list_resource import ListResource
from twilio.base.page import Page
class CompositionList(ListResource):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact help@twilio.com. """
def __init__(self, version):
"""
Initialize the CompositionList
:param Version version: Version that contains the resource
:returns: twilio.rest.video.v1.composition.CompositionList
:rtype: twilio.rest.video.v1.composition.CompositionList
"""
super(CompositionList, self).__init__(version)
# Path Solution
self._solution = {}
self._uri = '/Compositions'.format(**self._solution)
def stream(self, status=values.unset, date_created_after=values.unset,
date_created_before=values.unset, room_sid=values.unset, limit=None,
page_size=None):
"""
Streams CompositionInstance records from the API as a generator stream.
This operation lazily loads records as efficiently as possible until the limit
is reached.
The results are returned as a generator, so this operation is memory efficient.
:param CompositionInstance.Status status: Read only Composition resources with this status
:param datetime date_created_after: Read only Composition resources created on or after this [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) date-time with time zone
:param datetime date_created_before: Read only Composition resources created before this ISO 8601 date-time with time zone
:param unicode room_sid: Read only Composition resources with this Room SID
:param int limit: Upper limit for the number of records to return. stream()
guarantees to never return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, stream() will attempt to read the
limit with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.video.v1.composition.CompositionInstance]
"""
limits = self._version.read_limits(limit, page_size)
page = self.page(
status=status,
date_created_after=date_created_after,
date_created_before=date_created_before,
room_sid=room_sid,
page_size=limits['page_size'],
)
return self._version.stream(page, limits['limit'], limits['page_limit'])
def list(self, status=values.unset, date_created_after=values.unset,
date_created_before=values.unset, room_sid=values.unset, limit=None,
page_size=None):
"""
Lists CompositionInstance records from the API as a list.
Unlike stream(), this operation is eager and will load `limit` records into
memory before returning.
:param CompositionInstance.Status status: Read only Composition resources with this status
:param datetime date_created_after: Read only Composition resources created on or after this [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) date-time with time zone
:param datetime date_created_before: Read only Composition resources created before this ISO 8601 date-time with time zone
:param unicode room_sid: Read only Composition resources with this Room SID
:param int limit: Upper limit for the number of records to return. list() guarantees
never to return more than limit. Default is no limit
:param int page_size: Number of records to fetch per request, when not set will use
the default value of 50 records. If no page_size is defined
but a limit is defined, list() will attempt to read the limit
with the most efficient page size, i.e. min(limit, 1000)
:returns: Generator that will yield up to limit results
:rtype: list[twilio.rest.video.v1.composition.CompositionInstance]
"""
return list(self.stream(
status=status,
date_created_after=date_created_after,
date_created_before=date_created_before,
room_sid=room_sid,
limit=limit,
page_size=page_size,
))
def page(self, status=values.unset, date_created_after=values.unset,
date_created_before=values.unset, room_sid=values.unset,
page_token=values.unset, page_number=values.unset,
page_size=values.unset):
"""
Retrieve a single page of CompositionInstance records from the API.
Request is executed immediately
:param CompositionInstance.Status status: Read only Composition resources with this status
:param datetime date_created_after: Read only Composition resources created on or after this [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601) date-time with time zone
:param datetime date_created_before: Read only Composition resources created before this ISO 8601 date-time with time zone
:param unicode room_sid: Read only Composition resources with this Room SID
:param str page_token: PageToken provided by the API
:param int page_number: Page Number, this value is simply for client state
:param int page_size: Number of records to return, defaults to 50
:returns: Page of CompositionInstance
:rtype: twilio.rest.video.v1.composition.CompositionPage
"""
params = values.of({
'Status': status,
'DateCreatedAfter': serialize.iso8601_datetime(date_created_after),
'DateCreatedBefore': serialize.iso8601_datetime(date_created_before),
'RoomSid': room_sid,
'PageToken': page_token,
'Page': page_number,
'PageSize': page_size,
})
response = self._version.page(
'GET',
self._uri,
params=params,
)
return CompositionPage(self._version, response, self._solution)
def get_page(self, target_url):
"""
Retrieve a specific page of CompositionInstance records from the API.
Request is executed immediately
:param str target_url: API-generated URL for the requested results page
:returns: Page of CompositionInstance
:rtype: twilio.rest.video.v1.composition.CompositionPage
"""
response = self._version.domain.twilio.request(
'GET',
target_url,
)
return CompositionPage(self._version, response, self._solution)
def create(self, room_sid, video_layout=values.unset,
audio_sources=values.unset, audio_sources_excluded=values.unset,
resolution=values.unset, format=values.unset,
status_callback=values.unset, status_callback_method=values.unset,
trim=values.unset):
"""
Create a new CompositionInstance
:param unicode room_sid: The SID of the Group Room with the media tracks to be used as composition sources
:param dict video_layout: An object that describes the video layout of the composition
:param unicode audio_sources: An array of track names from the same group room to merge
:param unicode audio_sources_excluded: An array of track names to exclude
:param unicode resolution: A string that describes the columns (width) and rows (height) of the generated composed video in pixels
:param CompositionInstance.Format format: The container format of the composition's media files
:param unicode status_callback: The URL we should call to send status information to your application
:param unicode status_callback_method: The HTTP method we should use to call status_callback
:param bool trim: Whether to clip the intervals where there is no active media in the composition
:returns: Newly created CompositionInstance
:rtype: twilio.rest.video.v1.composition.CompositionInstance
"""
data = values.of({
'RoomSid': room_sid,
'VideoLayout': serialize.object(video_layout),
'AudioSources': serialize.map(audio_sources, lambda e: e),
'AudioSourcesExcluded': serialize.map(audio_sources_excluded, lambda e: e),
'Resolution': resolution,
'Format': format,
'StatusCallback': status_callback,
'StatusCallbackMethod': status_callback_method,
'Trim': trim,
})
payload = self._version.create(
'POST',
self._uri,
data=data,
)
return CompositionInstance(self._version, payload, )
def get(self, sid):
"""
Constructs a CompositionContext
:param sid: The SID that identifies the resource to fetch
:returns: twilio.rest.video.v1.composition.CompositionContext
:rtype: twilio.rest.video.v1.composition.CompositionContext
"""
return CompositionContext(self._version, sid=sid, )
def __call__(self, sid):
"""
Constructs a CompositionContext
:param sid: The SID that identifies the resource to fetch
:returns: twilio.rest.video.v1.composition.CompositionContext
:rtype: twilio.rest.video.v1.composition.CompositionContext
"""
return CompositionContext(self._version, sid=sid, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Video.V1.CompositionList>'
class CompositionPage(Page):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact help@twilio.com. """
def __init__(self, version, response, solution):
"""
Initialize the CompositionPage
:param Version version: Version that contains the resource
:param Response response: Response from the API
:returns: twilio.rest.video.v1.composition.CompositionPage
:rtype: twilio.rest.video.v1.composition.CompositionPage
"""
super(CompositionPage, self).__init__(version, response)
# Path Solution
self._solution = solution
def get_instance(self, payload):
"""
Build an instance of CompositionInstance
:param dict payload: Payload response from the API
:returns: twilio.rest.video.v1.composition.CompositionInstance
:rtype: twilio.rest.video.v1.composition.CompositionInstance
"""
return CompositionInstance(self._version, payload, )
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
return '<Twilio.Video.V1.CompositionPage>'
class CompositionContext(InstanceContext):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact help@twilio.com. """
def __init__(self, version, sid):
"""
Initialize the CompositionContext
:param Version version: Version that contains the resource
:param sid: The SID that identifies the resource to fetch
:returns: twilio.rest.video.v1.composition.CompositionContext
:rtype: twilio.rest.video.v1.composition.CompositionContext
"""
super(CompositionContext, self).__init__(version)
# Path Solution
self._solution = {'sid': sid, }
self._uri = '/Compositions/{sid}'.format(**self._solution)
def fetch(self):
"""
Fetch a CompositionInstance
:returns: Fetched CompositionInstance
:rtype: twilio.rest.video.v1.composition.CompositionInstance
"""
params = values.of({})
payload = self._version.fetch(
'GET',
self._uri,
params=params,
)
return CompositionInstance(self._version, payload, sid=self._solution['sid'], )
def delete(self):
"""
Deletes the CompositionInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._version.delete('delete', self._uri)
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Video.V1.CompositionContext {}>'.format(context)
class CompositionInstance(InstanceResource):
""" PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact help@twilio.com. """
class Status(object):
ENQUEUED = "enqueued"
PROCESSING = "processing"
COMPLETED = "completed"
DELETED = "deleted"
FAILED = "failed"
class Format(object):
MP4 = "mp4"
WEBM = "webm"
def __init__(self, version, payload, sid=None):
"""
Initialize the CompositionInstance
:returns: twilio.rest.video.v1.composition.CompositionInstance
:rtype: twilio.rest.video.v1.composition.CompositionInstance
"""
super(CompositionInstance, self).__init__(version)
# Marshaled Properties
self._properties = {
'account_sid': payload.get('account_sid'),
'status': payload.get('status'),
'date_created': deserialize.iso8601_datetime(payload.get('date_created')),
'date_completed': deserialize.iso8601_datetime(payload.get('date_completed')),
'date_deleted': deserialize.iso8601_datetime(payload.get('date_deleted')),
'sid': payload.get('sid'),
'room_sid': payload.get('room_sid'),
'audio_sources': payload.get('audio_sources'),
'audio_sources_excluded': payload.get('audio_sources_excluded'),
'video_layout': payload.get('video_layout'),
'resolution': payload.get('resolution'),
'trim': payload.get('trim'),
'format': payload.get('format'),
'bitrate': deserialize.integer(payload.get('bitrate')),
'size': deserialize.integer(payload.get('size')),
'duration': deserialize.integer(payload.get('duration')),
'url': payload.get('url'),
'links': payload.get('links'),
}
# Context
self._context = None
self._solution = {'sid': sid or self._properties['sid'], }
@property
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: CompositionContext for this CompositionInstance
:rtype: twilio.rest.video.v1.composition.CompositionContext
"""
if self._context is None:
self._context = CompositionContext(self._version, sid=self._solution['sid'], )
return self._context
@property
def account_sid(self):
"""
:returns: The SID of the Account that created the resource
:rtype: unicode
"""
return self._properties['account_sid']
@property
def status(self):
"""
:returns: The status of the composition
:rtype: CompositionInstance.Status
"""
return self._properties['status']
@property
def date_created(self):
"""
:returns: The ISO 8601 date and time in GMT when the resource was created
:rtype: datetime
"""
return self._properties['date_created']
@property
def date_completed(self):
"""
:returns: Date when the media processing task finished
:rtype: datetime
"""
return self._properties['date_completed']
@property
def date_deleted(self):
"""
:returns: The ISO 8601 date and time in GMT when the composition generated media was deleted
:rtype: datetime
"""
return self._properties['date_deleted']
@property
def sid(self):
"""
:returns: The unique string that identifies the resource
:rtype: unicode
"""
return self._properties['sid']
@property
def room_sid(self):
"""
:returns: The SID of the Group Room that generated the audio and video tracks used in the composition
:rtype: unicode
"""
return self._properties['room_sid']
@property
def audio_sources(self):
"""
:returns: The array of track names to include in the composition
:rtype: unicode
"""
return self._properties['audio_sources']
@property
def audio_sources_excluded(self):
"""
:returns: The array of track names to exclude from the composition
:rtype: unicode
"""
return self._properties['audio_sources_excluded']
@property
def video_layout(self):
"""
:returns: An object that describes the video layout of the composition
:rtype: dict
"""
return self._properties['video_layout']
@property
def resolution(self):
"""
:returns: The dimensions of the video image in pixels expressed as columns (width) and rows (height)
:rtype: unicode
"""
return self._properties['resolution']
@property
def trim(self):
"""
:returns: Whether to remove intervals with no media
:rtype: bool
"""
return self._properties['trim']
@property
def format(self):
"""
:returns: The container format of the composition's media files as specified in the POST request that created the Composition resource
:rtype: CompositionInstance.Format
"""
return self._properties['format']
@property
def bitrate(self):
"""
:returns: The average bit rate of the composition's media
:rtype: unicode
"""
return self._properties['bitrate']
@property
def size(self):
"""
:returns: The size of the composed media file in bytes
:rtype: unicode
"""
return self._properties['size']
@property
def duration(self):
"""
:returns: The duration of the composition's media file in seconds
:rtype: unicode
"""
return self._properties['duration']
@property
def url(self):
"""
:returns: The absolute URL of the resource
:rtype: unicode
"""
return self._properties['url']
@property
def links(self):
"""
:returns: The URL of the media file associated with the composition
:rtype: unicode
"""
return self._properties['links']
def fetch(self):
"""
Fetch a CompositionInstance
:returns: Fetched CompositionInstance
:rtype: twilio.rest.video.v1.composition.CompositionInstance
"""
return self._proxy.fetch()
def delete(self):
"""
Deletes the CompositionInstance
:returns: True if delete succeeds, False otherwise
:rtype: bool
"""
return self._proxy.delete()
def __repr__(self):
"""
Provide a friendly representation
:returns: Machine friendly representation
:rtype: str
"""
context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items())
return '<Twilio.Video.V1.CompositionInstance {}>'.format(context)
|
tysonholub/twilio-python
|
twilio/rest/video/v1/composition/__init__.py
|
Python
|
mit
| 20,700
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# file prefs2prefs-lfuns.py
# This file is part of LyX, the document processor.
# Licence details can be found in the file COPYING.
# author Richard Heck
# Full author contact details are available in file CREDITS
# This file houses conversion information for the bind and ui files,
# i.e., for files where we are converting lfuns.
# The converter functions take a line as argument and return a list:
# (Bool, NewLine),
# where the Bool says if we've modified anything and the NewLine is
# the new line, if so, which will be used to replace the old line.
import sys, re
###########################################################
#
# Actual converter functions
#
# These accept a line as argument and should return a list:
# (bool, newline)
# where the bool indicates whether we changed anything. If not,
# one normally returns: (False, []).
no_match = (False, [])
def simple_renaming(line, old, new):
if line.find(old) == -1:
return no_match
line = line.replace(old, new)
return (True, line)
def next_inset_modify(line):
return simple_renaming(line, "next-inset-modify", "inset-modify")
def next_inset_toggle(line):
return simple_renaming(line, "next-inset-toggle", "inset-toggle")
def optional_insert(line):
return simple_renaming(line, "optional-insert", "argument-insert")
re_nm = re.compile(r'^(.*)notes-mutate\s+(\w+)\s+(\w+)(.*)$')
def notes_mutate(line):
m = re_nm.search(line)
if not m:
return no_match
prefix = m.group(1)
source = m.group(2)
target = m.group(3)
suffix = m.group(4)
newline = prefix + "inset-forall Note:" + source + \
" inset-modify note Note " + target + suffix
return (True, newline)
re_ait = re.compile(r'^(.*)all-insets-toggle\s+(\w+)(?:\s+(\w+))?(.*)$')
def all_insets_toggle(line):
m = re_ait.search(line)
if not m:
return no_match
prefix = m.group(1)
action = m.group(2)
target = m.group(3)
suffix = m.group(4)
# we need to transform the target to match the inset layout names
# this will not be perfect
if target == "ert":
target = "ERT"
elif target == None:
target = "*"
elif target == "tabular":
# There does not seem to be an InsetLayout for tables, so
# I do not know what to do here. If anyone does, then please
# fix this. For now, we just have to remove this line.
return (True, "")
else:
target = target.capitalize()
newline = prefix + "inset-forall " + target + " inset-toggle " + \
action + suffix
return (True, newline)
re_li = re.compile(r'^(.*)\bline-insert\b(.*)$')
def line_insert(line):
m = re_li.search(line)
if not m:
return no_match
newline = m.group(1) + \
"inset-insert line rule height 0.25ex width 100col% \\end_inset" + \
m.group(2)
return (True, newline)
def toc_insert(line):
return simple_renaming(line, "toc-insert", "inset-insert toc")
re_ps = re.compile(r'^(.*)paragraph-spacing\s+(default|single|onehalf|double)\b(.*)$')
re_psother = re.compile(r'^(.*)paragraph-spacing\s+other\s+(\d+\.\d?|\d?\.\d+|\d+)(.*)$')
def paragraph_spacing(line):
# possible args: default, single, onehalf, double, other FLOAT
m = re_ps.search(line)
if m:
arg = m.group(2)
newline = m.group(1) + "paragraph-params \\paragraph-spacing " + arg + \
m.group(3)
return (True, newline)
m = re_psother.search(line)
if not m:
return no_match
arg = m.group(2)
newline = m.group(1) + "paragraph-params \\paragraph-spacing other " + \
arg + m.group(3)
return (True, newline)
def tabular_feature(line):
return simple_renaming(line, "tabular-feature", "inset-modify tabular")
re_Bar2bar = re.compile(r'^(\\(?:bind|unbind))\s+"([^"]*)Bar"(\s+"[^"]+")')
def Bar2bar(line):
m = re_Bar2bar.search(line)
if not m:
return no_match
btype = m.group(1)
mod = m.group(2)
rest = m.group(3)
newline = btype + " \"" + mod + "bar\"" + rest
return (True, newline)
def paragraph_break(line):
return simple_renaming(line, "break-paragraph", "paragraph-break")
def tab_group_close(line):
return simple_renaming(line, "close-tab-group", "tab-group-close")
def view_split(line):
return simple_renaming(line, "split-view", "view-split")
#
#
###########################################################
# Conversion chain
conversions = [
[ 1, [ # this will be a long list of conversions to format 1, LyX 2.0
next_inset_toggle,
next_inset_modify,
optional_insert,
notes_mutate,
all_insets_toggle,
line_insert,
toc_insert,
paragraph_spacing,
tabular_feature,
Bar2bar
]],
[ 2, [ # list of conversions to format 2, LyX 2.1
paragraph_break,
tab_group_close,
view_split
]],
]
|
mandeepsimak/Lyx
|
lib/scripts/prefs2prefs_lfuns.py
|
Python
|
gpl-2.0
| 4,605
|
"""
Sphinx plugins for Django documentation.
"""
import json
import os
import re
from docutils import nodes
from docutils.parsers.rst import directives
from sphinx import addnodes, __version__ as sphinx_ver
from sphinx.builders.html import StandaloneHTMLBuilder
from sphinx.writers.html import SmartyPantsHTMLTranslator
from sphinx.util.console import bold
from sphinx.util.compat import Directive
from sphinx.util.nodes import set_source_info
# RE for option descriptions without a '--' prefix
simple_option_desc_re = re.compile(
r'([-_a-zA-Z0-9]+)(\s*.*?)(?=,\s+(?:/|-|--)|$)')
def setup(app):
app.add_crossref_type(
directivename="setting",
rolename="setting",
indextemplate="pair: %s; setting",
)
app.add_crossref_type(
directivename="templatetag",
rolename="ttag",
indextemplate="pair: %s; template tag"
)
app.add_crossref_type(
directivename="templatefilter",
rolename="tfilter",
indextemplate="pair: %s; template filter"
)
app.add_crossref_type(
directivename="fieldlookup",
rolename="lookup",
indextemplate="pair: %s; field lookup type",
)
app.add_description_unit(
directivename="django-admin",
rolename="djadmin",
indextemplate="pair: %s; django-admin command",
parse_node=parse_django_admin_node,
)
app.add_description_unit(
directivename="django-admin-option",
rolename="djadminopt",
indextemplate="pair: %s; django-admin command-line option",
parse_node=parse_django_adminopt_node,
)
app.add_config_value('django_next_version', '0.0', True)
app.add_directive('versionadded', VersionDirective)
app.add_directive('versionchanged', VersionDirective)
app.add_builder(DjangoStandaloneHTMLBuilder)
# register the snippet directive
app.add_directive('snippet', SnippetWithFilename)
# register a node for snippet directive so that the xml parser
# knows how to handle the enter/exit parsing event
app.add_node(snippet_with_filename,
html=(visit_snippet, depart_snippet_literal),
latex=(visit_snippet_latex, depart_snippet_latex),
man=(visit_snippet_literal, depart_snippet_literal),
text=(visit_snippet_literal, depart_snippet_literal),
texinfo=(visit_snippet_literal, depart_snippet_literal))
class snippet_with_filename(nodes.literal_block):
"""
Subclass the literal_block to override the visit/depart event handlers
"""
pass
def visit_snippet_literal(self, node):
"""
default literal block handler
"""
self.visit_literal_block(node)
def depart_snippet_literal(self, node):
"""
default literal block handler
"""
self.depart_literal_block(node)
def visit_snippet(self, node):
"""
HTML document generator visit handler
"""
lang = self.highlightlang
linenos = node.rawsource.count('\n') >= self.highlightlinenothreshold - 1
fname = node['filename']
highlight_args = node.get('highlight_args', {})
if 'language' in node:
# code-block directives
lang = node['language']
highlight_args['force'] = True
if 'linenos' in node:
linenos = node['linenos']
def warner(msg):
self.builder.warn(msg, (self.builder.current_docname, node.line))
highlighted = self.highlighter.highlight_block(node.rawsource, lang,
warn=warner,
linenos=linenos,
**highlight_args)
starttag = self.starttag(node, 'div', suffix='',
CLASS='highlight-%s' % lang)
self.body.append(starttag)
self.body.append('<div class="snippet-filename">%s</div>\n''' % (fname,))
self.body.append(highlighted)
self.body.append('</div>\n')
raise nodes.SkipNode
def visit_snippet_latex(self, node):
"""
Latex document generator visit handler
"""
self.verbatim = ''
def depart_snippet_latex(self, node):
"""
Latex document generator depart handler.
"""
code = self.verbatim.rstrip('\n')
lang = self.hlsettingstack[-1][0]
linenos = code.count('\n') >= self.hlsettingstack[-1][1] - 1
fname = node['filename']
highlight_args = node.get('highlight_args', {})
if 'language' in node:
# code-block directives
lang = node['language']
highlight_args['force'] = True
if 'linenos' in node:
linenos = node['linenos']
def warner(msg):
self.builder.warn(msg, (self.curfilestack[-1], node.line))
hlcode = self.highlighter.highlight_block(code, lang, warn=warner,
linenos=linenos,
**highlight_args)
self.body.append('\n{\\colorbox[rgb]{0.9,0.9,0.9}'
'{\\makebox[\\textwidth][l]'
'{\\small\\texttt{%s}}}}\n' % (fname,))
if self.table:
hlcode = hlcode.replace('\\begin{Verbatim}',
'\\begin{OriginalVerbatim}')
self.table.has_problematic = True
self.table.has_verbatim = True
hlcode = hlcode.rstrip()[:-14] # strip \end{Verbatim}
hlcode = hlcode.rstrip() + '\n'
self.body.append('\n' + hlcode + '\\end{%sVerbatim}\n' %
(self.table and 'Original' or ''))
self.verbatim = None
class SnippetWithFilename(Directive):
"""
The 'snippet' directive that allows to add the filename (optional)
of a code snippet in the document. This is modeled after CodeBlock.
"""
has_content = True
optional_arguments = 1
option_spec = {'filename': directives.unchanged_required}
def run(self):
code = '\n'.join(self.content)
literal = snippet_with_filename(code, code)
if self.arguments:
literal['language'] = self.arguments[0]
literal['filename'] = self.options['filename']
set_source_info(self, literal)
return [literal]
class VersionDirective(Directive):
has_content = True
required_arguments = 1
optional_arguments = 1
final_argument_whitespace = True
option_spec = {}
def run(self):
if len(self.arguments) > 1:
msg = """Only one argument accepted for directive '{directive_name}::'.
Comments should be provided as content,
not as an extra argument.""".format(directive_name=self.name)
raise self.error(msg)
env = self.state.document.settings.env
ret = []
node = addnodes.versionmodified()
ret.append(node)
if self.arguments[0] == env.config.django_next_version:
node['version'] = "Development version"
else:
node['version'] = self.arguments[0]
node['type'] = self.name
if self.content:
self.state.nested_parse(self.content, self.content_offset, node)
env.note_versionchange(node['type'], node['version'], node, self.lineno)
return ret
class DjangoHTMLTranslator(SmartyPantsHTMLTranslator):
"""
Django-specific reST to HTML tweaks.
"""
# Don't use border=1, which docutils does by default.
def visit_table(self, node):
self.context.append(self.compact_p)
self.compact_p = True
self._table_row_index = 0 # Needed by Sphinx
self.body.append(self.starttag(node, 'table', CLASS='docutils'))
def depart_table(self, node):
self.compact_p = self.context.pop()
self.body.append('</table>\n')
def visit_desc_parameterlist(self, node):
self.body.append('(') # by default sphinx puts <big> around the "("
self.first_param = 1
self.optional_param_level = 0
self.param_separator = node.child_text_separator
self.required_params_left = sum([isinstance(c, addnodes.desc_parameter)
for c in node.children])
def depart_desc_parameterlist(self, node):
self.body.append(')')
if sphinx_ver < '1.0.8':
#
# Don't apply smartypants to literal blocks
#
def visit_literal_block(self, node):
self.no_smarty += 1
SmartyPantsHTMLTranslator.visit_literal_block(self, node)
def depart_literal_block(self, node):
SmartyPantsHTMLTranslator.depart_literal_block(self, node)
self.no_smarty -= 1
#
# Turn the "new in version" stuff (versionadded/versionchanged) into a
# better callout -- the Sphinx default is just a little span,
# which is a bit less obvious that I'd like.
#
# FIXME: these messages are all hardcoded in English. We need to change
# that to accommodate other language docs, but I can't work out how to make
# that work.
#
version_text = {
'deprecated': 'Deprecated in Django %s',
'versionchanged': 'Changed in Django %s',
'versionadded': 'New in Django %s',
}
def visit_versionmodified(self, node):
self.body.append(
self.starttag(node, 'div', CLASS=node['type'])
)
title = "%s%s" % (
self.version_text[node['type']] % node['version'],
":" if len(node) else "."
)
self.body.append('<span class="title">%s</span> ' % title)
def depart_versionmodified(self, node):
self.body.append("</div>\n")
# Give each section a unique ID -- nice for custom CSS hooks
def visit_section(self, node):
old_ids = node.get('ids', [])
node['ids'] = ['s-' + i for i in old_ids]
node['ids'].extend(old_ids)
SmartyPantsHTMLTranslator.visit_section(self, node)
node['ids'] = old_ids
def parse_django_admin_node(env, sig, signode):
command = sig.split(' ')[0]
env._django_curr_admin_command = command
title = "django-admin.py %s" % sig
signode += addnodes.desc_name(title, title)
return sig
def parse_django_adminopt_node(env, sig, signode):
"""A copy of sphinx.directives.CmdoptionDesc.parse_signature()"""
from sphinx.domains.std import option_desc_re
count = 0
firstname = ''
for m in option_desc_re.finditer(sig):
optname, args = m.groups()
if count:
signode += addnodes.desc_addname(', ', ', ')
signode += addnodes.desc_name(optname, optname)
signode += addnodes.desc_addname(args, args)
if not count:
firstname = optname
count += 1
if not count:
for m in simple_option_desc_re.finditer(sig):
optname, args = m.groups()
if count:
signode += addnodes.desc_addname(', ', ', ')
signode += addnodes.desc_name(optname, optname)
signode += addnodes.desc_addname(args, args)
if not count:
firstname = optname
count += 1
if not firstname:
raise ValueError
return firstname
class DjangoStandaloneHTMLBuilder(StandaloneHTMLBuilder):
"""
Subclass to add some extra things we need.
"""
name = 'djangohtml'
def finish(self):
super(DjangoStandaloneHTMLBuilder, self).finish()
self.info(bold("writing templatebuiltins.js..."))
xrefs = self.env.domaindata["std"]["objects"]
templatebuiltins = {
"ttags": [n for ((t, n), (l, a)) in xrefs.items()
if t == "templatetag" and l == "ref/templates/builtins"],
"tfilters": [n for ((t, n), (l, a)) in xrefs.items()
if t == "templatefilter" and l == "ref/templates/builtins"],
}
outfilename = os.path.join(self.outdir, "templatebuiltins.js")
with open(outfilename, 'w') as fp:
fp.write('var django_template_builtins = ')
json.dump(templatebuiltins, fp)
fp.write(';\n')
|
liavkoren/djangoDev
|
docs/_ext/djangodocs.py
|
Python
|
bsd-3-clause
| 12,017
|
# Version 5
'''This takes a base MineCraft level and adds or edits trees.
Place it in the folder where the save files are (usually .../.minecraft/saves)
Requires mcInterface.py in the same folder.'''
# Here are the variables you can edit.
# This is the name of the map to edit.
# Make a backup if you are experimenting!
LOADNAME = "LevelSave"
# How many trees do you want to add?
TREECOUNT = 12
# Where do you want the new trees?
# X, and Z are the map coordinates
X = 66
Z = -315
# How large an area do you want the trees to be in?
# for example, RADIUS = 10 will make place trees randomly in
# a circular area 20 blocks wide.
RADIUS = 80
# NOTE: tree density will be higher in the center than at the edges.
# Which shapes would you like the trees to be?
# these first three are best suited for small heights, from 5 - 10
# "normal" is the normal minecraft shape, it only gets taller and shorter
# "bamboo" a trunk with foliage, it only gets taller and shorter
# "palm" a trunk with a fan at the top, only gets taller and shorter
# "stickly" selects randomly from "normal", "bamboo" and "palm"
# these last five are best suited for very large trees, heights greater than 8
# "round" procedural spherical shaped tree, can scale up to immense size
# "cone" procedural, like a pine tree, also can scale up to immense size
# "procedural" selects randomly from "round" and "conical"
# "rainforest" many slender trees, most at the lower range of the height,
# with a few at the upper end.
# "mangrove" makes mangrove trees (see PLANTON below).
SHAPE = "procedural"
# What height should the trees be?
# Specifies the average height of the tree
# Examples:
# 5 is normal minecraft tree
# 3 is minecraft tree with foliage flush with the ground
# 10 is very tall trees, they will be hard to chop down
# NOTE: for round and conical, this affects the foliage size as well.
# CENTERHEIGHT is the height of the trees at the center of the area
# ie, when radius = 0
CENTERHEIGHT = 55
# EDGEHEIGHT is the height at the trees at the edge of the area.
# ie, when radius = RADIUS
EDGEHEIGHT = 25
# What should the variation in HEIGHT be?
# actual value +- variation
# default is 1
# Example:
# HEIGHT = 8 and HEIGHTVARIATION = 3 will result in
# trunk heights from 5 to 11
# value is clipped to a max of HEIGHT
# for a good rainforest, set this value not more than 1/2 of HEIGHT
HEIGHTVARIATION = 12
# Do you want branches, trunk, and roots?
# True makes all of that
# False does not create the trunk and branches, or the roots (even if they are
# enabled further down)
WOOD = True
# Trunk thickness multiplyer
# from zero (super thin trunk) to whatever huge number you can think of.
# Only works if SHAPE is not a "stickly" subtype
# Example:
# 1.0 is the default, it makes decently normal sized trunks
# 0.3 makes very thin trunks
# 4.0 makes a thick trunk (good for HOLLOWTRUNK).
# 10.5 will make a huge thick trunk. Not even kidding. Makes spacious
# hollow trunks though!
TRUNKTHICKNESS = 1.0
# Trunk height, as a fraction of the tree
# Only works on "round" shaped trees
# Sets the height of the crown, where the trunk ends and splits
# Examples:
# 0.7 the default value, a bit more than half of the height
# 0.3 good for a fan-like tree
# 1.0 the trunk will extend to the top of the tree, and there will be no crown
# 2.0 the trunk will extend out the top of the foliage, making the tree appear
# like a cluster of green grapes impaled on a spike.
TRUNKHEIGHT = 0.7
# Do you want the trunk and tree broken off at the top?
# removes about half of the top of the trunk, and any foliage
# and branches that would attach above it.
# Only works if SHAPE is not a "stickly" subtype
# This results in trees that are shorter than the height settings
# True does that stuff
# False makes a normal tree (default)
BROKENTRUNK = False
# Note, this works well with HOLLOWTRUNK (below) turned on as well.
# Do you want the trunk to be hollow (or filled) inside?
# Only works with larger sized trunks.
# Only works if SHAPE is not a "stickly" subtype
# True makes the trunk hollow (or filled with other stuff)
# False makes a solid trunk (default)
HOLLOWTRUNK = False
# Note, this works well with BROKENTRUNK set to true (above)
# Further note, you may want to use a large value for TRUNKTHICKNESS
# How many branches should there be?
# General multiplyer for the number of branches
# However, it will not make more branches than foliage clusters
# so to garuntee a branch to every foliage cluster, set it very high, like 10000
# this also affects the number of roots, if they are enabled.
# Examples:
# 1.0 is normal
# 0.5 will make half as many branches
# 2.0 will make twice as mnay branches
# 10000 will make a branch to every foliage cluster (I'm pretty sure)
BRANCHDENSITY = 1.0
# do you want roots from the bottom of the tree?
# Only works if SHAPE is "round" or "cone" or "procedural"
# "yes" roots will penetrate anything, and may enter underground caves.
# "tostone" roots will be stopped by stone (default see STOPSROOTS below).
# There may be some penetration.
# "hanging" will hang downward in air. Good for "floating" type maps
# (I really miss "floating" terrain as a default option)
# "no" roots will not be generated
ROOTS = "tostone"
# Do you want root buttresses?
# These make the trunk not-round at the base, seen in tropical or old trees.
# This option generally makes the trunk larger.
# Only works if SHAPE is "round" or "cone" or "procedural"
# Options:
# True makes root butresses
# False leaves them out
ROOTBUTTRESSES = True
# Do you want leaves on the trees?
# True there will be leaves
# False there will be no leaves
FOLIAGE = True
# How thick should the foliage be
# General multiplyer for the number of foliage clusters
# Examples:
# 1.0 is normal
# 0.3 will make very sparse spotty trees, half as many foliage clusters
# 2.0 will make dense foliage, better for the "rainforests" SHAPE
FOLIAGEDENSITY = 1.0
# Limit the tree height to the top of the map?
# True the trees will not grow any higher than the top of the map
# False the trees may be cut off by the top of the map
MAPHEIGHTLIMIT = True
# add lights in the middle of foliage clusters
# for those huge trees that get so dark underneath
# or for enchanted forests that should glow and stuff
# Only works if SHAPE is "round" or "cone" or "procedural"
# 0 makes just normal trees
# 1 adds one light inside the foliage clusters for a bit of light
# 2 adds two lights around the base of each cluster, for more light
# 4 adds lights all around the base of each cluster for lots of light
LIGHTTREE = 0
# Do you want to only place trees near existing trees?
# True will only plant new trees near existing trees.
# False will not check for existing trees before planting.
# NOTE: the taller the tree, the larger the forest needs to be to qualify
# OTHER NOTE: this feature has not been extensively tested.
# IF YOU HAVE PROBLEMS: SET TO False
ONLYINFORESTS = False
#####################
# Advanced options! #
#####################
# What kind of material should the "wood" be made of?
# defaults to 17
WOODMAT = 17
# What data value should the wood blocks have?
# Some blocks, like wood, leaves, and cloth change
# apperance with different data values
# defaults to 0
WOODDATA = 0
# What kind of material should the "leaves" be made of?
# defaults to 18
LEAFMAT = 18
# What data value should the leaf blocks have?
# Some blocks, like wood, leaves, and cloth change
# apperance with different data values
# defaults to 0
LEAFDATA = 0
# What kind of material should the "lights" be made of?
# defaults to 89 (glowstone)
LIGHTMAT = 89
# What data value should the light blocks have?
# defaults to 0
LIGHTDATA = 0
# What kind of material would you like the "hollow" trunk filled with?
# defaults to 0 (air)
TRUNKFILLMAT = 0
# What data value would you like the "hollow" trunk filled with?
# defaults to 0
TRUNKFILLDATA = 0
# What kind of blocks should the trees be planted on?
# Use the Minecraft index.
# Examples
# 2 is grass (the default)
# 3 is dirt
# 1 is stone (an odd choice)
# 12 is sand (for beach or desert)
# 9 is water (if you want an aquatic forest)
# this is a list, and comma seperated.
# example: [2, 3]
# will plant trees on grass or dirt
PLANTON = [2]
# What kind of blocks should stop the roots?
# a list of block id numbers like PLANTON
# Only works if ROOTS = "tostone"
# default, [1] (stone)
# if you want it to be stopped by other block types, add it to the list
STOPSROOTS = [1]
# What kind of blocks should stop branches?
# same as STOPSROOTS above, but is always turned on
# defaults to stone, cobblestone, and glass
# set it to [] if you want branches to go through everything
STOPSBRANCHES = [1, 4, 20]
# How do you want to interpolate from center to edge?
# "linear" makes a cone-shaped forest
# This is the only option at present
INTERPOLATION = "linear"
# Do a rough recalculation of the lighting?
# Slows it down to do a very rough and incomplete re-light.
# If you want to really fix the lighting, use a seperate re-lighting tool.
# True do the rough fix
# False don't bother
LIGHTINGFIX = True
# How many times do you want to try to find a location?
# it will stop planing after MAXTRIES has been exceeded.
# Set to smaller numbers to abort quicker, or larger numbers
# if you want to keep trying for a while.
# NOTE: the number of trees will not exceed this number
# Default: 1000
MAXTRIES = 1000
# Do you want lots of text telling you waht is going on?
# True lots of text (default). Good for debugging.
# False no text
VERBOSE = True
##############################################################
# Don't edit below here unless you know what you are doing #
##############################################################
# input filtering
TREECOUNT = int(TREECOUNT)
if TREECOUNT < 0:
TREECOUNT = 0
if SHAPE not in ["normal", "bamboo", "palm", "stickly",
"round", "cone", "procedural",
"rainforest", "mangrove"]:
if VERBOSE:
print("SHAPE not set correctly, using 'procedural'.")
SHAPE = "procedural"
if CENTERHEIGHT < 1:
CENTERHEIGHT = 1
if EDGEHEIGHT < 1:
EDGEHEIGHT = 1
minheight = min(CENTERHEIGHT, EDGEHEIGHT)
if HEIGHTVARIATION > minheight:
HEIGHTVARIATION = minheight
if INTERPOLATION not in ["linear"]:
if VERBOSE:
print("INTERPOLATION not set correctly, using 'linear'.")
INTERPOLATION = "linear"
if WOOD not in [True, False]:
if VERBOSE:
print("WOOD not set correctly, using True")
WOOD = True
if TRUNKTHICKNESS < 0.0:
TRUNKTHICKNESS = 0.0
if TRUNKHEIGHT < 0.0:
TRUNKHEIGHT = 0.0
if ROOTS not in ["yes", "tostone", "hanging", "no"]:
if VERBOSE:
print("ROOTS not set correctly, using 'no' and creating no roots")
ROOTS = "no"
if ROOTBUTTRESSES not in [True, False]:
if VERBOSE:
print("ROOTBUTTRESSES not set correctly, using False")
ROOTBUTTRESSES = False
if FOLIAGE not in [True, False]:
if VERBOSE:
print("FOLIAGE not set correctly, using True")
ROOTBUTTRESSES = True
if FOLIAGEDENSITY < 0.0:
FOLIAGEDENSITY = 0.0
if BRANCHDENSITY < 0.0:
BRANCHDENSITY = 0.0
if MAPHEIGHTLIMIT not in [True, False]:
if VERBOSE:
print("MAPHEIGHTLIMIT not set correctly, using False")
MAPHEIGHTLIMIT = False
if LIGHTTREE not in [0, 1, 2, 4]:
if VERBOSE:
print("LIGHTTREE not set correctly, using 0 for no torches")
LIGHTTREE = 0
# assemble the material dictionaries
WOODINFO = {'B': WOODMAT, 'D': WOODDATA}
LEAFINFO = {'B': LEAFMAT, 'D': LEAFDATA}
LIGHTINFO = {'B': LIGHTMAT, 'D': LIGHTDATA}
TRUNKFILLINFO = {'B': TRUNKFILLMAT, 'D': TRUNKFILLDATA}
# The following is an interface class for .mclevel data for minecraft savefiles.
# The following also includes a useful coordinate to index convertor and several
# other useful functions.
import mcInterface
#some handy functions
def dist_to_mat(cord, vec, matidxlist, mcmap, invert=False, limit=False):
'''travel from cord along vec and return how far it was to a point of matidx
the distance is returned in number of iterations. If the edge of the map
is reached, then return the number of iterations as well.
if invert == True, search for anything other than those in matidxlist
'''
assert isinstance(mcmap, mcInterface.SaveFile)
block = mcmap.block
curcord = [i + .5 for i in cord]
iterations = 0
on_map = True
while on_map:
x = int(curcord[0])
y = int(curcord[1])
z = int(curcord[2])
return_dict = block(x, y, z)
if return_dict is None:
break
else:
block_value = return_dict['B']
if (block_value in matidxlist) and (invert is False):
break
elif (block_value not in matidxlist) and invert:
break
else:
curcord = [curcord[i] + vec[i] for i in range(3)]
iterations += 1
if limit and iterations > limit:
break
return iterations
# This is the end of the MCLevel interface.
# Now, on to the actual code.
from random import random, choice, sample
from math import sqrt, sin, cos, pi
def calc_column_lighting(x, z, mclevel):
'''Recalculate the sky lighting of the column.'''
# Begin at the top with sky light level 15.
cur_light = 15
# traverse the column until cur_light == 0
# and the existing light values are also zero.
y = 255
get_block = mclevel.block
set_block = mclevel.set_block
get_height = mclevel.retrieve_heightmap
set_height = mclevel.set_heightmap
#get the current heightmap
cur_height = get_height(x, z)
# set a flag that the highest point has been updated
height_updated = False
# if this doesn't exist, the block doesn't exist either, abort.
if cur_height is None:
return None
light_reduction_lookup = {0: 0, 20: 0, 18: 1, 8: 2, 79: 2}
while True:
#get the block sky light and type
block_info = get_block(x, y, z, 'BS')
block_light = block_info['S']
block_type = block_info['B']
# update the height map if it hasn't been updated yet,
# and the current block reduces light
if (not height_updated) and (block_type not in (0, 20)):
new_height = y + 1
if new_height == 256:
new_height = 255
set_height(x, new_height, z)
height_updated = True
#compare block with cur_light, escape if both 0
if block_light == 0 and cur_light == 0:
break
#set the block light if necessary
if block_light != cur_light:
set_block(x, y, z, {'S': cur_light})
#set the new cur_light
if block_type in light_reduction_lookup:
# partial light reduction
light_reduction = light_reduction_lookup[block_type]
else:
# full light reduction
light_reduction = 16
cur_light += -light_reduction
if cur_light < 0:
cur_light = 0
#increment and check y
y += -1
if y < 0:
break
class ReLight(object):
'''keep track of which squares need to be relit, and then relight them'''
def add(self, x, z):
coords = (x, z)
self.all_columns.add(coords)
def calc_lighting(self):
mclevel = self.save_file
for column_coords in self.all_columns:
# recalculate the lighting
x = column_coords[0]
z = column_coords[1]
calc_column_lighting(x, z, mclevel)
def __init__(self):
self.all_columns = set()
self.save_file = None
relight_master = ReLight()
def assign_value(x, y, z, values, save_file):
'''Assign an index value to a location in mcmap.
If the index is outside the bounds of the map, return None. If the
assignment succeeds, return True.
'''
if y > 255:
return None
result = save_file.set_block(x, y, z, values)
if LIGHTINGFIX:
relight_master.add(x, z)
return result
class Tree(object):
'''Set up the interface for tree objects. Designed for subclassing.
'''
def prepare(self, mcmap):
'''initialize the internal values for the Tree object.
'''
return None
def maketrunk(self, mcmap):
'''Generate the trunk and enter it in mcmap.
'''
return None
def makefoliage(self, mcmap):
"""Generate the foliage and enter it in mcmap.
Note, foliage will disintegrate if there is no log nearby"""
return None
def copy(self, other):
'''Copy the essential values of the other tree object into self.
'''
self.pos = other.pos
self.height = other.height
def __init__(self, pos=[0, 0, 0], height=1):
'''Accept values for the position and height of a tree.
Store them in self.
'''
self.pos = pos
self.height = height
class StickTree(Tree):
'''Set up the trunk for trees with a trunk width of 1 and simple geometry.
Designed for sublcassing. Only makes the trunk.
'''
def maketrunk(self, mcmap):
x = self.pos[0]
y = self.pos[1]
z = self.pos[2]
for i in range(self.height):
assign_value(x, y, z, WOODINFO, mcmap)
y += 1
class NormalTree(StickTree):
'''Set up the foliage for a 'normal' tree.
This tree will be a single bulb of foliage above a single width trunk.
This shape is very similar to the default Minecraft tree.
'''
def makefoliage(self, mcmap):
"""note, foliage will disintegrate if there is no foliage below, or
if there is no "log" block within range 2 (square) at the same level or
one level below"""
topy = self.pos[1] + self.height - 1
start = topy - 2
end = topy + 2
for y in range(start, end):
if y > start + 1:
rad = 1
else:
rad = 2
for xoff in range(-rad, rad + 1):
for zoff in range(-rad, rad + 1):
if (random() > 0.618
and abs(xoff) == abs(zoff)
and abs(xoff) == rad
):
continue
x = self.pos[0] + xoff
z = self.pos[2] + zoff
assign_value(x, y, z, LEAFINFO, mcmap)
class BambooTree(StickTree):
'''Set up the foliage for a bamboo tree.
Make foliage sparse and adjacent to the trunk.
'''
def makefoliage(self, mcmap):
start = self.pos[1]
end = self.pos[1] + self.height + 1
for y in range(start, end):
for _ in [0, 1]:
xoff = choice([-1, 1])
zoff = choice([-1, 1])
x = self.pos[0] + xoff
z = self.pos[2] + zoff
assign_value(x, y, z, LEAFINFO, mcmap)
class PalmTree(StickTree):
'''Set up the foliage for a palm tree.
Make foliage stick out in four directions from the top of the trunk.
'''
def makefoliage(self, mcmap):
y = self.pos[1] + self.height
for xoff in range(-2, 3):
for zoff in range(-2, 3):
if abs(xoff) == abs(zoff):
x = self.pos[0] + xoff
z = self.pos[2] + zoff
assign_value(x, y, z, LEAFINFO, mcmap)
class ProceduralTree(Tree):
'''Set up the methods for a larger more complicated tree.
This tree type has roots, a trunk, and branches all of varying width,
and many foliage clusters.
MUST BE SUBCLASSED. Specifically, self.foliage_shape must be set.
Subclass 'prepare' and 'shapefunc' to make different shaped trees.
'''
@staticmethod
def crossection(center, radius, diraxis, matidx, mcmap):
'''Create a round section of type matidx in mcmap.
Passed values:
center = [x, y, z] for the coordinates of the center block
radius = <number> as the radius of the section. May be a float or int.
diraxis: The list index for the axis to make the section
perpendicular to. 0 indicates the x axis, 1 the y, 2 the z. The
section will extend along the other two axies.
matidx = <int> the integer value to make the section out of.
mcmap = the array generated by make_mcmap
matdata = <int> the integer value to make the block data value.
'''
rad = int(radius + .618)
if rad <= 0:
return None
secidx1 = (diraxis - 1) % 3
secidx2 = (1 + diraxis) % 3
coord = [0, 0, 0]
for off1 in range(-rad, rad + 1):
for off2 in range(-rad, rad + 1):
thisdist = sqrt((abs(off1) + .5) ** 2 + (abs(off2) + .5) ** 2)
if thisdist > radius:
continue
pri = center[diraxis]
sec1 = center[secidx1] + off1
sec2 = center[secidx2] + off2
coord[diraxis] = pri
coord[secidx1] = sec1
coord[secidx2] = sec2
assign_value(coord[0], coord[1], coord[2], matidx, mcmap)
def shapefunc(self, y):
'''Take y and return a radius for the location of the foliage cluster.
If no foliage cluster is to be created, return None
Designed for sublcassing. Only makes clusters close to the trunk.
'''
if random() < 100. / (self.height ** 2) and y < self.trunkheight:
return self.height * .12
return None
def foliagecluster(self, center, mcmap):
'''generate a round cluster of foliage at the location center.
The shape of the cluster is defined by the list self.foliage_shape.
This list must be set in a subclass of ProceduralTree.
'''
level_radius = self.foliage_shape
x = center[0]
y = center[1]
z = center[2]
for i in level_radius:
self.crossection([x, y, z], i, 1, LEAFINFO, mcmap)
y += 1
def taperedcylinder(self, start, end, startsize, endsize, mcmap, blockdata):
'''Create a tapered cylinder in mcmap.
start and end are the beginning and ending coordinates of form [x, y, z].
startsize and endsize are the beginning and ending radius.
The material of the cylinder is WOODMAT.
'''
# delta is the coordinate vector for the difference between
# start and end.
delta = [int(end[i] - start[i]) for i in range(3)]
# primidx is the index (0, 1, or 2 for x, y, z) for the coordinate
# which has the largest overall delta.
maxdist = max(delta, key=abs)
if maxdist == 0:
return None
primidx = delta.index(maxdist)
# secidx1 and secidx2 are the remaining indicies out of [0, 1, 2].
secidx1 = (primidx - 1) % 3
secidx2 = (1 + primidx) % 3
# primsign is the digit 1 or -1 depending on whether the limb is headed
# along the positive or negative primidx axis.
primsign = int(delta[primidx] / abs(delta[primidx]))
# secdelta1 and ...2 are the amount the associated values change
# for every step along the prime axis.
secdelta1 = delta[secidx1]
secfac1 = float(secdelta1) / delta[primidx]
secdelta2 = delta[secidx2]
secfac2 = float(secdelta2) / delta[primidx]
# Initialize coord. These values could be anything, since
# they are overwritten.
coord = [0, 0, 0]
# Loop through each crossection along the primary axis,
# from start to end.
endoffset = delta[primidx] + primsign
for primoffset in range(0, endoffset, primsign):
primloc = start[primidx] + primoffset
secloc1 = int(start[secidx1] + primoffset * secfac1)
secloc2 = int(start[secidx2] + primoffset * secfac2)
coord[primidx] = primloc
coord[secidx1] = secloc1
coord[secidx2] = secloc2
primdist = abs(delta[primidx])
radius = endsize + (startsize - endsize) * abs(delta[primidx]
- primoffset) / primdist
self.crossection(coord, radius, primidx, blockdata, mcmap)
def makefoliage(self, mcmap):
'''Generate the foliage for the tree in mcmap.
'''
"""note, foliage will disintegrate if there is no foliage below, or
if there is no "log" block within range 2 (square) at the same level or
one level below"""
foliage_coords = self.foliage_cords
for coord in foliage_coords:
self.foliagecluster(coord, mcmap)
for cord in foliage_coords:
assign_value(cord[0], cord[1], cord[2], WOODINFO, mcmap)
if LIGHTTREE == 1:
assign_value(cord[0], cord[1] + 1, cord[2], LIGHTINFO, mcmap)
elif LIGHTTREE in [2, 4]:
assign_value(cord[0] + 1, cord[1], cord[2], LIGHTINFO, mcmap)
assign_value(cord[0] - 1, cord[1], cord[2], LIGHTINFO, mcmap)
if LIGHTTREE == 4:
assign_value(cord[0], cord[1], cord[2] + 1, LIGHTINFO, mcmap)
assign_value(cord[0], cord[1], cord[2] - 1, LIGHTINFO, mcmap)
def makebranches(self, mcmap):
'''Generate the branches and enter them in mcmap.
'''
treeposition = self.pos
height = self.height
topy = treeposition[1] + int(self.trunkheight + 0.5)
# endrad is the base radius of the branches at the trunk
endrad = self.trunkradius * (1 - self.trunkheight / height)
if endrad < 1.0:
endrad = 1.0
for coord in self.foliage_cords:
dist = (sqrt(float(coord[0] - treeposition[0]) ** 2 +
float(coord[2] - treeposition[2]) ** 2))
ydist = coord[1] - treeposition[1]
# value is a magic number that weights the probability
# of generating branches properly so that
# you get enough on small trees, but not too many
# on larger trees.
# Very difficult to get right... do not touch!
value = (self.branchdensity * 220 * height) / ((ydist + dist) ** 3)
if value < random():
continue
posy = coord[1]
slope = self.branchslope + (0.5 - random()) * .16
if coord[1] - dist * slope > topy:
# Another random rejection, for branches between
# the top of the trunk and the crown of the tree
threshhold = 1 / float(height)
if random() < threshhold:
continue
branchy = topy
basesize = endrad
else:
branchy = posy - dist * slope
basesize = (endrad + (self.trunkradius - endrad) *
(topy - branchy) / self.trunkheight)
startsize = (basesize * (1 + random()) * .618 *
(dist / height) ** 0.618)
rndr = sqrt(random()) * basesize * 0.618
rndang = random() * 2 * pi
rndx = int(rndr * sin(rndang) + 0.5)
rndz = int(rndr * cos(rndang) + 0.5)
startcoord = [treeposition[0] + rndx,
int(branchy),
treeposition[2] + rndz]
if startsize < 1.0:
startsize = 1.0
endsize = 1.0
self.taperedcylinder(startcoord, coord, startsize, endsize,
mcmap, WOODINFO)
def makeroots(self, rootbases, mcmap):
'''generate the roots and enter them in mcmap.
rootbases = [[x, z, base_radius], ...] and is the list of locations
the roots can originate from, and the size of that location.
'''
treeposition = self.pos
height = self.height
for coord in self.foliage_cords:
# First, set the threshhold for randomly selecting this
# coordinate for root creation.
dist = (sqrt(float(coord[0] - treeposition[0]) ** 2 +
float(coord[2] - treeposition[2]) ** 2))
ydist = coord[1] - treeposition[1]
value = (self.branchdensity * 220 * height) / ((ydist + dist) ** 3)
# Randomly skip roots, based on the above threshold
if value < random():
continue
# initialize the internal variables from a selection of
# starting locations.
rootbase = choice(rootbases)
rootx = rootbase[0]
rootz = rootbase[1]
rootbaseradius = rootbase[2]
# Offset the root origin location by a random amount
# (radialy) from the starting location.
rndr = (sqrt(random()) * rootbaseradius * .618)
rndang = random() * 2 * pi
rndx = int(rndr * sin(rndang) + 0.5)
rndz = int(rndr * cos(rndang) + 0.5)
rndy = int(random() * rootbaseradius * 0.5)
startcoord = [rootx + rndx, treeposition[1] + rndy, rootz + rndz]
# offset is the distance from the root base to the root tip.
offset = [startcoord[i] - coord[i] for i in range(3)]
# If this is a mangrove tree, make the roots longer.
if SHAPE == "mangrove":
offset = [int(val * 1.618 - 1.5) for val in offset]
endcoord = [startcoord[i] + offset[i] for i in range(3)]
rootstartsize = (rootbaseradius * 0.618 * abs(offset[1]) /
(height * 0.618))
if rootstartsize < 1.0:
rootstartsize = 1.0
endsize = 1.0
# If ROOTS is set to "tostone" or "hanging" we need to check
# along the distance for collision with existing materials.
if ROOTS in ["tostone", "hanging"]:
offlength = sqrt(float(offset[0]) ** 2 +
float(offset[1]) ** 2 +
float(offset[2]) ** 2)
if offlength < 1:
continue
rootmid = endsize
# vec is a unit vector along the direction of the root.
vec = [offset[i] / offlength for i in range(3)]
if ROOTS == "tostone":
searchindex = STOPSROOTS
elif ROOTS == "hanging":
searchindex = [0]
# startdist is how many steps to travel before starting to
# search for the material. It is used to ensure that large
# roots will go some distance before changing directions
# or stopping.
startdist = int(random() * 6 * sqrt(rootstartsize) + 2.8)
# searchstart is the coordinate where the search should begin
searchstart = [startcoord[i] + startdist * vec[i]
for i in range(3)]
# dist stores how far the search went (including searchstart)
# before encountering the expected marterial.
dist = startdist + dist_to_mat(searchstart, vec,
searchindex, mcmap, limit=offlength)
# If the distance to the material is less than the length
# of the root, change the end point of the root to where
# the search found the material.
if dist < offlength:
# rootmid is the size of the crossection at endcoord.
rootmid += (rootstartsize -
endsize) * (1 - dist / offlength)
# endcoord is the midpoint for hanging roots,
# and the endpoint for roots stopped by stone.
endcoord = [startcoord[i] + int(vec[i] * dist)
for i in range(3)]
if ROOTS == "hanging":
# remaining_dist is how far the root had left
# to go when it was stopped.
remaining_dist = offlength - dist
# Initialize bottomcord to the stopping point of
# the root, and then hang straight down
# a distance of remaining_dist.
bottomcord = endcoord[:]
bottomcord[1] += -int(remaining_dist)
# Make the hanging part of the hanging root.
self.taperedcylinder(endcoord, bottomcord,
rootmid, endsize, mcmap, WOODINFO)
# make the beginning part of hanging or "tostone" roots
self.taperedcylinder(startcoord, endcoord,
rootstartsize, rootmid, mcmap, WOODINFO)
# If you aren't searching for stone or air, just make the root.
else:
self.taperedcylinder(startcoord, endcoord,
rootstartsize, endsize, mcmap, WOODINFO)
def maketrunk(self, mcmap):
'''Generate the trunk, roots, and branches in mcmap.
'''
height = self.height
trunkheight = self.trunkheight
trunkradius = self.trunkradius
treeposition = self.pos
starty = treeposition[1]
midy = treeposition[1] + int(trunkheight * .382)
topy = treeposition[1] + int(trunkheight + 0.5)
# In this method, x and z are the position of the trunk.
x = treeposition[0]
z = treeposition[2]
end_size_factor = trunkheight / height
midrad = trunkradius * (1 - end_size_factor * .5)
endrad = trunkradius * (1 - end_size_factor)
if endrad < 1.0:
endrad = 1.0
if midrad < endrad:
midrad = endrad
# Make the root buttresses, if indicated
if ROOTBUTTRESSES or SHAPE == "mangrove":
# The start radius of the trunk should be a little smaller if we
# are using root buttresses.
startrad = trunkradius * .8
# rootbases is used later in self.makeroots(...) as
# starting locations for the roots.
rootbases = [[x, z, startrad]]
buttress_radius = trunkradius * 0.382
# posradius is how far the root buttresses should be offset
# from the trunk.
posradius = trunkradius
# In mangroves, the root buttresses are much more extended.
if SHAPE == "mangrove":
posradius *= 2.618
num_of_buttresses = int(sqrt(trunkradius) + 3.5)
for i in range(num_of_buttresses):
rndang = random() * 2 * pi
thisposradius = posradius * (0.9 + random() * .2)
# thisx and thisz are the x and z position for the base of
# the root buttress.
thisx = x + int(thisposradius * sin(rndang))
thisz = z + int(thisposradius * cos(rndang))
# thisbuttressradius is the radius of the buttress.
# Currently, root buttresses do not taper.
thisbuttressradius = buttress_radius * (0.618 + random())
if thisbuttressradius < 1.0:
thisbuttressradius = 1.0
# Make the root buttress.
self.taperedcylinder([thisx, starty, thisz], [x, midy, z],
thisbuttressradius, thisbuttressradius,
mcmap, WOODINFO)
# Add this root buttress as a possible location at
# which roots can spawn.
rootbases += [[thisx, thisz, thisbuttressradius]]
else:
# If root buttresses are turned off, set the trunk radius
# to normal size.
startrad = trunkradius
rootbases = [[x, z, startrad]]
# Make the lower and upper sections of the trunk.
self.taperedcylinder([x, starty, z], [x, midy, z], startrad, midrad,
mcmap, WOODINFO)
self.taperedcylinder([x, midy, z], [x, topy, z], midrad, endrad,
mcmap, WOODINFO)
#Make the branches
self.makebranches(mcmap)
#Make the roots, if indicated.
if ROOTS in ["yes", "tostone", "hanging"]:
self.makeroots(rootbases, mcmap)
# Hollow the trunk, if specified
# check to make sure that the trunk is large enough to be hollow
if trunkradius > 2 and HOLLOWTRUNK:
# wall thickness is actually the double the wall thickness
# it is a diameter difference, not a radius difference.
wall_thickness = (1 + trunkradius * 0.1 * random())
if wall_thickness < 1.3:
wall_thickness = 1.3
base_radius = trunkradius - wall_thickness
if base_radius < 1:
base_radius = 1.0
mid_radius = midrad - wall_thickness
top_radius = endrad - wall_thickness
# the starting x and y can be offset by up to the wall thickness.
base_offset = int(wall_thickness)
x_choices = [i for i in range(x - base_offset,
x + base_offset + 1)]
start_x = choice(x_choices)
z_choices = [i for i in range(z - base_offset,
z + base_offset + 1)]
start_z = choice(z_choices)
self.taperedcylinder([start_x, starty, start_z], [x, midy, z],
base_radius, mid_radius,
mcmap, TRUNKFILLINFO)
hollow_top_y = int(topy + trunkradius + 1.5)
self.taperedcylinder([x, midy, z], [x, hollow_top_y, z],
mid_radius, top_radius,
mcmap, TRUNKFILLINFO)
def prepare(self, mcmap):
'''Initialize the internal values for the Tree object.
Primarily, sets up the foliage cluster locations.
'''
treeposition = self.pos
self.trunkradius = .618 * sqrt(self.height * TRUNKTHICKNESS)
if self.trunkradius < 1:
self.trunkradius = 1
if BROKENTRUNK:
self.trunkheight = self.height * (.3 + random() * .4)
yend = int(treeposition[1] + self.trunkheight + .5)
else:
self.trunkheight = self.height
yend = int(treeposition[1] + self.height)
self.branchdensity = BRANCHDENSITY / FOLIAGEDENSITY
topy = treeposition[1] + int(self.trunkheight + 0.5)
foliage_coords = []
ystart = treeposition[1]
num_of_clusters_per_y = int(1.5 + (FOLIAGEDENSITY *
self.height / 19.) ** 2)
if num_of_clusters_per_y < 1:
num_of_clusters_per_y = 1
# make sure we don't spend too much time off the top of the map
if yend > 255:
yend = 255
if ystart > 255:
ystart = 255
for y in range(yend, ystart, -1):
for i in range(num_of_clusters_per_y):
shapefac = self.shapefunc(y - ystart)
if shapefac is None:
continue
r = (sqrt(random()) + .328) * shapefac
theta = random() * 2 * pi
x = int(r * sin(theta)) + treeposition[0]
z = int(r * cos(theta)) + treeposition[2]
# if there are values to search in STOPSBRANCHES
# then check to see if this cluster is blocked
# by stuff, like dirt or rock, or whatever
if len(STOPSBRANCHES):
dist = (sqrt(float(x - treeposition[0]) ** 2 +
float(z - treeposition[2]) ** 2))
slope = self.branchslope
if y - dist * slope > topy:
# the top of the tree
starty = topy
else:
starty = y - dist * slope
# the start position of the search
start = [treeposition[0], starty, treeposition[2]]
offset = [x - treeposition[0],
y - starty,
z - treeposition[2]]
offlength = sqrt(offset[0] ** 2 + offset[1] ** 2 + offset[2] ** 2)
# if the branch is as short as... nothing, don't bother.
if offlength < 1:
continue
# unit vector for the search
vec = [offset[i] / offlength for i in range(3)]
mat_dist = dist_to_mat(start, vec, STOPSBRANCHES,
mcmap, limit=offlength + 3)
# after all that, if you find something, don't add
# this coordinate to the list
if mat_dist < offlength + 2:
continue
foliage_coords += [[x, y, z]]
self.foliage_cords = foliage_coords
class RoundTree(ProceduralTree):
'''This kind of tree is designed to resemble a deciduous tree.
'''
def prepare(self, mcmap):
self.branchslope = 0.382
ProceduralTree.prepare(self, mcmap)
self.foliage_shape = [2, 3, 3, 2.5, 1.6]
self.trunkradius *= 0.8
self.trunkheight *= TRUNKHEIGHT
def shapefunc(self, y):
twigs = ProceduralTree.shapefunc(self, y)
if twigs is not None:
return twigs
if y < self.height * (.282 + .1 * sqrt(random())):
return None
radius = self.height / 2.
adj = self.height / 2. - y
if adj == 0:
dist = radius
elif abs(adj) >= radius:
dist = 0
else:
dist = sqrt((radius ** 2) - (adj ** 2))
dist *= .618
return dist
class ConeTree(ProceduralTree):
'''this kind of tree is designed to resemble a conifer tree.
'''
# woodType is the kind of wood the tree has, a data value
woodType = 1
def prepare(self, mcmap):
self.branchslope = 0.15
ProceduralTree.prepare(self, mcmap)
self.foliage_shape = [3, 2.6, 2, 1]
self.trunkradius *= 0.5
def shapefunc(self, y):
twigs = ProceduralTree.shapefunc(self, y)
if twigs is not None:
return twigs
if y < self.height * (.25 + .05 * sqrt(random())):
return None
radius = (self.height - y) * 0.382
if radius < 0:
radius = 0
return radius
class RainforestTree(ProceduralTree):
'''This kind of tree is designed to resemble a rainforest tree.
'''
def prepare(self, mcmap):
self.foliage_shape = [3.4, 2.6]
self.branchslope = 1.0
ProceduralTree.prepare(self, mcmap)
self.trunkradius *= 0.382
self.trunkheight *= .9
def shapefunc(self, y):
if y < self.height * 0.8:
if EDGEHEIGHT < self.height:
twigs = ProceduralTree.shapefunc(self, y)
if (twigs is not None) and random() < 0.07:
return twigs
return None
else:
width = self.height * .382
topdist = (self.height - y) / (self.height * 0.2)
dist = width * (0.618 + topdist) * (0.618 + random()) * 0.382
return dist
class MangroveTree(RoundTree):
'''This kind of tree is designed to resemble a mangrove tree.
'''
def prepare(self, mcmap):
self.branchslope = 1.0
RoundTree.prepare(self, mcmap)
self.trunkradius *= 0.618
def shapefunc(self, y):
val = RoundTree.shapefunc(self, y)
if val is None:
return val
val *= 1.618
return val
def planttrees(mcmap, treelist):
'''Take mcmap and add trees to random locations on the surface to treelist.
'''
assert isinstance(mcmap, mcInterface.SaveFile)
# keep looping until all the trees are placed
# calc the radius difference, for interpolation
in_out_dif = EDGEHEIGHT - CENTERHEIGHT
if VERBOSE:
print('Tree Locations: x, y, z, tree height')
tries = 0
max_tries = MAXTRIES
while len(treelist) < TREECOUNT:
if tries > max_tries:
if VERBOSE:
print("Stopping search for tree locations after {0} tries".format(tries))
print("If you don't have enough trees, check X, Y, RADIUS, and PLANTON")
break
tries += 1
# choose a location
rad_fraction = random()
# this is some kind of square interpolation
rad_fraction = 1.0 - rad_fraction
rad_fraction **= 2
rad_fraction = 1.0 - rad_fraction
rad = rad_fraction * RADIUS
ang = random() * pi * 2
x = X + int(rad * sin(ang) + .5)
z = Z + int(rad * cos(ang) + .5)
# check to see if this location is suitable
y_top = mcmap.surface_block(x, z)
if y_top is None:
# this location is off the map!
continue
if y_top['B'] in PLANTON:
# plant the tree on the block above the ground
# hence the " + 1"
y = y_top['y'] + 1
else:
continue
# this is linear interpolation also.
base_height = CENTERHEIGHT + (in_out_dif * rad_fraction)
height_rand = (random() - .5) * 2 * HEIGHTVARIATION
height = int(base_height + height_rand)
# if the option is set, check the surrounding area for trees
if ONLYINFORESTS:
'''we are looking for foliage
it should show up in the "surface_block" search
check every fifth block in a square pattern,
offset around the trunk
and equal to the trees height
if the area is not at least one third foliage,
don't build the tree'''
# spacing is how far apart each sample should be
spacing = 5
# search_size is how many blocks to check
# along each axis
search_size = 2 + (height // spacing)
# check at least 3 x 3
search_size = max([search_size, 3])
# set up the offset values to offset the starting corner
offset = ((search_size - 1) * spacing) // 2
# foliage_count is the total number of foliage blocks found
foliage_count = 0
# check each sample location for foliage
for step_x in range(search_size):
# search_x is the x location to search this sample
search_x = x - offset + (step_x * spacing)
for step_z in range(search_size):
# same as for search_x
search_z = z - offset + (step_z * spacing)
search_block = mcmap.surface_block(search_x, search_z)
if search_block is None:
continue
if search_block['B'] == 18:
# this sample contains foliage!
# add it to the total
foliage_count += 1
#now that we have the total count, find the ratio
total_searched = search_size ** 2
foliage_ratio = foliage_count / total_searched
# the acceptable amount is about a third
acceptable_ratio = .3
if foliage_ratio < acceptable_ratio:
# after all that work, there wasn't enough foliage around!
# try again!
continue
# generate the new tree
newtree = Tree([x, y, z], height)
if VERBOSE:
print(x, y, z, height)
treelist += [newtree]
def processtrees(mcmap, treelist):
'''Initalize all of the trees in treelist.
Set all of the trees to the right type, and run prepare. If indicated
limit the height of the trees to the top of the map.
'''
assert isinstance(mcmap, mcInterface.SaveFile)
if SHAPE == "stickly":
shape_choices = ["normal", "bamboo", "palm"]
elif SHAPE == "procedural":
shape_choices = ["round", "cone"]
else:
shape_choices = [SHAPE]
# initialize mapheight, just in case
mapheight = 255
for i in range(len(treelist)):
newshape = choice(shape_choices)
if newshape == "normal":
newtree = NormalTree()
elif newshape == "bamboo":
newtree = BambooTree()
elif newshape == "palm":
newtree = PalmTree()
elif newshape == "round":
newtree = RoundTree()
elif newshape == "cone":
newtree = ConeTree()
elif newshape == "rainforest":
newtree = RainforestTree()
elif newshape == "mangrove":
newtree = MangroveTree()
# Get the height and position of the existing trees in
# the list.
newtree.copy(treelist[i])
# Now check each tree to ensure that it doesn't stick
# out the top of the map. If it does, shorten it until
# the top of the foliage just touches the top of the map.
if MAPHEIGHTLIMIT:
height = newtree.height
ybase = newtree.pos[1]
if SHAPE == "rainforest":
foliageheight = 2
else:
foliageheight = 4
if ybase + height + foliageheight > mapheight:
newheight = mapheight - ybase - foliageheight
newtree.height = newheight
# Even if it sticks out the top of the map, every tree
# should be at least one unit tall.
if newtree.height < 1:
newtree.height = 1
newtree.prepare(mcmap)
treelist[i] = newtree
def main(the_map):
'''create the trees
'''
treelist = []
if VERBOSE:
print("Planting new trees")
planttrees(the_map, treelist)
if VERBOSE:
print("Processing tree changes")
processtrees(the_map, treelist)
if FOLIAGE:
if VERBOSE:
print("Generating foliage ")
for i in treelist:
i.makefoliage(the_map)
if VERBOSE:
print(' completed')
if WOOD:
if VERBOSE:
print("Generating trunks, roots, and branches ")
for i in treelist:
i.maketrunk(the_map)
if VERBOSE:
print(' completed')
return None
def standalone():
if VERBOSE:
print("Importing the map")
try:
the_map = mcInterface.SaveFile(LOADNAME)
except IOError:
if VERBOSE:
print('File name invalid or save file otherwise corrupted. Aborting')
return None
main(the_map)
if LIGHTINGFIX:
if VERBOSE:
print("Rough re-lighting the map")
relight_master.save_file = the_map
relight_master.calc_lighting()
if VERBOSE:
print("Saving the map, this could be a while")
the_map.write()
if VERBOSE:
print("finished")
if __name__ == '__main__':
standalone()
# to do:
# get height limits from map
# set "limit height" or somesuch to respect level height limits
|
DragonQuiz/MCEdit-Unified
|
stock-filters/Forester.py
|
Python
|
isc
| 51,634
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2013, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Storage abstraction layer for WebDeposit."""
import hashlib
import urllib2
import uuid
from fs import opener
from fs import path
from invenio.base.globals import cfg
class UploadError(IOError):
"""Error during upload."""
class ExternalFile(object):
"""Wrapper around a URL to make it behave like a file.
Allows external files to be passed to the storage layer.
"""
def __init__(self, url, filename):
"""Initialiez external file."""
from invenio.legacy.bibdocfile.api import open_url, \
InvenioBibdocfileUnauthorizedURL
try:
self._file = open_url(url, headers={})
self.filename = None
info = self._file.info()
content_disposition = info.getheader('Content-Disposition')
if content_disposition:
for item in content_disposition.split(';'):
item = item.strip()
if item.strip().startswith('filename='):
self.filename = item[len('filename="'):-len('"')]
if not self.filename:
self.filename = filename
size = int(info.getheader('Content-length'))
if size > cfg['DEPOSIT_MAX_UPLOAD_SIZE']:
raise UploadError("File too big")
except InvenioBibdocfileUnauthorizedURL as e:
raise UploadError(str(e))
except urllib2.URLError as e:
raise UploadError('URL could not be opened: %s' % str(e))
def close(self):
"""Close the external file."""
self._file.close()
def read(self):
"""Read the external file."""
return self._file.read()
class Storage(object):
"""Default storage backend."""
_fsdir = None
def __init__(self, fs_path):
"""Initialize with file system path."""
self.fs_path = fs_path
@property
def storage(self):
"""Get the pyFilesytem object for the backend path."""
if self._fsdir is None:
# Opens a directory, creates it if needed, and ensures
# it is writeable.
self._fsdir = opener.fsopendir(
self.fs_path, writeable=True, create_dir=True
)
return self._fsdir
def unique_filename(self, filename):
"""Generate a unique secure filename."""
return str(uuid.uuid4()) + "-" + filename
def save(self, incoming_file, filename, unique_name=True,
with_checksum=True, chunksize=65536):
"""Store the incoming file."""
if unique_name:
filename = self.unique_filename(filename)
fs_file = self.storage.open(filename, 'wb')
checksum = None
m = hashlib.md5()
f_bytes = incoming_file.read(chunksize)
while f_bytes:
fs_file.write(f_bytes)
if with_checksum:
m.update(f_bytes)
f_bytes = incoming_file.read(chunksize)
fs_file.close()
checksum = m.hexdigest()
# Create complete file path and return it
return (
path.join(self.fs_path, filename),
self.storage.getsize(filename),
checksum,
with_checksum,
)
@staticmethod
def delete(fs_path):
""".Delete the file on storage."""
(dirurl, filename) = opener.pathsplit(fs_path)
fs = opener.fsopendir(dirurl)
fs.remove(filename)
@staticmethod
def is_local(fs_path):
"""Determine if file is a local file."""
(dirurl, filename) = opener.pathsplit(fs_path)
fs = opener.fsopendir(dirurl)
return fs.hassyspath(filename)
@staticmethod
def get_url(fs_path):
"""Get a URL for the file."""
(dirurl, filename) = opener.pathsplit(fs_path)
fs = opener.fsopendir(dirurl)
return fs.getpathurl(filename)
@staticmethod
def get_syspath(fs_path):
"""Get a local system path to the file."""
(dirurl, filename) = opener.pathsplit(fs_path)
fs = opener.fsopendir(dirurl)
return fs.getsyspath(filename)
class DepositionStorage(Storage):
"""Deposition storage backend.
Saves files to a folder (<CFG_WEBDEPOSIT_UPLOAD_FOLDER>/<deposition_id>/).
"""
def __init__(self, deposition_id):
"""Initialize storage."""
self.fs_path = path.join(
cfg['DEPOSIT_STORAGEDIR'],
str(deposition_id)
)
class ChunkedDepositionStorage(DepositionStorage):
"""Chunked storage backend.
Capable of handling storage of a file in multiple chunks. Otherwise
similar to DepositionStorage.
"""
def chunk_filename(self, filename, chunks, chunk):
"""Generate chunk file name."""
return "%s_%s_%s" % (
filename,
chunks,
chunk,
)
def save(self, incoming_file, filename, chunk=None, chunks=None):
"""Save one chunk of an incoming file."""
try:
# Generate chunked file name
chunk = int(chunk)
chunks = int(chunks)
except (ValueError, TypeError):
raise UploadError("Invalid chunk value: %s" % chunk)
# Store chunk
chunk_filename = self.chunk_filename(filename, chunks, chunk)
res = super(ChunkedDepositionStorage, self).save(
incoming_file, chunk_filename, unique_name=False,
with_checksum=False,
)
# Only merge files on last_trunk
if chunk != chunks-1:
return res
# Get the chunks
file_chunks = self.storage.listdir(
wildcard=self.chunk_filename(
filename, chunks, '*'
)
)
file_chunks.sort(key=lambda x: int(x.split("_")[-1]))
# Write the chunks into one file
filename = self.unique_filename(filename)
fs_file = self.storage.open(filename, 'wb')
m = hashlib.md5()
for c in file_chunks:
fs_c = self.storage.open(c, 'rb')
f_bytes = fs_c.read(65536)
while f_bytes:
fs_file.write(f_bytes)
m.update(f_bytes)
f_bytes = fs_c.read(65536)
fs_c.close()
# Remove each chunk right after appending to main file, to
# minimize storage usage.
self.storage.remove(c)
fs_file.close()
checksum = m.hexdigest()
return (
path.join(self.fs_path, filename),
self.storage.getsize(filename),
checksum,
True
)
|
nharraud/b2share
|
invenio/modules/deposit/storage.py
|
Python
|
gpl-2.0
| 7,376
|
from ctx.toolkit import Widget
__author__ = 'fmca'
class AgendaWidget(Widget):
def __init__(self, *generators):
super(AgendaWidget, self).__init__("Ocupado", None, *generators)
def update(self, event):
now = self.get_property("time")
events = self.get_property("calendar")
occupied = False
if events and now:
for event in events:
if event['start'] <= now < event['end']:
occupied = True
self.status = occupied
|
fmca/ctxpy
|
ctx/widgets/agenda.py
|
Python
|
mit
| 518
|
import os
import re
from prospector2.formatters.base import Formatter
class PylintFormatter(Formatter):
"""
This formatter outputs messages in the same way as pylint -f parseable , which is used by several
tools to parse pylint output. This formatter is therefore a compatability shim between tools built
on top of pylint and prospector itself.
"""
def render(self, summary=True, messages=True, profile=False):
# this formatter will always ignore the summary and profile
cur_loc = None
output = []
for message in sorted(self.messages):
if cur_loc != message.location.path:
cur_loc = message.location.path
module_name = message.location.path.replace(os.path.sep, '.')
module_name = re.sub(r'(\.__init__)?\.py$', '', module_name)
header = '************* Module %s' % module_name
output.append(header)
# ={path}:{line}: [{msg_id}({symbol}), {obj}] {msg}
# prospector/configuration.py:65: [missing-docstring(missing-docstring), build_default_sources] \
# Missing function docstring
template = '%(path)s:%(line)s: [%(code)s(%(source)s), %(function)s] %(message)s'
output.append(template % {
'path': message.location.path,
'line': message.location.line,
'source': message.source,
'code': message.code,
'function': message.location.function,
'message': message.message.strip()
})
return '\n'.join(output)
|
landscapeio/prospector
|
prospector2/formatters/pylint.py
|
Python
|
gpl-2.0
| 1,632
|
import random
from numbers import Integral
import numpy as np
class QuantumBitMachine(object):
def __init__(self, nqubits):
assert isinstance(nqubits, Integral)
self.nqubits = nqubits
self.state = np.zeros([2 ** nqubits], dtype=complex)
self.state[0] = 1.
# Representations for ipython notebook
def _repr_svg_(self):
from io import BytesIO
from pyqis.cairo_render import render_state
s = BytesIO()
render_state(self, svg_file=s)
return s.getvalue().decode("utf-8")
def _repr_png_(self):
from io import BytesIO
from pyqis.cairo_render import render_state
s = BytesIO()
render_state(self, png_file=s)
return s.getvalue()
# All operations (as defined below) change the state of the
# QuantumBitMachine, following how a quantum computer works. However, we
# take the stance that operations copy the np.array instead of modifying it
# in place. Otherwise we would need to enforce unique ownership of the
# array somehow.
#
# Nevertheless, we try to keep unique ownership of the `self.state` array
# in case a user of the class decides to modify the array in place. In
# short, things should behave as expected regardless of whether the user
# expects the array to be mutable or immutable.
def X(self, register):
"""apply the NOT gate on a given register"""
state = np.copy(self.state)
bit = 1 << register
for i in range(len(state)):
state[i] = self.state[i ^ bit]
self.state = state
return self
def __phase(self, register, phase):
state = np.copy(self.state)
bit = 1 << register
for i in range(len(state)):
if i & bit:
state[i] *= phase
self.state = state
return self
def Z(self, register):
"""apply the PHASE gate on a given register"""
return self.__phase(register, -1.)
def T(self, register):
"""apply the pi/8 gate on a given register"""
return self.__phase(register, (1. + 1j) / np.sqrt(2))
def Rtheta(self, register, theta):
"""apply an arbitrary phase rotation on a given register"""
return self.__phase(register, np.exp(1j * theta))
def H(self, register):
"""apply the Hadamard gate on a given register"""
state = np.zeros_like(self.state)
bit = 1 << register
for i in range(len(state)):
if i & bit:
state[i] += -1 / np.sqrt(2) * self.state[i]
else:
state[i] += 1 / np.sqrt(2) * self.state[i]
state[i ^ bit] += 1 / np.sqrt(2) * self.state[i]
self.state = state
return self
def CNOT(self, control_register, target_register):
state = np.copy(self.state)
control_bit = 1 << control_register
target_bit = 1 << target_register
for i in range(len(state)):
if i & control_bit:
state[i] = self.state[i ^ target_bit]
self.state = state
return self
def CCNOT(self, control_register1, control_register2, target_register):
state = np.copy(self.state)
control_bit1 = 1 << control_register1
control_bit2 = 1 << control_register2
target_bit = 1 << target_register
for i in range(len(state)):
if i & control_bit1 and i & control_bit2:
state[i] = self.state[i ^ target_bit]
self.state = state
return self
def observe(self):
cumsum = np.cumsum(np.square(np.abs(self.state)))
r = random.uniform(0., cumsum[-1])
for i, v in enumerate(cumsum):
if r < v:
break
# "collapse" the wavefunction, keeping phase of the resulting amplitude
amplitude = self.state[i] / np.abs(self.state[i])
self.state = np.zeros_like(self.state)
self.state[i] = amplitude
return i
# We will allow users to "cheat" and clone a quantum state, even though the
# laws of physics do not allow such a thing. See
# http://en.wikipedia.org/wiki/No-cloning_theorem
def _clone(self):
rv = QuantumBitMachine(self.nqubits)
rv.state = np.copy(self.state)
return rv
|
garrison/pyqis
|
pyqis/__init__.py
|
Python
|
mit
| 4,319
|
import logging
import logging.handlers
import datetime
# The DHCP lease time for all static addresses. Dynamic lease times are configured on the pool.
static_lease_time = 86400
listen_address='0.0.0.0'
client_port=68
server_port=67
# The amount of time we wait before we will process a request of the same type.
# This is to prevent broken clients from breaking us. A better solution might
# be to cache the responses somehow, but that has implications on forcing a
# lease to expire.
between_requests=datetime.timedelta( days=0, minutes=0, seconds=10 )
traceback_file='/tmp/openipam_dhcpd.tracebacks'
syslog=True
syslog_facility='local0'
# Log everything
syslog_level=logging.DEBUG
syslog_fmt = logging.Formatter("%(name)s[%(process)s]: %(message)s")
# Used to set syslog_connect if host != None
syslog_host=None
syslog_port=514
syslog_connect='/dev/log'
logger=None
from openipam_config.dhcp import *
def get_logger():
global logger
if logger is None:
logger = logging.getLogger('dhcp')
logger.setLevel(syslog_level)
#syslog_fmt = logging.Formatter("%(asctime)s %(name)s %(levelname)s %(message)s")
if syslog:
syslog_handler = logging.handlers.SysLogHandler(syslog_connect, syslog_facility)
syslog_handler.setLevel( syslog_level )
syslog_handler.setFormatter( syslog_fmt )
logger.addHandler( syslog_handler )
#print logger.level
#print logger.handlers[0].level
return logger
|
ehuelsmann/openipam
|
openIPAM/openipam/config/dhcp.py
|
Python
|
gpl-3.0
| 1,419
|
import time
from django import template
from main import models as main_models
from knowall import models as knowall_models
register = template.Library()
LAST_ITEMS_COUNT = 5
@register.inclusion_tag('knowall/top_pages.html', name='top_pages', takes_context=True)
def top_pages(context):
print(context['request'].path)
items = main_models.ViewCount.objects.filter(uri__contains="/knowall") \
.exclude(uri=context['request'].path)\
.order_by('-counter')[:LAST_ITEMS_COUNT]
print(items)
articles = knowall_models.Knowall.published\
.filter(uri__in=[item.uri for item in items])
return {'articles': articles}
|
audiua/shkolyar_django
|
knowall/templatetags/top_pages.py
|
Python
|
mit
| 671
|
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "Lag1Trend", cycle_length = 30, transform = "RelativeDifference", sigma = 0.0, exog_count = 20, ar_order = 0);
|
antoinecarme/pyaf
|
tests/artificial/transf_RelativeDifference/trend_Lag1Trend/cycle_30/ar_/test_artificial_32_RelativeDifference_Lag1Trend_30__20.py
|
Python
|
bsd-3-clause
| 273
|
#! /usr/bin/env python
import matplotlib
if __name__ == '__main__':
matplotlib.use('Agg')
import numpy as np
import pylab as plt
import os
import sys
import tempfile
import datetime
import gc
from functools import reduce
from scipy.ndimage.morphology import binary_dilation
from scipy.ndimage.measurements import label, center_of_mass
import fitsio
from astrometry.util.file import trymakedirs
from astrometry.util.fits import fits_table, merge_tables
from astrometry.util.miscutils import estimate_mode, polygons_intersect, clip_polygon, patch_image
from astrometry.util.util import Tan, Sip, flat_median_f
from astrometry.util.resample import resample_with_wcs, OverlapError
from astrometry.util.run_command import run_command
from astrometry.util.starutil_numpy import degrees_between
from astrometry.util.ttime import Time, MemMeas
from astrometry.libkd.spherematch import match_radec
import logging
logger = None
def info(*args):
msg = ' '.join(map(str, args))
logger.info(msg)
def debug(*args):
import logging
if logger.isEnabledFor(logging.DEBUG):
msg = ' '.join(map(str, args))
logger.debug(msg)
from unwise_coadd import get_dir_for_coadd, get_coadd_tile_wcs
def main():
import argparse
parser = argparse.ArgumentParser('%prog [options]')
parser.add_argument('--outdir', '-o', dest='outdir', default='unwise-coadds',
help='Output directory: default %(default)s')
parser.add_argument('--wisedir', help='unWISE coadds input directory')
parser.add_argument('--size', dest='size', default=2048, type=int,
help='Set output image size in pixels; default %(default)s')
parser.add_argument('--width', dest='width', default=0, type=int,
help='Set output image width in pixels; default --size')
parser.add_argument('--height', dest='height', default=0, type=int,
help='Set output image height in pixels; default --size')
parser.add_argument('--pixscale', dest='pixscale', type=float, default=2.75,
help='Set coadd pixel scale, default %(default)s arcsec/pixel')
parser.add_argument('--force', dest='force', action='store_true',
default=False,
help='Run even if output file already exists?')
parser.add_argument('--ra', dest='ra', type=float, default=None,
help='Build coadd at given RA center')
parser.add_argument('--dec', dest='dec', type=float, default=None,
help='Build coadd at given Dec center')
parser.add_argument('--band', type=int, default=None, action='append',
help='with --ra,--dec: band(s) to do (1,2,3,4)')
parser.add_argument('--zoom', type=int, nargs=4,
help='Set target image extent (default "0 2048 0 2048")')
parser.add_argument('--name', default=None,
help='Output file name: unwise-NAME-w?-*.fits')
parser.add_argument('-v', '--verbose', dest='verbose', action='count',
default=0, help='Make more verbose')
parser.add_argument('--masked-only', action='store_true', default=False)
opt = parser.parse_args()
if opt.verbose == 0:
lvl = logging.INFO
else:
lvl = logging.DEBUG
logging.basicConfig(level=lvl, format='%(message)s', stream=sys.stdout)
global logger
logger = logging.getLogger('unwise_resample')
radec = opt.ra is not None and opt.dec is not None
if not radec:
print('Must specify --ra,--dec or --tile')
parser.print_help()
return -1
if not opt.wisedir:
print('Must specify --wisedir')
parser.print_help()
return -1
Time.add_measurement(MemMeas)
W = H = opt.size
if opt.width:
W = opt.width
if opt.height:
H = opt.height
unmasked = True
if opt.masked_only:
unmasked = False
dataset = ('custom-%04i%s%03i' %
(int(opt.ra*10.), 'p' if opt.dec >= 0. else 'm', int(np.abs(opt.dec)*10.)))
print('Setting custom dataset', dataset)
# fake tiles table
tiles = fits_table()
tiles.coadd_id = np.array([dataset])
tiles.ra = np.array([opt.ra])
tiles.dec = np.array([opt.dec])
tile = tiles[0]
# cosd = np.cos(np.deg2rad(tile.dec))
# r0 = tile.ra - (opt.pixscale * W/2.)/3600. / cosd
# r1 = tile.ra + (opt.pixscale * W/2.)/3600. / cosd
# d0 = tile.dec - (opt.pixscale * H/2.)/3600.
# d1 = tile.dec + (opt.pixscale * H/2.)/3600.
if opt.name:
tile.coadd_id = opt.name
if opt.band is None:
bands = [1,2]
else:
bands = list(opt.band)
outdir = get_dir_for_coadd(opt.outdir, tile.coadd_id)
if not os.path.exists(outdir):
print('Creating output directory', outdir)
trymakedirs(outdir)
from astrometry.util.run_command import run_command
rtn,version,err = run_command('git describe')
if rtn:
raise RuntimeError('Failed to get version string (git describe):' + ver + err)
version = version.strip()
debug('"git describe" version info:', version)
cowcs = get_coadd_tile_wcs(tile.ra, tile.dec, W, H, opt.pixscale)
if opt.zoom is not None:
(x0,x1,y0,y1) = opt.zoom
W = x1-x0
H = y1-y0
zoomwcs = cowcs.get_subimage(x0, y0, W, H)
print('Zooming WCS from', cowcs, 'to', zoomwcs)
cowcs = zoomwcs
ra_center,dec_center = cowcs.radec_center()
wtiles = unwise_tiles_touching_wcs(cowcs)
for band in bands:
print('Doing coadd tile', tile.coadd_id, 'band', band)
t0 = Time()
tag = 'unwise-%s-w%i' % (tile.coadd_id, band)
prefix = os.path.join(outdir, tag)
ofn = prefix + '-img-m.fits'
if os.path.exists(ofn):
print('Output file exists:', ofn)
if not opt.force:
return 0
img_m = np.zeros((H,W), np.float32)
iv_m = np.zeros((H,W), np.float32)
std_m = np.zeros((H,W), np.float32)
n_m = np.zeros((H,W), np.int32)
if unmasked:
img_u = np.zeros((H,W), np.float32)
iv_u = np.zeros((H,W), np.float32)
std_u = np.zeros((H,W), np.float32)
n_u = np.zeros((H,W), np.int32)
for wtile in wtiles:
from astrometry.util.resample import resample_with_wcs, ResampleError
print('Reading unWISE tile', wtile.coadd_id)
wtag = os.path.join(get_dir_for_coadd(opt.wisedir, wtile.coadd_id),
'unwise-%s-w%i' % (wtile.coadd_id, band))
#wtag = os.path.join(opt.wisedir, wtile.coadd_id[:3], wtile.coadd_id,
prod = 'img-m.fits'
fn = wtag + '-' + prod
I,whdr = fitsio.read(fn, header=True)
wwcs = Tan(whdr)
ims = [I]
for prod in ['invvar-m.fits.gz', 'std-m.fits.gz']:
fn = wtag + '-' + prod
I = fitsio.read(fn)
ims.append(I)
if unmasked:
prod = 'img-u.fits'
fn = wtag + '-' + prod
I = fitsio.read(fn)
ims.append(I)
for prod in ['invvar-u.fits.gz', 'std-u.fits.gz']:
fn = wtag + '-' + prod
I = fitsio.read(fn)
ims.append(I)
try:
Yo,Xo,Yi,Xi,rims = resample_with_wcs(cowcs, wwcs, ims, intType=np.int16)
img_m[Yo,Xo] = rims[0]
iv_m [Yo,Xo] = rims[1]
std_m[Yo,Xo] = rims[2]
if unmasked:
img_u[Yo,Xo] = rims[3]
iv_u [Yo,Xo] = rims[4]
std_u[Yo,Xo] = rims[5]
del rims
del ims
prods = [#('img-m.fits', img_m),
#('invvar-m.fits.gz', iv_m),
#('std-m.fits.gz', std_m),
('n-m.fits.gz', n_m),]
if unmasked:
prods.extend([#('img-u.fits', img_u),
#('invvar-u.fits.gz', iv_u),
#('std-u.fits.gz', std_u),
('n-u.fits.gz', n_u),])
for prod,im in prods:
fn = wtag + '-' + prod
I = fitsio.read(fn)
im[Yo,Xo] = I[Yi,Xi]
del I
del Yo,Xo,Yi,Xi
except ResampleError:
pass
# Plug the WCS header cards into the output coadd files.
hdr = fitsio.FITSHDR()
cowcs.add_to_header(hdr)
# Arbitarily plug in a number of header values from the *last* tile
for r in whdr.records():
key = r['name']
if not key in ['MAGZP', 'UNW_SKY', 'UNW_VER', 'UNW_URL', 'UNW_DVER',
'UNW_DATE', 'UNW_FR0', 'UNW_FRN', 'UNW_MEDF',
'UNW_BGMA', 'REFEREN1', 'REFEREN2', 'EPOCH',
'MJDMIN', 'MJDMAX', 'BAND']:
continue
hdr.add_record(r)
if unmasked:
# "Unmasked" versions
ofn = prefix + '-img-u.fits'
fitsio.write(ofn, img_u, header=hdr, clobber=True)
debug('Wrote', ofn)
ofn = prefix + '-invvar-u.fits'
fitsio.write(ofn, iv_u, header=hdr, clobber=True)
debug('Wrote', ofn)
ofn = prefix + '-std-u.fits'
fitsio.write(ofn, std_u, header=hdr, clobber=True)
debug('Wrote', ofn)
ofn = prefix + '-n-u.fits'
fitsio.write(ofn, n_u, header=hdr, clobber=True)
debug('Wrote', ofn)
# "Masked" versions
ofn = prefix + '-img-m.fits'
fitsio.write(ofn, img_m, header=hdr, clobber=True)
debug('Wrote', ofn)
ofn = prefix + '-invvar-m.fits'
fitsio.write(ofn, iv_m, header=hdr, clobber=True)
debug('Wrote', ofn)
ofn = prefix + '-std-m.fits'
fitsio.write(ofn, std_m, header=hdr, clobber=True)
debug('Wrote', ofn)
ofn = prefix + '-n-m.fits'
fitsio.write(ofn, n_m, header=hdr, clobber=True)
debug('Wrote', ofn)
#ofn = prefix + '-frames.fits'
#frames.writeto(ofn)
#debug('Wrote', ofn)
###
# This is taken directly from tractor/wise.py, replacing only the filename.
###
def unwise_tiles_touching_wcs(wcs, atlasfn='data/wise-tiles.fits', polygons=True):
'''
Returns a FITS table (with RA,Dec,coadd_id) of unWISE tiles
'''
from astrometry.util.miscutils import polygons_intersect
from astrometry.util.starutil_numpy import degrees_between
from wise.unwise import unwise_tile_wcs
T = fits_table(atlasfn)
trad = wcs.radius()
wrad = np.sqrt(2.) / 2. * 2048 * 2.75 / 3600.
rad = trad + wrad
r, d = wcs.radec_center()
I, = np.nonzero(np.abs(T.dec - d) < rad)
I = I[degrees_between(T.ra[I], T.dec[I], r, d) < rad]
if not polygons:
return T[I]
# now check actual polygon intersection
tw, th = wcs.imagew, wcs.imageh
targetpoly = [(0.5, 0.5), (tw + 0.5, 0.5),
(tw + 0.5, th + 0.5), (0.5, th + 0.5)]
cd = wcs.get_cd()
tdet = cd[0] * cd[3] - cd[1] * cd[2]
if tdet > 0:
targetpoly = list(reversed(targetpoly))
targetpoly = np.array(targetpoly)
keep = []
for i in I:
wwcs = unwise_tile_wcs(T.ra[i], T.dec[i])
cd = wwcs.get_cd()
wdet = cd[0] * cd[3] - cd[1] * cd[2]
H, W = wwcs.shape
poly = []
for x, y in [(0.5, 0.5), (W + 0.5, 0.5), (W + 0.5, H + 0.5), (0.5, H + 0.5)]:
rr,dd = wwcs.pixelxy2radec(x, y)
_,xx,yy = wcs.radec2pixelxy(rr, dd)
poly.append((xx, yy))
if wdet > 0:
poly = list(reversed(poly))
poly = np.array(poly)
if polygons_intersect(targetpoly, poly):
keep.append(i)
I = np.array(keep)
return T[I]
if __name__ == '__main__':
sys.exit(main())
|
dstndstn/unwise-coadds
|
unwise_resample.py
|
Python
|
gpl-2.0
| 12,170
|
# Spruce - a tool to help manage system software states.
# Copyright (C) 2017 Matt North
# This file is part of Spruce.
# Spruce is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Spruce is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Spruce. If not, see <http://www.gnu.org/licenses/>.
from django.apps import AppConfig
class SpruceConfig(AppConfig):
name = 'spruce'
|
Mattsky/spruce
|
spruce/apps.py
|
Python
|
gpl-3.0
| 824
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.service_client import ServiceClient
from msrest import Configuration, Serializer, Deserializer
from .version import VERSION
from .operations.web_operations import WebOperations
from . import models
class WebSearchAPIConfiguration(Configuration):
"""Configuration for WebSearchAPI
Note that all parameters used to create this instance are saved as instance
attributes.
:param credentials: Subscription credentials which uniquely identify
client subscription.
:type credentials: None
:param str base_url: Service URL
"""
def __init__(
self, credentials, base_url=None):
if credentials is None:
raise ValueError("Parameter 'credentials' must not be None.")
if not base_url:
base_url = 'https://api.cognitive.microsoft.com/bing/v7.0'
super(WebSearchAPIConfiguration, self).__init__(base_url)
self.add_user_agent('azure-cognitiveservices-search-websearch/{}'.format(VERSION))
self.credentials = credentials
class WebSearchAPI(object):
"""The Web Search API lets you send a search query to Bing and get back search results that include links to webpages, images, and more.
:ivar config: Configuration for client.
:vartype config: WebSearchAPIConfiguration
:ivar web: Web operations
:vartype web: azure.cognitiveservices.search.websearch.operations.WebOperations
:param credentials: Subscription credentials which uniquely identify
client subscription.
:type credentials: None
:param str base_url: Service URL
"""
def __init__(
self, credentials, base_url=None):
self.config = WebSearchAPIConfiguration(credentials, base_url)
self._client = ServiceClient(self.config.credentials, self.config)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self.api_version = '1.0'
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self.web = WebOperations(
self._client, self.config, self._serialize, self._deserialize)
|
lmazuel/azure-sdk-for-python
|
azure-cognitiveservices-search-websearch/azure/cognitiveservices/search/websearch/web_search_api.py
|
Python
|
mit
| 2,615
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""add TI state index
Revision ID: 211e584da130
Revises: 2e82aab8ef20
Create Date: 2016-06-30 10:54:24.323588
"""
# revision identifiers, used by Alembic.
revision = '211e584da130'
down_revision = '2e82aab8ef20'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_index('ti_state', 'task_instance', ['state'], unique=False)
def downgrade():
op.drop_index('ti_state', table_name='task_instance')
|
yiqingj/airflow
|
airflow/migrations/versions/211e584da130_add_ti_state_index.py
|
Python
|
apache-2.0
| 1,036
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-07-18 11:46
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import filer.fields.file
class Migration(migrations.Migration):
initial = True
dependencies = [
('cms', '0016_auto_20160608_1535'),
('filer', '0007_auto_20161016_1055'),
]
operations = [
migrations.CreateModel(
name='Teaser',
fields=[
('cmsplugin_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, related_name='teasers_teaser', serialize=False, to='cms.CMSPlugin')),
('name', models.CharField(default='', max_length=150, verbose_name='Name')),
('body', models.TextField(blank=True, default='', verbose_name='Text')),
('filer_icon', filer.fields.file.FilerFileField(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='cms_teasers_teaser_filer_icon_set', to='filer.File', verbose_name='Icon')),
],
options={
'verbose_name': 'Teaser',
'verbose_name_plural': 'Teasers',
},
bases=('cms.cmsplugin',),
),
migrations.CreateModel(
name='TeaserWrap',
fields=[
('cmsplugin_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, related_name='teasers_teaserwrap', serialize=False, to='cms.CMSPlugin')),
('css_class', models.CharField(blank=True, default='', max_length=200, verbose_name='CSS class')),
('in_navigation', models.BooleanField(default=False, verbose_name='In navigation')),
('is_visible', models.BooleanField(default=True, verbose_name='Visible')),
('height', models.CharField(blank=True, default='', max_length=100, verbose_name='Height')),
('width', models.CharField(blank=True, default='', max_length=50, verbose_name='Width')),
('name', models.CharField(blank=True, default='', max_length=150, verbose_name='Name')),
('slug', models.SlugField(blank=True, default='', editable=False, max_length=150, verbose_name='Slug')),
('cms_page', models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='cms_teasers_wrap_set', to='cms.Page')),
],
options={
'verbose_name': 'Teasers Wrap',
'verbose_name_plural': 'Teasers Wraps',
},
bases=('cms.cmsplugin',),
),
]
|
rouxcode/django-cms-plugins
|
cmsplugins/teasers/migrations/0001_initial.py
|
Python
|
mit
| 2,774
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# bts_tools - Tools to easily manage the bitshares client
# Copyright (c) 2014 Nicolas Wack <wackou@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from . import seed, backbone, prefer_backbone_exclusively, missed,\
online, network_connections, cpu_ram_usage, voted_in, wallet_state,\
fork, free_disk_space, indexing
|
wackou/bts_tools
|
bts_tools/monitoring/__init__.py
|
Python
|
gpl-3.0
| 976
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.