commit stringlengths 40 40 | subject stringlengths 1 1.49k | old_file stringlengths 4 311 | new_file stringlengths 4 311 | new_contents stringlengths 1 29.8k | old_contents stringlengths 0 9.9k | lang stringclasses 3 values | proba float64 0 1 |
|---|---|---|---|---|---|---|---|
2e3341c7e32182cc35f6a658d613c77a72b9b377 | Modify comments | src/marketdata/access/remote/google.py | src/marketdata/access/remote/google.py | import urllib2
import urllib
from marketdata.utils.transform.google.rawquote_intraday import TranformIntradayQuote
def _getUrl(url, urlconditions):
url_values = urllib.urlencode(urlconditions)
return url + '?' + url_values
def _pullQuote(url, urlconditions):
req = urllib2.Request(_getUrl(url, urlconditions))
response = urllib2.urlopen(req).readlines()
return response
class IntradayMinutes(object):
'''Extract intraday market data from Google finance.
URL to access market data:
http://www.google.com/finance/getprices?q=IBM&x=NYSE&i=60&p=5d&f=d,c,h,l,o,v
Abbreviations in the URL:
q = quote symbol
x = exchange symbol
i = interval in seconds i.e. 60 = 1 minute
p = number of past trading days (max has been 15d)
f = quote format (date, close, high, low, open, volume)
'''
def __init__(self, symbol, exchange, minutes=1, days=1):
'''Constructor
'''
self.url = 'http://www.google.com/finance/getprices'
quoteformat = 'd,c,h,l,o,v'
self.urlconditions = {}
self.urlconditions['q'] = symbol # 'IBM', 'JPM', 'GE', 'AMD'
self.urlconditions['x'] = exchange # 'NYSE', 'INDEXNASDAQ'
self.urlconditions['i'] = str(minutes * 60) # 60 refers to 1 minute interval
self.urlconditions['p'] = str(days) + 'd' # 1d refers to 1 day (max 15 days)
self.urlconditions['f'] = quoteformat # date, close, high, low, open, volume
self.quote = self.__extractTransform()
def __extractRawQuote(self):
return _pullQuote(self.url, self.urlconditions)
def __transformRawQuote(self, raw_quote):
interval = self.urlconditions['i']
return TranformIntradayQuote(raw_quote, interval)
def __extractTransform(self):
raw_quote = self.__extractRawQuote()
return self.__transformRawQuote(raw_quote)
def json(self):
return self.quote.json_uts_chlov()
def dict_np(self):
return self.quote.dts_chlov()
| import urllib2
import urllib
from marketdata.utils.transform.google.rawquote_intraday import TranformIntradayQuote
def _getUrl(url, urlconditions):
url_values = urllib.urlencode(urlconditions)
return url + '?' + url_values
def _pullQuote(url, urlconditions):
req = urllib2.Request(_getUrl(url, urlconditions))
response = urllib2.urlopen(req).readlines()
return response
class IntradayMinutes(object):
'''Extract intraday market data from Google finance.
URL to access market data from Google finance:
http://www.google.com/finance/getprices?q=IBM&x=NYSE&i=60&p=5d&f=d,c,h,l,o,v
Description of abbreviations present in the above URL:
q = quote symbol
x = exchange symbol
i = interval in seconds i.e. 60 = 1 minute
p = number of past trading days (max has been 15d)
f = quote format (date, close, high, low, open, volume)
'''
def __init__(self, symbol, exchange, minutes=1, days=1):
'''Constructor
'''
self.url = 'http://www.google.com/finance/getprices'
quoteformat = 'd,c,h,l,o,v'
self.urlconditions = {}
self.urlconditions['q'] = symbol # 'IBM', 'JPM', 'GE', 'AMD'
self.urlconditions['x'] = exchange # 'NYSE', 'INDEXNASDAQ'
self.urlconditions['i'] = str(minutes * 60) # 60 refers to 1 minute interval
self.urlconditions['p'] = str(days) + 'd' # 1d refers to 1 day (max 15 days)
self.urlconditions['f'] = quoteformat # date, close, high, low, open, volume
self.quote = self.__extractTransform()
def __extractRawQuote(self):
return _pullQuote(self.url, self.urlconditions)
def __transformRawQuote(self, raw_quote):
interval = self.urlconditions['i']
return TranformIntradayQuote(raw_quote, interval)
def __extractTransform(self):
raw_quote = self.__extractRawQuote()
return self.__transformRawQuote(raw_quote)
def json(self):
return self.quote.json_uts_chlov()
def dict_np(self):
return self.quote.dts_chlov()
| Python | 0 |
3577de6383053e0f8e05d531c8a632be12e89ca6 | fix for route parser to handle when path=None | python/marvin/utils/general/decorators.py | python/marvin/utils/general/decorators.py |
from functools import wraps
# General Decorators
def parseRoutePath(f):
''' Decorator to parse generic route path '''
@wraps(f)
def decorated_function(inst, *args, **kwargs):
if 'path' in kwargs and kwargs['path']:
for kw in kwargs['path'].split('/'):
if len(kw) == 0:
continue
var, value = kw.split('=')
kwargs[var] = value
kwargs.pop('path')
return f(inst, *args, **kwargs)
return decorated_function
|
from functools import wraps
# General Decorators
def parseRoutePath(f):
''' Decorator to parse generic route path '''
@wraps(f)
def decorated_function(inst, *args, **kwargs):
for kw in kwargs['path'].split('/'):
if len(kw) == 0:
continue
var, value = kw.split('=')
kwargs[var] = value
kwargs.pop('path')
return f(inst, *args, **kwargs)
return decorated_function
| Python | 0.000106 |
4c31e94752e635c0826dd6b223201fe7ce0d5220 | Fix cache_home to expand path | rplugin/python3/deoplete/sources/jedi.py | rplugin/python3/deoplete/sources/jedi.py | import os
import re
import sys
current_dir = os.path.dirname(os.path.abspath(__file__))
jedi_dir = os.path.join(os.path.dirname(current_dir), 'jedi')
sys.path.insert(0, jedi_dir)
import jedi
from .base import Base
class Source(Base):
def __init__(self, vim):
Base.__init__(self, vim)
self.name = 'jedi'
self.mark = '[jedi]'
self.filetypes = ['python']
self.input_pattern = (r'[^. \t0-9]\.\w*|^\s*@\w*|' +
r'^\s*from\s.+import \w*|' +
r'^\s*from \w*|^\s*import \w*')
def get_complete_position(self, context):
m = re.search(r'\w*$', context['input'])
return m.start() if m else -1
def gather_candidates(self, context):
source = '\n'.join(self.vim.current.buffer)
try:
completions = self.get_script(
source, self.vim.current.window.cursor[1]).completions()
except Exception:
return []
out = []
for c in completions:
word = c.name
# TODO(zchee): configurable and refactoring
# Add '(' bracket
if c.type == 'function':
word += '('
# Add '.' for 'self' and 'class'
elif (word == 'self' or
c.type == 'class' or
c.type == 'module') and (not re.match(
r'^\s*from\s.+import \w*' +
r'^\s*from \w*|^\s*import \w*',
self.vim.current.line)):
word += '.'
# Format c.docstring() for abbr
if re.match(c.name, c.docstring()):
abbr = re.sub('"(|)| ",', '',
c.docstring().split("\n\n")[0]
.split("->")[0]
.replace('\n', ' ')
)
else:
abbr = c.name
out.append(dict(word=word,
abbr=abbr,
kind=re.sub('\n| ', '', c.description),
info=c.docstring(),
icase=1,
dup=1
))
return out
def get_script(self, source, column):
# http://jedi.jedidjah.ch/en/latest/docs/settings.html#jedi.settings.add_dot_after_module
# Adds a dot after a module, because a module that is not accessed this
# way is definitely not the normal case. However, in VIM this doesn’t
# work, that’s why it isn’t used at the moment.
jedi.settings.add_dot_after_module = True
# http://jedi.jedidjah.ch/en/latest/docs/settings.html#jedi.settings.add_bracket_after_function
# Adds an opening bracket after a function, because that's normal
# behaviour. Removed it again, because in VIM that is not very
# practical.
jedi.settings.add_bracket_after_function = True
# http://jedi.jedidjah.ch/en/latest/docs/settings.html#jedi.settings.additional_dynamic_modules
# Additional modules in which Jedi checks if statements are to be
# found. This is practical for IDEs, that want to administrate their
# modules themselves.
jedi.settings.additional_dynamic_modules = [
b.name for b in self.vim.buffers
if b.name is not None and b.name.endswith('.py')]
cache_home = os.getenv('XDG_CACHE_HOME')
if cache_home is None:
cache_home = os.path.expanduser('~/.cache')
jedi.settings.cache_directory = os.path.join(cache_home, 'jedi')
row = self.vim.current.window.cursor[0]
buf_path = self.vim.current.buffer.name
encoding = self.vim.eval('&encoding')
return jedi.Script(source, row, column, buf_path, encoding)
| import os
import re
import sys
current_dir = os.path.dirname(os.path.abspath(__file__))
jedi_dir = os.path.join(os.path.dirname(current_dir), 'jedi')
sys.path.insert(0, jedi_dir)
import jedi
from .base import Base
class Source(Base):
def __init__(self, vim):
Base.__init__(self, vim)
self.name = 'jedi'
self.mark = '[jedi]'
self.filetypes = ['python']
self.input_pattern = (r'[^. \t0-9]\.\w*|^\s*@\w*|' +
r'^\s*from\s.+import \w*|' +
r'^\s*from \w*|^\s*import \w*')
def get_complete_position(self, context):
m = re.search(r'\w*$', context['input'])
return m.start() if m else -1
def gather_candidates(self, context):
source = '\n'.join(self.vim.current.buffer)
try:
completions = self.get_script(
source, self.vim.current.window.cursor[1]).completions()
except Exception:
return []
out = []
for c in completions:
word = c.name
# TODO(zchee): configurable and refactoring
# Add '(' bracket
if c.type == 'function':
word += '('
# Add '.' for 'self' and 'class'
elif (word == 'self' or
c.type == 'class' or
c.type == 'module') and (not re.match(
r'^\s*from\s.+import \w*' +
r'^\s*from \w*|^\s*import \w*',
self.vim.current.line)):
word += '.'
# Format c.docstring() for abbr
if re.match(c.name, c.docstring()):
abbr = re.sub('"(|)| ",', '',
c.docstring().split("\n\n")[0]
.split("->")[0]
.replace('\n', ' ')
)
else:
abbr = c.name
out.append(dict(word=word,
abbr=abbr,
kind=re.sub('\n| ', '', c.description),
info=c.docstring(),
icase=1,
dup=1
))
return out
def get_script(self, source, column):
# http://jedi.jedidjah.ch/en/latest/docs/settings.html#jedi.settings.add_dot_after_module
# Adds a dot after a module, because a module that is not accessed this
# way is definitely not the normal case. However, in VIM this doesn’t
# work, that’s why it isn’t used at the moment.
jedi.settings.add_dot_after_module = True
# http://jedi.jedidjah.ch/en/latest/docs/settings.html#jedi.settings.add_bracket_after_function
# Adds an opening bracket after a function, because that's normal
# behaviour. Removed it again, because in VIM that is not very
# practical.
jedi.settings.add_bracket_after_function = True
# http://jedi.jedidjah.ch/en/latest/docs/settings.html#jedi.settings.additional_dynamic_modules
# Additional modules in which Jedi checks if statements are to be
# found. This is practical for IDEs, that want to administrate their
# modules themselves.
jedi.settings.additional_dynamic_modules = [
b.name for b in self.vim.buffers
if b.name is not None and b.name.endswith('.py')]
cache_home = os.getenv('XDG_CACHE_HOME')
if cache_home is None:
cache_home = '~/.cache'
jedi.settings.cache_directory = os.path.join(cache_home, 'jedi')
row = self.vim.current.window.cursor[0]
buf_path = self.vim.current.buffer.name
encoding = self.vim.eval('&encoding')
return jedi.Script(source, row, column, buf_path, encoding)
| Python | 0 |
9044018db0a909884ada225af12c7252f85aece8 | Remove dead code | examples/tictactoe_td0.py | examples/tictactoe_td0.py | from capstone.environment import Environment
from capstone.game import TicTacToe
from capstone.mdp import GameMDP
from capstone.player import AlphaBeta, RandPlayer
from capstone.util import ZobristHashing
class TabularTD0(object):
def __init__(self, env, policy=RandPlayer(), alpha=0.01, gamma=0.99, n_episodes=1000):
self.env = env
self.policy = RandPlayer()
self.alpha = alpha
self.gamma = gamma
self.n_episodes = n_episodes
self.zobrist_hash = ZobristHashing(n_positions=9, n_pieces=2)
self._table = {}
self._boards = {}
def learn(self):
import random
for episode in range(self.n_episodes):
print('Episode {}'.format(episode))
self.env.reset()
step = 0
while not self.env.is_terminal():
print('Step {}'.format(step))
cur_state = self.env.cur_state()
action = random.choice(self.env.actions())
reward = self.env.do_action(action)
# print('Reward {}'.format(reward))
next_state = self.env.cur_state()
cur_state_hash = self.zobrist_hash(cur_state.board)
# print('cur_state_value_hash: {}'.format(cur_state_hash))
cur_state_value = self._table.get(cur_state_hash, 0.1)
next_state_hash = self.zobrist_hash(next_state.board)
# print('next_state_value_hash: {}'.format(next_state_hash))
next_state_value = self._table.get(next_state_hash, 0.3)
new_value = cur_state_value + (self.alpha * (reward + (self.gamma * next_state_value) - cur_state_value))
# print('new_value {}'.format(new_value))
self._table[cur_state_hash] = new_value
self._boards[cur_state_hash] = cur_state
# print('cur_state_hash' + str(cur_state_hash))
# print(env.cur_state())
step += 1
if env.is_terminal():
self._table[next_state_hash] = reward;
print('Results:')
print(self._table)
print(self._boards)
game = TicTacToe(
'X-O'
'XO-'
'-XO'
)
ab = AlphaBeta()
mdp = GameMDP(game, ab, 1)
env = Environment(mdp)
td0 = TabularTD0(env)
td0.learn()
| from capstone.environment import Environment
from capstone.game import TicTacToe
from capstone.mdp import GameMDP
from capstone.player import AlphaBeta, RandPlayer
from capstone.util import ZobristHashing
# class TabularTD0(object):
# def __init__(self, env, policy, alpha, gamma, n_episodes):
# self.env = env
# self.policy = policy
# self.alpha = alpha
# self.gamma = gamme
# self.n_episodes = n_episodes
# def learn(self):
# for episode in range(self.n_episodes):
# pass
# def step(self):
# action = self.policy.choose_action(self.env)
# cur_state = env.cur_state()
# reward = env.make_action(action)
# next_state = env.cur_state()
# new_value = self.table[
class TabularTD0(object):
def __init__(self, env, policy=RandPlayer(), alpha=0.01, gamma=0.99, n_episodes=1000):
self.env = env
self.policy = RandPlayer()
self.alpha = alpha
self.gamma = gamma
self.n_episodes = n_episodes
self.zobrist_hash = ZobristHashing(n_positions=9, n_pieces=2)
self._table = {}
self._boards = {}
def learn(self):
import random
for episode in range(self.n_episodes):
print('Episode {}'.format(episode))
self.env.reset()
step = 0
while not self.env.is_terminal():
print('Step {}'.format(step))
cur_state = self.env.cur_state()
action = random.choice(self.env.actions())
reward = self.env.do_action(action)
# print('Reward {}'.format(reward))
next_state = self.env.cur_state()
cur_state_hash = self.zobrist_hash(cur_state.board)
# print('cur_state_value_hash: {}'.format(cur_state_hash))
cur_state_value = self._table.get(cur_state_hash, 0.1)
next_state_hash = self.zobrist_hash(next_state.board)
# print('next_state_value_hash: {}'.format(next_state_hash))
next_state_value = self._table.get(next_state_hash, 0.3)
new_value = cur_state_value + (self.alpha * (reward + (self.gamma * next_state_value) - cur_state_value))
# print('new_value {}'.format(new_value))
self._table[cur_state_hash] = new_value
self._boards[cur_state_hash] = cur_state
# print('cur_state_hash' + str(cur_state_hash))
# print(env.cur_state())
step += 1
if env.is_terminal():
self._table[next_state_hash] = reward;
print('Results:')
print(self._table)
print(self._boards)
game = TicTacToe(
'X-O'
'XO-'
'-XO'
)
ab = AlphaBeta()
mdp = GameMDP(game, ab, 1)
env = Environment(mdp)
td0 = TabularTD0(env)
td0.learn()
| Python | 0.001497 |
114793d6abce14ece5fbd537cce38230366db365 | Fix compilation | rsqueakvm/plugins/immutability_plugin.py | rsqueakvm/plugins/immutability_plugin.py | """
RSqueak/VM plugin which provides support for immutable objects.
Immutable objects can be created as copy of existing objects
or from a list of arguments. The package `ImmutableObjects`, located in
`/repository`, needs to be loaded in the image.
"""
from rsqueakvm.error import PrimitiveFailedError
from rsqueakvm.model.variable import W_BytesObject, W_WordsObject
from rsqueakvm.plugins.immutability import patch_w_object
from rsqueakvm.plugins.immutability.bytes import W_Immutable_BytesObject
from rsqueakvm.plugins.immutability.pointers import (
select_immutable_pointers_class)
from rsqueakvm.plugins.immutability.words import W_Immutable_WordsObject
from rsqueakvm.plugins.plugin import Plugin
from rsqueakvm.storage_classes import BYTES, POINTERS, WORDS
ImmutabilityPlugin = Plugin()
patch_w_object()
@ImmutabilityPlugin.expose_primitive(unwrap_spec=[object])
def primitiveIsImmutable(interp, s_frame, w_recv):
"""
Tests if `w_recv` is an immutable object.
:param interp: The interpreter proxy.
:param s_frame: The stack frame.
:param w_recv: The receiver object.
:returns: `w_true` if `w_recv` is immutable object, otherwise `w_false`.
"""
if w_recv.is_immutable():
return interp.space.w_true
return interp.space.w_false
@ImmutabilityPlugin.expose_primitive(unwrap_spec=[object, object])
def primitiveImmutableFrom(interp, s_frame, w_cls, w_obj):
"""
Creates an immutable copy of a given Smalltalk object.
:param interp: The interpreter proxy.
:param s_frame: The stack frame.
:param w_cls: The imutable objects target class.
:param w_obj: The Smalltalk object to produce an immutable copy from.
:returns: An immutable copy of `w_obj` with class `w_cls`.
:raises: PrimitiveFailedError
"""
space = interp.space
instance_kind = w_cls.as_class_get_shadow(space).get_instance_kind()
if instance_kind == POINTERS:
pointers = w_obj.fetch_all(space)
cls = select_immutable_pointers_class(pointers)
return cls(space, w_cls, pointers)
elif instance_kind == BYTES and isinstance(w_obj, W_BytesObject):
return W_Immutable_BytesObject(space, w_cls, w_obj.bytes)
elif instance_kind == WORDS and isinstance(w_obj, W_WordsObject):
return W_Immutable_WordsObject(space, w_cls, w_obj.words)
raise PrimitiveFailedError
@ImmutabilityPlugin.expose_primitive(unwrap_spec=None)
def primitiveImmutableFromArgs(interp, s_frame, argcount):
"""
Returns an immutable instance of the receiver (which is a class) with
all fields initialized with the arguments given.
:param interp: The interpreter proxy.
:param s_frame: The stack frame.
:param argcount: The number of arguments.
:returns: An immutable object.
:raises: PrimitiveFailedError
"""
if argcount == 0:
raise PrimitiveFailedError
w_args = s_frame.pop_and_return_n(argcount)[:]
w_first_arg = w_args[0]
w_cls = s_frame.pop()
space = interp.space
instance_kind = w_cls.as_class_get_shadow(space).get_instance_kind()
if instance_kind == POINTERS:
cls = select_immutable_pointers_class(w_args)
return cls(space, w_cls, w_args)
elif (instance_kind == BYTES and
len(w_args) == 1 and isinstance(w_first_arg, W_BytesObject)):
return W_Immutable_BytesObject(space, w_cls, w_first_arg.bytes)
elif (instance_kind == WORDS and
len(w_args) == 1 and isinstance(w_first_arg, W_WordsObject)):
return W_Immutable_WordsObject(space, w_cls, w_first_arg.words)
raise PrimitiveFailedError
| """
RSqueak/VM plugin which provides support for immutable objects.
Immutable objects can be created as copy of existing objects
or from a list of arguments. The package `ImmutableObjects`, located in
`/repository`, needs to be loaded in the image.
"""
from rsqueakvm.error import PrimitiveFailedError
from rsqueakvm.model.variable import W_BytesObject, W_WordsObject
from rsqueakvm.plugins.immutability import patch_w_object
from rsqueakvm.plugins.immutability.bytes import W_Immutable_BytesObject
from rsqueakvm.plugins.immutability.pointers import (
select_immutable_pointers_class)
from rsqueakvm.plugins.immutability.words import W_Immutable_WordsObject
from rsqueakvm.plugins.plugin import Plugin
from rsqueakvm.storage_classes import BYTES, POINTERS, WORDS
ImmutabilityPlugin = Plugin()
patch_w_object()
@ImmutabilityPlugin.expose_primitive(unwrap_spec=[object])
def primitiveIsImmutable(interp, s_frame, w_recv):
"""
Tests if `w_recv` is an immutable object.
:param interp: The interpreter proxy.
:param s_frame: The stack frame.
:param w_recv: The receiver object.
:returns: `w_true` if `w_recv` is immutable object, otherwise `w_false`.
"""
if w_recv.is_immutable():
return interp.space.w_true
return interp.space.w_false
@ImmutabilityPlugin.expose_primitive(unwrap_spec=[object, object])
def primitiveImmutableFrom(interp, s_frame, w_cls, w_obj):
"""
Creates an immutable copy of a given Smalltalk object.
:param interp: The interpreter proxy.
:param s_frame: The stack frame.
:param w_cls: The imutable objects target class.
:param w_obj: The Smalltalk object to produce an immutable copy from.
:returns: An immutable copy of `w_obj` with class `w_cls`.
:raises: PrimitiveFailedError
"""
space = interp.space
instance_kind = w_cls.as_class_get_shadow(space).get_instance_kind()
if instance_kind == POINTERS:
pointers = w_obj.fetch_all(space)
cls = select_immutable_pointers_class(pointers)
return cls(space, w_cls, pointers)
elif instance_kind == BYTES and isinstance(w_obj, W_BytesObject):
return W_Immutable_BytesObject(space, w_cls, w_obj.bytes)
elif instance_kind == WORDS and isinstance(w_obj, W_WordsObject):
return W_Immutable_WordsObject(space, w_cls, w_obj.words)
raise PrimitiveFailedError
@ImmutabilityPlugin.expose_primitive(unwrap_spec=None)
def primitiveImmutableFromArgs(interp, s_frame, argcount):
"""
Returns an immutable instance of the receiver (which is a class) with
all fields initialized with the arguments given.
:param interp: The interpreter proxy.
:param s_frame: The stack frame.
:param argcount: The number of arguments.
:returns: An immutable object.
:raises: PrimitiveFailedError
"""
if argcount == 0:
raise PrimitiveFailedError
w_args = s_frame.pop_and_return_n(argcount)[:]
w_cls = s_frame.pop()
space = interp.space
instance_kind = w_cls.as_class_get_shadow(space).get_instance_kind()
if instance_kind == POINTERS:
cls = select_immutable_pointers_class(w_args)
return cls(space, w_cls, w_args)
elif (instance_kind == BYTES and
len(w_args) == 1 and isinstance(w_args[0], W_BytesObject)):
return W_Immutable_BytesObject(space, w_cls, w_args[0].bytes)
elif (instance_kind == WORDS and
len(w_args) == 1 and isinstance(w_args[0], W_WordsObject)):
return W_Immutable_WordsObject(space, w_cls, w_args[0].words)
raise PrimitiveFailedError
| Python | 0.000001 |
66586d0fa74a7b109305d6330b2448c32a54bd1b | Fix lints | flask_fs/backends/__init__.py | flask_fs/backends/__init__.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import six
from flask_fs import files
__all__ = [i.encode('ascii') for i in ('BaseBackend', 'DEFAULT_BACKEND')]
DEFAULT_BACKEND = 'local'
class BaseBackend(object):
'''
Abstract class to implement backend.
'''
root = None
DEFAULT_MIME = 'application/octet-stream'
def __init__(self, name, config):
self.name = name
self.config = config
def exists(self, filename):
'''Test wether a file exists or not given its filename in the storage'''
raise NotImplementedError('Existance checking is not implemented')
def open(self, filename, *args, **kwargs):
'''Open a file given its filename relative to the storage root'''
raise NotImplementedError('Open operation is not implemented')
def read(self, filename):
'''Read a file content given its filename in the storage'''
raise NotImplementedError('Read operation is not implemented')
def write(self, filename, content):
'''Write content into a file given its filename in the storage'''
raise NotImplementedError('Write operation is not implemented')
def delete(self, filename):
'''Delete a file given its filename in the storage'''
raise NotImplementedError('Delete operation is not implemented')
def copy(self, filename, target):
'''Copy a file given its filename to another path in the storage'''
raise NotImplementedError('Copy operation is not implemented')
def move(self, filename, target):
'''
Move a file given its filename to another path in the storage
Default implementation perform a copy then a delete.
Backends should overwrite it if there is a better way.
'''
self.copy(filename, target)
self.delete(filename)
def save(self, file_or_wfs, filename, overwrite=False):
'''
Save a file-like object or a `werkzeug.FileStorage` with the specified filename.
:param storage: The file or the storage to be saved.
:param filename: The destination in the storage.
:param overwrite: if `False`, raise an exception if file exists in storage
:raises FileExists: when file exists and overwrite is `False`
'''
self.write(filename, file_or_wfs.read())
return filename
def metadata(self, filename):
'''
Fetch all available metadata for a given file
'''
meta = self.get_metadata(filename)
# Fix backend mime misdetection
meta['mime'] = meta.get('mime') or files.mime(filename, self.DEFAULT_MIME)
return meta
def get_metadata(self, filename):
'''
Backend specific method to retrieve metadata for a given file
'''
raise NotImplementedError('Copy operation is not implemented')
def serve(self, filename):
'''Serve a file given its filename'''
raise NotImplementedError('serve operation is not implemented')
def as_binary(self, content, encoding='utf8'):
'''Perform content encoding for binary write'''
if hasattr(content, 'read'):
return content.read()
elif isinstance(content, six.text_type):
return content.encode(encoding)
else:
return content
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import six
from flask_fs import files
__all__ = [i.encode('ascii') for i in ('BaseBackend', 'DEFAULT_BACKEND')]
DEFAULT_BACKEND = 'local'
class BaseBackend(object):
'''
Abstract class to implement backend.
'''
root = None
DEFAULT_MIME = 'application/octet-stream'
def __init__(self, name, config):
self.name = name
self.config = config
def exists(self, filename):
'''Test wether a file exists or not given its filename in the storage'''
raise NotImplementedError('Existance checking is not implemented')
def open(self, filename, *args, **kwargs):
'''Open a file given its filename relative to the storage root'''
raise NotImplementedError('Open operation is not implemented')
def read(self, filename):
'''Read a file content given its filename in the storage'''
raise NotImplementedError('Read operation is not implemented')
def write(self, filename, content):
'''Write content into a file given its filename in the storage'''
raise NotImplementedError('Write operation is not implemented')
def delete(self, filename):
'''Delete a file given its filename in the storage'''
raise NotImplementedError('Delete operation is not implemented')
def copy(self, filename, target):
'''Copy a file given its filename to another path in the storage'''
raise NotImplementedError('Copy operation is not implemented')
def move(self, filename, target):
'''
Move a file given its filename to another path in the storage
Default implementation perform a copy then a delete.
Backends should overwrite it if there is a better way.
'''
self.copy(filename, target)
self.delete(filename)
def save(self, file_or_wfs, filename, overwrite=False):
'''
Save a file-like object or a `werkzeug.FileStorage` with the specified filename.
:param storage: The file or the storage to be saved.
:param filename: The destination in the storage.
:param overwrite: if `False`, raise an exception if file exists in storage
:raises FileExists: when file exists and overwrite is `False`
'''
self.write(filename, file_or_wfs.read())
return filename
def metadata(self, filename):
'''
Fetch all available metadata for a given file
'''
meta = self.get_metadata(filename)
# Fix backend mime misdetection
meta['mime'] = meta.get('mime') or files.mime(filename, self.DEFAULT_MIME)
return meta
def get_metadata(self, filename):
'''
Backend specific method to retrieve metadata for a given file
'''
raise NotImplementedError('Copy operation is not implemented')
def serve(self, filename):
'''Serve a file given its filename'''
raise NotImplementedError('serve operation is not implemented')
def as_binary(self, content, encoding='utf8'):
'''Perform content encoding for binary write'''
if hasattr(content, 'read'):
return content.read()
elif isinstance(content, six.text_type):
return content.encode(encoding)
else:
return content
| Python | 0.000006 |
27acea8beae7876159f142add8d3e55b62d61f8f | Add read method to modulators | feder/questionaries/modulator.py | feder/questionaries/modulator.py | from django import forms
from django.utils.translation import ugettext as _
class BaseBlobFormModulator(object):
description = None
def __init__(self, blob=None):
self.blob = blob or {}
super(BaseBlobFormModulator, self).__init__()
def create(self, fields):
raise NotImplementedError("Provide method 'create'")
def answer(self, fields):
raise NotImplementedError("Provide method 'answer'")
def read(self, cleaned_data):
raise NotImplementedError("Provide method 'read'")
class BaseSimpleModulator(BaseBlobFormModulator):
output_field_cls = None
def create(self, fields):
fields['name'] = forms.CharField(label=_("Question"))
fields['help_text'] = forms.CharField(label=_("Description of question"))
fields['required'] = forms.BooleanField(label=_("This fields is required?"))
def answer(self, fields):
fields['value'] = self.output_field_cls(label=self.blob['name'],
help_text=self.blob['help_text'], required=self.blob.get('required', True))
def read(self, cleaned_data):
return cleaned_data['value']
class CharModulator(BaseSimpleModulator):
description = "Char modulator"
output_field_cls = forms.CharField
class IntegerModulator(BaseSimpleModulator):
description = "Integer modulator"
output_field_cls = forms.CharField
class EmailModulator(BaseSimpleModulator):
description = "E-mail modulator"
output_field_cls = forms.CharField
modulators = {'char': CharModulator,
'int': IntegerModulator,
'email': EmailModulator}
| from django import forms
from django.utils.translation import ugettext as _
class BaseBlobFormModulator(object):
description = None
def __init__(self, blob=None):
self.blob = blob or {}
super(BaseBlobFormModulator, self).__init__()
def create(self):
raise NotImplementedError("")
def answer(self):
raise NotImplementedError("")
class BaseSimpleModulator(BaseBlobFormModulator):
output_field_cls = None
def create(self, fields):
fields['name'] = forms.CharField(label=_("Question"))
fields['help_text'] = forms.CharField(label=_("Description of question"))
fields['required'] = forms.BooleanField(label=_("This fields is required?"))
def answer(self, fields):
fields['value'] = self.output_field_cls(label=self.blob['name'],
help_text=self.blob['help_text'], required=self.blob.get('required', True))
class CharModulator(BaseSimpleModulator):
description = "Char modulator"
output_field_cls = forms.CharField
class IntegerModulator(BaseSimpleModulator):
description = "Integer modulator"
output_field_cls = forms.CharField
class EmailModulator(BaseSimpleModulator):
description = "E-mail modulator"
output_field_cls = forms.CharField
modulators = {'char': CharModulator, 'int': IntegerModulator, 'email': EmailModulator}
| Python | 0.000001 |
90e1b254266155abded62bc3155785961acc0ff0 | Split filepath and count in credential module | bin/Credential.py | bin/Credential.py | #!/usr/bin/env python2
# -*-coding:UTF-8 -*
import time
from packages import Paste
from pubsublogger import publisher
from Helper import Process
import re
if __name__ == "__main__":
publisher.port = 6380
publisher.channel = "Script"
config_section = "Credential"
p = Process(config_section)
publisher.info("Find credentials")
critical = 8
regex_web = "/^(https?:\/\/)?([\da-z\.-]+)\.([a-z\.]{2,6})([\/\w \.-]*)*\/?$/"
regex_cred = "[a-zA-Z0-9._-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,6}:[a-zA-Z0-9\_\-]+"
while True:
message = p.get_from_set()
if message is None:
publisher.debug("Script Credential is Idling 10s")
print('Sleeping')
time.sleep(10)
continue
filepath, count = message.split()
if count < 5:
# Less than 5 matches from the top password list, false positive.
continue
paste = Paste.Paste(filepath)
content = paste.get_p_content()
creds = set(re.findall(regex_cred, content))
if len(creds) == 0:
continue
sites = set(re.findall(regex_web, content))
message = '{} credentials found.'.format(len(creds))
if sites:
message += ' Related websites: {}'.format(', '.join(sites))
to_print = 'Credential;{};{};{};{}'.format(paste.p_source, paste.p_date, paste.p_name, message)
print('\n '.join(creds))
if len(creds) > critical:
print("========> Found more than 10 credentials in this file : {}".format(filepath))
publisher.warning(to_print)
if sites:
print("=======> Probably on : {}".format(', '.join(sites)))
else:
publisher.info(to_print)
| #!/usr/bin/env python2
# -*-coding:UTF-8 -*
import time
from packages import Paste
from pubsublogger import publisher
from Helper import Process
import re
if __name__ == "__main__":
publisher.port = 6380
publisher.channel = "Script"
config_section = "Credential"
p = Process(config_section)
publisher.info("Find credentials")
critical = 10
regex_web = "/^(https?:\/\/)?([\da-z\.-]+)\.([a-z\.]{2,6})([\/\w \.-]*)*\/?$/"
regex_cred = "[a-zA-Z0-9._-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,6}:[a-zA-Z0-9\_\-]+"
while True:
filepath = p.get_from_set()
if filepath is None:
publisher.debug("Script Credential is Idling 10s")
print('Sleeping')
time.sleep(10)
continue
paste = Paste.Paste(filepath)
content = paste.get_p_content()
creds = set(re.findall(regex_cred, content))
if len(creds) == 0:
continue
sites = set(re.findall(regex_web, content))
message = '{} credentials found.'.format(len(creds))
if sites:
message += ' Related websites: {}'.format(', '.join(sites))
to_print = 'Credential;{};{};{};{}'.format(paste.p_source, paste.p_date, paste.p_name, message)
print('\n '.join(creds))
if len(creds) > critical:
print("========> Found more than 10 credentials on this file : {}".format(filepath))
publisher.warning(to_print)
if sites:
print("=======> Probably on : {}".format(', '.join(sites)))
else:
publisher.info(to_print)
| Python | 0 |
e18047a3cb3c8303bf64dc9ce5fc230e29b25b56 | Fix fac-gitall.py | bin/fac-gitall.py | bin/fac-gitall.py | #!/usr/bin/env python3
import sys
import os
import lnls
#import git
from termcolor import colored
import subprocess
git_functions = ('pull','push','status','diff','clone')
def run_git_clone():
if not os.path.exists(lnls.folder_code):
print('fac-gitall.py: please create ' + lnls.folder_code + ' folder with correct permissions first!')
return
all_repos = ('collective_effects',
'fieldmaptrack',
'job_manager',
'lnls',
'mathphys',
'MatlabMiddleLayer',
'pyaccel',
'scripts',
'sirius',
'sirius_parameters',
'sirius_wiki',
'tools',
'trackcpp',
'tracy_sirius',
'va',
)
for repo in all_repos:
cmd = 'git clone ssh://git@github.com/lnls-fac/' + repo + '.git'
os.system(cmd)
def run_git(func):
if func == 'clone': return run_git_clone()
fnames = os.listdir(lnls.folder_code)
for fname in fnames:
repo_folder = os.path.join(lnls.folder_code, fname)
if not os.path.exists(os.path.join(repo_folder,'.git')): continue
print('processing ' + func + colored(' <'+fname+'>','yellow')+'...')
cmd = 'cd ' + repo_folder + '; git ' + func
text = subprocess.call([cmd], shell=True, stdout=sys.stdout)
print('...ok')
print()
if __name__ == '__main__':
if len(sys.argv) != 2 or sys.argv[1] not in git_functions:
print('usage: fac-gitall.py [' + '|'.join(git_functions) + ']')
else:
print()
run_git(sys.argv[1])
| #!/usr/bin/env python3
import sys
import os
import lnls
#import git
from termcolor import colored
import subprocess
git_functions = ('pull','push','status','diff','clone')
def run_git_clone():
if not os.path.exists(lnls.folder_code):
print('gitall.py: please create ' + lnls.folder_code + ' folder with correct permissions first!')
return
all_repos = ('collective_effects',
'fieldmaptrack',
'job_manager',
'lnls',
'mathphys',
'MatlabMiddleLayer',
'pyaccel',
'scripts',
'sirius',
'sirius_parameters',
'sirius_wiki',
'tools',
'trackcpp',
'tracy_sirius',
'va',
)
for repo in all_repos:
cmd = 'git clone ssh://git@github.com/lnls-fac/' + repo + '.git'
os.system(cmd)
def run_git(func):
if func == 'clone': return run_git_clone()
fnames = os.listdir(lnls.folder_code)
for fname in fnames:
repo_folder = os.path.join(lnls.folder_code, fname)
if not os.path.exists(os.path.join(repo_folder,'.git')): continue
print('processing ' + func + colored(' <'+fname+'>','yellow')+'...')
cmd = 'cd ' + repo_folder + '; git ' + func
text = subprocess.call([cmd], shell=True, stdout=sys.stdout)
print('...ok')
print()
if __name__ == '__main__':
if len(sys.argv) != 2 or sys.argv[1] not in git_functions:
print('usage: gitall.py [' + '|'.join(git_functions) + ']')
else:
print()
run_git(sys.argv[1])
| Python | 0 |
1b172c592bb5efc1a0dcf8f18d6ea6a1037ec9ff | Clean things up a bit | filebutler_upload/filehandler.py | filebutler_upload/filehandler.py | import requests
class Filemanager:
def __init__(self, url, username, password):
self.headers = {'Accept': 'application/json'}
self.username = username
self.password = password
self.url = url
def list(self):
'''
List all files uploaded by user
'''
data = {
'username': self.username,
'password': self.password
}
response = requests.post(
self.url + 'files',
data=data,
headers=self.headers
)
if response.status_code == 200:
return response.json['message']
else:
return {}
def delete(self, hash):
''' delete specified hash '''
if hash == 'all':
pass
data = {
'username': self.username,
'password': self.password,
}
response = requests.post(
self.url + hash + '/delete',
data=data,
headers=self.headers
)
return response.text
def upload(self, upload_file,
download_password, one_time_download, expire):
files = {'file': upload_file}
data = {
'username': self.username,
'password': self.password,
'download_password': download_password,
'one_time_download': '1' if one_time_download else '0',
'expire': expire
}
response = requests.post(
self.url,
data=data,
files=files,
headers=self.headers
)
return response
| import requests
#import os
#from ConfigParser import RawConfigParser
#from text_table import TextTable
class Filemanager:
def __init__(self, url, username, password):
self.headers = {'Accept': 'application/json'}
self.username = username
self.password = password
self.url = url
def list(self):
'''
List all files uploaded by user
'''
data = {
'username': self.username,
'password': self.password
}
response = requests.post(
self.url + 'files',
data=data,
headers=self.headers
)
if response.status_code == 200:
return response.json['message']
else:
return {}
def delete(self, hash):
''' delete specified hash '''
if hash == 'all':
pass
data = {
'username': self.username,
'password': self.password,
}
response = requests.post(
self.url + hash + '/delete',
data=data,
headers=self.headers
)
return response.text
def upload(self, upload_file,
download_password, one_time_download, expire):
files = {'file': upload_file}
data = {
'username': self.config.get('settings', 'username'),
'password': self.config.get('settings', 'password'),
'download_password': self.options.password,
'one_time_download': '1' if self.options.onetime else '0',
'expire': self.options.lifetime
}
response = requests.post(
self.url,
data=data,
files=files, headers=self.headers
)
return response
# For testing, remove when finished.
#config = RawConfigParser()
#config.read(os.path.expanduser('~/.filebutler-upload.conf'))
#username = config.get('settings', 'username')
#password = config.get('settings', 'password')
#url = config.get('settings', 'upload_url')
#fm = Filemanager(url, username, password)
#t = TextTable((40, 'Download hash'), (35, 'Filename'))
#for hash, filename in fm.list().iteritems():
# t.row(hash, filename)
#print t.draw()
print fm.delete('a13170f4cdbd96743e18126306ddba484785ba6b')
| Python | 0.000008 |
3fea731e62653dfc847e82b8185feb029d844fd8 | Revert "minifiying doctype json's" | frappe/modules/export_file.py | frappe/modules/export_file.py | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe, os, json
import frappe.model
from frappe.modules import scrub, get_module_path, lower_case_files_for, scrub_dt_dn
def export_doc(doc):
export_to_files([[doc.doctype, doc.name]])
def export_to_files(record_list=None, record_module=None, verbose=0, create_init=None):
"""
Export record_list to files. record_list is a list of lists ([doctype],[docname] ) ,
"""
if frappe.flags.in_import:
return
if record_list:
for record in record_list:
write_document_file(frappe.get_doc(record[0], record[1]), record_module, create_init=create_init)
def write_document_file(doc, record_module=None, create_init=None):
newdoc = doc.as_dict(no_nulls=True)
# strip out default fields from children
for df in doc.meta.get_table_fields():
for d in newdoc.get(df.fieldname):
for fieldname in frappe.model.default_fields:
if fieldname in d:
del d[fieldname]
module = record_module or get_module_name(doc)
if create_init is None:
create_init = doc.doctype in lower_case_files_for
# create folder
folder = create_folder(module, doc.doctype, doc.name, create_init)
# write the data file
fname = (doc.doctype in lower_case_files_for and scrub(doc.name)) or doc.name
with open(os.path.join(folder, fname +".json"),'w+') as txtfile:
txtfile.write(frappe.as_json(newdoc))
def get_module_name(doc):
if doc.doctype == 'Module Def':
module = doc.name
elif doc.doctype=="Workflow":
module = frappe.db.get_value("DocType", doc.document_type, "module")
elif hasattr(doc, 'module'):
module = doc.module
else:
module = frappe.db.get_value("DocType", doc.doctype, "module")
return module
def create_folder(module, dt, dn, create_init):
module_path = get_module_path(module)
dt, dn = scrub_dt_dn(dt, dn)
# create folder
folder = os.path.join(module_path, dt, dn)
frappe.create_folder(folder)
# create init_py_files
if create_init:
create_init_py(module_path, dt, dn)
return folder
def create_init_py(module_path, dt, dn):
def create_if_not_exists(path):
initpy = os.path.join(path, '__init__.py')
if not os.path.exists(initpy):
open(initpy, 'w').close()
create_if_not_exists(os.path.join(module_path))
create_if_not_exists(os.path.join(module_path, dt))
create_if_not_exists(os.path.join(module_path, dt, dn))
| # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe, os, json
import frappe.model
from frappe.modules import scrub, get_module_path, lower_case_files_for, scrub_dt_dn
def export_doc(doc):
export_to_files([[doc.doctype, doc.name]])
def export_to_files(record_list=None, record_module=None, verbose=0, create_init=None):
"""
Export record_list to files. record_list is a list of lists ([doctype],[docname] ) ,
"""
if frappe.flags.in_import:
return
if record_list:
for record in record_list:
write_document_file(frappe.get_doc(record[0], record[1]), record_module, create_init=create_init)
def write_document_file(doc, record_module=None, create_init=None):
newdoc = doc.as_dict(no_nulls=True)
# strip out default fields from children
for df in doc.meta.get_table_fields():
for d in newdoc.get(df.fieldname):
for fieldname in frappe.model.default_fields:
if fieldname in d:
del d[fieldname]
for fieldname in d.keys():
if d[fieldname] == 0 or d[fieldname] == "":
del d[fieldname]
module = record_module or get_module_name(doc)
if create_init is None:
create_init = doc.doctype in lower_case_files_for
# create folder
folder = create_folder(module, doc.doctype, doc.name, create_init)
# write the data file
fname = (doc.doctype in lower_case_files_for and scrub(doc.name)) or doc.name
with open(os.path.join(folder, fname +".json"),'w+') as txtfile:
txtfile.write(frappe.as_json(newdoc))
def get_module_name(doc):
if doc.doctype == 'Module Def':
module = doc.name
elif doc.doctype=="Workflow":
module = frappe.db.get_value("DocType", doc.document_type, "module")
elif hasattr(doc, 'module'):
module = doc.module
else:
module = frappe.db.get_value("DocType", doc.doctype, "module")
return module
def create_folder(module, dt, dn, create_init):
module_path = get_module_path(module)
dt, dn = scrub_dt_dn(dt, dn)
# create folder
folder = os.path.join(module_path, dt, dn)
frappe.create_folder(folder)
# create init_py_files
if create_init:
create_init_py(module_path, dt, dn)
return folder
def create_init_py(module_path, dt, dn):
def create_if_not_exists(path):
initpy = os.path.join(path, '__init__.py')
if not os.path.exists(initpy):
open(initpy, 'w').close()
create_if_not_exists(os.path.join(module_path))
create_if_not_exists(os.path.join(module_path, dt))
create_if_not_exists(os.path.join(module_path, dt, dn))
| Python | 0 |
f03ba99cd7c4db064b2ece3d226b30c8e9ca63bf | Add a test for scipy.integrate.newton_cotes. A more comprehensive set of tests would be better, but it's a start. | scipy/integrate/tests/test_quadrature.py | scipy/integrate/tests/test_quadrature.py |
import numpy
from numpy import cos, sin, pi
from numpy.testing import *
from scipy.integrate import quadrature, romberg, romb, newton_cotes
class TestQuadrature(TestCase):
def quad(self, x, a, b, args):
raise NotImplementedError
def test_quadrature(self):
# Typical function with two extra arguments:
def myfunc(x,n,z): # Bessel function integrand
return cos(n*x-z*sin(x))/pi
val, err = quadrature(myfunc,0,pi,(2,1.8))
table_val = 0.30614353532540296487
assert_almost_equal(val, table_val, decimal=7)
def test_romberg(self):
# Typical function with two extra arguments:
def myfunc(x, n, z): # Bessel function integrand
return cos(n*x-z*sin(x))/pi
val = romberg(myfunc,0,pi, args=(2, 1.8))
table_val = 0.30614353532540296487
assert_almost_equal(val, table_val, decimal=7)
def test_romb(self):
assert_equal(romb(numpy.arange(17)),128)
def test_non_dtype(self):
# Check that we work fine with functions returning float
import math
valmath = romberg(math.sin, 0, 1)
expected_val = 0.45969769413185085
assert_almost_equal(valmath, expected_val, decimal=7)
def test_newton_cotes(self):
"""Test the first few degrees, for evenly spaced points."""
n = 1
wts, errcoff = newton_cotes(n, 1)
assert_equal(wts, n*numpy.array([0.5, 0.5]))
assert_almost_equal(errcoff, -n**3/12.0)
n = 2
wts, errcoff = newton_cotes(n, 1)
assert_almost_equal(wts, n*numpy.array([1.0, 4.0, 1.0])/6.0)
assert_almost_equal(errcoff, -n**5/2880.0)
n = 3
wts, errcoff = newton_cotes(n, 1)
assert_almost_equal(wts, n*numpy.array([1.0, 3.0, 3.0, 1.0])/8.0)
assert_almost_equal(errcoff, -n**5/6480.0)
n = 4
wts, errcoff = newton_cotes(n, 1)
assert_almost_equal(wts, n*numpy.array([7.0, 32.0, 12.0, 32.0, 7.0])/90.0)
assert_almost_equal(errcoff, -n**7/1935360.0)
if __name__ == "__main__":
run_module_suite()
|
import numpy
from numpy import cos, sin, pi
from numpy.testing import *
from scipy.integrate import quadrature, romberg, romb
class TestQuadrature(TestCase):
def quad(self, x, a, b, args):
raise NotImplementedError
def test_quadrature(self):
# Typical function with two extra arguments:
def myfunc(x,n,z): # Bessel function integrand
return cos(n*x-z*sin(x))/pi
val, err = quadrature(myfunc,0,pi,(2,1.8))
table_val = 0.30614353532540296487
assert_almost_equal(val, table_val, decimal=7)
def test_romberg(self):
# Typical function with two extra arguments:
def myfunc(x, n, z): # Bessel function integrand
return cos(n*x-z*sin(x))/pi
val = romberg(myfunc,0,pi, args=(2, 1.8))
table_val = 0.30614353532540296487
assert_almost_equal(val, table_val, decimal=7)
def test_romb(self):
assert_equal(romb(numpy.arange(17)),128)
def test_non_dtype(self):
# Check that we work fine with functions returning float
import math
valmath = romberg(math.sin, 0, 1)
expected_val = 0.45969769413185085
assert_almost_equal(valmath, expected_val, decimal=7)
if __name__ == "__main__":
run_module_suite()
| Python | 0.996962 |
3aa198bc49b32db49abe5653ed82f1ede7081df7 | make fix work for both versions of GeoSteiner | geonet/geosteiner.py | geonet/geosteiner.py | '''
Wrapper for GeoSteiner program
'''
from itertools import dropwhile, takewhile, ifilter
import os
from subprocess import Popen, PIPE
from geonet.network import SteinerTree
# TODO: check if GeoSteiner is available
def geosteiner(pos):
'''Call geosteiner to compute and return'''
def parse_ps(output):
lines = output.splitlines()
no_pre = dropwhile(lambda l:' % fs' not in l, lines)
end_kwds = ['Euclidean SMT', '(Steiner Minimal']
no_post = takewhile(lambda l: all(kw not in l for kw in end_kwds), no_pre)
filter_comments = ifilter(lambda l: ' % fs' not in l, no_post)
arcs = [l.split()[:4] for l in filter_comments]
return arcs
def build_tree(nodes, raw_arcs, pos):
_nodes = list(nodes)
_arcs = []
_steiner_pos = {}
num = 0
for ra in raw_arcs:
if ra[1] == 'T':
tail = nodes[int(ra[0])]
else: # must be Steiner node
coords = '_'.join(ra[0:2])
if coords in _steiner_pos:
tail = _steiner_pos[coords]
else:
node = '_%d' % num
_nodes.append(node)
tail = _steiner_pos.setdefault(coords, node)
num += 1
if ra[3] == 'T':
head = nodes[int(ra[2])]
else: # must be Steiner node
coords = '_'.join(ra[2:4])
if coords in _steiner_pos:
head = _steiner_pos[coords]
else:
node = '_%d' % num
_nodes.append(node)
head = _steiner_pos.setdefault(coords, node)
num += 1
_arcs.append((tail, head))
tree = SteinerTree(_nodes, _arcs, pos)
steiner_pos = {}
for k,v in _steiner_pos.items():
node = v
coords = k.split('_')
steiner_pos[node] = float(coords[0]), float(coords[1])
return tree, steiner_pos
nodes = list(sorted(pos.keys()))
nodeset = ''.join('%4d %4d\n' % pos[n] for n in nodes)
efst = Popen(['efst'], stdin=PIPE, stdout=PIPE)
efst_output, _ = efst.communicate(nodeset)
bb = Popen(['bb'], stdin=PIPE, stdout=PIPE)
output, _ = bb.communicate(efst_output)
raw_arcs = parse_ps(output)
tree, steiner_pos = build_tree(nodes, raw_arcs, pos)
return tree, steiner_pos
| '''
Wrapper for GeoSteiner program
'''
from itertools import dropwhile, takewhile, ifilter
import os
from subprocess import Popen, PIPE
from geonet.network import SteinerTree
# TODO: check if GeoSteiner is available
def geosteiner(pos):
'''Call geosteiner to compute and return'''
def parse_ps(output):
lines = output.splitlines()
no_pre = dropwhile(lambda l:' % fs' not in l, lines)
no_post = takewhile(lambda l:'Euclidean SMT' not in l, no_pre)
filter_comments = ifilter(lambda l: ' % fs' not in l, no_post)
arcs = [l.split()[:4] for l in filter_comments]
return arcs
def build_tree(nodes, raw_arcs, pos):
_nodes = list(nodes)
_arcs = []
_steiner_pos = {}
num = 0
for ra in raw_arcs:
if ra[1] == 'T':
tail = nodes[int(ra[0])]
else: # must be Steiner node
coords = '_'.join(ra[0:2])
if coords in _steiner_pos:
tail = _steiner_pos[coords]
else:
node = '_%d' % num
_nodes.append(node)
tail = _steiner_pos.setdefault(coords, node)
num += 1
if ra[3] == 'T':
head = nodes[int(ra[2])]
else: # must be Steiner node
coords = '_'.join(ra[2:4])
if coords in _steiner_pos:
head = _steiner_pos[coords]
else:
node = '_%d' % num
_nodes.append(node)
head = _steiner_pos.setdefault(coords, node)
num += 1
_arcs.append((tail, head))
tree = SteinerTree(_nodes, _arcs, pos)
steiner_pos = {}
for k,v in _steiner_pos.items():
node = v
coords = k.split('_')
steiner_pos[node] = float(coords[0]), float(coords[1])
return tree, steiner_pos
nodes = list(sorted(pos.keys()))
nodeset = ''.join('%4d %4d\n' % pos[n] for n in nodes)
efst = Popen(['efst'], stdin=PIPE, stdout=PIPE)
efst_output, _ = efst.communicate(nodeset)
bb = Popen(['bb'], stdin=PIPE, stdout=PIPE)
output, _ = bb.communicate(efst_output)
raw_arcs = parse_ps(output)
tree, steiner_pos = build_tree(nodes, raw_arcs, pos)
return tree, steiner_pos
| Python | 0 |
a9b2b6fe868ab564653f40e611ce6a788f396981 | Fix wrong variable replacement | backend/globaleaks/tests/jobs/test_pgp_check_sched.py | backend/globaleaks/tests/jobs/test_pgp_check_sched.py | # -*- coding: utf-8 -*-
from twisted.internet.defer import inlineCallbacks
from globaleaks.tests import helpers
from globaleaks.jobs import pgp_check_sched
class TestPGPCheckSchedule(helpers.TestGLWithPopulatedDB):
encryption_scenario = 'ONE_VALID_ONE_EXPIRED'
@inlineCallbacks
def test_pgp_check_schedule(self):
# FIXME: complete this unit test by performing checks
# on the actions performed by the scheduler.
yield pgp_check_sched.PGPCheckSchedule().operation()
| # -*- coding: utf-8 -*-
from twisted.internet.defer import inlineCallbacks
from globaleaks.tests import helpers
from globaleaks.jobs import secure_file_delete_sched
class TestPGPCheckSchedule(helpers.TestGLWithPopulatedDB):
encryption_scenario = 'ONE_VALID_ONE_EXPIRED'
@inlineCallbacks
def test_pgp_check_schedule(self):
# FIXME: complete this unit test by performing checks
# on the actions performed by the scheduler.
yield pgp_check_sched.PGPCheckSchedule().operation()
| Python | 0.000019 |
6ef76159ab32e454241f7979a1cdf320c463dd9e | add config file option | planetstack/planetstack-backend.py | planetstack/planetstack-backend.py | #!/usr/bin/env python
import os
import argparse
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "planetstack.settings")
from observer.backend import Backend
from planetstack.config import Config
config = Config()
# after http://www.erlenstar.demon.co.uk/unix/faq_2.html
def daemon():
"""Daemonize the current process."""
if os.fork() != 0: os._exit(0)
os.setsid()
if os.fork() != 0: os._exit(0)
os.umask(0)
devnull = os.open(os.devnull, os.O_RDWR)
os.dup2(devnull, 0)
# xxx fixme - this is just to make sure that nothing gets stupidly lost - should use devnull
logdir=os.path.dirname(config.observer_logfile)
# when installed in standalone we might not have httpd installed
if not os.path.isdir(logdir): os.mkdir(logdir)
crashlog = os.open('%s'%config.observer_logfile, os.O_RDWR | os.O_APPEND | os.O_CREAT, 0644)
os.dup2(crashlog, 1)
os.dup2(crashlog, 2)
def main():
# Generate command line parser
parser = argparse.ArgumentParser(usage='%(prog)s [options]')
parser.add_argument('-d', '--daemon', dest='daemon', action='store_true', default=False,
help='Run as daemon.')
# smbaker: util/config.py parses sys.argv[] directly to get config file name; include the option here to avoid
# throwing unrecognized argument exceptions
parser.add_argument('-C', '--config', dest='config_file', action='store', default="/opt/planetstack/plstackapi_config",
help='Name of config file.')
args = parser.parse_args()
if args.daemon: daemon()
backend = Backend()
backend.run()
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import os
import argparse
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "planetstack.settings")
from observer.backend import Backend
from planetstack.config import Config
config = Config()
# after http://www.erlenstar.demon.co.uk/unix/faq_2.html
def daemon():
"""Daemonize the current process."""
if os.fork() != 0: os._exit(0)
os.setsid()
if os.fork() != 0: os._exit(0)
os.umask(0)
devnull = os.open(os.devnull, os.O_RDWR)
os.dup2(devnull, 0)
# xxx fixme - this is just to make sure that nothing gets stupidly lost - should use devnull
logdir=os.path.dirname(config.observer_logfile)
# when installed in standalone we might not have httpd installed
if not os.path.isdir(logdir): os.mkdir(logdir)
crashlog = os.open('%s'%config.observer_logfile, os.O_RDWR | os.O_APPEND | os.O_CREAT, 0644)
os.dup2(crashlog, 1)
os.dup2(crashlog, 2)
def main():
# Generate command line parser
parser = argparse.ArgumentParser(usage='%(prog)s [options]')
parser.add_argument('-d', '--daemon', dest='daemon', action='store_true', default=False,
help='Run as daemon.')
args = parser.parse_args()
if args.daemon: daemon()
backend = Backend()
backend.run()
if __name__ == '__main__':
main()
| Python | 0.000002 |
a5942402fdf8f8013dbe62636ea29582538e33c6 | fix argument name | bin/trait_mapping/create_table_for_manual_curation.py | bin/trait_mapping/create_table_for_manual_curation.py | #!/usr/bin/env python3
import argparse
from eva_cttv_pipeline.trait_mapping.ols import (
get_ontology_label_from_ols, is_current_and_in_efo, is_in_efo,
)
def find_previous_mapping(trait_name, previous_mappings):
if trait_name not in previous_mappings:
return ''
uri = previous_mappings[trait_name]
label = get_ontology_label_from_ols(uri)
uri_is_current_and_in_efo = is_current_and_in_efo(uri)
uri_in_efo = is_in_efo(uri)
if uri_in_efo:
trait_status = 'EFO_CURRENT' if uri_is_current_and_in_efo else 'EFO_OBSOLETE'
else:
trait_status = 'NOT_CONTAINED'
trait_string = '|'.join([uri, label, 'NOT_SPECIFIED', 'previously-used', trait_status])
return trait_string
def find_exact_mapping(trait_name, mappings):
for mapping in mappings:
if mapping.lower().split('|')[1] == trait_name:
return mapping
return ''
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'-t', '--traits-for-curation',
help='Table with traits for which the pipeline failed to make a confident prediction')
parser.add_argument(
'-m', '--previous-mappings',
help='Table with all mappings previously issued by EVA')
parser.add_argument(
'-o', '--output',
help='Output TSV to be loaded in Google Sheets for manual curation')
args = parser.parse_args()
outfile = open(args.output, 'w')
# Load all previous mappings
previous_mappings = dict(l.rstrip().split('\t') for l in open(args.previous_mappings))
# Process all mappings which require manual curation
for line in open(args.traits_for_curation):
fields = line.rstrip().split('\t')
trait_name, trait_freq = fields[:2]
mappings = fields[2:]
previous_mapping = find_previous_mapping(trait_name, previous_mappings)
exact_mapping = find_exact_mapping(trait_name, mappings)
out_line = '\t'.join(
[trait_name, trait_freq,
# Mapping to use, if ready, comment, mapping URI, mapping label, whether exact, in EFO
'', '', '', '', '', '', '',
previous_mapping, exact_mapping] + mappings
) + '\n'
outfile.write(out_line)
| #!/usr/bin/env python3
import argparse
from eva_cttv_pipeline.trait_mapping.ols import (
get_ontology_label_from_ols, is_current_and_in_efo, is_in_efo,
)
def find_previous_mapping(trait_name, previous_mappings):
if trait_name not in previous_mappings:
return ''
uri = previous_mappings[trait_name]
label = get_ontology_label_from_ols(uri)
uri_is_current_and_in_efo = is_current_and_in_efo(uri)
uri_in_efo = is_in_efo(uri)
if uri_in_efo:
trait_status = 'EFO_CURRENT' if uri_is_current_and_in_efo else 'EFO_OBSOLETE'
else:
trait_status = 'NOT_CONTAINED'
trait_string = '|'.join([uri, label, 'NOT_SPECIFIED', 'previously-used', trait_status])
return trait_string
def find_exact_mapping(trait_name, mappings):
for mapping in mappings:
if mapping.lower().split('|')[1] == trait_name:
return mapping
return ''
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'-t', '--traits-for-curation',
help='Table with traits for which the pipeline failed to make a confident prediction')
parser.add_argument(
'-m', '--previous-mappings',
help='Table with all mappings previously issued by EVA')
parser.add_argument(
'-o', '--output',
help='Output TSV to be loaded in Google Sheets for manual curation')
args = parser.parse_args()
outfile = open(args.final_table_for_curation, 'w')
# Load all previous mappings
previous_mappings = dict(l.rstrip().split('\t') for l in open(args.previous_mappings))
# Process all mappings which require manual curation
for line in open(args.traits_for_curation):
fields = line.rstrip().split('\t')
trait_name, trait_freq = fields[:2]
mappings = fields[2:]
previous_mapping = find_previous_mapping(trait_name, previous_mappings)
exact_mapping = find_exact_mapping(trait_name, mappings)
out_line = '\t'.join(
[trait_name, trait_freq,
# Mapping to use, if ready, comment, mapping URI, mapping label, whether exact, in EFO
'', '', '', '', '', '', '',
previous_mapping, exact_mapping] + mappings
) + '\n'
outfile.write(out_line)
| Python | 0.005603 |
662608e6a183810072cb5e9dc7545145c866cf34 | Add missing import | byceps/services/shop/order/action_registry_service.py | byceps/services/shop/order/action_registry_service.py | """
byceps.services.shop.order.action_registry_service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from ...seating.models.category import CategoryID
from ...user_badge.models.badge import BadgeID
from ..article.models.article import ArticleNumber
from .models.payment import PaymentState
from . import action_service
def register_badge_awarding(article_number: ArticleNumber, badge_id: BadgeID
) -> None:
# Award badge to orderer when order is marked as paid.
params = {
'badge_id': str(badge_id),
}
action_service.create_action(article_number, PaymentState.paid,
'create_tickets', params_create)
def register_tickets_creation(article_number: ArticleNumber,
ticket_category_id: CategoryID) -> None:
# Create tickets for order when it is marked as paid.
params_create = {
'category_id': str(ticket_category_id),
}
action_service.create_action(article_number, PaymentState.paid,
'create_tickets', params_create)
# Revoke tickets that have been created for order when it is
# canceled after being marked as paid.
params_revoke = {}
action_service.create_action(article_number, PaymentState.canceled_after_paid,
'revoke_tickets', params_revoke)
| """
byceps.services.shop.order.action_registry_service
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from ...seating.models.category import CategoryID
from ...user_badge.models.badge import BadgeID
from ..article.models.article import ArticleNumber
from .models.payment import PaymentState
def register_badge_awarding(article_number: ArticleNumber, badge_id: BadgeID
) -> None:
# Award badge to orderer when order is marked as paid.
params = {
'badge_id': str(badge_id),
}
action_service.create_action(article_number, PaymentState.paid,
'create_tickets', params_create)
def register_tickets_creation(article_number: ArticleNumber,
ticket_category_id: CategoryID) -> None:
# Create tickets for order when it is marked as paid.
params_create = {
'category_id': str(ticket_category_id),
}
action_service.create_action(article_number, PaymentState.paid,
'create_tickets', params_create)
# Revoke tickets that have been created for order when it is
# canceled after being marked as paid.
params_revoke = {}
action_service.create_action(article_number, PaymentState.canceled_after_paid,
'revoke_tickets', params_revoke)
| Python | 0.000466 |
5cdf89e64ab9dabf277a867a774a88f12e1ece5e | Fix broken exception `BadHeader` | src/pyload/core/network/http/exceptions.py | src/pyload/core/network/http/exceptions.py | # -*- coding: utf-8 -*-
PROPRIETARY_RESPONSES = {
440: "Login Timeout - The client's session has expired and must log in again.",
449: "Retry With - The server cannot honour the request because the user has not provided the required information",
451: "Redirect - Unsupported Redirect Header",
509: "Bandwidth Limit Exceeded",
520: "Unknown Error",
521: "Web Server Is Down - The origin server has refused the connection from CloudFlare",
522: "Connection Timed Out - CloudFlare could not negotiate a TCP handshake with the origin server",
523: "Origin Is Unreachable - CloudFlare could not reach the origin server",
524: "A Timeout Occurred - CloudFlare did not receive a timely HTTP response",
525: "SSL Handshake Failed - CloudFlare could not negotiate a SSL/TLS handshake with the origin server",
526: "Invalid SSL Certificate - CloudFlare could not validate the SSL/TLS certificate that the origin server presented",
527: "Railgun Error - CloudFlare requests timeout or failed after the WAN connection has been established",
530: "Site Is Frozen - Used by the Pantheon web platform to indicate a site that has been frozen due to inactivity",
}
class BadHeader(Exception):
def __init__(self, code, header=b"", content=b""):
code = int(code)
response = PROPRIETARY_RESPONSES.get(code, "unknown error code")
super().__init__(f"Bad server response: {code} {response}")
self.code = code
self.header = header
self.content = content
| # -*- coding: utf-8 -*-
PROPRIETARY_RESPONSES = {
440: "Login Timeout - The client's session has expired and must log in again.",
449: "Retry With - The server cannot honour the request because the user has not provided the required information",
451: "Redirect - Unsupported Redirect Header",
509: "Bandwidth Limit Exceeded",
520: "Unknown Error",
521: "Web Server Is Down - The origin server has refused the connection from CloudFlare",
522: "Connection Timed Out - CloudFlare could not negotiate a TCP handshake with the origin server",
523: "Origin Is Unreachable - CloudFlare could not reach the origin server",
524: "A Timeout Occurred - CloudFlare did not receive a timely HTTP response",
525: "SSL Handshake Failed - CloudFlare could not negotiate a SSL/TLS handshake with the origin server",
526: "Invalid SSL Certificate - CloudFlare could not validate the SSL/TLS certificate that the origin server presented",
527: "Railgun Error - CloudFlare requests timeout or failed after the WAN connection has been established",
530: "Site Is Frozen - Used by the Pantheon web platform to indicate a site that has been frozen due to inactivity",
}
class BadHeader(Exception):
def __init__(self, code, header=b"", content=b""):
int_code = int(code)
response = responses.get(
int_code, PROPRIETARY_RESPONSES.get(int_code, "unknown error code")
)
super().__init__(f"Bad server response: {code} {response}")
self.code = int_code
self.header = header
self.content = content
| Python | 0.000001 |
a23c6132792bd6aff420791cf4b78a955cc0dfad | add headless | inscrawler/browser.py | inscrawler/browser.py | import os
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.chrome.options import Options
from time import sleep
class Browser:
def __init__(self):
dir_path = os.path.dirname(os.path.realpath(__file__))
service_args = ['--ignore-ssl-errors=true']
chrome_options = Options()
chrome_options.add_argument("--headless")
chrome_options.add_argument("--no-sandbox")
self.driver = webdriver.Chrome(
executable_path='%s/bin/chromedriver' % dir_path,
service_args=service_args,
chrome_options=chrome_options)
self.driver.implicitly_wait(5)
@property
def page_height(self):
return self.driver.execute_script('return document.body.scrollHeight')
def get(self, url):
self.driver.get(url)
def find_one(self, css_selector, elem=None):
obj = elem or self.driver
try:
return obj.find_element(By.CSS_SELECTOR, css_selector)
except NoSuchElementException:
return None
def find(self, css_selector, elem=None):
obj = elem or self.driver
try:
return obj.find_elements(By.CSS_SELECTOR, css_selector)
except NoSuchElementException:
return None
def scroll_down(self, wait=0.5):
self.driver.execute_script(
'window.scrollTo(0, document.body.scrollHeight)')
sleep(wait)
def scroll_up(self, wait=2):
self.driver.execute_script(
'window.scrollTo(0, 0)')
sleep(wait)
def js_click(self, elem):
self.driver.execute_script("arguments[0].click();", elem)
def __del__(self):
try:
self.driver.quit()
except Exception:
pass
| import os
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.chrome.options import Options
from time import sleep
class Browser:
def __init__(self):
dir_path = os.path.dirname(os.path.realpath(__file__))
service_args = ['--ignore-ssl-errors=true']
chrome_options = Options()
# chrome_options.add_argument("--headless")
chrome_options.add_argument("--no-sandbox")
self.driver = webdriver.Chrome(
executable_path='%s/bin/chromedriver' % dir_path,
service_args=service_args,
chrome_options=chrome_options)
self.driver.implicitly_wait(5)
@property
def page_height(self):
return self.driver.execute_script('return document.body.scrollHeight')
def get(self, url):
self.driver.get(url)
def find_one(self, css_selector, elem=None):
obj = elem or self.driver
try:
return obj.find_element(By.CSS_SELECTOR, css_selector)
except NoSuchElementException:
return None
def find(self, css_selector, elem=None):
obj = elem or self.driver
try:
return obj.find_elements(By.CSS_SELECTOR, css_selector)
except NoSuchElementException:
return None
def scroll_down(self, wait=0.5):
self.driver.execute_script(
'window.scrollTo(0, document.body.scrollHeight)')
sleep(wait)
def scroll_up(self, wait=2):
self.driver.execute_script(
'window.scrollTo(0, 0)')
sleep(wait)
def js_click(self, elem):
self.driver.execute_script("arguments[0].click();", elem)
def __del__(self):
try:
self.driver.quit()
except Exception:
pass
| Python | 0.999995 |
b44a9dfbf26e07b9db6a31119044b8347907a5a5 | disable fid map | examples/fitsdiff2.py | examples/fitsdiff2.py | #! /usr/bin/env python
#
# This routine can diff images from its neighbors. For a series i=1,N
# this can loop over i=2,N to produce N-1 difference images
#
# B_i = A_i - A_i-1
#
from __future__ import print_function
import glob
import sys
import shutil
import os
from astropy.io import fits
import numpy as np
if len(sys.argv) == 3:
f1 = sys.argv[1]
f2 = sys.argv[2]
print("Using %s %s" % (f1,f2))
hdu1 = fits.open(f1)
hdu2 = fits.open(f2)
h2 = hdu2[0].header
d1 = hdu1[0].data.astype(np.float32)
d2 = hdu2[0].data.astype(np.float32)
print(f1,d1.min(),d1.max())
print(f2,d2.min(),d2.max())
diff = d2 - d1
max1 = d1.max()
std1 = diff.std()
fidelity = max1 / std1
print("MEAN/STD/FID:",diff.mean(), std1, fidelity)
fits.writeto('diff.fits',diff,h2,overwrite=True)
#
#fid = np.abs(d2) / np.max(np.abs(diff),std1/1.4)
#fits.writeto('fidelity.fits',fid,h2,overwrite=True)
try:
import matplotlib.pyplot as plt
plt.figure(1)
plt.hist(diff.ravel())
plt.show()
except:
print("Failing to plot")
| #! /usr/bin/env python
#
# This routine can diff images from its neighbors. For a series i=1,N
# this can loop over i=2,N to produce N-1 difference images
#
# B_i = A_i - A_i-1
#
from __future__ import print_function
import glob
import sys
import shutil
import os
from astropy.io import fits
import numpy as np
if len(sys.argv) == 3:
f1 = sys.argv[1]
f2 = sys.argv[2]
print("Using %s %s" % (f1,f2))
hdu1 = fits.open(f1)
hdu2 = fits.open(f2)
h2 = hdu2[0].header
d1 = hdu1[0].data.astype(np.float32)
d2 = hdu2[0].data.astype(np.float32)
print(f1,d1.min(),d1.max())
print(f2,d2.min(),d2.max())
diff = d2 - d1
max1 = d1.max()
std1 = diff.std()
fidelity = max1 / std1
fid = np.abs(d2) / np.max(np.abs(diff),std1/1.4)
print("MEAN/STD/FID:",diff.mean(), std1, fidelity)
fits.writeto('diff.fits',diff,h2,overwrite=True)
fits.writeto('fidelity.fits',fid,h2,overwrite=True)
try:
import matplotlib.pyplot as plt
plt.figure(1)
plt.hist(diff.ravel())
plt.show()
except:
print("Failing to plot")
| Python | 0.000001 |
2c2d2024abf0eaa34b25038d2eb4cd5d8aeb6323 | remove defunct test | fsspec/tests/test_registry.py | fsspec/tests/test_registry.py | import sys
from unittest.mock import create_autospec, patch
import pytest
from fsspec.registry import (
ReadOnlyError,
_registry,
get_filesystem_class,
known_implementations,
register_implementation,
registry,
)
from fsspec.spec import AbstractFileSystem
try:
from importlib.metadata import EntryPoint
except ImportError: # python < 3.8
from importlib_metadata import EntryPoint
@pytest.fixture()
def clear_registry():
try:
yield
finally:
_registry.clear()
known_implementations.pop("test", None)
@pytest.fixture()
def clean_imports():
try:
real_module = sys.modules["fsspec"]
del sys.modules["fsspec"]
yield
finally:
sys.modules["fsspec"] = real_module
def test_registry_readonly():
get_filesystem_class("file")
assert "file" in registry
assert "file" in list(registry)
with pytest.raises(ReadOnlyError):
del registry["file"]
with pytest.raises(ReadOnlyError):
registry["file"] = None
with pytest.raises(ReadOnlyError):
registry.clear()
def test_register_cls(clear_registry):
with pytest.raises(ValueError):
get_filesystem_class("test")
register_implementation("test", AbstractFileSystem)
cls = get_filesystem_class("test")
assert cls is AbstractFileSystem
def test_register_str(clear_registry):
with pytest.raises(ValueError):
get_filesystem_class("test")
register_implementation("test", "fsspec.AbstractFileSystem")
assert "test" not in registry
cls = get_filesystem_class("test")
assert cls is AbstractFileSystem
assert "test" in registry
def test_register_fail(clear_registry):
register_implementation("test", "doesntexist.AbstractFileSystem")
with pytest.raises(ImportError):
get_filesystem_class("test")
register_implementation("test", "doesntexist.AbstractFileSystem")
with pytest.raises(ValueError):
register_implementation("test", "doesntexist.AbstractFileSystem", clobber=False)
register_implementation(
"test", "doesntexist.AbstractFileSystem", errtxt="hiho", clobber=True
)
with pytest.raises(ImportError) as e:
get_filesystem_class("test")
assert "hiho" in str(e.value)
register_implementation("test", AbstractFileSystem)
with pytest.raises(ValueError):
register_implementation("test", AbstractFileSystem, clobber=False)
register_implementation("test", AbstractFileSystem, clobber=True)
def test_entry_points_registered_on_import(clear_registry, clean_imports):
mock_ep = create_autospec(EntryPoint, module="fsspec.spec.AbstractFileSystem")
mock_ep.name = "test" # this can't be set in the constructor...
if sys.version_info < (3, 8):
import_location = "importlib_metadata.entry_points"
else:
import_location = "importlib.metadata.entry_points"
with patch(import_location, return_value={"fsspec.specs": [mock_ep]}):
assert "test" not in registry
import fsspec # noqa
get_filesystem_class("test")
assert "test" in registry
| import sys
from unittest.mock import create_autospec, patch
import pytest
from fsspec.registry import (
ReadOnlyError,
_registry,
get_filesystem_class,
known_implementations,
register_implementation,
registry,
)
from fsspec.spec import AbstractFileSystem
try:
from importlib.metadata import EntryPoint
except ImportError: # python < 3.8
from importlib_metadata import EntryPoint
@pytest.fixture()
def clear_registry():
try:
yield
finally:
_registry.clear()
known_implementations.pop("test", None)
@pytest.fixture()
def clean_imports():
try:
real_module = sys.modules["fsspec"]
del sys.modules["fsspec"]
yield
finally:
sys.modules["fsspec"] = real_module
@pytest.mark.parametrize(
"protocol,module,minversion,oldversion",
[("s3", "s3fs", "0.3.0", "0.1.0"), ("gs", "gcsfs", "0.3.0", "0.1.0")],
)
def test_minversion_s3fs(protocol, module, minversion, oldversion, monkeypatch):
_registry.clear()
mod = pytest.importorskip(module, minversion)
assert get_filesystem_class("s3") is not None
_registry.clear()
monkeypatch.setattr(mod, "__version__", oldversion)
with pytest.raises(RuntimeError, match=minversion):
get_filesystem_class(protocol)
def test_registry_readonly():
get_filesystem_class("file")
assert "file" in registry
assert "file" in list(registry)
with pytest.raises(ReadOnlyError):
del registry["file"]
with pytest.raises(ReadOnlyError):
registry["file"] = None
with pytest.raises(ReadOnlyError):
registry.clear()
def test_register_cls(clear_registry):
with pytest.raises(ValueError):
get_filesystem_class("test")
register_implementation("test", AbstractFileSystem)
cls = get_filesystem_class("test")
assert cls is AbstractFileSystem
def test_register_str(clear_registry):
with pytest.raises(ValueError):
get_filesystem_class("test")
register_implementation("test", "fsspec.AbstractFileSystem")
assert "test" not in registry
cls = get_filesystem_class("test")
assert cls is AbstractFileSystem
assert "test" in registry
def test_register_fail(clear_registry):
register_implementation("test", "doesntexist.AbstractFileSystem")
with pytest.raises(ImportError):
get_filesystem_class("test")
register_implementation("test", "doesntexist.AbstractFileSystem")
with pytest.raises(ValueError):
register_implementation("test", "doesntexist.AbstractFileSystem", clobber=False)
register_implementation(
"test", "doesntexist.AbstractFileSystem", errtxt="hiho", clobber=True
)
with pytest.raises(ImportError) as e:
get_filesystem_class("test")
assert "hiho" in str(e.value)
register_implementation("test", AbstractFileSystem)
with pytest.raises(ValueError):
register_implementation("test", AbstractFileSystem, clobber=False)
register_implementation("test", AbstractFileSystem, clobber=True)
def test_entry_points_registered_on_import(clear_registry, clean_imports):
mock_ep = create_autospec(EntryPoint, module="fsspec.spec.AbstractFileSystem")
mock_ep.name = "test" # this can't be set in the constructor...
if sys.version_info < (3, 8):
import_location = "importlib_metadata.entry_points"
else:
import_location = "importlib.metadata.entry_points"
with patch(import_location, return_value={"fsspec.specs": [mock_ep]}):
assert "test" not in registry
import fsspec # noqa
get_filesystem_class("test")
assert "test" in registry
| Python | 0.998747 |
7a39c7a433e909b58ad0fdf8adaaa5c944e91e0e | Fix non-array samples in multinomial estimator. | src/python/cargo/statistics/multinomial.py | src/python/cargo/statistics/multinomial.py | """
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
import numpy
from cargo.log import get_logger
from cargo.statistics.base import (
Estimator,
Distribution,
)
log = get_logger(__name__)
def smooth_multinomial_mixture(mixture, epsilon = 1e-3):
"""
Apply a smoothing term to the multinomial mixture components.
"""
log.info("heuristically smoothing a multinomial mixture")
for m in xrange(mixture.ndomains):
for k in xrange(mixture.ncomponents):
beta = mixture.components[m, k].beta + epsilon
beta /= numpy.sum(beta)
mixture.components[m, k] = Multinomial(beta)
class Multinomial(Distribution):
"""
The multinomial distribution.
Relevant types:
- sample: D-shaped uint ndarray
- sequence: ND-shaped uint ndarray
"""
def __init__(self, beta, norm = 1):
"""
Instantiate the distribution.
@param beta: The distribution parameter vector.
"""
# initialization
self._beta = numpy.asarray(beta)
self._log_beta = numpy.nan_to_num(numpy.log(self._beta))
self._norm = norm
# let's not let us be idiots
self._beta.flags.writeable = False
self._log_beta.flags.writeable = False
def random_variate(self, random = numpy.random):
"""
Return a sample from this distribution.
"""
return random.multinomial(self._norm, self._beta).astype(numpy.uint)
def random_variates(self, size, random = numpy.random):
"""
Return an array of samples from this distribution.
"""
return random.multinomial(self._norm, self._beta, size).astype(numpy.uint)
def log_likelihood(self, sample):
"""
Return the log likelihood of C{sample} under this distribution.
"""
from cargo.statistics._multinomial import multinomial_log_probability
return multinomial_log_probability(self._log_beta, sample)
def total_log_likelihood(self, samples):
"""
Return the log likelihood of C{samples} under this distribution.
"""
return self.log_likelihood(numpy.sum(samples, 0))
@property
def beta(self):
"""
Return the multinomial parameter vector.
"""
return self._beta
@property
def log_beta(self):
"""
Return the multinomial log parameter vector.
"""
return self._log_beta
class MultinomialEstimator(Estimator):
"""
Estimate the parameters of a multinomial distribution.
"""
def __init__(self, norm = 1):
"""
Initialize.
"""
self._norm = norm
def estimate(self, samples, random = numpy.random, weights = None):
"""
Return the estimated maximum likelihood distribution.
"""
# parameters
samples = numpy.asarray(samples)
if weights is None:
weights = numpy.ones(samples.shape[0])
else:
weights = numpy.asarray(weights)
# estimate
mean = numpy.sum(samples * weights[:, None], 0)
mean /= numpy.sum(mean)
return Multinomial(mean, self._norm)
| """
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
import numpy
from cargo.log import get_logger
from cargo.statistics.base import (
Estimator,
Distribution,
)
log = get_logger(__name__)
def smooth_multinomial_mixture(mixture, epsilon = 1e-3):
"""
Apply a smoothing term to the multinomial mixture components.
"""
log.info("heuristically smoothing a multinomial mixture")
for m in xrange(mixture.ndomains):
for k in xrange(mixture.ncomponents):
beta = mixture.components[m, k].beta + epsilon
beta /= numpy.sum(beta)
mixture.components[m, k] = Multinomial(beta)
class Multinomial(Distribution):
"""
The multinomial distribution.
Relevant types:
- sample: D-shaped uint ndarray
- sequence: ND-shaped uint ndarray
"""
def __init__(self, beta, norm = 1):
"""
Instantiate the distribution.
@param beta: The distribution parameter vector.
"""
# initialization
self._beta = numpy.asarray(beta)
self._log_beta = numpy.nan_to_num(numpy.log(self._beta))
self._norm = norm
# let's not let us be idiots
self._beta.flags.writeable = False
self._log_beta.flags.writeable = False
def random_variate(self, random = numpy.random):
"""
Return a sample from this distribution.
"""
return random.multinomial(self._norm, self._beta).astype(numpy.uint)
def random_variates(self, size, random = numpy.random):
"""
Return an array of samples from this distribution.
"""
return random.multinomial(self._norm, self._beta, size).astype(numpy.uint)
def log_likelihood(self, sample):
"""
Return the log likelihood of C{sample} under this distribution.
"""
from cargo.statistics._multinomial import multinomial_log_probability
return multinomial_log_probability(self._log_beta, sample)
def total_log_likelihood(self, samples):
"""
Return the log likelihood of C{samples} under this distribution.
"""
return self.log_likelihood(numpy.sum(samples, 0))
@property
def beta(self):
"""
Return the multinomial parameter vector.
"""
return self._beta
@property
def log_beta(self):
"""
Return the multinomial log parameter vector.
"""
return self._log_beta
class MultinomialEstimator(Estimator):
"""
Estimate the parameters of a multinomial distribution.
"""
def __init__(self, norm = 1):
"""
Initialize.
"""
self._norm = norm
def estimate(self, samples, random = numpy.random, weights = None):
"""
Return the estimated maximum likelihood distribution.
"""
from numpy import newaxis
if weights is None:
weights = numpy.ones(samples.shape[0])
mean = numpy.sum(samples * weights[:, newaxis], 0)
mean /= numpy.sum(mean)
return Multinomial(mean, self._norm)
| Python | 0.000021 |
2319534cecf4ed475469a8ded468b348a21947ce | Fix shape of array returned from Arnoldi matrix exponential | src/WaveBlocksND/MatrixExponential.py | src/WaveBlocksND/MatrixExponential.py | """The WaveBlocks Project
This file contains several different algorithms to compute the
matrix exponential. Currently we have an exponential based on
Pade approximations and an Arnoldi iteration method.
@author: R. Bourquin
@copyright: Copyright (C) 2007 V. Gradinaru
@copyright: Copyright (C) 2010, 2011, 2012, 2015 R. Bourquin
@license: Modified BSD License
"""
from numpy import zeros, dot, complexfloating, conjugate
from scipy.linalg import norm, expm
def matrix_exp_pade(A, v, factor):
r"""Compute the solution of :math:`v' = A v` with a full
matrix exponential via Pade approximation.
:param A: The matrix :math:`A` of shape :math:`N \times N`.
:param v: The vector :math:`v` of length :math:`N`.
:param factor: An additional scalar factor :math:`\alpha`.
:return: The (approximate) value of :math:`\exp\left(-i \alpha A\right) v`
"""
return dot(expm(-1.0j*A*factor), v)
def arnoldi(A, v0, k):
r"""Arnoldi algorithm to compute the Krylov approximation :math:`H` of a matrix :math:`A`.
:param A: The matrix :math:`A` of shape :math:`N \times N` to approximate.
:param v0: The initial vector :math:`v_0` of length :math:`N`.
:param k: The number :math:`k` of Krylov steps performed.
:return: A tuple :math:`(V, H)` where :math:`V` is the large matrix of shape
:math:`N \times (k+1)` containing the orthogonal vectors and :math:`H` is the
small matrix of shape :math:`(k+1) \times k` containing the Krylov approximation
of :math:`A`.
"""
r, c = A.shape
V = zeros((r, k+1), dtype=complexfloating)
H = zeros((k+1, k), dtype=complexfloating)
V[:,0] = v0.reshape(-1) / norm(v0)
for i in xrange(1, k+1):
vi = dot(A, V[:,i-1])
for j in xrange(i):
H[j,i-1] = dot(conjugate(V[:,j]), vi)
vi -= H[j,i-1] * V[:,j]
H[i,i-1] = norm(vi)
V[:,i] = vi / H[i,i-1]
return V, H
def matrix_exp_arnoldi(A, v, factor, k):
r"""Compute the solution of :math:`v' = A v` via :math:`k`
steps of a the Arnoldi krylov method.
:param A: The matrix :math:`A` of shape :math:`N \times N`.
:param v: The vector :math:`v` of length :math:`N`.
:param factor: An additional scalar factor :math:`\alpha`.
:param k: The number :math:`k` of Krylov steps performed.
:return: The (approximate) value of :math:`\exp\left(-i \alpha A\right) v`.
"""
V, H = arnoldi(A, v, min(min(A.shape), k))
eH = expm(-1.0j*factor*H[:-1,:])
r = norm(v) * dot(V[:,:-1], eH[:,0])
return r.reshape(v.shape)
| """The WaveBlocks Project
This file contains several different algorithms to compute the
matrix exponential. Currently we have an exponential based on
Pade approximations and an Arnoldi iteration method.
@author: R. Bourquin
@copyright: Copyright (C) 2007 V. Gradinaru
@copyright: Copyright (C) 2010, 2011, 2012, 2015 R. Bourquin
@license: Modified BSD License
"""
from numpy import zeros, dot, complexfloating, conjugate
from scipy.linalg import norm, expm
def matrix_exp_pade(A, v, factor):
r"""Compute the solution of :math:`v' = A v` with a full
matrix exponential via Pade approximation.
:param A: The matrix :math:`A` of shape :math:`N \times N`.
:param v: The vector :math:`v` of length :math:`N`.
:param factor: An additional scalar factor :math:`\alpha`.
:return: The (approximate) value of :math:`\exp\left(-i \alpha A\right) v`
"""
return dot(expm(-1.0j*A*factor), v)
def arnoldi(A, v0, k):
r"""Arnoldi algorithm to compute the Krylov approximation :math:`H` of a matrix :math:`A`.
:param A: The matrix :math:`A` of shape :math:`N \times N` to approximate.
:param v0: The initial vector :math:`v_0` of length :math:`N`.
:param k: The number :math:`k` of Krylov steps performed.
:return: A tuple :math:`(V, H)` where :math:`V` is the large matrix of shape
:math:`N \times (k+1)` containing the orthogonal vectors and :math:`H` is the
small matrix of shape :math:`(k+1) \times k` containing the Krylov approximation
of :math:`A`.
"""
r, c = A.shape
V = zeros((r, k+1), dtype=complexfloating)
H = zeros((k+1, k), dtype=complexfloating)
V[:,0] = v0.reshape(-1) / norm(v0)
for i in xrange(1, k+1):
vi = dot(A, V[:,i-1])
for j in xrange(i):
H[j,i-1] = dot(conjugate(V[:,j]), vi)
vi -= H[j,i-1] * V[:,j]
H[i,i-1] = norm(vi)
V[:,i] = vi / H[i,i-1]
return V, H
def matrix_exp_arnoldi(A, v, factor, k):
r"""Compute the solution of :math:`v' = A v` via :math:`k`
steps of a the Arnoldi krylov method.
:param A: The matrix :math:`A` of shape :math:`N \times N`.
:param v: The vector :math:`v` of length :math:`N`.
:param factor: An additional scalar factor :math:`\alpha`.
:param k: The number :math:`k` of Krylov steps performed.
:return: The (approximate) value of :math:`\exp\left(-i \alpha A\right) v`.
"""
V, H = arnoldi(A, v, min(min(A.shape), k))
eH = expm(-1.0j*factor*H[:-1,:])
r = dot(V[:,:-1], eH[:,0])
return r * norm(v)
| Python | 0.000008 |
304760823382e72efb8f98ab3b5a98147f98c0e8 | Improve userlist liveness guarentees | geventirc/channel.py | geventirc/channel.py |
import gevent
from geventirc.message import Join, Part, Privmsg
from geventirc.replycodes import replies
from geventirc.userlist import UserList
class Channel(object):
"""Object representing an IRC channel.
This is the reccomended way to do operations like joins, or tracking user lists.
A channel may be join()ed and part()ed multiple times.
The user list will be the most recent info available, or None before first join.
In particular, the user list can be considered up to date iff users_ready is set.
Can be used in a with statement to join then part.
"""
USERS_READY_TIMEOUT = 10
joined = False
users_ready = gevent.event.Event()
userlist = None
def __init__(self, client, name):
self.client = client
self.name = name
self.client.add_handler(self._recv_part, command=Part, channels=lambda value: self.name in value)
self.client.add_handler(self._recv_end_of_names, command=replies.ENDOFNAMES, params=[None, self.name, None])
def join(self, block=True):
"""Join the channel if not already joined. If block=True, do not return until name list is received."""
if self.joined: return
self.joined = True
self.users_ready.clear()
self.userlist = UserList(self.client, self.name)
self.client.send(Join(self.name))
if not block: return
self.users_ready.wait(self.USERS_READY_TIMEOUT)
def part(self, block=True):
"""Part from the channel if joined. If block=True, do not return until fully parted."""
if not self.joined: return
self.joined = False
@gevent.spawn
def _part():
# we delay unregistering until the part is sent.
self.client.send(Part(self.name), block=True)
self.userlist.unregister()
if block: _part.get()
def msg(self, content, block=False):
self.client.msg(self.name, content, block=block)
def action(self, content, block=False):
self.client.send(Privmsg.action(self.name, content), block=block)
def _recv_end_of_names(self, client, msg):
self.users_ready.set()
def _recv_part(self, client, msg):
# we receive a forced PART from the server
self.joined = False
self.userlist.unregister()
def __enter__(self):
self.join()
def __exit__(self, *exc_info):
# if we're cleaning up after an exception, ignore errors in part()
# as they are most likely a carry-on error or same root cause.
try:
self.part()
except Exception:
if exc_info == (None, None, None):
raise
|
import gevent
from geventirc.message import Join, Part, Privmsg
from geventirc.replycodes import replies
from geventirc.userlist import UserList
class Channel(object):
"""Object representing an IRC channel.
This is the reccomended way to do operations like joins, or tracking user lists.
A channel may be join()ed and part()ed multiple times.
The user list will be the most recent info available, or None before first join.
Can be used in a with statement to join then part.
"""
joined = False
userlist = None
def __init__(self, client, name):
self.client = client
self.name = name
self.client.add_handler(self._recv_part, command=Part, channels=lambda value: self.name in value)
def join(self, block=True):
"""Join the channel if not already joined. If block=True, do not return until name list is received."""
if self.joined: return
self.joined = True
self.userlist = UserList(self.client, self.name)
self.client.send(Join(self.name))
if not block: return
self.client.wait_for(command=replies.ENDOFNAMES, params=[None, self.name, None])
def part(self, block=True):
"""Part from the channel if joined. If block=True, do not return until fully parted."""
if not self.joined: return
self.joined = False
@gevent.spawn
def _part():
# we delay unregistering until the part is sent.
self.client.send(Part(self.name), block=True)
self.userlist.unregister()
if block: _part.get()
def msg(self, content, block=False):
self.client.msg(self.name, content, block=block)
def action(self, content, block=False):
self.client.send(Privmsg.action(self.name, content), block=block)
def _recv_part(self, client, msg):
# we receive a forced PART from the server
self.joined = False
self.userlist.unregister()
def __enter__(self):
self.join()
def __exit__(self, *exc_info):
# if we're cleaning up after an exception, ignore errors in part()
# as they are most likely a carry-on error or same root cause.
try:
self.part()
except Exception:
if exc_info == (None, None, None):
raise
| Python | 0 |
cda111aecdd650d1f08b75e2c92774526bf9e06d | Change Misc to Miscellaneous Utilities | bipy/util/misc.py | bipy/util/misc.py | #!/usr/bin/env python
r"""
Miscellaneous Utilities (:mod:`bipy.util.misc`)
============================
.. currentmodule:: bipy.util.misc
This module provides miscellaneous useful utility classes and methods that do
not fit in any specific module.
Functions
---------
.. autosummary::
:toctree: generated/
safe_md5
"""
from __future__ import division
#-----------------------------------------------------------------------------
# Copyright (c) 2013--, bipy development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
import hashlib
def safe_md5(open_file, block_size=2**20):
"""Computes an md5 sum without loading the file into memory
Parameters
----------
open_file : file object
open file handle to the archive to compute the checksum
block_size : int, optional
size of the block taken per iteration
Returns
-------
md5 : md5 object from the hashlib module
object with the loaded file
Notes
-----
This method is based on the answers given in:
http://stackoverflow.com/a/1131255/379593
Examples
--------
>>> from StringIO import StringIO
>>> from bipy.util.misc import safe_md5
>>> fd = StringIO("foo bar baz") # open file like object
>>> x = safe_md5(fd)
>>> x.hexdigest()
'ab07acbb1e496801937adfa772424bf7'
>>> fd.close()
"""
md5 = hashlib.md5()
data = True
while data:
data = open_file.read(block_size)
if data:
md5.update(data)
return md5
| #!/usr/bin/env python
r"""
Misc (:mod:`bipy.util.misc`)
============================
.. currentmodule:: bipy.util.misc
This module provides miscellaneous useful utility classes and methods that do
not fit in any specific module.
Functions
---------
.. autosummary::
:toctree: generated/
safe_md5
"""
from __future__ import division
#-----------------------------------------------------------------------------
# Copyright (c) 2013--, bipy development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
import hashlib
def safe_md5(open_file, block_size=2**20):
"""Computes an md5 sum without loading the file into memory
Parameters
----------
open_file : file object
open file handle to the archive to compute the checksum
block_size : int, optional
size of the block taken per iteration
Returns
-------
md5 : md5 object from the hashlib module
object with the loaded file
Notes
-----
This method is based on the answers given in:
http://stackoverflow.com/a/1131255/379593
Examples
--------
>>> from StringIO import StringIO
>>> from bipy.util.misc import safe_md5
>>> fd = StringIO("foo bar baz") # open file like object
>>> x = safe_md5(fd)
>>> x.hexdigest()
'ab07acbb1e496801937adfa772424bf7'
>>> fd.close()
"""
md5 = hashlib.md5()
data = True
while data:
data = open_file.read(block_size)
if data:
md5.update(data)
return md5
| Python | 0 |
0a0d55a2a9aa07b0841b2a221e8b7bc9b844b976 | update version numbers and project details | butter/__init__.py | butter/__init__.py | #!/usr/bin/env python
"""Butter: library to give python access to linux's more lower level features"""
__author__ = "Da_Blitz"
__version__ = "0.2"
__email__ = "code@pocketnix.org"
__license__ = "BSD (3 Clause)"
__url__ = "http://code.pocketnix.org/butter"
| #!/usr/bin/env python
"""Butter: library to give python access to linux's more lower level features"""
__author__ = "Da_Blitz"
__version__ = "0.1"
__email__ = "code@pocketnix.org"
__license__ = "BSD (3 Clause)"
__url__ = "http://code.pocketnix.org/"
__testsuite__ = "tests.testall"
| Python | 0 |
39d4f9c0df535c13c6f37eaaccaaeabb0b92b8e0 | Bump version number | fabric_colors/_version.py | fabric_colors/_version.py | __version__ = "0.9.42"
| __version__ = "0.9.41"
| Python | 0.000002 |
4e09200b83f986ce333f5b1143e13a4b2d7df2ce | determine site activity on process_view | pykeg/src/pykeg/web/middleware.py | pykeg/src/pykeg/web/middleware.py | # Copyright 2011 Mike Wakerly <opensource@hoho.com>
#
# This file is part of the Pykeg package of the Kegbot project.
# For more information on Pykeg or Kegbot, see http://kegbot.org/
#
# Pykeg is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# Pykeg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Pykeg. If not, see <http://www.gnu.org/licenses/>.
from pykeg.core import models
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404
class KegbotSiteMiddleware:
def process_view(self, request, view_func, view_args, view_kwargs):
kbsite_name = view_kwargs.pop('kbsite_name', None)
if kbsite_name is not None:
if kbsite_name == '':
kbsite_name = 'default'
request.kbsite = get_object_or_404(models.KegbotSite, name=kbsite_name)
return None
class SiteActiveMiddleware:
"""Middleware which throws 503s when KegbotSite.is_active is false."""
ALLOWED_PATHS = (
'/accounts/login/',
'/admin/',
'/site_media/',
)
def _path_allowed(self, path):
for p in self.ALLOWED_PATHS:
if path.startswith(p):
return True
return False
def process_view(self, request, view_func, view_args, view_kwargs):
if not hasattr(request, 'kbsite'):
return None
kbsite = request.kbsite
# We have a KegbotSite, and that site is active: nothing to do.
if kbsite.is_active:
return None
# If the request is for a whitelisted path, allow it.
if self._path_allowed(request.path):
return None
# Allow staff/superusers access if inactive.
if request.user.is_staff or request.user.is_superuser:
return None
return HttpResponse('Site temporarily unavailable', status=503)
| # Copyright 2011 Mike Wakerly <opensource@hoho.com>
#
# This file is part of the Pykeg package of the Kegbot project.
# For more information on Pykeg or Kegbot, see http://kegbot.org/
#
# Pykeg is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# Pykeg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Pykeg. If not, see <http://www.gnu.org/licenses/>.
from pykeg.core import models
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404
class KegbotSiteMiddleware:
def process_view(self, request, view_func, view_args, view_kwargs):
kbsite_name = view_kwargs.pop('kbsite_name', None)
if kbsite_name is not None:
if kbsite_name == '':
kbsite_name = 'default'
request.kbsite = get_object_or_404(models.KegbotSite, name=kbsite_name)
return None
class SiteActiveMiddleware:
"""Middleware which throws 503s when KegbotSite.is_active is false."""
ALLOWED_PATHS = (
'/accounts/login/',
'/admin/',
'/site_media/',
)
def _path_allowed(self, path):
for p in self.ALLOWED_PATHS:
if path.startswith(p):
return True
return False
def process_request(self, request):
kbsite = None
if hasattr(request, 'kbsite'):
kbsite = request.kbsite
# We have a KegbotSite, and that site is active: nothing to do.
if kbsite and kbsite.is_active:
return None
# If the request is for a whitelisted path, allow it.
if self._path_allowed(request.path):
return None
# Allow staff/superusers access if inactive.
if request.user.is_staff or request.user.is_superuser:
return None
else:
return HttpResponse('Site temporarily unavailable', status=503)
| Python | 0.00001 |
7b21270ca893e90790a0a60c8417df12052ea9a0 | Add alternate MDP-ID aleph API if the first fails | falcom/api/reject_list.py | falcom/api/reject_list.py | # Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from os import environ
from urllib.request import urlopen
from .uri import URI, APIQuerier
from .marc import get_marc_data_from_xml
from .worldcat import get_worldcat_data_from_json
from .hathi import get_oclc_counts_from_json
from .common import ReadOnlyDataStructure
AlephURI = URI("http://mirlyn-aleph.lib.umich.edu/cgi-bin/bc2meta")
WorldCatURI = URI("http://www.worldcat.org/webservices/catalog"
"/content/libraries/{oclc}")
HathiURI = URI("http://catalog.hathitrust.org/api/volumes/brief"
"/oclc/{oclc}.json")
aleph_api = APIQuerier(AlephURI, url_opener=urlopen)
worldcat_api = APIQuerier(WorldCatURI, url_opener=urlopen)
hathi_api = APIQuerier(HathiURI, url_opener=urlopen)
wc_key = environ.get("MDP_REJECT_WC_KEY", "none")
class VolumeDataFromBarcode:
def __init__ (self, barcode):
self.barcode = barcode
self.marc = get_marc_data_from_xml(aleph_api.get(
id=barcode,
type="bc",
schema="marcxml"))
if not self.marc:
self.marc = get_marc_data_from_xml(aleph_api.get(
id="mdp." + barcode,
schema="marcxml"))
if self.marc.oclc is None:
worldcat, hathi = None, None
else:
worldcat = worldcat_api.get(
oclc=self.marc.oclc,
wskey=wc_key,
format="json",
maximumLibraries="50")
hathi = hathi_api.get(oclc=self.marc.oclc)
self.worldcat = get_worldcat_data_from_json(worldcat)
self.hathi = get_oclc_counts_from_json(hathi, "mdp." + barcode)
| # Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from os import environ
from urllib.request import urlopen
from .uri import URI, APIQuerier
from .marc import get_marc_data_from_xml
from .worldcat import get_worldcat_data_from_json
from .hathi import get_oclc_counts_from_json
from .common import ReadOnlyDataStructure
AlephURI = URI("http://mirlyn-aleph.lib.umich.edu/cgi-bin/bc2meta")
WorldCatURI = URI("http://www.worldcat.org/webservices/catalog"
"/content/libraries/{oclc}")
HathiURI = URI("http://catalog.hathitrust.org/api/volumes/brief"
"/oclc/{oclc}.json")
aleph_api = APIQuerier(AlephURI, url_opener=urlopen)
worldcat_api = APIQuerier(WorldCatURI, url_opener=urlopen)
hathi_api = APIQuerier(HathiURI, url_opener=urlopen)
wc_key = environ.get("MDP_REJECT_WC_KEY", "none")
class VolumeDataFromBarcode:
def __init__ (self, barcode):
self.barcode = barcode
self.marc = get_marc_data_from_xml(aleph_api.get(
id=barcode,
type="bc",
schema="marcxml"))
if self.marc.oclc is None:
worldcat, hathi = None, None
else:
worldcat = worldcat_api.get(
oclc=self.marc.oclc,
wskey=wc_key,
format="json",
maximumLibraries="50")
hathi = hathi_api.get(oclc=self.marc.oclc)
self.worldcat = get_worldcat_data_from_json(worldcat)
self.hathi = get_oclc_counts_from_json(hathi, "mdp." + barcode)
| Python | 0.000005 |
fae13bf07e3b336f52911cb23291c6db029922cb | fix timing issues with new test | selfdrive/controls/tests/test_startup.py | selfdrive/controls/tests/test_startup.py | #!/usr/bin/env python3
import time
import unittest
from parameterized import parameterized
from cereal import log, car
import cereal.messaging as messaging
from common.params import Params
from selfdrive.boardd.boardd_api_impl import can_list_to_can_capnp # pylint: disable=no-name-in-module,import-error
from selfdrive.car.fingerprints import _FINGERPRINTS
from selfdrive.car.hyundai.values import CAR as HYUNDAI
from selfdrive.car.mazda.values import CAR as MAZDA
from selfdrive.controls.lib.events import EVENT_NAME
from selfdrive.test.helpers import with_processes
EventName = car.CarEvent.EventName
class TestStartup(unittest.TestCase):
@parameterized.expand([
# TODO: test EventName.startup for release branches
# officially supported car
(EventName.startupMaster, HYUNDAI.SONATA, False),
(EventName.startupMaster, HYUNDAI.SONATA, True),
# community supported car
(EventName.startupMaster, HYUNDAI.KIA_STINGER, True),
(EventName.startupMaster, HYUNDAI.KIA_STINGER, False),
# dashcamOnly car
(EventName.startupMaster, MAZDA.CX5, True),
(EventName.startupMaster, MAZDA.CX5, False),
# unrecognized car
(EventName.startupNoCar, None, True),
(EventName.startupNoCar, None, False),
])
@with_processes(['controlsd'])
def test_startup_alert(self, expected_event, car, toggle_enabled):
# TODO: this should be done without any real sockets
controls_sock = messaging.sub_sock("controlsState")
pm = messaging.PubMaster(['can', 'health'])
Params().put("CommunityFeaturesToggle", b"1" if toggle_enabled else b"0")
time.sleep(2) # wait for controlsd to be ready
health = messaging.new_message('health')
health.health.hwType = log.HealthData.HwType.uno
pm.send('health', health)
# fingerprint
if car is None:
finger = {addr: 1 for addr in range(1, 100)}
else:
finger = _FINGERPRINTS[car][0]
for _ in range(500):
msgs = [[addr, 0, b'\x00'*length, 0] for addr, length in finger.items()]
pm.send('can', can_list_to_can_capnp(msgs))
time.sleep(0.01)
msgs = messaging.drain_sock(controls_sock)
if len(msgs):
event_name = msgs[0].controlsState.alertType.split("/")[0]
self.assertEqual(EVENT_NAME[expected_event], event_name,
f"expected {EVENT_NAME[expected_event]} for '{car}', got {event_name}")
break
else:
self.fail(f"failed to fingerprint {car}")
if __name__ == "__main__":
unittest.main()
| #!/usr/bin/env python3
import time
import unittest
from parameterized import parameterized
from cereal import log, car
import cereal.messaging as messaging
from common.params import Params
from selfdrive.boardd.boardd_api_impl import can_list_to_can_capnp # pylint: disable=no-name-in-module,import-error
from selfdrive.car.fingerprints import _FINGERPRINTS
from selfdrive.car.hyundai.values import CAR as HYUNDAI
from selfdrive.car.mazda.values import CAR as MAZDA
from selfdrive.controls.lib.events import EVENT_NAME
from selfdrive.test.helpers import with_processes
EventName = car.CarEvent.EventName
class TestStartup(unittest.TestCase):
@parameterized.expand([
# TODO: test EventName.startup for release branches
# officially supported car
(EventName.startupMaster, HYUNDAI.SONATA, False),
(EventName.startupMaster, HYUNDAI.SONATA, True),
# community supported car
(EventName.startupMaster, HYUNDAI.KIA_STINGER, True),
(EventName.startupMaster, HYUNDAI.KIA_STINGER, False),
# dashcamOnly car
(EventName.startupMaster, MAZDA.CX5, True),
(EventName.startupMaster, MAZDA.CX5, False),
# unrecognized car
(EventName.startupNoCar, None, True),
(EventName.startupNoCar, None, False),
])
@with_processes(['controlsd'])
def test_startup_alert(self, expected_event, car, toggle_enabled):
# TODO: this should be done without any real sockets
sm = messaging.SubMaster(['controlsState'])
pm = messaging.PubMaster(['can', 'health'])
Params().put("CommunityFeaturesToggle", b"1" if toggle_enabled else b"0")
time.sleep(2) # wait for controlsd to be ready
health = messaging.new_message('health')
health.health.hwType = log.HealthData.HwType.uno
pm.send('health', health)
# fingerprint
if car is None:
finger = {addr: 1 for addr in range(1, 100)}
else:
finger = _FINGERPRINTS[car][0]
for _ in range(500):
msgs = [[addr, 0, b'\x00'*length, 0] for addr, length in finger.items()]
pm.send('can', can_list_to_can_capnp(msgs))
time.sleep(0.05)
sm.update(0)
if sm.updated["controlsState"]:
event_name = sm["controlsState"].alertType.split("/")[0]
self.assertEqual(EVENT_NAME[expected_event], event_name,
f"expected {EVENT_NAME[expected_event]} for '{car}', got {event_name}")
break
else:
self.fail(f"failed to fingerprint {car}")
if __name__ == "__main__":
unittest.main()
| Python | 0.000001 |
c4966e274c885da4e5d252143b9feb260c8f78f5 | Correct config path finding for Linux. | pokemon_go_hunter/watch_twitter.py | pokemon_go_hunter/watch_twitter.py | import logging
import os
import re
import time
import twitter
import yaml
from pushbullet import Pushbullet
def get_config():
config_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), '../config.yaml')
with open(config_path) as f:
return yaml.load(f)
def get_twitter_api(config):
api_config = config['API']
twitter_api_config = api_config['Twitter']
return twitter.Api(consumer_key=twitter_api_config['consumer key'],
consumer_secret=twitter_api_config['consumer secret'],
access_token_key=twitter_api_config['access token key'],
access_token_secret=twitter_api_config['access token secret'])
def get_pushbullet_api(config):
api_config = config['API']
pushbullet_api_config = api_config['Pushbullet']
return Pushbullet(api_key=pushbullet_api_config['api key'],
encryption_password=pushbullet_api_config['encryption password'])
def get_pushbullet_device(pb, config):
devices = pb.devices
result = None
for d in devices:
if d.nickname == config['API']['Pushbullet']['device name']:
result = d
assert result is not None, "Couldn't find Pushbullet device."
return result
_config = get_config()
_twitter_api = get_twitter_api(_config)
_pb = get_pushbullet_api(_config)
_device = get_pushbullet_device(_pb, _config)
def main(screen_name: str,
pattern,
callback,
period_s: int = 61):
logging.info("Waiting for tweets.")
since_id = None
while True:
statuses = _twitter_api.GetUserTimeline(screen_name=screen_name,
since_id=since_id,
trim_user=True)
for status in statuses:
if since_id is None:
since_id = status.id
else:
since_id = max(since_id, status.id)
text = status.text
m = pattern.search(text)
logging.debug(text)
if m:
callback(status)
time.sleep(period_s)
def notify(status):
text = status.text
for url in status.urls:
text = text.replace(url.url, url.expanded_url)
logging.info("Sending: \"%s\".", text)
_pb.push_sms(_device, 'TODO', text)
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s [%(levelname)s] - %(name)s:%(filename)s:%(funcName)s\n%(message)s',
level=logging.INFO)
# TODO Read from config.
# Example.
main(screen_name='montrealpokemap',
pattern=re.compile(r'\b(Unown)\b', re.IGNORECASE),
callback=notify)
| import logging
import os
import re
import time
import twitter
import yaml
from pushbullet import Pushbullet
def get_config():
config_path = os.path.join(os.path.abspath(__file__), '../../config.yaml')
with open(config_path) as f:
return yaml.load(f)
def get_twitter_api(config):
api_config = config['API']
twitter_api_config = api_config['Twitter']
return twitter.Api(consumer_key=twitter_api_config['consumer key'],
consumer_secret=twitter_api_config['consumer secret'],
access_token_key=twitter_api_config['access token key'],
access_token_secret=twitter_api_config['access token secret'])
def get_pushbullet_api(config):
api_config = config['API']
pushbullet_api_config = api_config['Pushbullet']
return Pushbullet(api_key=pushbullet_api_config['api key'],
encryption_password=pushbullet_api_config['encryption password'])
def get_pushbullet_device(pb, config):
devices = pb.devices
result = None
for d in devices:
if d.nickname == config['API']['Pushbullet']['device name']:
result = d
assert result is not None, "Couldn't find Pushbullet device."
return result
_config = get_config()
_twitter_api = get_twitter_api(_config)
_pb = get_pushbullet_api(_config)
_device = get_pushbullet_device(_pb, _config)
def main(screen_name: str,
pattern,
callback,
period_s: int = 61):
logging.info("Waiting for tweets.")
since_id = None
while True:
statuses = _twitter_api.GetUserTimeline(screen_name=screen_name,
since_id=since_id,
trim_user=True)
for status in statuses:
if since_id is None:
since_id = status.id
else:
since_id = max(since_id, status.id)
text = status.text
m = pattern.search(text)
logging.debug(text)
if m:
callback(status)
time.sleep(period_s)
def notify(status):
text = status.text
for url in status.urls:
text = text.replace(url.url, url.expanded_url)
logging.info("Sending: \"%s\".", text)
_pb.push_sms(_device, 'TODO', text)
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s [%(levelname)s] - %(name)s:%(filename)s:%(funcName)s\n%(message)s',
level=logging.INFO)
# TODO Read from config.
# Example.
main(screen_name='montrealpokemap',
pattern=re.compile(r'\b(Unown)\b', re.IGNORECASE),
callback=notify)
| Python | 0 |
8b78463ac8d8953dffb3c3ecd5e9e1e4396da106 | Make sure set_mpl_backend works if qtpy is not installed | glue/_mpl_backend.py | glue/_mpl_backend.py | class MatplotlibBackendSetter(object):
"""
Import hook to make sure the proper Qt backend is set when importing
Matplotlib.
"""
enabled = True
def find_module(self, mod_name, pth):
if self.enabled and 'matplotlib' in mod_name:
self.enabled = False
set_mpl_backend()
def find_spec(self, name, import_path, target_module=None):
pass
def set_mpl_backend():
try:
from qtpy import PYQT5
except:
# If Qt isn't available, we don't have to worry about
# setting the backend
return
from matplotlib import rcParams, rcdefaults
# standardize mpl setup
rcdefaults()
if PYQT5:
rcParams['backend'] = 'Qt5Agg'
else:
rcParams['backend'] = 'Qt4Agg'
# The following is a workaround for the fact that Matplotlib checks the
# rcParams at import time, not at run-time. I have opened an issue with
# Matplotlib here: https://github.com/matplotlib/matplotlib/issues/5513
from matplotlib import get_backend
from matplotlib import backends
backends.backend = get_backend()
| class MatplotlibBackendSetter(object):
"""
Import hook to make sure the proper Qt backend is set when importing
Matplotlib.
"""
enabled = True
def find_module(self, mod_name, pth):
if self.enabled and 'matplotlib' in mod_name:
self.enabled = False
set_mpl_backend()
def find_spec(self, name, import_path, target_module=None):
pass
def set_mpl_backend():
from matplotlib import rcParams, rcdefaults
# standardize mpl setup
rcdefaults()
from qtpy import PYQT5
if PYQT5:
rcParams['backend'] = 'Qt5Agg'
else:
rcParams['backend'] = 'Qt4Agg'
# The following is a workaround for the fact that Matplotlib checks the
# rcParams at import time, not at run-time. I have opened an issue with
# Matplotlib here: https://github.com/matplotlib/matplotlib/issues/5513
from matplotlib import get_backend
from matplotlib import backends
backends.backend = get_backend()
| Python | 0 |
1bbd84111b142daf9301842f1cb411983fccedef | Comment change. | gnuplot-py/gp_mac.py | gnuplot-py/gp_mac.py | # $Id$
# Copyright (C) 1999 Michael Haggerty <mhagger@alum.mit.edu>
# Thanks to Tony Ingraldi and Noboru Yamamoto for their contributions.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version. This program is distributed in the
# hope that it will be useful, but WITHOUT ANY WARRANTY; without even
# the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details; it is
# available at <http://www.fsf.org/copyleft/gpl.html>, or by writing to
# the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
"""gp_mac -- an interface to gnuplot for the Macintosh.
"""
__cvs_version__ = '$Revision$'
import os, string
import Errors
# ############ Configuration variables: ################################
class GnuplotOpts:
"""The configuration options for gnuplot on the Macintosh.
See gp.py for details about the meaning of these options. Please
let me know if you know better choices for these settings."""
# The '-persist' option is not supported on the Mac:
recognizes_persist = 0
# Apparently the Mac can use binary data:
recognizes_binary_splot = 1
# Apparently the Mac can not use inline data:
prefer_inline_data = 0
# The default choice for the 'set term' command (to display on screen).
# Terminal types are different in Gnuplot 3.7.1c.
# For earlier versions, this was default_term = 'macintosh'
default_term = 'pict'
# I don't know how to print directly to a printer on the Mac:
default_lpr = '| lpr'
# Used the 'enhanced' option of postscript by default? Set to
# None (*not* 0!) if your version of gnuplot doesn't support
# enhanced postscript.
prefer_enhanced_postscript = 1
# ############ End of configuration options ############################
# The Macintosh doesn't support pipes so communication is via
# AppleEvents.
import gnuplot_Suites
import Required_Suite
import aetools
# Mac doesn't recognize persist.
def test_persist():
return 0
class _GNUPLOT(aetools.TalkTo,
Required_Suite.Required_Suite,
gnuplot_Suites.gnuplot_Suite,
gnuplot_Suites.odds_and_ends,
gnuplot_Suites.Standard_Suite,
gnuplot_Suites.Miscellaneous_Events):
"""Start a gnuplot program and emulate a pipe to it."""
def __init__(self):
aetools.TalkTo.__init__(self, '{GP}', start=1)
class GnuplotProcess:
"""Unsophisticated interface to a running gnuplot program.
See gp_unix.GnuplotProcess for usage information.
"""
def __init__(self, persist=0):
"""Start a gnuplot process.
Create a 'GnuplotProcess' object. This starts a gnuplot
program and prepares to write commands to it.
Keyword arguments:
'persist' -- the '-persist' option is not supported on the
Macintosh so this argument must be zero.
"""
if persist:
raise Errors.OptionError(
'-persist is not supported on the Macintosh!')
self.gnuplot = _GNUPLOT()
# forward close method:
self.close = self.gnuplot.quit
def write(self, s):
"""Mac gnuplot apparently requires '\r' to end statements."""
self.gnuplot.gnuexec(string.replace(s, '\n', os.linesep))
def flush(self):
pass
def __call__(self, s):
"""Send a command string to gnuplot, for immediate execution."""
# Apple Script doesn't seem to need the trailing '\n'.
self.write(s)
self.flush()
| # $Id$
# Copyright (C) 1999 Michael Haggerty <mhagger@alum.mit.edu>
# Thanks to Tony Ingraldi and Noboru Yamamoto for their contributions.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version. This program is distributed in the
# hope that it will be useful, but WITHOUT ANY WARRANTY; without even
# the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details; it is
# available at <http://www.fsf.org/copyleft/gpl.html>, or by writing to
# the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
"""gp_mac -- an interface to gnuplot for the Macintosh.
"""
__cvs_version__ = '$Revision$'
import os, string
import Errors
# ############ Configuration variables: ################################
class GnuplotOpts:
"""The configuration options for gnuplot on the Macintosh.
See gp.py for details about the meaning of these options. Please
let me know if you know better choices for these settings."""
# The '-persist' option is not supported on the Mac:
recognizes_persist = 0
# Apparently the Mac can use binary data:
recognizes_binary_splot = 1
# Apparently the Mac can not use inline data:
prefer_inline_data = 0
# The default choice for the 'set term' command (to display on screen):
default_term = 'pict'
# I don't know how to print directly to a printer on the Mac:
default_lpr = '| lpr'
# Used the 'enhanced' option of postscript by default? Set to
# None (*not* 0!) if your version of gnuplot doesn't support
# enhanced postscript.
prefer_enhanced_postscript = 1
# ############ End of configuration options ############################
# The Macintosh doesn't support pipes so communication is via
# AppleEvents.
import gnuplot_Suites
import Required_Suite
import aetools
# Mac doesn't recognize persist.
def test_persist():
return 0
class _GNUPLOT(aetools.TalkTo,
Required_Suite.Required_Suite,
gnuplot_Suites.gnuplot_Suite,
gnuplot_Suites.odds_and_ends,
gnuplot_Suites.Standard_Suite,
gnuplot_Suites.Miscellaneous_Events):
"""Start a gnuplot program and emulate a pipe to it."""
def __init__(self):
aetools.TalkTo.__init__(self, '{GP}', start=1)
class GnuplotProcess:
"""Unsophisticated interface to a running gnuplot program.
See gp_unix.GnuplotProcess for usage information.
"""
def __init__(self, persist=0):
"""Start a gnuplot process.
Create a 'GnuplotProcess' object. This starts a gnuplot
program and prepares to write commands to it.
Keyword arguments:
'persist' -- the '-persist' option is not supported on the
Macintosh so this argument must be zero.
"""
if persist:
raise Errors.OptionError(
'-persist is not supported on the Macintosh!')
self.gnuplot = _GNUPLOT()
# forward close method:
self.close = self.gnuplot.quit
def write(self, s):
"""Mac gnuplot apparently requires '\r' to end statements."""
self.gnuplot.gnuexec(string.replace(s, '\n', os.linesep))
def flush(self):
pass
def __call__(self, s):
"""Send a command string to gnuplot, for immediate execution."""
# Apple Script doesn't seem to need the trailing '\n'.
self.write(s)
self.flush()
| Python | 0.000012 |
247fe732ad71d2db3e664b63636492782a804151 | Support old Selenium | capture/capture.py | capture/capture.py | #!/bin/python3
from __future__ import print_function
"""
Benchmark creator, for Cassius.
Uses Selenium Webdriver to download new benchmarks for Cassius.
Opens a page in Firefox, causes it to execute get_bench.js, and saves the result.
"""
from selenium import webdriver
import os, sys
import warnings
try:
import urllib.parse as parse
except:
import urlparse as parse
import collections
import argparse
def jsfile(name):
return open(os.path.join(os.path.dirname(__file__), name), "rt").read()
def measure_scrollbar(browser):
browser.get("about:blank");
browser.execute_script(jsfile("scrollbar.js") + "; estimate_scrollbar()");
def make_browser():
from selenium.webdriver.firefox.options import Options
options = Options()
options.add_argument("--headless")
profile = webdriver.FirefoxProfile()
profile.set_preference("security.mixed_content.block_active_content", False)
profile.set_preference("security.mixed_content.block_display_content", False)
browser = webdriver.Firefox(firefox_profile=profile, firefox_options=options)
measure_scrollbar(browser)
return browser
def capture(browser, url, id, prerun=None):
browser.get(url)
if prerun: browser.execute_script(prerun)
text = browser.execute_script(jsfile("all.js") + "; return page2text(arguments[0]);", id)
return ";; From {}\n\n{}\n\n".format(url, text)
def main(urls, prerun=None, fd=None):
urls = sorted([url if "://" in url else "file://" + os.path.abspath(url)
for url in urls])
for url in urls:
scheme, _, _, _, _, _ = parse.urlparse(url)
if scheme not in ["http", "https", "file"]:
warnings.warn("Only http and file scheme supported (not {})".format(scheme))
try:
browser = make_browser()
print("Saving layout to {}:".format(fd.name), file=sys.stderr, end=" ")
for i, url in enumerate(urls):
id = str(i+1).rjust(len(str(len(urls))), "0")
try:
fd.write(capture(browser, url, "doc-" + id, prerun=prerun))
print(id, file=sys.stderr, end=" ")
except:
import traceback
traceback.print_exc()
continue
print(file=sys.stderr)
finally:
browser.quit()
if __name__ == "__main__":
p = argparse.ArgumentParser(description="Download a website as Cassius test cases")
p.add_argument("urls", metavar="URLs", type=str, nargs="+", help="URLs to dowload")
p.add_argument("--output", type=argparse.FileType('w'), default=sys.stdout, help="File name under bench/.")
p.add_argument("--prerun", type=argparse.FileType('r'), help="JS file to run before capturing.")
args = p.parse_args()
prerun = args.prerun.read() if args.prerun else None
main(args.urls, prerun=prerun, fd=args.output)
| #!/bin/python3
from __future__ import print_function
"""
Benchmark creator, for Cassius.
Uses Selenium Webdriver to download new benchmarks for Cassius.
Opens a page in Firefox, causes it to execute get_bench.js, and saves the result.
"""
from selenium import webdriver
import os, sys
import warnings
try:
import urllib.parse as parse
except:
import urlparse as parse
import collections
import argparse
def jsfile(name):
return open(os.path.join(os.path.dirname(__file__), name), "rt").read()
def measure_scrollbar(browser):
browser.get("about:blank");
browser.execute_script(jsfile("scrollbar.js") + "; estimate_scrollbar()");
def make_browser():
from selenium.webdriver.firefox.options import Options
options = Options()
options.set_headless(True)
profile = webdriver.FirefoxProfile()
profile.set_preference("security.mixed_content.block_active_content", False)
profile.set_preference("security.mixed_content.block_display_content", False)
browser = webdriver.Firefox(firefox_profile=profile, firefox_options=options)
measure_scrollbar(browser)
return browser
def capture(browser, url, id, prerun=None):
browser.get(url)
if prerun: browser.execute_script(prerun)
text = browser.execute_script(jsfile("all.js") + "; return page2text(arguments[0]);", id)
return ";; From {}\n\n{}\n\n".format(url, text)
def main(urls, prerun=None, fd=None):
urls = sorted([url if "://" in url else "file://" + os.path.abspath(url)
for url in urls])
for url in urls:
scheme, _, _, _, _, _ = parse.urlparse(url)
if scheme not in ["http", "https", "file"]:
warnings.warn("Only http and file scheme supported (not {})".format(scheme))
try:
browser = make_browser()
print("Saving layout to {}:".format(fd.name), file=sys.stderr, end=" ")
for i, url in enumerate(urls):
id = str(i+1).rjust(len(str(len(urls))), "0")
try:
fd.write(capture(browser, url, "doc-" + id, prerun=prerun))
print(id, file=sys.stderr, end=" ")
except:
import traceback
traceback.print_exc()
continue
print(file=sys.stderr)
finally:
browser.quit()
if __name__ == "__main__":
p = argparse.ArgumentParser(description="Download a website as Cassius test cases")
p.add_argument("urls", metavar="URLs", type=str, nargs="+", help="URLs to dowload")
p.add_argument("--output", type=argparse.FileType('w'), default=sys.stdout, help="File name under bench/.")
p.add_argument("--prerun", type=argparse.FileType('r'), help="JS file to run before capturing.")
args = p.parse_args()
prerun = args.prerun.read() if args.prerun else None
main(args.urls, prerun=prerun, fd=args.output)
| Python | 0 |
eb1fdf3419bdfd1d5920d73a877f707162b783b0 | Drop unused and dangerous entrypoint `open_fileindex` | cfgrib/__init__.py | cfgrib/__init__.py | #
# Copyright 2017-2021 European Centre for Medium-Range Weather Forecasts (ECMWF).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = "0.9.9.2.dev0"
# cfgrib core API depends on the ECMWF ecCodes C-library only
from .cfmessage import CfMessage
from .dataset import Dataset, DatasetBuildError, open_container, open_file, open_from_index
from .messages import FileStream, Message
# NOTE: xarray is not a hard dependency, but let's provide helpers if it is available.
try:
from .xarray_store import open_dataset, open_datasets
except ImportError:
pass
| #
# Copyright 2017-2021 European Centre for Medium-Range Weather Forecasts (ECMWF).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = "0.9.9.2.dev0"
# cfgrib core API depends on the ECMWF ecCodes C-library only
from .cfmessage import CfMessage
from .dataset import (
Dataset,
DatasetBuildError,
open_container,
open_file,
open_fileindex,
open_from_index,
)
from .messages import FileStream, Message
# NOTE: xarray is not a hard dependency, but let's provide helpers if it is available.
try:
from .xarray_store import open_dataset, open_datasets
except ImportError:
pass
| Python | 0 |
4b465ee4873d4a077fc1e37be038459555aacdec | Remove Python 3.5 compatibility from _compat module | cheroot/_compat.py | cheroot/_compat.py | # pylint: disable=unused-import
"""Compatibility code for using Cheroot with various versions of Python."""
import os
import platform
try:
import ssl
IS_ABOVE_OPENSSL10 = ssl.OPENSSL_VERSION_INFO >= (1, 1)
del ssl
except ImportError:
IS_ABOVE_OPENSSL10 = None
IS_CI = bool(os.getenv('CI'))
IS_GITHUB_ACTIONS_WORKFLOW = bool(os.getenv('GITHUB_WORKFLOW'))
IS_PYPY = platform.python_implementation() == 'PyPy'
SYS_PLATFORM = platform.system()
IS_WINDOWS = SYS_PLATFORM == 'Windows'
IS_LINUX = SYS_PLATFORM == 'Linux'
IS_MACOS = SYS_PLATFORM == 'Darwin'
PLATFORM_ARCH = platform.machine()
IS_PPC = PLATFORM_ARCH.startswith('ppc')
def ntob(n, encoding='ISO-8859-1'):
"""Return the native string as bytes in the given encoding."""
assert_native(n)
# In Python 3, the native string type is unicode
return n.encode(encoding)
def ntou(n, encoding='ISO-8859-1'):
"""Return the native string as Unicode with the given encoding."""
assert_native(n)
# In Python 3, the native string type is unicode
return n
def bton(b, encoding='ISO-8859-1'):
"""Return the byte string as native string in the given encoding."""
return b.decode(encoding)
def assert_native(n):
"""Check whether the input is of native :py:class:`str` type.
Raises:
TypeError: in case of failed check
"""
if not isinstance(n, str):
raise TypeError('n must be a native str (got %s)' % type(n).__name__)
def extract_bytes(mv):
r"""Retrieve bytes out of the given input buffer.
:param mv: input :py:func:`buffer`
:type mv: memoryview or bytes
:return: unwrapped bytes
:rtype: bytes
:raises ValueError: if the input is not one of \
:py:class:`memoryview`/:py:func:`buffer` \
or :py:class:`bytes`
"""
if isinstance(mv, memoryview):
return mv.tobytes()
if isinstance(mv, bytes):
return mv
raise ValueError(
'extract_bytes() only accepts bytes and memoryview/buffer',
)
| # pylint: disable=unused-import
"""Compatibility code for using Cheroot with various versions of Python."""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import os
import platform
import re
import six
try:
import selectors # lgtm [py/unused-import]
except ImportError:
import selectors2 as selectors # noqa: F401 # lgtm [py/unused-import]
try:
import ssl
IS_ABOVE_OPENSSL10 = ssl.OPENSSL_VERSION_INFO >= (1, 1)
del ssl
except ImportError:
IS_ABOVE_OPENSSL10 = None
# contextlib.suppress was added in Python 3.4
try:
from contextlib import suppress
except ImportError:
from contextlib import contextmanager
@contextmanager
def suppress(*exceptions):
"""Return a context manager that suppresses the `exceptions`."""
try:
yield
except exceptions:
pass
IS_CI = bool(os.getenv('CI'))
IS_GITHUB_ACTIONS_WORKFLOW = bool(os.getenv('GITHUB_WORKFLOW'))
IS_PYPY = platform.python_implementation() == 'PyPy'
SYS_PLATFORM = platform.system()
IS_WINDOWS = SYS_PLATFORM == 'Windows'
IS_LINUX = SYS_PLATFORM == 'Linux'
IS_MACOS = SYS_PLATFORM == 'Darwin'
PLATFORM_ARCH = platform.machine()
IS_PPC = PLATFORM_ARCH.startswith('ppc')
if not six.PY2:
def ntob(n, encoding='ISO-8859-1'):
"""Return the native string as bytes in the given encoding."""
assert_native(n)
# In Python 3, the native string type is unicode
return n.encode(encoding)
def ntou(n, encoding='ISO-8859-1'):
"""Return the native string as Unicode with the given encoding."""
assert_native(n)
# In Python 3, the native string type is unicode
return n
def bton(b, encoding='ISO-8859-1'):
"""Return the byte string as native string in the given encoding."""
return b.decode(encoding)
else:
# Python 2
def ntob(n, encoding='ISO-8859-1'):
"""Return the native string as bytes in the given encoding."""
assert_native(n)
# In Python 2, the native string type is bytes. Assume it's already
# in the given encoding, which for ISO-8859-1 is almost always what
# was intended.
return n
def ntou(n, encoding='ISO-8859-1'):
"""Return the native string as Unicode with the given encoding."""
assert_native(n)
# In Python 2, the native string type is bytes.
# First, check for the special encoding 'escape'. The test suite uses
# this to signal that it wants to pass a string with embedded \uXXXX
# escapes, but without having to prefix it with u'' for Python 2,
# but no prefix for Python 3.
if encoding == 'escape':
return re.sub(
r'\\u([0-9a-zA-Z]{4})',
lambda m: six.unichr(int(m.group(1), 16)),
n.decode('ISO-8859-1'),
)
# Assume it's already in the given encoding, which for ISO-8859-1
# is almost always what was intended.
return n.decode(encoding)
def bton(b, encoding='ISO-8859-1'):
"""Return the byte string as native string in the given encoding."""
return b
def assert_native(n):
"""Check whether the input is of native :py:class:`str` type.
Raises:
TypeError: in case of failed check
"""
if not isinstance(n, str):
raise TypeError('n must be a native str (got %s)' % type(n).__name__)
if not six.PY2:
"""Python 3 has :py:class:`memoryview` builtin."""
# Python 2.7 has it backported, but socket.write() does
# str(memoryview(b'0' * 100)) -> <memory at 0x7fb6913a5588>
# instead of accessing it correctly.
memoryview = memoryview
else:
"""Link :py:class:`memoryview` to buffer under Python 2."""
memoryview = buffer # noqa: F821
def extract_bytes(mv):
r"""Retrieve bytes out of the given input buffer.
:param mv: input :py:func:`buffer`
:type mv: memoryview or bytes
:return: unwrapped bytes
:rtype: bytes
:raises ValueError: if the input is not one of \
:py:class:`memoryview`/:py:func:`buffer` \
or :py:class:`bytes`
"""
if isinstance(mv, memoryview):
return bytes(mv) if six.PY2 else mv.tobytes()
if isinstance(mv, bytes):
return mv
raise ValueError(
'extract_bytes() only accepts bytes and memoryview/buffer',
)
| Python | 0.000016 |
ad8036e5a21fd29885dc7ebf201e599a0ca79563 | add charliecloud 0.9.7 (#10661) | var/spack/repos/builtin/packages/charliecloud/package.py | var/spack/repos/builtin/packages/charliecloud/package.py | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Charliecloud(MakefilePackage):
"""Lightweight user-defined software stacks for HPC."""
homepage = "https://hpc.github.io/charliecloud"
url = "https://github.com/hpc/charliecloud/archive/v0.2.4.tar.gz"
version('0.9.7', sha256='ec80a4b9bef3a2161a783e11d99cc58e09a32dfbc8a6234c8f7ce7fa76e2f62d')
version('0.9.6', sha256='50e20d5e2a3710cd06e7c999db22495b07ef0fb15ffbc0af3bccac5387f0fddb')
version('0.9.3', sha256='f1bf032377b8845bc9a93b8a4fad6386161e35900223c0acc61d1f3aa3a87bc7')
version('0.9.2', sha256='8d0e4804d412beef720a66f886a0a78bce42f3269e880ebf11f602581f8047d4')
version('0.9.1', sha256='8e69150a271285da71ece7a09b48251ef6593f72207c5126741d9976aa737d95')
version('0.9.0', sha256='7e74cb16e31fd9d502198f7509bab14d1049ec68ba90b15e277e76f805db9458')
version('0.2.4', 'b112de661c2c360174b42c99022c1967')
@property
def install_targets(self):
return ['install', 'PREFIX=%s' % self.prefix]
| # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Charliecloud(MakefilePackage):
"""Lightweight user-defined software stacks for HPC."""
homepage = "https://hpc.github.io/charliecloud"
url = "https://github.com/hpc/charliecloud/archive/v0.2.4.tar.gz"
version('0.9.6', sha256='50e20d5e2a3710cd06e7c999db22495b07ef0fb15ffbc0af3bccac5387f0fddb')
version('0.9.3', sha256='f1bf032377b8845bc9a93b8a4fad6386161e35900223c0acc61d1f3aa3a87bc7')
version('0.9.2', sha256='8d0e4804d412beef720a66f886a0a78bce42f3269e880ebf11f602581f8047d4')
version('0.9.1', sha256='8e69150a271285da71ece7a09b48251ef6593f72207c5126741d9976aa737d95')
version('0.9.0', sha256='7e74cb16e31fd9d502198f7509bab14d1049ec68ba90b15e277e76f805db9458')
version('0.2.4', 'b112de661c2c360174b42c99022c1967')
@property
def install_targets(self):
return ['install', 'PREFIX=%s' % self.prefix]
| Python | 0.000001 |
a3a19ab3cad0999cc61fdebe9c6fb1ceca873ab6 | make it full screen | boothpy/widget.py | boothpy/widget.py | # Copyright 2017 Christian Menard
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from PyQt5.QtWidgets import QWidget, QLabel, QMessageBox, QApplication
from PyQt5.QtGui import QPixmap
from PyQt5.QtCore import QTimer, Qt
class ErrorMessage(QMessageBox):
def __init__(self, error, description):
super().__init__()
self.setIcon(QMessageBox.Critical)
self.setWindowTitle('PyBooth Error')
self.setText(error)
self.setInformativeText(description)
class BoothPyWidget(QWidget):
def __init__(self, camera):
super().__init__()
self.camera = camera
self.init_ui()
def init_ui(self):
self.setGeometry(300, 300, 300, 220)
self.setWindowTitle('BoothPy')
preview_data = None
try:
preview_data = self.camera.capture_preview()
except BaseException as e:
err = ErrorMessage('Error while capturing preview:', str(e))
self.close()
err.exec_()
self.preview = QLabel(self)
self.preview.setGeometry(QApplication.desktop().screenGeometry())
self.preview.setScaledContents(True)
pixmap = QPixmap()
pixmap.loadFromData(preview_data)
self.preview.setPixmap(pixmap)
self.frame_timer = QTimer()
self.frame_timer.timeout.connect(self.on_frame_timeout)
self.frame_timer.setInterval(50)
self.frame_timer.start()
self.showFullScreen()
def on_frame_timeout(self):
preview_data = None
try:
preview_data = self.camera.capture_preview()
except BaseException as e:
err = ErrorMessage('Error while capturing preview:', str(e))
self.close()
err.exec_()
pixmap = QPixmap()
pixmap.loadFromData(preview_data)
self.preview.setPixmap(pixmap)
def keyPressEvent(self, e):
if e.key() == Qt.Key_Escape:
self.close()
| # Copyright 2017 Christian Menard
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from PyQt5.QtWidgets import QWidget, QLabel, QMessageBox
from PyQt5.QtGui import QPixmap
from PyQt5.QtCore import QTimer, Qt
class ErrorMessage(QMessageBox):
def __init__(self, error, description):
super().__init__()
self.setIcon(QMessageBox.Critical)
self.setWindowTitle('PyBooth Error')
self.setText(error)
self.setInformativeText(description)
class BoothPyWidget(QWidget):
def __init__(self, camera):
super().__init__()
self.camera = camera
self.init_ui()
def init_ui(self):
self.setGeometry(300, 300, 300, 220)
self.setWindowTitle('BoothPy')
preview_data = None
try:
preview_data = self.camera.capture_preview()
except BaseException as e:
err = ErrorMessage('Error while capturing preview:', str(e))
err.exec_()
self.close()
self.preview = QLabel(self)
pixmap = QPixmap()
pixmap.loadFromData(preview_data)
self.preview.setPixmap(pixmap)
self.resize(pixmap.width(), pixmap.height())
self.frame_timer = QTimer()
self.frame_timer.timeout.connect(self.on_frame_timeout)
self.frame_timer.setInterval(50)
self.frame_timer.start()
self.show()
def on_frame_timeout(self):
preview_data = None
try:
preview_data = self.camera.capture_preview()
except BaseException as e:
err = ErrorMessage('Error while capturing preview:', str(e))
err.exec_()
self.close()
pixmap = QPixmap()
pixmap.loadFromData(preview_data)
self.preview.setPixmap(pixmap)
def keyPressEvent(self, e):
if e.key() == Qt.Key_Escape:
self.close()
| Python | 0.000233 |
c7322a1ff37c7f2d4c3dfb149c2e36daafae6043 | Bump to version 0.11.3 | ckanny/__init__.py | ckanny/__init__.py | # -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
"""
ckanny
~~~~~~
Miscellaneous CKAN utility scripts
Examples:
literal blocks::
python example_google.py
Attributes:
module_level_variable1 (int): Module level variables may be documented in
"""
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
from manager import Manager
from . import datastorer, filestorer, hdx
__title__ = 'ckanny'
__author__ = 'Reuben Cummings'
__description__ = 'Miscellaneous CKAN utility scripts'
__email__ = 'reubano@gmail.com'
__version__ = '0.11.3'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015 Reuben Cummings'
manager = Manager()
manager.merge(datastorer.manager, namespace='ds')
manager.merge(filestorer.manager, namespace='fs')
manager.merge(hdx.manager, namespace='hdx')
@manager.command
def ver():
"""Show ckanny version"""
from . import __version__ as version
print('v%s' % version)
if __name__ == '__main__':
manager.main()
| # -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
"""
ckanny
~~~~~~
Miscellaneous CKAN utility scripts
Examples:
literal blocks::
python example_google.py
Attributes:
module_level_variable1 (int): Module level variables may be documented in
"""
from __future__ import (
absolute_import, division, print_function, with_statement,
unicode_literals)
from manager import Manager
from . import datastorer, filestorer, hdx
__title__ = 'ckanny'
__author__ = 'Reuben Cummings'
__description__ = 'Miscellaneous CKAN utility scripts'
__email__ = 'reubano@gmail.com'
__version__ = '0.11.2'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015 Reuben Cummings'
manager = Manager()
manager.merge(datastorer.manager, namespace='ds')
manager.merge(filestorer.manager, namespace='fs')
manager.merge(hdx.manager, namespace='hdx')
@manager.command
def ver():
"""Show ckanny version"""
from . import __version__ as version
print('v%s' % version)
if __name__ == '__main__':
manager.main()
| Python | 0 |
07a74375fabddc9b6fa4de0c345949bfadb54504 | Revert silly change | examples/sync_test.py | examples/sync_test.py | """
=============
A-V sync test
=============
This example tests synchronization between the screen and the audio playback.
"""
# Author: Dan McCloy <drmccloy@uw.edu>
#
# License: BSD (3-clause)
print __doc__
import numpy as np
from expyfun import ExperimentController
rng = np.random.RandomState(0)
with ExperimentController('SyncTest', screen_num=0, window_size=[300, 300],
full_screen=False, stim_db=70, noise_db=-np.inf,
stim_fs=24414, participant='s', session='0',
output_dir=None) as ec:
ec.load_buffer(np.r_[0.1, np.zeros(2000)])
white = [1, 1, 1]
black = [-1, -1, -1]
while True:
ec.draw_background_color(white)
t1 = ec.flip_and_play()
ec.draw_background_color(black)
t2 = ec.flip() # expyfun
print 1. / (t2 - t1)
ec.wait_one_press(0.5)
| """
=============
A-V sync test
=============
This example tests synchronization between the screen and the audio playback.
"""
# Author: Dan McCloy <drmccloy@uw.edu>
#
# License: BSD (3-clause)
print __doc__
import numpy as np
from expyfun import ExperimentController
rng = np.random.RandomState(0)
with ExperimentController('SyncTest', screen_num=0, window_size=[300, 300],
full_screen=False, stim_db=70, noise_db=-np.inf,
stim_fs=44100, participant='s', session='0',
output_dir=None) as ec:
ec.load_buffer(np.r_[0.1, np.zeros(2000)])
white = [1, 1, 1]
black = [-1, -1, -1]
while True:
ec.draw_background_color(white)
t1 = ec.flip_and_play()
ec.draw_background_color(black)
t2 = ec.flip() # expyfun
print 1. / (t2 - t1)
ec.wait_one_press(0.5)
| Python | 0.000002 |
63ec2f241c219f9a5fea33de63b520d8b0da5fd8 | Fix initial display update. | classes/display.py | classes/display.py | """Display Class"""
import time
class Display:
"""Display progress of process.
Attributes:
start_time (float): Seconds since epoch to when progress starts.
elapsed_time (float): Seconds since progress started.
last_updated (float): Seconds since epoch to when progress was
last updated.
"""
def __init__(self):
self.start_time = None
self.elapsed_time = None
self.last_updated = None
def start(self, message=None):
"""Initiates start time. Can display start messages.
Args:
message (string): Optional start message.
Returns:
None
Raises:
None
"""
self.start_time = time.time()
if message:
print(message)
def update_progress_bar(self, step, end):
"""
Args:
step (float): Current iteration of process.
end (float): Final iteration of process.
Returns:
None
Raises:
None
"""
percent = float(step) / float(end)
start_time = self.start_time
current_time = time.time()
if self.last_updated and current_time < self.last_updated + 0.017:
return
else:
self.last_updated = current_time
elapsed_time = current_time - start_time
self.elapsed_time = elapsed_time
estimated_time = (elapsed_time / percent) - elapsed_time
hours = int(estimated_time / 3600.0)
minutes = int((estimated_time - (hours * 3600)) / 60.0)
seconds = int(estimated_time - (minutes * 60) - (hours * 3600))
time_remaining = "{:02d}:{:02d}:{:02d}".format(
hours,
minutes,
seconds
)
progress_bar = "{}".format('\u2588' * int(percent * 25.0))
remainder = (percent * 25.0) - len(progress_bar)
if remainder >= 0.75:
progress_bar += '\u258a'
elif remainder >= 0.5:
progress_bar += '\u258c'
elif remainder >= 0.25:
progress_bar += '\u258e'
progress_bar += ' ' * (25 - len(progress_bar))
output = " {:05.2f}% |{}| Time Remaining: {}".format(
percent * 100.0,
progress_bar,
time_remaining
)
print(' ' * 80, end='\r')
print(output, end='\r')
def finish(self):
"""Displays elapsed time of process. Clears attributes.
Args:
None
Returns:
None
Raises:
None
"""
hours = int(self.elapsed_time / 3600.0)
minutes = int(self.elapsed_time / 60.0)
seconds = int(self.elapsed_time - (minutes * 60) - (hours * 3600))
elapsed_time = "{:02d}:{:02d}:{:02d}".format(
hours,
minutes,
seconds
)
print(" 100.00% |{}| Elapsed Time: {} ".format(
'\u2588' * 25,
elapsed_time
))
self.start_time = None
self.elapsed_time = None
self.last_updated = None
| """Display Class"""
import time
class Display:
"""Display progress of process.
Attributes:
start_time (float): Seconds since epoch to when progress starts.
elapsed_time (float): Seconds since progress started.
last_updated (float): Seconds since epoch to when progress was
last updated.
"""
def __init__(self):
self.start_time = None
self.elapsed_time = None
self.last_updated = None
def start(self, message=None):
"""Initiates start time. Can display start messages.
Args:
message (string): Optional start message.
Returns:
None
Raises:
None
"""
self.start_time = time.time()
if message:
print(message)
def update_progress_bar(self, step, end):
"""
Args:
step (float): Current iteration of process.
end (float): Final iteration of process.
Returns:
None
Raises:
None
"""
percent = float(step) / float(end)
start_time = self.start_time
current_time = time.time()
if current_time < self.last_updated + 0.017:
return
else:
self.last_updated = current_time
elapsed_time = current_time - start_time
self.elapsed_time = elapsed_time
estimated_time = (elapsed_time / percent) - elapsed_time
hours = int(estimated_time / 3600.0)
minutes = int((estimated_time - (hours * 3600)) / 60.0)
seconds = int(estimated_time - (minutes * 60) - (hours * 3600))
time_remaining = "{:02d}:{:02d}:{:02d}".format(
hours,
minutes,
seconds
)
progress_bar = "{}".format('\u2588' * int(percent * 25.0))
remainder = (percent * 25.0) - len(progress_bar)
if remainder >= 0.75:
progress_bar += '\u258a'
elif remainder >= 0.5:
progress_bar += '\u258c'
elif remainder >= 0.25:
progress_bar += '\u258e'
progress_bar += ' ' * (25 - len(progress_bar))
output = " {:05.2f}% |{}| Time Remaining: {}".format(
percent * 100.0,
progress_bar,
time_remaining
)
print(' ' * 72, end='\r')
print(output, end='\r')
def finish(self):
"""Displays elapsed time of process. Clears attributes.
Args:
None
Returns:
None
Raises:
None
"""
hours = int(self.elapsed_time / 3600.0)
minutes = int(self.elapsed_time / 60.0)
seconds = int(self.elapsed_time - (minutes * 60) - (hours * 3600))
elapsed_time = "{:02d}:{:02d}:{:02d}".format(
hours,
minutes,
seconds
)
print(" 100.00% |{}| Elapsed Time: {} ".format(
'\u2588' * 25,
elapsed_time
))
self.start_time = None
self.elapsed_time = None
self.last_updated = None
| Python | 0 |
d008b0cec67a1428a2761b32f8b9cd7fee6372ed | Fix hardcoded vsdk branch | generator/src/lib/managers.py | generator/src/lib/managers.py | # -*- coding: utf-8 -*-
import os
import shutil
import threading
from git import Repo, GitCommandError
from printer import Printer
class TaskManager(object):
""" Multi threading manager """
def __init__(self):
""" Initializes a TaskManager
"""
self.threads = list()
def wait_until_exit(self):
""" Wait until all the threads are finished.
"""
[t.join() for t in self.threads]
self.threads = list()
def start_task(self, method, *args, **kwargs):
""" Start a task in a separate thread
Args:
method: the method to start in a separate thread
args: Accept args/kwargs arguments
"""
thread = threading.Thread(target=method, args=args, kwargs=kwargs)
thread.is_daemon = False
thread.start()
self.threads.append(thread)
class GitManager(object):
""" Manager of git repository
"""
def __init__(self, url, branch, directory):
""" Initializes a GitManager
Args:
url: url of the git repository to clone
branch: name of the branch
directory: the directory name
"""
self.url = url
self.directory = directory
self.branch = str(branch)
self.repo = None
self._nb_changes = 0
self.remove_directory()
self.repo = Repo.clone_from(url=self.url, to_path=self.directory)
try:
self.repo.git.checkout(self.branch)
Printer.log('Switching to branch %s' % self.branch)
except GitCommandError:
Printer.log('Branch %s does not exist yet. Creating it...' % self.branch)
branch = self.repo.create_head(self.branch)
self.repo.head.reference = branch
# remote = self.repo.remote()
# remote.push(self.repo.head)
def commit(self, message):
""" Add all modification and add a commit message
Args:
message: the message for the commit
Returns:
Returns the number of diffs affected by the commit
No commit are made if no diffs are found
"""
diffs = self.repo.index.diff(None)
nb_diffs = len(diffs)
nb_untracked_files = len(self.repo.untracked_files)
if nb_diffs:
for diff in diffs:
if diff.b_mode == 0 and diff.b_blob is None:
self.repo.index.remove(items=[diff.a_blob.path])
else:
self.repo.index.add(items=[diff.a_blob.path])
if nb_untracked_files > 0:
self.repo.index.add(items=self.repo.untracked_files)
self._nb_changes = nb_diffs + nb_untracked_files
if self._nb_changes > 0:
self.repo.index.commit(message)
return self._nb_changes
def push(self):
""" Push all modififcation to the repository
"""
if self._nb_changes > 0:
remote = self.repo.remote()
remote.push(self.repo.head)
self._nb_changes = 0
def remove_directory(self):
""" Clean the clone repository
"""
if os.path.exists(self.directory):
shutil.rmtree(self.directory)
| # -*- coding: utf-8 -*-
import os
import shutil
import threading
from git import Repo, GitCommandError
from printer import Printer
class TaskManager(object):
""" Multi threading manager """
def __init__(self):
""" Initializes a TaskManager
"""
self.threads = list()
def wait_until_exit(self):
""" Wait until all the threads are finished.
"""
[t.join() for t in self.threads]
self.threads = list()
def start_task(self, method, *args, **kwargs):
""" Start a task in a separate thread
Args:
method: the method to start in a separate thread
args: Accept args/kwargs arguments
"""
thread = threading.Thread(target=method, args=args, kwargs=kwargs)
thread.is_daemon = False
thread.start()
self.threads.append(thread)
class GitManager(object):
""" Manager of git repository
"""
def __init__(self, url, branch, directory):
""" Initializes a GitManager
Args:
url: url of the git repository to clone
branch: name of the branch
directory: the directory name
"""
self.url = url
self.directory = directory
self.branch = branch
self.repo = None
self._nb_changes = 0
self.remove_directory()
self.repo = Repo.clone_from(url=self.url, to_path=self.directory)
try:
self.repo.git.checkout('3.0')
Printer.log('Switching to branch %s' % self.branch)
except GitCommandError:
Printer.log('Branch %s does not exist yet. Creating it...' % self.branch)
branch = self.repo.create_head(self.branch)
self.repo.head.reference = branch
# remote = self.repo.remote()
# remote.push(self.repo.head)
def commit(self, message):
""" Add all modification and add a commit message
Args:
message: the message for the commit
Returns:
Returns the number of diffs affected by the commit
No commit are made if no diffs are found
"""
diffs = self.repo.index.diff(None)
nb_diffs = len(diffs)
nb_untracked_files = len(self.repo.untracked_files)
if nb_diffs:
for diff in diffs:
if diff.b_mode == 0 and diff.b_blob is None:
self.repo.index.remove(items=[diff.a_blob.path])
else:
self.repo.index.add(items=[diff.a_blob.path])
if nb_untracked_files > 0:
self.repo.index.add(items=self.repo.untracked_files)
self._nb_changes = nb_diffs + nb_untracked_files
if self._nb_changes > 0:
self.repo.index.commit(message)
return self._nb_changes
def push(self):
""" Push all modififcation to the repository
"""
if self._nb_changes > 0:
remote = self.repo.remote()
remote.push(self.repo.head)
self._nb_changes = 0
def remove_directory(self):
""" Clean the clone repository
"""
if os.path.exists(self.directory):
shutil.rmtree(self.directory)
| Python | 0.000335 |
72e71235d0f5e4851b212e4c7fa583eeddce6252 | Fix QueueUtility to read request from view again | src/plone.server/plone/server/async.py | src/plone.server/plone/server/async.py | # -*- coding: utf-8 -*-
from datetime import datetime
from plone.server.browser import ErrorResponse
from plone.server.browser import UnauthorizedResponse
from plone.server.browser import View
from plone.server import _
from plone.server.transactions import sync
from plone.server.transactions import TransactionProxy
from zope.interface import Interface
from zope.security.interfaces import Unauthorized
import asyncio
import logging
logger = logging.getLogger(__name__)
class IAsyncUtility(Interface):
async def initialize(self):
pass
class IQueueUtility(IAsyncUtility):
pass
class QueueUtility(object):
def __init__(self, settings):
self._queue = asyncio.PriorityQueue()
self._exceptions = False
self._total_queued = 0
async def initialize(self, app=None):
# loop
self.app = app
while True:
got_obj = False
try:
priority, view = await self._queue.get()
got_obj = True
txn = view.request.conn.transaction_manager.begin(view.request)
try:
view_result = await view()
if isinstance(view_result, ErrorResponse):
await sync(view.request)(txn.abort)
elif isinstance(view_result, UnauthorizedResponse):
await sync(view.request)(txn.abort)
else:
await sync(view.request)(txn.commit)
except Unauthorized:
await sync(view.request)(txn.abort)
view_result = UnauthorizedResponse(
_('Not authorized to render operation'))
except Exception as e:
logger.error(
"Exception on writing execution",
exc_info=e)
await sync(view.request)(txn.abort)
view_result = ErrorResponse(
'ServiceError',
_('Error on execution of operation')
)
except KeyboardInterrupt or MemoryError or SystemExit or asyncio.CancelledError:
self._exceptions = True
raise
except:
self._exceptions = True
logger.error('Worker call failed')
finally:
if got_obj:
self._queue.task_done()
@property
def exceptions(self):
return self._exceptions
@property
def total_queued(self):
return self._total_queued
async def add(self, view, priority=3):
await self._queue.put((priority, view))
self._total_queued += 1
return self._queue.qsize()
class QueueObject(View):
def __init__(self, context, request):
super(QueueObject, self).__init__(context, TransactionProxy(request))
self.time = datetime.now().timestamp()
def __lt__(self, view):
return self.time < view.time
| # -*- coding: utf-8 -*-
from datetime import datetime
from plone.server.browser import ErrorResponse
from plone.server.browser import UnauthorizedResponse
from plone.server.browser import View
from plone.server import _
from plone.server.transactions import sync
from plone.server.transactions import TransactionProxy
from zope.interface import Interface
from zope.security.interfaces import Unauthorized
import asyncio
import logging
logger = logging.getLogger(__name__)
class IAsyncUtility(Interface):
async def initialize(self):
pass
class IQueueUtility(IAsyncUtility):
pass
class QueueUtility(object):
def __init__(self, settings):
self._queue = asyncio.PriorityQueue()
self._exceptions = False
self._total_queued = 0
async def initialize(self, app=None):
# loop
self.app = app
while True:
got_obj = False
try:
priority, view = await self._queue.get()
got_obj = True
txn = request.conn.transaction_manager.begin(request)
try:
view_result = await view()
if isinstance(view_result, ErrorResponse):
await sync(request)(txn.abort)
elif isinstance(view_result, UnauthorizedResponse):
await sync(request)(txn.abort)
else:
await sync(request)(txn.commit)
except Unauthorized:
await sync(request)(txn.abort)
view_result = UnauthorizedResponse(
_('Not authorized to render operation'))
except Exception as e:
logger.error(
"Exception on writing execution",
exc_info=e)
await sync(request)(txn.abort)
view_result = ErrorResponse(
'ServiceError',
_('Error on execution of operation')
)
except KeyboardInterrupt or MemoryError or SystemExit or asyncio.CancelledError:
self._exceptions = True
raise
except:
self._exceptions = True
logger.error('Worker call failed')
finally:
if got_obj:
self._queue.task_done()
@property
def exceptions(self):
return self._exceptions
@property
def total_queued(self):
return self._total_queued
async def add(self, view, priority=3):
await self._queue.put((priority, view))
self._total_queued += 1
return self._queue.qsize()
class QueueObject(View):
def __init__(self, context, request):
super(QueueObject, self).__init__(context, TransactionProxy(request))
self.time = datetime.now().timestamp()
def __lt__(self, view):
return self.time < view.time
| Python | 0 |
85880dbf68718737fa52535326163d9b40adf7f9 | Add tags to event serializer | src/sentry/api/serializers/models/event.py | src/sentry/api/serializers/models/event.py | from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import Event
@register(Event)
class EventSerializer(Serializer):
def _get_entries(self, event, user):
# XXX(dcramer): These are called entries for future-proofing
interface_list = []
for key, interface in event.interfaces.iteritems():
if key == 'user':
continue
entry = {
'data': interface.to_json(),
'type': interface.get_alias(),
}
interface_list.append((interface, entry))
interface_list.sort(key=lambda x: x[0].get_display_score(), reverse=True)
return [i[1] for i in interface_list]
def get_attrs(self, item_list, user):
Event.objects.bind_nodes(item_list, 'data')
results = {}
for item in item_list:
user_interface = item.interfaces.get('sentry.interfaces.User')
if user_interface:
user_data = user_interface.to_json()
else:
user_data = None
results[item] = {
'entries': self._get_entries(item, user),
'user': user_data,
}
return results
def serialize(self, obj, attrs, user):
d = {
'id': str(obj.id),
'eventID': str(obj.event_id),
'entries': attrs['entries'],
'message': obj.message,
'user': attrs['user'],
'tags': obj.get_tags(),
'platform': obj.platform,
'dateCreated': obj.datetime,
'timeSpent': obj.time_spent,
}
return d
| from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import Event
@register(Event)
class EventSerializer(Serializer):
def _get_entries(self, event, user):
# XXX(dcramer): These are called entries for future-proofing
interface_list = []
for key, interface in event.interfaces.iteritems():
if key == 'user':
continue
entry = {
'data': interface.to_json(),
'type': interface.get_alias(),
}
interface_list.append((interface, entry))
interface_list.sort(key=lambda x: x[0].get_display_score(), reverse=True)
return [i[1] for i in interface_list]
def get_attrs(self, item_list, user):
Event.objects.bind_nodes(item_list, 'data')
results = {}
for item in item_list:
user_interface = item.interfaces.get('sentry.interfaces.User')
if user_interface:
user_data = user_interface.to_json()
else:
user_data = None
results[item] = {
'entries': self._get_entries(item, user),
'user': user_data,
}
return results
def serialize(self, obj, attrs, user):
d = {
'id': str(obj.id),
'eventID': str(obj.event_id),
'entries': attrs['entries'],
'message': obj.message,
'user': attrs['user'],
'platform': obj.platform,
'dateCreated': obj.datetime,
'timeSpent': obj.time_spent,
}
return d
| Python | 0 |
71636292d089f16485691f242edf74fcbd72ff2b | Enforce PEP8 on readpdf.py | jarviscli/plugins/readpdf.py | jarviscli/plugins/readpdf.py | # importing the modules
import PyPDF2
import pyttsx3
from plugin import plugin
"""
A tool for reading out the pdf files using the jarvis.Uses PyPDF2 and pyttsx3 libraries
"""
@plugin('readpdf')
class readpdfjarvis():
def __init__(self):
self.path = None
def __call__(self, jarvis, s):
self.read_pdf(jarvis)
def read_pdf(self, jarvis):
filename = jarvis.input("Enter your file path with '/' separations:")
pdf = open(filename, 'rb')
pdfRead = PyPDF2.PdfFileReader(pdf)
for i in range(pdfRead.getNumPages()):
page = pdfRead.getPage(i)
jarvis.say("Page No: " + str(1 + pdfRead.getPageNumber(page)))
pageContent = page.extractText()
jarvis.say(pageContent)
speak = pyttsx3.init()
speak.say(pageContent)
speak.runAndWait()
| # importing the modules
import PyPDF2
import pyttsx3
from plugin import plugin
"""
A tool for reading out the pdf files using the jarvis.Uses PyPDF2 and pyttsx3 libraries
"""
@plugin('readpdf')
class readpdfjarvis():
def __init__(self):
self.path = None
def __call__(self, jarvis, s):
self.read_pdf(jarvis)
def read_pdf(self, jarvis):
filename = jarvis.input("Enter your file path with '/' seperations:")
pdf = open(filename, 'rb')
pdfRead = PyPDF2.PdfFileReader(pdf)
for i in range(pdfRead.getNumPages()):
page = pdfRead.getPage(i)
jarvis.say("Page No: "+str(1 + pdfRead.getPageNumber(page)))
pageContent = page.extractText()
jarvis.say(pageContent)
speak = pyttsx3.init()
speak.say(pageContent)
speak.runAndWait()
| Python | 0 |
bb7fa507a31901819dbc7712b13c4223fe6d3585 | Correct p tags on system message output | src/sentry/templatetags/sentry_activity.py | src/sentry/templatetags/sentry_activity.py | """
sentry.templatetags.sentry_activity
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from django import template
from django.utils.html import escape, linebreaks
from django.utils.safestring import mark_safe
from sentry.models import Activity
from sentry.templatetags.sentry_helpers import timesince
register = template.Library()
ACTIVITY_ACTION_STRINGS = {
Activity.COMMENT: 'left a comment',
Activity.SET_RESOLVED: 'marked this event as resolved',
Activity.SET_UNRESOLVED: 'marked this event as unresolved',
Activity.SET_MUTED: 'marked this event as muted',
Activity.SET_PUBLIC: 'made this event public',
Activity.SET_PRIVATE: 'made this event private',
Activity.SET_REGRESSION: 'marked this event as a regression',
}
@register.filter
def render_activity(item):
if not item.group:
# not implemented
return
action_str = ACTIVITY_ACTION_STRINGS[item.type]
output = '<p>'
if item.user:
name = item.user.first_name or item.user.email
output += '<strong>%s</strong> %s' % (escape(name), action_str)
else:
output += 'The system %s' % (action_str,)
output += ' — %s</p>' % (timesince(item.datetime),)
if item.type == Activity.COMMENT:
output += linebreaks(item.data['body'])
return mark_safe(output)
| """
sentry.templatetags.sentry_activity
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from django import template
from django.utils.html import escape, linebreaks
from django.utils.safestring import mark_safe
from sentry.models import Activity
from sentry.templatetags.sentry_helpers import timesince
register = template.Library()
ACTIVITY_ACTION_STRINGS = {
Activity.COMMENT: 'left a comment',
Activity.SET_RESOLVED: 'marked this event as resolved',
Activity.SET_UNRESOLVED: 'marked this event as unresolved',
Activity.SET_MUTED: 'marked this event as muted',
Activity.SET_PUBLIC: 'made this event public',
Activity.SET_PRIVATE: 'made this event private',
Activity.SET_REGRESSION: 'marked this event as a regression',
}
@register.filter
def render_activity(item):
if not item.group:
# not implemented
return
action_str = ACTIVITY_ACTION_STRINGS[item.type]
if item.user:
name = item.user.first_name or item.user.email
output = '<p><strong>%s</strong> %s' % (escape(name), action_str)
else:
output = 'The system %s' % (action_str,)
output += ' — %s</p>' % (timesince(item.datetime),)
if item.type == Activity.COMMENT:
output += linebreaks(item.data['body'])
return mark_safe(output)
| Python | 0.00002 |
736e1f7f4de56a57df3b51058c5b45455e577cf0 | Fix flake8 | busstops/management/commands/import_areas.py | busstops/management/commands/import_areas.py | """
Import administrative areas from the NPTG.
Usage:
import_areas < AdminAreas.csv
"""
from ..import_from_csv import ImportFromCSVCommand
from ...models import AdminArea
class Command(ImportFromCSVCommand):
def handle_row(self, row):
AdminArea.objects.update_or_create(
id=row['AdministrativeAreaCode'],
defaults={
'atco_code': row['AtcoAreaCode'],
'name': row['AreaName'],
'short_name': row['ShortName'],
'country': row['Country'],
'region_id': row['RegionCode'],
}
)
def handle(self, *args, **options):
super(Command, self).handle(*args, **options)
# Move Cumbria to the North West.
# Necessary because of the confusing 'North East and Cumbria' Traveline
# region, but Cumbrian bus *services* are actually in the North West now
AdminArea.objects.filter(name='Cumbria').update(region_id='NW')
| """
Import administrative areas from the NPTG.
Usage:
import_areas < AdminAreas.csv
"""
from ..import_from_csv import ImportFromCSVCommand
from ...models import AdminArea
class Command(ImportFromCSVCommand):
def handle_row(self, row):
AdminArea.objects.update_or_create(
id=row['AdministrativeAreaCode'],
defaults={
'atco_code': row['AtcoAreaCode'],
'name': row['AreaName'],
'short_name': row['ShortName'],
'country': row['Country'],
'region_id': row['RegionCode'],
}
)
def handle(self, *args, **options):
super(Command, self).handle(*args, **options)
# Move Cumbria to the North West.
# There is the legacy of the confusing 'North East and Cumbria' Traveline region,
# but actually Cumbrian bus services are in the North West now
AdminArea.objects.filter(name='Cumbria').update(region_id='NW')
| Python | 0 |
8a870c6faf8aa50ad7f8c58458c4af9ddef7cfdc | Make authbind check graceful. | braid/authbind.py | braid/authbind.py | import os
from fabric.api import sudo, run, abort, quiet
from braid import package, hasSudoCapabilities
def install():
package.install('authbind')
def allow(user, port):
path = os.path.join('/etc/authbind/byport', str(port))
needsUpdate = True
with quiet():
state = run('stat -c %U:%a {}'.format(path))
needsUpdate = state.strip().split(':') != [user, '500']
if needsUpdate:
if not hasSudoCapabilities():
abort('Trying to give {} access to port {} but have insufficient '
'capabilities.'.format(user, port))
sudo('touch {}'.format(path))
sudo('chown {0}:{0} {1}'.format(user, path))
sudo('chmod 0500 {}'.format(path))
| import os
from fabric.api import sudo, run, abort
from braid import package, hasSudoCapabilities
def install():
package.install('authbind')
def allow(user, port):
path = os.path.join('/etc/authbind/byport', str(port))
state = run('stat -c %U:%a {}'.format(path))
if state.strip().split(':') != (user, '500'):
if not hasSudoCapabilities():
abort('Trying to give {} access to port {} but have insufficient '
'capabilities.'.format(user, port))
sudo('touch {}'.format(path))
sudo('chown {0}:{0} {1}'.format(user, path))
sudo('chmod 0500 {}'.format(path))
| Python | 0 |
ad2087daae138d3897fc47f0713c8955352ed6ae | add SecretBallotUserIdMiddleware | secretballot/middleware.py | secretballot/middleware.py | # -*- coding: utf-8 -*-
from hashlib import md5
from django.utils.deprecation import MiddlewareMixin
class SecretBallotMiddleware(MiddlewareMixin):
def process_request(self, request):
request.secretballot_token = self.generate_token(request)
def generate_token(self, request):
raise NotImplementedError
class SecretBallotIpMiddleware(SecretBallotMiddleware):
def generate_token(self, request):
return request.META['REMOTE_ADDR']
class SecretBallotUserIdMiddleware(SecretBallotMiddleware):
"""
As the token is generated based on the user ID, this middleware
should only be used on pages where the user is logged in.
"""
def genereate_token(self, request):
return request.user.id
class SecretBallotIpUseragentMiddleware(SecretBallotMiddleware):
def generate_token(self, request):
s = u"".join((request.META['REMOTE_ADDR'], request.META.get('HTTP_USER_AGENT', '')))
return md5(s.encode('utf-8')).hexdigest()
| # -*- coding: utf-8 -*-
from hashlib import md5
from django.utils.deprecation import MiddlewareMixin
class SecretBallotMiddleware(MiddlewareMixin):
def process_request(self, request):
request.secretballot_token = self.generate_token(request)
def generate_token(self, request):
raise NotImplementedError
class SecretBallotIpMiddleware(SecretBallotMiddleware):
def generate_token(self, request):
return request.META['REMOTE_ADDR']
class SecretBallotIpUseragentMiddleware(SecretBallotMiddleware):
def generate_token(self, request):
s = u"".join((request.META['REMOTE_ADDR'], request.META.get('HTTP_USER_AGENT', '')))
return md5(s.encode('utf-8')).hexdigest()
| Python | 0.000001 |
898e97a38ea0510b743ca79d97444458274426b2 | Add tests for queue predeclaration. | st2common/tests/unit/test_service_setup.py | st2common/tests/unit/test_service_setup.py | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tempfile
import mock
from oslo_config import cfg
from st2common import service_setup
from st2common.transport.bootstrap_utils import register_exchanges
from st2common.transport.bootstrap_utils import QUEUES
from st2tests.base import CleanFilesTestCase
from st2tests import config
__all__ = [
'ServiceSetupTestCase'
]
MOCK_LOGGING_CONFIG_INVALID_LOG_LEVEL = """
[loggers]
keys=root
[handlers]
keys=consoleHandler
[formatters]
keys=simpleConsoleFormatter
[logger_root]
level=invalid_log_level
handlers=consoleHandler
[handler_consoleHandler]
class=StreamHandler
level=DEBUG
formatter=simpleConsoleFormatter
args=(sys.stdout,)
[formatter_simpleConsoleFormatter]
class=st2common.logging.formatters.ConsoleLogFormatter
format=%(asctime)s %(levelname)s [-] %(message)s
datefmt=
""".strip()
class ServiceSetupTestCase(CleanFilesTestCase):
def test_no_logging_config_found(self):
def mock_get_logging_config_path():
return ''
config.get_logging_config_path = mock_get_logging_config_path
expected_msg = "No section: .*"
self.assertRaisesRegexp(Exception, expected_msg,
service_setup.setup, service='api',
config=config,
setup_db=False, register_mq_exchanges=False,
register_signal_handlers=False,
register_internal_trigger_types=False,
run_migrations=False)
def test_invalid_log_level_friendly_error_message(self):
_, mock_logging_config_path = tempfile.mkstemp()
self.to_delete_files.append(mock_logging_config_path)
with open(mock_logging_config_path, 'w') as fp:
fp.write(MOCK_LOGGING_CONFIG_INVALID_LOG_LEVEL)
def mock_get_logging_config_path():
return mock_logging_config_path
config.get_logging_config_path = mock_get_logging_config_path
expected_msg = 'Invalid log level selected. Log level names need to be all uppercase'
self.assertRaisesRegexp(KeyError, expected_msg,
service_setup.setup, service='api',
config=config,
setup_db=False, register_mq_exchanges=False,
register_signal_handlers=False,
register_internal_trigger_types=False,
run_migrations=False)
@mock.patch('kombu.Queue.declare')
def test_register_exchanges_predeclare_queues(self, mock_declare):
# Verify that queues are correctly pre-declared if the corresponding config option is set
# Pre-declaration is disabled
self.assertEqual(mock_declare.call_count, 0)
cfg.CONF.set_override(group='messaging', name='predeclare_queues', override=False)
register_exchanges()
self.assertEqual(mock_declare.call_count, 0)
# Pre-declaration is enabled
self.assertEqual(mock_declare.call_count, 0)
cfg.CONF.set_override(group='messaging', name='predeclare_queues', override=True)
register_exchanges()
self.assertEqual(mock_declare.call_count, len(QUEUES))
| # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import tempfile
from st2common import service_setup
from st2tests.base import CleanFilesTestCase
from st2tests import config
__all__ = [
'ServiceSetupTestCase'
]
MOCK_LOGGING_CONFIG_INVALID_LOG_LEVEL = """
[loggers]
keys=root
[handlers]
keys=consoleHandler
[formatters]
keys=simpleConsoleFormatter
[logger_root]
level=invalid_log_level
handlers=consoleHandler
[handler_consoleHandler]
class=StreamHandler
level=DEBUG
formatter=simpleConsoleFormatter
args=(sys.stdout,)
[formatter_simpleConsoleFormatter]
class=st2common.logging.formatters.ConsoleLogFormatter
format=%(asctime)s %(levelname)s [-] %(message)s
datefmt=
""".strip()
class ServiceSetupTestCase(CleanFilesTestCase):
def test_no_logging_config_found(self):
def mock_get_logging_config_path():
return ''
config.get_logging_config_path = mock_get_logging_config_path
expected_msg = "No section: .*"
self.assertRaisesRegexp(Exception, expected_msg,
service_setup.setup, service='api',
config=config,
setup_db=False, register_mq_exchanges=False,
register_signal_handlers=False,
register_internal_trigger_types=False,
run_migrations=False)
def test_invalid_log_level_friendly_error_message(self):
_, mock_logging_config_path = tempfile.mkstemp()
self.to_delete_files.append(mock_logging_config_path)
with open(mock_logging_config_path, 'w') as fp:
fp.write(MOCK_LOGGING_CONFIG_INVALID_LOG_LEVEL)
def mock_get_logging_config_path():
return mock_logging_config_path
config.get_logging_config_path = mock_get_logging_config_path
expected_msg = 'Invalid log level selected. Log level names need to be all uppercase'
self.assertRaisesRegexp(KeyError, expected_msg,
service_setup.setup, service='api',
config=config,
setup_db=False, register_mq_exchanges=False,
register_signal_handlers=False,
register_internal_trigger_types=False,
run_migrations=False)
| Python | 0 |
ea17679936442d8e5af90dcae72c003f708d7b0c | Fix check_user_support for custom user models | guardian/backends.py | guardian/backends.py | from __future__ import unicode_literals
from django.db import models
from guardian.compat import get_user_model
from guardian.conf import settings
from guardian.exceptions import WrongAppError
from guardian.core import ObjectPermissionChecker
def check_object_support(obj):
"""
Returns ``True`` if given ``obj`` is supported
"""
# Backend checks only object permissions (isinstance implies that obj
# is not None)
# Backend checks only permissions for Django models
return isinstance(obj, models.Model)
def check_user_support(user_obj):
"""
Returns a tuple of checkresult and ``user_obj`` which should be used for
permission checks
Checks if the given user is supported. Anonymous users need explicit
activation via ANONYMOUS_USER_NAME
"""
# This is how we support anonymous users - simply try to retrieve User
# instance and perform checks for that predefined user
if not user_obj.is_authenticated():
# If anonymous user permission is disabled then they are always
# unauthorized
if settings.ANONYMOUS_USER_NAME is None:
return False, user_obj
User = get_user_model()
lookup = {User.USERNAME_FIELD: settings.ANONYMOUS_USER_NAME}
user_obj = User.objects.get(**lookup)
return True, user_obj
def check_support(user_obj, obj):
"""
Combination of ``check_object_support`` and ``check_user_support``
"""
obj_support = check_object_support(obj)
user_support, user_obj = check_user_support(user_obj)
return obj_support and user_support, user_obj
class ObjectPermissionBackend(object):
supports_object_permissions = True
supports_anonymous_user = True
supports_inactive_user = True
def authenticate(self, username, password):
return None
def has_perm(self, user_obj, perm, obj=None):
"""
Returns ``True`` if given ``user_obj`` has ``perm`` for ``obj``. If no
``obj`` is given, ``False`` is returned.
.. note::
Remember, that if user is not *active*, all checks would return
``False``.
Main difference between Django's ``ModelBackend`` is that we can pass
``obj`` instance here and ``perm`` doesn't have to contain
``app_label`` as it can be retrieved from given ``obj``.
**Inactive user support**
If user is authenticated but inactive at the same time, all checks
always returns ``False``.
"""
# check if user_obj and object are supported
support, user_obj = check_support(user_obj, obj)
if not support:
return False
if '.' in perm:
app_label, perm = perm.split('.')
if app_label != obj._meta.app_label:
raise WrongAppError("Passed perm has app label of '%s' and "
"given obj has '%s'" % (app_label, obj._meta.app_label))
check = ObjectPermissionChecker(user_obj)
return check.has_perm(perm, obj)
def get_all_permissions(self, user_obj, obj=None):
"""
Returns a set of permission strings that the given ``user_obj`` has for ``obj``
"""
# check if user_obj and object are supported
support, user_obj = check_support(user_obj, obj)
if not support:
return set()
check = ObjectPermissionChecker(user_obj)
return check.get_perms(obj)
| from __future__ import unicode_literals
from django.db import models
from guardian.compat import get_user_model
from guardian.conf import settings
from guardian.exceptions import WrongAppError
from guardian.core import ObjectPermissionChecker
def check_object_support(obj):
"""
Returns ``True`` if given ``obj`` is supported
"""
# Backend checks only object permissions (isinstance implies that obj
# is not None)
# Backend checks only permissions for Django models
return isinstance(obj, models.Model)
def check_user_support(user_obj):
"""
Returns a tuple of checkresult and ``user_obj`` which should be used for
permission checks
Checks if the given user is supported. Anonymous users need explicit
activation via ANONYMOUS_USER_NAME
"""
# This is how we support anonymous users - simply try to retrieve User
# instance and perform checks for that predefined user
if not user_obj.is_authenticated():
# If anonymous user permission is disabled then they are always
# unauthorized
if settings.ANONYMOUS_USER_NAME is None:
return False, user_obj
user_obj = get_user_model().objects.get(username=settings.ANONYMOUS_USER_NAME)
return True, user_obj
def check_support(user_obj, obj):
"""
Combination of ``check_object_support`` and ``check_user_support``
"""
obj_support = check_object_support(obj)
user_support, user_obj = check_user_support(user_obj)
return obj_support and user_support, user_obj
class ObjectPermissionBackend(object):
supports_object_permissions = True
supports_anonymous_user = True
supports_inactive_user = True
def authenticate(self, username, password):
return None
def has_perm(self, user_obj, perm, obj=None):
"""
Returns ``True`` if given ``user_obj`` has ``perm`` for ``obj``. If no
``obj`` is given, ``False`` is returned.
.. note::
Remember, that if user is not *active*, all checks would return
``False``.
Main difference between Django's ``ModelBackend`` is that we can pass
``obj`` instance here and ``perm`` doesn't have to contain
``app_label`` as it can be retrieved from given ``obj``.
**Inactive user support**
If user is authenticated but inactive at the same time, all checks
always returns ``False``.
"""
# check if user_obj and object are supported
support, user_obj = check_support(user_obj, obj)
if not support:
return False
if '.' in perm:
app_label, perm = perm.split('.')
if app_label != obj._meta.app_label:
raise WrongAppError("Passed perm has app label of '%s' and "
"given obj has '%s'" % (app_label, obj._meta.app_label))
check = ObjectPermissionChecker(user_obj)
return check.has_perm(perm, obj)
def get_all_permissions(self, user_obj, obj=None):
"""
Returns a set of permission strings that the given ``user_obj`` has for ``obj``
"""
# check if user_obj and object are supported
support, user_obj = check_support(user_obj, obj)
if not support:
return set()
check = ObjectPermissionChecker(user_obj)
return check.get_perms(obj)
| Python | 0.000001 |
112cb1eb06034f5afb24f9f1c20052a87d8a6374 | Update pir_test.py | sensor_testing/pir_test.py | sensor_testing/pir_test.py | # parallax_pir_reva.py - write to screen when movement detected
# (c) BotBook.com - Karvinen, Karvinen, Valtokari
# 22.9.2017 modified from original
import time
import botbook_gpio as gpio
learningPeriod = 30
def main():
pirPin = 7
gpio.mode(pirPin,"in")
#Learning period
print ("learning... " + str(learningPeriod) + " seconds")
time.sleep(learningPeriod) # <1>
while (True):
movement = gpio.read(pirPin) # <2>
if(movement == gpio.HIGH):
print ("Movement detected " + time.ctime())
else:
print ("No movement detected " + time.ctime())
time.sleep(0.3)
if __name__ == "__main__":
main()
| # parallax_pir_reva.py - write to screen when movement detected
# (c) BotBook.com - Karvinen, Karvinen, Valtokari
# 22.9.2017 modified by Vesa Valli
import time
import botbook_gpio as gpio
learningPeriod = 30
def main():
pirPin = 7
gpio.mode(pirPin,"in")
#Learning period
print ("learning... " + str(learningPeriod) + " seconds")
time.sleep(learningPeriod) # <1>
while (True):
movement = gpio.read(pirPin) # <2>
if(movement == gpio.HIGH):
print ("Movement detected " + time.ctime())
else:
print ("No movement detected " + time.ctime())
time.sleep(0.3)
if __name__ == "__main__":
main()
| Python | 0.000004 |
6c17a81685f4f1b24cefb4760b26e9a33298742c | Bump to v1.10.0 | client/__init__.py | client/__init__.py | __version__ = 'v1.10.0'
FILE_NAME = 'ok'
import os
import sys
sys.path.insert(0, '')
# Add directory in which the ok.zip is stored to sys.path.
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
| __version__ = 'v1.9.6'
FILE_NAME = 'ok'
import os
import sys
sys.path.insert(0, '')
# Add directory in which the ok.zip is stored to sys.path.
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
| Python | 0.000001 |
86d59bbcad5d33e9a4cbad473a36972d29ddbaf0 | missing comma | src/rwtypes/writers/activemq/ActivemqWriter.py | src/rwtypes/writers/activemq/ActivemqWriter.py | import datetime
import time
import json
from third.stomp import stomp_sender
from __writer import Writer
class ActivemqWriter(Writer):
def write(self, msg):
try:
headers = {'destination' : self.destination,
'eventtype' : self.eventtype,
'timestamp' : int(time.time()*1000)}
for item in msg:
if isinstance(msg[item], datetime.datetime):
msg[item] = msg[item].isoformat()
body = json.dumps(msg)
stomp_sender.send_message_via_stomp([(self.host, self.port)], headers, body)
return True
except Exception, e:
print e
return False
| import datetime
import time
import json
from third.stomp import stomp_sender
from __writer import Writer
class ActivemqWriter(Writer):
def write(self, msg):
try:
headers = {'destination' : self.destination,
'eventtype' : self.eventtype
'timestamp' : int(time.time()*1000)}
for item in msg:
if isinstance(msg[item], datetime.datetime):
msg[item] = msg[item].isoformat()
body = json.dumps(msg)
stomp_sender.send_message_via_stomp([(self.host, self.port)], headers, body)
return True
except Exception, e:
print e
return False
| Python | 0.999885 |
4b54488dd2b40254f6217d98c37690dcb37cf783 | fix false origin on replies | halibot/halmodule.py | halibot/halmodule.py | from .halobject import HalObject
from .message import Message
class HalModule(HalObject):
def reply(self, msg0=None, **kwargs):
# Create the reply message
body = kwargs.get('body', msg0.body)
mtype = kwargs.get('type', msg0.type)
author = kwargs.get('author', msg0.author)
origin = kwargs.get('origin', self.name)
msg = Message(body=body, type=mtype, author=author, origin=origin)
# Synchronous reply?
if msg0.sync:
self.sync_replies[msg0.uuid].append(msg)
else:
self.send_to(msg, [ msg0.origin ])
def hasPermission(self, msg, perm):
return self._hal.auth.hasPermission(msg.origin, msg.identity, perm)
| from .halobject import HalObject
from .message import Message
class HalModule(HalObject):
def reply(self, msg0=None, **kwargs):
# Create the reply message
body = kwargs.get('body', msg0.body)
mtype = kwargs.get('type', msg0.type)
author = kwargs.get('author', msg0.author)
origin = kwargs.get('origin', msg0.origin)
msg = Message(body=body, type=mtype, author=author, origin=origin)
# Synchronous reply?
if msg0.sync:
self.sync_replies[msg0.uuid].append(msg)
else:
self.send_to(msg, [ msg.origin ])
def hasPermission(self, msg, perm):
return self._hal.auth.hasPermission(msg.origin, msg.identity, perm)
| Python | 0.000003 |
8dd9d4bf58e976ca40bcafa7249ed3140b77ea69 | fix cfg parsing | tf2director.py | tf2director.py | #!/usr/bin/env python3
import os
import sys
from argparse import ArgumentParser
from configparser import ConfigParser
import actions
from tf2server import Tf2Server
def main():
"""
Parse command line options, read config and run desired action.
"""
description = 'tf2director is a script that helps managing multiple Team Fortress 2 server instances.'
parser = ArgumentParser(description=description)
parser.add_argument('server', help='server to be used or "all"', metavar='server')
parser.add_argument('action', choices=['start', 'stop', 'restart', 'console', 'update', 'status'],
help='action to do', metavar='action')
args = parser.parse_args()
home = os.path.expanduser('~')
config_file = os.path.join(home, 'tf2director.ini')
if not os.path.isfile(config_file):
print('Config file missing (' + config_file + ')')
sys.exit(1)
config = ConfigParser()
config.read(config_file)
if 'all' in config:
raise ValueError('A server cannot be named \'all\'!')
if args.server not in config and args.server != 'all':
raise ValueError('Server \'{0}\' is not configured'.format(args.server))
servers = []
if args.server == 'all':
for s in config.sections():
c = config[s]
server = Tf2Server(s, os.path.expanduser(c['path']))
server.ip = c['ip']
server.port = c['port']
server.initial_map = c['initial_map']
server.cfg_file = c['server_config']
server.max_players = c['max_players']
servers.append(server)
else:
c = config[args.server]
path = c['path']
server = Tf2Server(args.server, os.path.expanduser(path))
server.ip = c['ip']
server.port = c['port']
server.initial_map = c['initial_map']
server.cfg_file = c['server_config']
server.max_players = c['max_players']
server.tv_port = int(c['tv_port']) if 'tv_port' in c else int(server.port) + 5
servers.append(server)
try:
if args.action == 'start':
actions.start(servers)
elif args.action == 'stop':
actions.stop(servers)
elif args.action == 'restart':
actions.restart(servers)
elif args.action == 'console':
if len(servers) == 1:
server = servers[0]
server.attach()
elif args.action == 'update':
actions.update(servers)
elif args.action == 'status':
actions.status(servers)
except ValueError as error:
print('{0}'.format(error))
if __name__ == '__main__':
main()
| #!/usr/bin/env python3
import os
import sys
from argparse import ArgumentParser
from configparser import ConfigParser
import actions
from tf2server import Tf2Server
def main():
"""
Parse command line options, read config and run desired action.
"""
description = 'tf2director is a script that helps managing multiple Team Fortress 2 server instances.'
parser = ArgumentParser(description=description)
parser.add_argument('server', help='server to be used or "all"', metavar='server')
parser.add_argument('action', choices=['start', 'stop', 'restart', 'console', 'update', 'status'],
help='action to do', metavar='action')
args = parser.parse_args()
home = os.path.expanduser('~')
config_file = os.path.join(home, 'tf2director.ini')
if not os.path.isfile(config_file):
print('Config file missing (' + config_file + ')')
sys.exit(1)
config = ConfigParser()
config.read(config_file)
if 'all' in config:
raise ValueError('A server cannot be named \'all\'!')
if args.server not in config and args.server != 'all':
raise ValueError('Server \'{0}\' is not configured'.format(args.server))
servers = []
if args.server == 'all':
for s in config.sections():
c = config[s]
server = Tf2Server(s, os.path.expanduser(c['path']))
server.ip = c['ip']
server.port = c['port']
server.initial_map = c['initial_map']
server.cfg_file = c['server_config']
server.max_players = c['max_players']
servers.append(server)
else:
c = config[args.server]
path = c['path']
server = Tf2Server(args.server, os.path.expanduser(path))
server.ip = c['ip']
server.port = c['port']
server.initial_map = c['initial_map']
server.cfg_file = c['server_config']
server.max_players = c['max_players']
server.tv_port = int(c['tv_port']) if 'tv_port' in c else server.port + 5
servers.append(server)
try:
if args.action == 'start':
actions.start(servers)
elif args.action == 'stop':
actions.stop(servers)
elif args.action == 'restart':
actions.restart(servers)
elif args.action == 'console':
if len(servers) == 1:
server = servers[0]
server.attach()
elif args.action == 'update':
actions.update(servers)
elif args.action == 'status':
actions.status(servers)
except ValueError as error:
print('{0}'.format(error))
if __name__ == '__main__':
main()
| Python | 0.000007 |
c802426e1c7e45ed456ad92a8b88ab18fba59aa3 | 更新 modules ELOs 中的 management command 'clone_metadata', 新增函式功能宣告註解 | commonrepo/elos/management/commands/clone_metadata.py | commonrepo/elos/management/commands/clone_metadata.py | # -*- coding: utf-8 -*-
#
# Copyright 2016 edX PDR Lab, National Central University, Taiwan.
#
# http://edxpdrlab.ncu.cc/
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Created By: yrchen@ATCity.org
# Maintained By: yrchen@ATCity.org
#
'''
Management command ``clone_metadata`` of ELOs in Common Repo projects.
This command will clone the metadata related with specefic ELOs.
'''
from __future__ import absolute_import, unicode_literals
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from commonrepo.elos.models import ELO, ELOMetadata
class Command(BaseCommand):
help = 'Clone Metadata of ELOs'
def add_arguments(self, parser):
parser.add_argument('--source',
type=int,
help='Build Metadata of specific ELOs')
parser.add_argument('--target',
nargs='+',
type=int,
help='Build Metadata of specific ELOs')
def handle(self, *args, **options):
try:
elo_source = ELO.objects.get(id=options['source'])
except ELO.DoesNotExist:
raise CommandError(
'Source ELO "%s" does not exist' %
options['source'])
if not elo_source.metadata:
raise CommandError(
'Source Metadata of ELO "%s" does not exist' %
elo_source.id)
for target in options['target']:
try:
elo_target = ELO.objects.get(id=target)
except ELO.DoesNotExist:
raise CommandError('ELO "%s" does not exist' % target)
# Delete original metadata
if elo_target.metadata:
elo_target.metadata.delete()
metadata = elo_source.metadata
metadata.pk = None
metadata.save()
elo_target.metadata = metadata
elo_target.save()
self.stdout.write(
'Successfully clone Metadata to target ELO "%s"' %
elo_target.id)
| # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from commonrepo.elos.models import ELO, ELOMetadata
class Command(BaseCommand):
help = 'Clone Metadata of ELOs'
def add_arguments(self, parser):
parser.add_argument('--source',
type=int,
help='Build Metadata of specific ELOs')
parser.add_argument('--target',
nargs='+',
type=int,
help='Build Metadata of specific ELOs')
def handle(self, *args, **options):
try:
elo_source = ELO.objects.get(id=options['source'])
except ELO.DoesNotExist:
raise CommandError('Source ELO "%s" does not exist' % options['source'])
if not elo_source.metadata:
raise CommandError('Source Metadata of ELO "%s" does not exist' % elo_source.id)
for target in options['target']:
try:
elo_target = ELO.objects.get(id=target)
except ELO.DoesNotExist:
raise CommandError('ELO "%s" does not exist' % target)
# Delete original metadata
if elo_target.metadata:
elo_target.metadata.delete()
metadata = elo_source.metadata
metadata.pk = None
metadata.save()
elo_target.metadata = metadata
elo_target.save()
self.stdout.write('Successfully clone Metadata to target ELO "%s"' % elo_target.id)
| Python | 0 |
c01c97583e11bfe1c41dd41e7b39d19be22fbb7c | use the real paths | tools/build.py | tools/build.py | #!/usr/bin/env python
import os
import subprocess
import sys
# TODO: release/debug
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
build_dir = os.path.join(root, 'out')
def build():
if sys.platform != "win32":
cmd = 'make -C %s' % build_dir
else:
cmd = 'tools\win_build.bat'
print cmd
sys.exit(subprocess.call(cmd, shell=True))
def test():
agent = os.path.join(root, 'out', 'Debug', 'monitoring-agent')
cmd = '%s --zip out/Debug/monitoring-test.zip -e tests -c docs/sample.state' % agent
print cmd
rc = subprocess.call(cmd, shell=True)
sys.exit(rc)
commands = {
'build': build,
'test': test,
}
def usage():
print('Usage: build.py [%s]' % ', '.join(commands.keys()))
sys.exit(1)
if len(sys.argv) != 2:
usage()
ins = sys.argv[1]
if not commands.has_key(ins):
print('Invalid command: %s' % ins)
sys.exit(1)
print('Running %s' % ins)
cmd = commands.get(ins)
cmd()
| #!/usr/bin/env python
import os
import subprocess
import sys
# TODO: release/debug
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
build_dir = os.path.join(root, 'out')
def build():
if sys.platform != "win32":
cmd = 'make -C %s' % build_dir
else:
cmd = 'tools\win_build.bat'
print cmd
sys.exit(subprocess.call(cmd, shell=True))
def test():
agent = os.path.join(root, 'monitoring-agent')
cmd = '%s --zip monitoring-test.zip -e tests -c docs/sample.state' % agent
print cmd
rc = subprocess.call(cmd, shell=True)
sys.exit(rc)
commands = {
'build': build,
'test': test,
}
def usage():
print('Usage: build.py [%s]' % ', '.join(commands.keys()))
sys.exit(1)
if len(sys.argv) != 2:
usage()
ins = sys.argv[1]
if not commands.has_key(ins):
print('Invalid command: %s' % ins)
sys.exit(1)
print('Running %s' % ins)
cmd = commands.get(ins)
cmd()
| Python | 0.000017 |
1edac6151b4a730039e0782a5cb9777fe7f4a21d | Implement basic teste | code/web/scisynergy_flask/tests/test_basic.py | code/web/scisynergy_flask/tests/test_basic.py | import os
import unittest
from scisynergy_flask import app
class BasicTests(unittest.TestCase):
def setUp(self):
self.app = app.test_client()
self.app.testing = True
def test_main_page(self):
response = self.app.get('/', follow_redirects=True)
self.assertEqual(response.status_code, 200)
def test_maintenance(self):
response = self.app.get('/maintenance')
self.assertEqual(response.status_code, 200)
if __name__ == "__main__":
unittest.main() | import os
import unittest
from scisynergy import app
class BasicTests(unittest.TestCase):
def test_main_page(self):
response = self.app.get('/', follow_redirects=True)
self.assertEqual(response.status_code, 200)
| Python | 0.02115 |
20d1ad27c85ecc7dcfbfb30abd7a68be10db2a33 | Change showIt=False to pass test on Travis | simpegEM/Tests/test_forward_EMproblem.py | simpegEM/Tests/test_forward_EMproblem.py | import unittest
from SimPEG import *
import simpegEM as EM
from scipy.constants import mu_0
from simpegEM.Utils.Ana import hzAnalyticDipoleT
import matplotlib.pyplot as plt
class TDEM_bTests(unittest.TestCase):
def setUp(self):
cs = 10.
ncx = 15
ncy = 10
npad = 20
hx = Utils.meshTensors(((0,cs), (ncx,cs), (npad,cs)))
hy = Utils.meshTensors(((npad,cs), (ncy,cs), (npad,cs)))
mesh = Mesh.Cyl1DMesh([hx,hy], -hy.sum()/2)
active = mesh.vectorCCz<0.
model = Model.ActiveModel(mesh, active, -8, nC=mesh.nCz)
model = Model.ComboModel(mesh,
[Model.LogModel, Model.Vertical1DModel, model])
opts = {'txLoc':0.,
'txType':'VMD_MVP',
'rxLoc':np.r_[30., 0.],
'rxType':'bz',
'timeCh':np.logspace(-4,-2.5, 21),
}
self.dat = EM.TDEM.SurveyTDEM1D(**opts)
self.prb = EM.TDEM.ProblemTDEM_b(model)
self.prb.setTimes([1e-6, 5e-6, 1e-5, 5e-5, 1e-4, 5e-4], [40, 40, 40, 40, 40, 40])
self.sigma = np.ones(mesh.nCz)*1e-8
self.sigma[mesh.vectorCCz<0] = 1e-3
self.sigma = np.log(self.sigma[active])
self.showIt = False
self.prb.pair(self.dat)
def test_analitic_b(self):
bz_calc = self.dat.dpred(self.sigma)
bz_ana = mu_0*hzAnalyticDipoleT(self.dat.rxLoc[0], self.prb.times, np.exp(self.sigma[0]))
ind = self.prb.times > 1e-5
diff = np.linalg.norm(bz_calc[ind].flatten() - bz_ana[ind].flatten())/np.linalg.norm(bz_ana[ind].flatten())
if self.showIt == True:
plt.loglog(self.prb.times[bz_calc>0], bz_calc[bz_calc>0], 'b', self.prb.times[bz_calc<0], -bz_calc[bz_calc<0], 'b--')
plt.loglog(self.prb.times, abs(bz_ana), 'b*')
plt.xlim(1e-5, 1e-2)
plt.show()
print diff
self.assertTrue(diff < 0.10)
if __name__ == '__main__':
unittest.main()
| import unittest
from SimPEG import *
import simpegEM as EM
from scipy.constants import mu_0
from simpegEM.Utils.Ana import hzAnalyticDipoleT
import matplotlib.pyplot as plt
class TDEM_bTests(unittest.TestCase):
def setUp(self):
cs = 10.
ncx = 15
ncy = 10
npad = 20
hx = Utils.meshTensors(((0,cs), (ncx,cs), (npad,cs)))
hy = Utils.meshTensors(((npad,cs), (ncy,cs), (npad,cs)))
mesh = Mesh.Cyl1DMesh([hx,hy], -hy.sum()/2)
active = mesh.vectorCCz<0.
model = Model.ActiveModel(mesh, active, -8, nC=mesh.nCz)
model = Model.ComboModel(mesh,
[Model.LogModel, Model.Vertical1DModel, model])
opts = {'txLoc':0.,
'txType':'VMD_MVP',
'rxLoc':np.r_[30., 0.],
'rxType':'bz',
'timeCh':np.logspace(-4,-2.5, 21),
}
self.dat = EM.TDEM.SurveyTDEM1D(**opts)
self.prb = EM.TDEM.ProblemTDEM_b(model)
self.prb.setTimes([1e-6, 5e-6, 1e-5, 5e-5, 1e-4, 5e-4], [40, 40, 40, 40, 40, 40])
self.sigma = np.ones(mesh.nCz)*1e-8
self.sigma[mesh.vectorCCz<0] = 1e-3
self.sigma = np.log(self.sigma[active])
self.showIt = True
self.prb.pair(self.dat)
def test_analitic_b(self):
bz_calc = self.dat.dpred(self.sigma)
bz_ana = mu_0*hzAnalyticDipoleT(self.dat.rxLoc[0], self.prb.times, np.exp(self.sigma[0]))
ind = self.prb.times > 1e-5
diff = np.linalg.norm(bz_calc[ind].flatten() - bz_ana[ind].flatten())/np.linalg.norm(bz_ana[ind].flatten())
if self.showIt == True:
plt.loglog(self.prb.times[bz_calc>0], bz_calc[bz_calc>0], 'b', self.prb.times[bz_calc<0], -bz_calc[bz_calc<0], 'b--')
plt.loglog(self.prb.times, abs(bz_ana), 'b*')
plt.xlim(1e-5, 1e-2)
plt.show()
print diff
self.assertTrue(diff < 0.10)
if __name__ == '__main__':
unittest.main()
| Python | 0.000004 |
1e5345a786b24d341cfd99c3334c3122e3e5a91b | Update simulate method in Framework | simulation/MappingSimulationFrameWork.py | simulation/MappingSimulationFrameWork.py | from ResourceGetter import ResouceGetter
from RequestGenerator import TestReqGen
from RequestGenerator import SimpleReqGen
from RequestGenerator import MultiReqGen
from AbstractOrchestrator import *
import sys
#sys.path.append(./RequestGenerator)
#from escape.mapping.simulation import ResourceGetter
#from escape.mapping.simulation import RequestGenerator
class MappingSolutionFramework:
__discrete_simulation = True
__resource_getter = None
__request_generator = None
__orchestrator_adaptor = None
__remaining_request_lifetimes = list()
def __init__(self, simulation_type):
self.__discreate_simulation = simulation_type
def __clean_expired_requests(self,time,service_graph):
# Delete expired SCs
for sc in self.__remaining_request_lifetimes:
if sc.dead_time < time:
# Delete mapping
for nf in sc.SC.nfs:
service_graph.del_node(nf)
# refresh the active SCs list
self.__remaining_request_lifetimes.remove(sc)
def simulate(self,topology_type,request_type,orchestrator_type,sim_end,discrete_sim):
time = 0
mapping_level = 1
#Get resource
resource_getter = ResouceGetter()
resource_graph = resource_getter.GetNFFG(topology_type)
#Simulation cycle
sim_running = True
sim_iter = 0
while sim_running:
#Get request
#TODO: EZT MEG MODOSITANI AZ OSZTALYDIAGRAM SZERINT
if request_type == "test":
request_generator = TestReqGen()
elif request_type == "simple":
request_generator = SimpleReqGen()
elif request_type == "multi":
request_generator = MultiReqGen()
else:
#TODO: create exception
pass
service_graph, life_time = request_generator.get_request(resource_graph,mapping_level)
#Discrete working
if discrete_sim:
time += 1
#Get Orchestrator
if orchestrator_type == "online":
orchestrator_adaptor = OnlineOrchestrator()
elif orchestrator_type == "offline":
orchestrator_adaptor = OfflineOrchestrator()
elif orchestrator_type == "hybrid":
orchestrator_adaptor = HybridOrchestrator()
else:
# TODO: create exception
pass
#Synchronous MAP call
orchestrator_adaptor.MAP(service_graph,resource_graph)
#Adding successfully mapped request to the remaining_request_lifetimes
# TODO: ELLENORIZNI, HOGY MAPPING SIKERES-E
service_life_element = {"dead_time":time+life_time,"SG":service_graph}
self.__remaining_request_lifetimes.append(service_life_element)
#Remove expired service graph requests
self.__clean_expired_requests()
#Indiscrete working
else:
#TODO: Create this simulation type
pass
#Increase simulation iteration
if (sim_iter < sim_end):
sim_iter += 1
else:
sim_running = False
if __name__ == "__main__":
test = MappingSolutionFramework(resource_graph,request) #,orch_adaptor)
| from ResourceGetter import ResouceGetter
from RequestGenerator import TestReqGen
from RequestGenerator import SimpleReqGen
from RequestGenerator import MultiReqGen
from AbstractOrchestrator
import sys
#sys.path.append(./RequestGenerator)
#from escape.mapping.simulation import ResourceGetter
#from escape.mapping.simulation import RequestGenerator
class MappingSolutionFramework:
#__request_generator = None
remaining_request_lifetimes = []
def __init__(self, resource_getter, request_generator): #, orchestrator_adaptor):
self.__resource_getter = resource_getter
self.__request_generator = request_generator
#self.__orchestrator_adaptor = orchestrator_adaptor
def simulate(self,topology_type,request_type,sim_end,discrete_sim):
#Get resource
resource_getter = ResouceGetter()
resource_graph = resource_getter.GetNFFG(topology_type)
#Simulation cycle
sim_running = True
sim_iter = 0
while sim_running:
#Get request
#TODO: EZT MEG MODOSITANI AZ OSZTALYDIAGRAM SZERINT
if request_type == "test":
request_generator = TestReqGen()
elif request_type == "simple":
request_generator = SimpleReqGen()
elif request_type == "multi":
request_generator = MultiReqGen()
else:
#TODO: create exception
pass
service_graph = request_generator.get_request()
#Discrete working
if discrete_sim:
#Indiscrete working
else:
pass
#Increase simulation iteration
if (sim_iter < sim_end):
sim_iter += 1
else:
sim_running = False
if __name__ == "__main__":
#Start simulate:
resource_graph = ResouceGetter()
asd = resource_graph.GetNFFG('pico')
request = RequestGenerator()
# orch_adaptor = OrchestratorAdaptor()
test = MappingSolutionFramework(resource_graph,request) #,orch_adaptor)
| Python | 0 |
492ab05637b92f2decbd8fe60e25783ce63f9733 | remove ignore from staging | server/settings/staging.py | server/settings/staging.py | """ Do not put secrets in this file. This file is public.
For staging environment (Using Dokku)
"""
import os
import sys
import binascii
from server.settings import RAVEN_IGNORE_EXCEPTIONS
default_secret = binascii.hexlify(os.urandom(24))
ENV = 'staging'
PREFERRED_URL_SCHEME = 'https'
SECRET_KEY = os.getenv('SECRET_KEY', default_secret)
CACHE_TYPE = 'simple'
DEBUG = False
ASSETS_DEBUG = False
TESTING_LOGIN = False
DEBUG_TB_INTERCEPT_REDIRECTS = False
SQLALCHEMY_TRACK_MODIFICATIONS = False
db_url = os.getenv('DATABASE_URL')
if db_url:
db_url = db_url.replace('mysql://', 'mysql+pymysql://')
db_url += "&sql_mode=STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION"
else:
db_url = os.getenv('SQLALCHEMY_URL', 'sqlite:///../oksqlite.db')
SQLALCHEMY_DATABASE_URI = db_url
WTF_CSRF_CHECK_DEFAULT = True
WTF_CSRF_ENABLED = True
try:
os.environ["GOOGLE_ID"]
os.environ["GOOGLE_SECRET"]
except KeyError:
print("Please set the google login variables. source secrets.sh")
sys.exit(1)
GOOGLE = {
'consumer_key': os.environ.get('GOOGLE_ID'),
'consumer_secret': os.environ.get('GOOGLE_SECRET')
}
SENDGRID_AUTH = {
'user': os.environ.get("SENDGRID_USER"),
'key': os.environ.get("SENDGRID_KEY")
}
| """ Do not put secrets in this file. This file is public.
For staging environment (Using Dokku)
"""
import os
import sys
import binascii
from server.settings import RAVEN_IGNORE_EXCEPTIONS
default_secret = binascii.hexlify(os.urandom(24))
ENV = 'staging'
PREFERRED_URL_SCHEME = 'https'
SECRET_KEY = os.getenv('SECRET_KEY', default_secret)
CACHE_TYPE = 'simple'
DEBUG = False
ASSETS_DEBUG = False
TESTING_LOGIN = False
DEBUG_TB_INTERCEPT_REDIRECTS = False
SQLALCHEMY_TRACK_MODIFICATIONS = False
db_url = os.getenv('DATABASE_URL')
if db_url:
db_url = db_url.replace('mysql://', 'mysql+pymysql://')
db_url += "&sql_mode=STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION"
else:
db_url = os.getenv('SQLALCHEMY_URL', 'sqlite:///../oksqlite.db')
SQLALCHEMY_DATABASE_URI = db_url
WTF_CSRF_CHECK_DEFAULT = True
WTF_CSRF_ENABLED = True
RAVEN_IGNORE_EXCEPTIONS =['werkzeug.exceptions.Forbidden', 'werkzeug.exceptions.NotFound',
'werkzeug.exceptions.Unauthorized']
try:
os.environ["GOOGLE_ID"]
os.environ["GOOGLE_SECRET"]
except KeyError:
print("Please set the google login variables. source secrets.sh")
sys.exit(1)
GOOGLE = {
'consumer_key': os.environ.get('GOOGLE_ID'),
'consumer_secret': os.environ.get('GOOGLE_SECRET')
}
SENDGRID_AUTH = {
'user': os.environ.get("SENDGRID_USER"),
'key': os.environ.get("SENDGRID_KEY")
}
| Python | 0.000001 |
1569633e1e73bbfb11a2cc34a1ed5239fdc58b1f | load extent data as json from form | casework/forms.py | casework/forms.py | # -*- coding: utf-8 -*-
from flask_wtf import Form
from wtforms import StringField, RadioField, DecimalField, HiddenField, TextAreaField, FieldList, DateField, FormField
from wtforms.validators import DataRequired, Optional
from casework.validators import validate_postcode, validate_price_paid, validate_extent, format_postcode
import simplejson
class ChargeForm(Form):
"""
Charge Form
"""
charge_date = DateField('Charge date', format='%d-%m-%Y', validators=[DataRequired()])
chargee_name = StringField('Company name', validators=[DataRequired()])
chargee_registration_number = StringField('Company registration number', validators=[DataRequired()])
chargee_address = TextAreaField('Address', validators=[DataRequired()])
class RegistrationForm(Form):
"""
The names of the variables here MUST match the name attribute of the fields
in the index.html for WTForms to work
Nope: you just have to use the form object you pass to the template and use
the form object to do the work for you
"""
title_number = HiddenField('Title Number')
first_name1 = StringField('First name 1', validators=[DataRequired()])
surname1 = StringField('Surname 1', validators=[DataRequired()])
first_name2 = StringField('First name 2')
surname2 = StringField('Surname 2')
house_number = StringField('House number', validators=[DataRequired()])
road = StringField('Road', validators=[DataRequired()])
town = StringField('Town', validators=[DataRequired()])
postcode = StringField('Postcode', validators=[DataRequired(), validate_postcode])
property_tenure = RadioField(
'Property tenure',
choices=[
('Freehold', 'Freehold'),
('Leasehold', 'Leasehold')
]
)
property_class = RadioField(
'Property class',
choices=[
('Absolute', 'Absolute'),
('Good', 'Good'),
('Qualified', 'Qualified'),
('Possessory', 'Possessory')
]
)
price_paid = DecimalField(
'Price paid (£)',
validators=[Optional(), validate_price_paid],
places=2,
rounding=None)
charges = FieldList(FormField(ChargeForm), min_entries=0)
charges_template = FieldList(FormField(ChargeForm), min_entries=1)
extent = TextAreaField('GeoJSON', validators=[DataRequired(), validate_extent])
def validate(self):
old_form_charges_template = self.charges_template
del self.charges_template
form_is_validated = super(RegistrationForm, self).validate()
self.charges_template = old_form_charges_template
return form_is_validated
def to_json(self):
arr = []
for charge in self['charges'].data:
dt = charge.pop('charge_date')
print "xXX", dt
charge['charge_date'] = str(dt)
arr.append(charge)
data = simplejson.dumps({
"title_number": self['title_number'].data,
"proprietors": [
{
"first_name": self['first_name1'].data,
"last_name": self['surname1'].data
},
{
"first_name": self['first_name2'].data,
"last_name": self['surname2'].data
}
],
"property": {
"address": {
"house_number": self['house_number'].data,
"road": self['road'].data,
"town": self['town'].data,
"postcode": format_postcode(self['postcode'].data)
},
"tenure": self['property_tenure'].data,
"class_of_title": self['property_class'].data
},
"payment": {
"price_paid": self['price_paid'].data,
"titles": [
self['title_number'].data
]
},
"charges": arr,
"extent": simplejson.loads(self['extent'].data)
})
return data
| # -*- coding: utf-8 -*-
from flask_wtf import Form
from wtforms import StringField, RadioField, DecimalField, HiddenField, TextAreaField, FieldList, DateField, FormField
from wtforms.validators import DataRequired, Optional
from casework.validators import validate_postcode, validate_price_paid, validate_extent, format_postcode
import simplejson
class ChargeForm(Form):
"""
Charge Form
"""
charge_date = DateField('Charge date', format='%d-%m-%Y', validators=[DataRequired()])
chargee_name = StringField('Company name', validators=[DataRequired()])
chargee_registration_number = StringField('Company registration number', validators=[DataRequired()])
chargee_address = TextAreaField('Address', validators=[DataRequired()])
class RegistrationForm(Form):
"""
The names of the variables here MUST match the name attribute of the fields
in the index.html for WTForms to work
Nope: you just have to use the form object you pass to the template and use
the form object to do the work for you
"""
title_number = HiddenField('Title Number')
first_name1 = StringField('First name 1', validators=[DataRequired()])
surname1 = StringField('Surname 1', validators=[DataRequired()])
first_name2 = StringField('First name 2')
surname2 = StringField('Surname 2')
house_number = StringField('House number', validators=[DataRequired()])
road = StringField('Road', validators=[DataRequired()])
town = StringField('Town', validators=[DataRequired()])
postcode = StringField('Postcode', validators=[DataRequired(), validate_postcode])
property_tenure = RadioField(
'Property tenure',
choices=[
('Freehold', 'Freehold'),
('Leasehold', 'Leasehold')
]
)
property_class = RadioField(
'Property class',
choices=[
('Absolute', 'Absolute'),
('Good', 'Good'),
('Qualified', 'Qualified'),
('Possessory', 'Possessory')
]
)
price_paid = DecimalField(
'Price paid (£)',
validators=[Optional(), validate_price_paid],
places=2,
rounding=None)
charges = FieldList(FormField(ChargeForm), min_entries=0)
charges_template = FieldList(FormField(ChargeForm), min_entries=1)
extent = TextAreaField('GeoJSON', validators=[DataRequired(), validate_extent])
def validate(self):
old_form_charges_template = self.charges_template
del self.charges_template
form_is_validated = super(RegistrationForm, self).validate()
self.charges_template = old_form_charges_template
return form_is_validated
def to_json(self):
arr = []
for charge in self['charges'].data:
dt = charge.pop('charge_date')
print "xXX", dt
charge['charge_date'] = str(dt)
arr.append(charge)
data = simplejson.dumps({
"title_number": self['title_number'].data,
"proprietors": [
{
"first_name": self['first_name1'].data,
"last_name": self['surname1'].data
},
{
"first_name": self['first_name2'].data,
"last_name": self['surname2'].data
}
],
"property": {
"address": {
"house_number": self['house_number'].data,
"road": self['road'].data,
"town": self['town'].data,
"postcode": format_postcode(self['postcode'].data)
},
"tenure": self['property_tenure'].data,
"class_of_title": self['property_class'].data
},
"payment": {
"price_paid": self['price_paid'].data,
"titles": [
self['title_number'].data
]
},
"charges": arr,
"extent": self['extent'].data
})
return data
| Python | 0 |
661fa0d89d66fe012165ee7553c65e1e73356763 | Fix pylint | batchflow/tests/filesindex_test.py | batchflow/tests/filesindex_test.py | """ Tests for FilesIndex class. """
# pylint: disable=missing-docstring
# pylint: disable=protected-access
# pylint: disable=redefined-outer-name
import os
import shutil
from contextlib import ExitStack as does_not_raise
import pytest
import numpy as np
from batchflow import FilesIndex, DatasetIndex
@pytest.fixture(scope='module')
def files_setup(request):
""" Fixture that creates files for tests """
path = 'fi_test_tmp'
folder1 = 'folder'
folder2 = 'other_folder'
folders = [path, os.path.join(path, folder1), os.path.join(path, folder2)]
for folder in folders:
os.mkdir(folder)
for i in range(3):
open(os.path.join(folder, 'file_{}.txt'.format(i)), 'w').close()
def fin():
shutil.rmtree(path)
request.addfinalizer(fin)
return path, folder1, folder2
@pytest.mark.parametrize('path, expectation', [['', does_not_raise()],
[[], pytest.raises(ValueError)],
[['', ''], does_not_raise()]])
def test_build_index_empty(path, expectation):
with expectation:
findex = FilesIndex(path=path)
assert len(findex) == 0
assert isinstance(findex.index, np.ndarray)
@pytest.mark.parametrize('path, error', [(1, TypeError),
([2, 3], TypeError),
([None], TypeError)])
def test_build_index_non_path(path, error):
""" `path` should be string or list of strings """
with pytest.raises(error):
FilesIndex(path=path)
def test_build_no_ext(files_setup):
path, _, _ = files_setup
path = os.path.join(path, '*')
findex = FilesIndex(path=path, no_ext=True)
assert len(findex) == 3
assert os.path.splitext(findex.indices[0])[1] == ''
def test_build_dirs(files_setup):
path, folder1, _ = files_setup
path = os.path.join(path, '*')
findex = FilesIndex(path=path, dirs=True, sort=True)
assert len(findex) == 2
assert findex.indices[0] == os.path.split(folder1)[1]
def test_same_name_in_differen_folders(files_setup):
path, _, _ = files_setup
path = os.path.join(path, '*', '*')
with pytest.raises(ValueError):
FilesIndex(path=path)
def test_build_from_index(files_setup):
path, _, _ = files_setup
files = ['file_{}.txt'.format(i) for i in range(3)]
paths = dict(zip(files, [os.path.join(path, f) for f in files]))
dsindex = DatasetIndex(files)
findex = FilesIndex(index=dsindex, paths=paths, dirs=False)
assert len(dsindex) == len(findex)
def test_get_full_path(files_setup):
path, _, _ = files_setup
findex = FilesIndex(path=os.path.join(path, '*'))
file_name = 'file_1.txt'
full_path = findex.get_fullpath(file_name)
assert os.path.dirname(full_path) == path
assert os.path.basename(full_path) == file_name
@pytest.mark.parametrize('index', [DatasetIndex(['file_1.txt']), ['file_1.txt']])
def test_create_subset(files_setup, index):
path, _, _ = files_setup
findex = FilesIndex(path=os.path.join(path, '*'))
new_findex = findex.create_subset(index)
file_name = 'file_1.txt'
full_path = new_findex.get_fullpath(file_name)
assert len(new_findex) == 1
assert isinstance(new_findex.indices, np.ndarray)
assert os.path.dirname(full_path) == path
assert os.path.basename(full_path) == file_name
| """ Tests for FilesIndex class. """
# pylint: disable=missing-docstring
# pylint: disable=protected-access
# pylint: disable=redefined-outer-name
import os
import shutil
from contextlib import ExitStack as does_not_raise
import pytest
import numpy as np
from batchflow import FilesIndex, DatasetIndex
@pytest.fixture(scope='module')
def files_setup(request):
""" Fixture that creates files for tests """
path = 'fi_test_tmp'
folder1 = 'folder'
folder2 = 'other_folder'
folders = [path, os.path.join(path, folder1), os.path.join(path, folder2)]
for folder in folders:
os.mkdir(folder)
for i in range(3):
open(os.path.join(folder, 'file_{}.txt'.format(i)), 'w').close()
def fin():
shutil.rmtree(path)
request.addfinalizer(fin)
return path, folder1, folder2
@pytest.mark.parametrize('path, expectation', [['', does_not_raise()],
[[], pytest.raises(ValueError)],
[['', ''], does_not_raise()]
])
def test_build_index_empty(path, expectation):
with expectation:
findex = FilesIndex(path=path)
assert len(findex) == 0
assert isinstance(findex.index, np.ndarray)
@pytest.mark.parametrize('path, error', [(1, TypeError),
([2, 3], TypeError),
([None], TypeError)])
def test_build_index_non_path(path, error):
""" `path` should be string or list of strings """
with pytest.raises(error):
FilesIndex(path=path)
def test_build_no_ext(files_setup):
path, _, _ = files_setup
path = os.path.join(path, '*')
findex = FilesIndex(path=path, no_ext=True)
assert len(findex) == 3
assert os.path.splitext(findex.indices[0])[1] == ''
def test_build_dirs(files_setup):
path, folder1, _ = files_setup
path = os.path.join(path, '*')
findex = FilesIndex(path=path, dirs=True, sort=True)
assert len(findex) == 2
assert findex.indices[0] == os.path.split(folder1)[1]
def test_same_name_in_differen_folders(files_setup):
path, _, _ = files_setup
path = os.path.join(path, '*', '*')
with pytest.raises(ValueError):
FilesIndex(path=path)
def test_build_from_index(files_setup):
path, _, _ = files_setup
files = ['file_{}.txt'.format(i) for i in range(3)]
paths = dict(zip(files, [os.path.join(path, f) for f in files]))
dsindex = DatasetIndex(files)
findex = FilesIndex(index=dsindex, paths=paths, dirs=False)
assert len(dsindex) == len(findex)
def test_get_full_path(files_setup):
path, _, _ = files_setup
findex = FilesIndex(path=os.path.join(path, '*'))
file_name = 'file_1.txt'
full_path = findex.get_fullpath(file_name)
assert os.path.dirname(full_path) == path
assert os.path.basename(full_path) == file_name
@pytest.mark.parametrize('index', [DatasetIndex(['file_1.txt']), ['file_1.txt']])
def test_create_subset(files_setup, index):
path, _, _ = files_setup
findex = FilesIndex(path=os.path.join(path, '*'))
new_findex = findex.create_subset(index)
file_name = 'file_1.txt'
full_path = new_findex.get_fullpath(file_name)
assert len(new_findex) == 1
assert isinstance(new_findex.indices, np.ndarray)
assert os.path.dirname(full_path) == path
assert os.path.basename(full_path) == file_name
| Python | 0.000099 |
352583af500746b431d46d7efc3a0d3f931b43a0 | Fix context processors | skcodeonlinetester/context_processors.py | skcodeonlinetester/context_processors.py | """
Extra context processors for the SkCodeOnlineTester app.
"""
from django.utils.translation import ugettext_lazy as _
from django.contrib.sites.shortcuts import get_current_site
def app_constants(request):
"""
Constants context processor.
:param request: the current request.
:return: All constants for the app.
"""
site = get_current_site(request)
return {
'APP': {
'TITLE': _('PySkCode test console'),
'AUTHOR': 'Fabien Batteix',
'COPYRIGHT': _('TamiaLab 2016'),
'DESCRIPTION': _('Test console for the PySkCode project.'),
'GOOGLE_SITE_VERIFICATION_CODE': '',
'TWITTER_USERNAME': 'skywodd',
'TWITTER_ACCOUNT_ID': '250273994',
'FACEBOOK_URL': 'https://www.facebook.com/fabien.batteix',
},
'SITE': {
'NAME': site.name,
'DOMAIN': site.domain,
'PROTO': 'https' if request.is_secure() else 'http'
}
}
| """
Extra context processors for the SkCodeOnlineTester app.
"""
from django.utils.translation import ugettext_lazy as _
from django.contrib.sites.shortcuts import get_current_site
def app_constants(request):
"""
Constants context processor.
:param request: the current request.
:return: All constants for the app.
"""
site = get_current_site(request)
return {
'APP': {
'TITLE': _('Test console for the PySkCode project'),
'TITLE_SHORT': _('PySkCode test console'),
'AUTHOR': 'Fabien Batteix',
'COPYRIGHT': 'TamiaLab 2016',
'DESCRIPTION': _('Test console for the PySkCode project.'),
'GOOGLE_SITE_VERIFICATION_CODE': '',
'TWITTER_USERNAME': 'skywodd',
'TWITTER_ACCOUNT_ID': '250273994',
'FACEBOOK_URL': 'https://www.facebook.com/fabien.batteix',
},
'SITE': {
'NAME': site.name,
'DOMAIN': site.domain,
'PROTO': 'https' if request.is_secure() else 'http'
}
}
| Python | 0.024871 |
75926fe8be6f47287561200a0d6e47cad5c51082 | Update tokenizers.py | cobe/tokenizers.py | cobe/tokenizers.py | # Copyright (C) 2010 Peter Teichman
import re
import Stemmer
import types
class MegaHALTokenizer:
"""A traditional MegaHAL style tokenizer. This considers any of these
to be a token:
* one or more consecutive alpha characters (plus apostrophe)
* one or more consecutive numeric characters
* one or more consecutive punctuation/space characters (not apostrophe)
This tokenizer ignores differences in capitalization."""
def split(self, phrase):
if len(phrase) == 0:
return []
# add ending punctuation if it is missing
if phrase[-1] not in ".!?":
phrase = phrase + "."
words = re.findall("([A-Z']+|[0-9]+|[^A-Z'0-9]+)", phrase.upper(),
re.UNICODE)
return words
def join(self, words):
"""Capitalize the first alpha character in the reply and the
first alpha character that follows one of [.?!] and a
space."""
chars = list(u"".join(words))
start = True
for i in xrange(len(chars)):
char = chars[i]
if char.isalpha():
if start:
chars[i] = char.upper()
else:
chars[i] = char.lower()
start = False
else:
if i > 2 and chars[i - 1] in ".?!" and char.isspace():
start = True
return u"".join(chars)
class CobeTokenizer:
"""A tokenizer that is somewhat improved from MegaHAL. These are
considered tokens:
* one or more consecutive Unicode word characters (plus apostrophe and dash)
* one or more consecutive Unicode non-word characters, possibly with
internal whitespace
* the whitespace between word or non-word tokens
* an HTTP url, [word]: followed by any run of non-space characters.
This tokenizer collapses multiple spaces in a whitespace token into a
single space character.
It preserves differences in case. foo, Foo, and FOO are different
tokens."""
def __init__(self):
# Add hyphen to the list of possible word characters, so hyphenated
# words become one token (e.g. hy-phen). But don't remove it from
# the list of non-word characters, so if it's found entirely within
# punctuation it's a normal non-word (e.g. :-( )
self.regex = re.compile("(\w+:\S+" # urls
"|[\w'-]+" # words
"|[^\w\s][^\w]*[^\w\s]" # multiple punctuation
"|[^\w\s]" # a single punctuation character
"|\s+)", # whitespace
re.UNICODE)
def split(self, phrase):
# Strip leading and trailing whitespace. This might not be the
# correct choice long-term, but in the brain it prevents edges
# from the root node that have has_space set.
phrase = phrase.strip()
if len(phrase) == 0:
return []
tokens = self.regex.findall(phrase)
# collapse runs of whitespace into a single space
space = u" "
for i, token in enumerate(tokens):
if token[0] == " " and len(token) > 1:
tokens[i] = space
return tokens
def join(self, words):
return u"".join(words)
class CobeStemmer:
def __init__(self, name):
# use the PyStemmer Snowball stemmer bindings
self.stemmer = Stemmer.Stemmer(name)
def stem(self, token):
if not re.search("\w", token, re.UNICODE):
return self.stem_nonword(token)
# Don't preserve case when stemming, i.e. create lowercase stems.
# This will allow us to create replies that switch the case of
# input words, but still generate the reply in context with the
# generated case.
stem = self.stemmer.stemWord(token.lower())
return stem
def stem_nonword(self, token):
# Stem common smile and frown emoticons down to :) and :(
if re.search(":-?[ \)]*\)", token):
return ":)"
if re.search(":-?[' \(]*\(", token):
return ":("
| # Copyright (C) 2010 Peter Teichman
import re
import Stemmer
import types
class MegaHALTokenizer:
"""A traditional MegaHAL style tokenizer. This considers any of these
to be a token:
* one or more consecutive alpha characters (plus apostrophe)
* one or more consecutive numeric characters
* one or more consecutive punctuation/space characters (not apostrophe)
This tokenizer ignores differences in capitalization."""
def split(self, phrase):
if type(phrase) != types.UnicodeType:
raise TypeError("Input must be Unicode")
if len(phrase) == 0:
return []
# add ending punctuation if it is missing
if phrase[-1] not in ".!?":
phrase = phrase + "."
words = re.findall("([A-Z']+|[0-9]+|[^A-Z'0-9]+)", phrase.upper(),
re.UNICODE)
return words
def join(self, words):
"""Capitalize the first alpha character in the reply and the
first alpha character that follows one of [.?!] and a
space."""
chars = list(u"".join(words))
start = True
for i in xrange(len(chars)):
char = chars[i]
if char.isalpha():
if start:
chars[i] = char.upper()
else:
chars[i] = char.lower()
start = False
else:
if i > 2 and chars[i - 1] in ".?!" and char.isspace():
start = True
return u"".join(chars)
class CobeTokenizer:
"""A tokenizer that is somewhat improved from MegaHAL. These are
considered tokens:
* one or more consecutive Unicode word characters (plus apostrophe and dash)
* one or more consecutive Unicode non-word characters, possibly with
internal whitespace
* the whitespace between word or non-word tokens
* an HTTP url, [word]: followed by any run of non-space characters.
This tokenizer collapses multiple spaces in a whitespace token into a
single space character.
It preserves differences in case. foo, Foo, and FOO are different
tokens."""
def __init__(self):
# Add hyphen to the list of possible word characters, so hyphenated
# words become one token (e.g. hy-phen). But don't remove it from
# the list of non-word characters, so if it's found entirely within
# punctuation it's a normal non-word (e.g. :-( )
self.regex = re.compile("(\w+:\S+" # urls
"|[\w'-]+" # words
"|[^\w\s][^\w]*[^\w\s]" # multiple punctuation
"|[^\w\s]" # a single punctuation character
"|\s+)", # whitespace
re.UNICODE)
def split(self, phrase):
if type(phrase) != types.UnicodeType:
raise TypeError("Input must be Unicode")
# Strip leading and trailing whitespace. This might not be the
# correct choice long-term, but in the brain it prevents edges
# from the root node that have has_space set.
phrase = phrase.strip()
if len(phrase) == 0:
return []
tokens = self.regex.findall(phrase)
# collapse runs of whitespace into a single space
space = u" "
for i, token in enumerate(tokens):
if token[0] == " " and len(token) > 1:
tokens[i] = space
return tokens
def join(self, words):
return u"".join(words)
class CobeStemmer:
def __init__(self, name):
# use the PyStemmer Snowball stemmer bindings
self.stemmer = Stemmer.Stemmer(name)
def stem(self, token):
if not re.search("\w", token, re.UNICODE):
return self.stem_nonword(token)
# Don't preserve case when stemming, i.e. create lowercase stems.
# This will allow us to create replies that switch the case of
# input words, but still generate the reply in context with the
# generated case.
stem = self.stemmer.stemWord(token.lower())
return stem
def stem_nonword(self, token):
# Stem common smile and frown emoticons down to :) and :(
if re.search(":-?[ \)]*\)", token):
return ":)"
if re.search(":-?[' \(]*\(", token):
return ":("
| Python | 0.000001 |
447b0bb977f050b904d36cb44aabe34cb03b87af | fix notation | chainer/functions/array/reshape.py | chainer/functions/array/reshape.py | from chainer import function
from chainer.utils import type_check
def _count_unknown_dims(shape):
cnt = 0
for dim in shape:
cnt += dim < 0
return cnt
class Reshape(function.Function):
"""Reshapes an input array without copy."""
def __init__(self, shape):
cnt = _count_unknown_dims(shape)
assert cnt == 0 or cnt == 1
self.shape = shape
def check_type_forward(self, in_types):
type_check.expect(
in_types.size() == 1,
)
x_type, = in_types
cnt = _count_unknown_dims(self.shape)
if cnt == 0:
type_check.expect(
type_check.prod(x_type.shape) == type_check.prod(self.shape))
else:
known_size = 1
for s in self.shape:
if s > 0:
known_size *= s
size_var = type_check.Variable(known_size,
'known_size(=%d)' % known_size)
type_check.expect(
type_check.prod(x_type.shape) % size_var == 0)
def forward(self, x):
return x[0].reshape(self.shape),
def backward(self, x, gy):
return gy[0].reshape(x[0].shape),
def reshape(x, shape):
"""Reshapes an input variable without copy.
Args:
x (:class:`~chainer.Variable` or :class:`numpy.ndarray` or \
:class:`cupy.ndarray`): Input variable.
shape (:class:`tuple` of :class:`int` s):
The **size** of shape (**size** means the number of elements) must
be equal to that of original shape. One shape dimension can be -1.
In this case, the value is inferred from the length of the array
and remaining dimensions.
Returns:
~chainer.Variable:
Variable that holds a reshaped version of the input variable.
.. seealso:: :func:`numpy.reshape`, :func:`cupy.reshape`
.. admonition:: Example
>>> x = np.array([[1, 2, 3, 4], [5, 6, 7, 8]])
>>> y = F.reshape(x, (8,))
>>> y.shape
(8,)
>>> y.data
array([1, 2, 3, 4, 5, 6, 7, 8])
>>> y = F.reshape(x, (4,-1))
>>> y.shape
(4, 2)
>>> y.data
array([[1, 2],
[3, 4],
[5, 6],
[7, 8]])
"""
return Reshape(shape)(x)
| from chainer import function
from chainer.utils import type_check
def _count_unknown_dims(shape):
cnt = 0
for dim in shape:
cnt += dim < 0
return cnt
class Reshape(function.Function):
"""Reshapes an input array without copy."""
def __init__(self, shape):
cnt = _count_unknown_dims(shape)
assert cnt == 0 or cnt == 1
self.shape = shape
def check_type_forward(self, in_types):
type_check.expect(
in_types.size() == 1,
)
x_type, = in_types
cnt = _count_unknown_dims(self.shape)
if cnt == 0:
type_check.expect(
type_check.prod(x_type.shape) == type_check.prod(self.shape))
else:
known_size = 1
for s in self.shape:
if s > 0:
known_size *= s
size_var = type_check.Variable(known_size,
'known_size(=%d)' % known_size)
type_check.expect(
type_check.prod(x_type.shape) % size_var == 0)
def forward(self, x):
return x[0].reshape(self.shape),
def backward(self, x, gy):
return gy[0].reshape(x[0].shape),
def reshape(x, shape):
"""Reshapes an input variable without copy.
Args:
x (:class:`~chainer.Variable` or :class:`numpy.ndarray` or \
:class:`cupy.ndarray`): Input variable.
shape (:class:`tuple` of :class:`int` s):
The `size` of shape (`size` means the number of elements) must be
equal to that of original shape. One shape dimension can be -1. In
this case, the value is inferred from the length of the array and
remaining dimensions.
Returns:
~chainer.Variable:
Variable that holds a reshaped version of the input variable.
.. seealso:: :func:`numpy.reshape`, :func:`cupy.reshape`
.. admonition:: Example
>>> x = np.array([[1, 2, 3, 4], [5, 6, 7, 8]])
>>> y = F.reshape(x, (8,))
>>> y.shape
(8,)
>>> y.data
array([1, 2, 3, 4, 5, 6, 7, 8])
>>> y = F.reshape(x, (4,-1))
>>> y.shape
(4, 2)
>>> y.data
array([[1, 2],
[3, 4],
[5, 6],
[7, 8]])
"""
return Reshape(shape)(x)
| Python | 0.000026 |
f2edfbbf3a5c4e18a26b8b9479456b91311bd4ea | check that the datum has a module_id | corehq/apps/app_manager/app_schemas/session_schema.py | corehq/apps/app_manager/app_schemas/session_schema.py | from django.utils.text import slugify
from corehq import toggles
from corehq.apps.app_manager.const import USERCASE_TYPE
from corehq.apps.app_manager.templatetags.xforms_extras import clean_trans
from corehq.apps.app_manager.util import is_usercase_in_use
def get_session_schema(form):
"""Get form session schema definition
"""
from corehq.apps.app_manager.suite_xml.sections.entries import EntriesHelper
app = form.get_app()
structure = {}
datums = EntriesHelper(app).get_datums_meta_for_form_generic(form)
datums = [
d for d in datums
if d.requires_selection and d.case_type and not d.is_new_case_id
]
def _get_structure(datum, data_registry, source=None):
id_source = f":{slugify(source)}" if source else ""
return {
"reference": {
"hashtag": f'#registry_case{id_source}' if data_registry else f"#case{id_source}",
"source": "registry" if data_registry else "casedb",
"subset": f"case{id_source}",
"key": "@case_id",
},
}
unrelated_parents = set()
for datum in datums:
if datum.module_id:
module = app.get_module_by_unique_id(datum.module_id)
parent_select_active = hasattr(module, 'parent_select') and module.parent_select.active
if parent_select_active and module.parent_select.relationship is None:
# for child modules that use parent select where the parent is not a 'related' case
# See toggles.NON_PARENT_MENU_SELECTION
unrelated_parents.add(module.parent_select.module_id)
data_structure = {}
for i, datum in enumerate(reversed(datums)):
if not datum.module_id:
continue
module = app.get_module_by_unique_id(datum.module_id)
data_registry = module.search_config.data_registry
if i == 0:
# always add the datum for this module
data_structure[datum.datum.id] = _get_structure(datum, data_registry)
else:
if datum.module_id and datum.module_id in unrelated_parents:
source = clean_trans(module.name, app.langs) # ensure that this structure reference is unique
data_structure[datum.datum.id] = _get_structure(datum, data_registry, source)
if data_structure:
structure["data"] = {
"merge": True,
"structure": data_structure,
}
if is_usercase_in_use(app.domain):
structure["context"] = {
"merge": True,
"structure": {
"userid": {
"reference": {
"hashtag": "#user",
"source": "casedb",
"subset": USERCASE_TYPE,
"subset_key": "@case_type",
"subset_filter": True,
"key": "hq_user_id",
},
},
},
}
return {
"id": "commcaresession",
"uri": "jr://instance/session",
"name": "Session",
"path": "/session",
"structure": structure,
}
| from django.utils.text import slugify
from corehq import toggles
from corehq.apps.app_manager.const import USERCASE_TYPE
from corehq.apps.app_manager.templatetags.xforms_extras import clean_trans
from corehq.apps.app_manager.util import is_usercase_in_use
def get_session_schema(form):
"""Get form session schema definition
"""
from corehq.apps.app_manager.suite_xml.sections.entries import EntriesHelper
app = form.get_app()
structure = {}
datums = EntriesHelper(app).get_datums_meta_for_form_generic(form)
datums = [
d for d in datums
if d.requires_selection and d.case_type and not d.is_new_case_id
]
def _get_structure(datum, data_registry, source=None):
id_source = f":{slugify(source)}" if source else ""
return {
"reference": {
"hashtag": f'#registry_case{id_source}' if data_registry else f"#case{id_source}",
"source": "registry" if data_registry else "casedb",
"subset": f"case{id_source}",
"key": "@case_id",
},
}
unrelated_parents = set()
for datum in datums:
if datum.module_id:
module = app.get_module_by_unique_id(datum.module_id)
parent_select_active = hasattr(module, 'parent_select') and module.parent_select.active
if parent_select_active and module.parent_select.relationship is None:
# for child modules that use parent select where the parent is not a 'related' case
# See toggles.NON_PARENT_MENU_SELECTION
unrelated_parents.add(module.parent_select.module_id)
data_structure = {}
for i, datum in enumerate(reversed(datums)):
module = app.get_module_by_unique_id(datum.module_id)
data_registry = module.search_config.data_registry
if i == 0:
# always add the datum for this module
data_structure[datum.datum.id] = _get_structure(datum, data_registry)
else:
if datum.module_id and datum.module_id in unrelated_parents:
source = clean_trans(module.name, app.langs) # ensure that this structure reference is unique
data_structure[datum.datum.id] = _get_structure(datum, data_registry, source)
if data_structure:
structure["data"] = {
"merge": True,
"structure": data_structure,
}
if is_usercase_in_use(app.domain):
structure["context"] = {
"merge": True,
"structure": {
"userid": {
"reference": {
"hashtag": "#user",
"source": "casedb",
"subset": USERCASE_TYPE,
"subset_key": "@case_type",
"subset_filter": True,
"key": "hq_user_id",
},
},
},
}
return {
"id": "commcaresession",
"uri": "jr://instance/session",
"name": "Session",
"path": "/session",
"structure": structure,
}
| Python | 0.000018 |
e3548d62aa67472f291f6d3c0c8beca9813d6032 | Make it possible to step() in a newly created env, rather than throwing AttributeError | gym/envs/toy_text/discrete.py | gym/envs/toy_text/discrete.py | from gym import Env
from gym import spaces
import numpy as np
def categorical_sample(prob_n):
"""
Sample from categorical distribution
Each row specifies class probabilities
"""
prob_n = np.asarray(prob_n)
csprob_n = np.cumsum(prob_n)
return (csprob_n > np.random.rand()).argmax()
class DiscreteEnv(Env):
"""
Has the following members
- nS: number of states
- nA: number of actions
- P: transitions (*)
- isd: initial state distribution (**)
(*) dictionary dict of dicts of lists, where
P[s][a] == [(probability, nextstate, reward, done), ...]
(**) list or array of length nS
"""
def __init__(self, nS, nA, P, isd):
self.action_space = spaces.Discrete(nA)
self.observation_space = spaces.Discrete(nS)
self.nA = nA
self.P = P
self.isd = isd
self.lastaction=None # for rendering
self._reset()
@property
def nS(self):
return self.observation_space.n
def _reset(self):
self.s = categorical_sample(self.isd)
return self.s
def _step(self, a):
transitions = self.P[self.s][a]
i = categorical_sample([t[0] for t in transitions])
p, s, r, d= transitions[i]
self.s = s
self.lastaction=a
return (s, r, d, {"prob" : p})
| from gym import Env
from gym import spaces
import numpy as np
def categorical_sample(prob_n):
"""
Sample from categorical distribution
Each row specifies class probabilities
"""
prob_n = np.asarray(prob_n)
csprob_n = np.cumsum(prob_n)
return (csprob_n > np.random.rand()).argmax()
class DiscreteEnv(Env):
"""
Has the following members
- nS: number of states
- nA: number of actions
- P: transitions (*)
- isd: initial state distribution (**)
(*) dictionary dict of dicts of lists, where
P[s][a] == [(probability, nextstate, reward, done), ...]
(**) list or array of length nS
"""
def __init__(self, nS, nA, P, isd):
self.action_space = spaces.Discrete(nA)
self.observation_space = spaces.Discrete(nS)
self.nA = nA
self.P = P
self.isd = isd
self.lastaction=None # for rendering
@property
def nS(self):
return self.observation_space.n
def _reset(self):
self.s = categorical_sample(self.isd)
return self.s
def _step(self, a):
transitions = self.P[self.s][a]
i = categorical_sample([t[0] for t in transitions])
p, s, r, d= transitions[i]
self.s = s
self.lastaction=a
return (s, r, d, {"prob" : p})
| Python | 0.000004 |
253a0f786339e90b1b5841b94a22d44e5db3b85c | Add small delay in TemporalInformationRetriever to avoid endless loop | server/src/weblab/user_processing/TemporalInformationRetriever.py | server/src/weblab/user_processing/TemporalInformationRetriever.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2009 University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individuals,
# listed below:
#
# Author: Pablo Orduña <pablo@ordunya.com>
#
import threading
import time
import voodoo.log as log
import weblab.data.experiments.Usage as Usage
import weblab.data.Command as Command
class TemporalInformationRetriever(threading.Thread):
"""
This class retrieves continuously the information of initial and finished experiments.
"""
def __init__(self, initial_store, finished_store, db_manager):
threading.Thread.__init__(self)
self.keep_running = True
self.initial_store = initial_store
self.finished_store = finished_store
self.iterations = 0
self.db_manager = db_manager
self.timeout = None
self.setDaemon(True)
def run(self):
while self.keep_running:
try:
self.iterations += 1
self.iterate()
except:
log.log( TemporalInformationRetriever, log.LogLevel.Critical, "Exception iterating in TemporalInformationRetriever!!!")
log.log_exc( TemporalInformationRetriever, log.LogLevel.Critical )
def stop(self):
self.keep_running = False
def iterate(self):
self.iterate_over_store(self.initial_store, 'initial')
if self.keep_running:
self.iterate_over_store(self.finished_store, 'finish')
def iterate_over_store(self, store, message):
information = store.get(timeout=self.timeout)
if information is not None:
reservation_id, obj, initial_time, end_time = information
initial_timestamp = time.mktime(initial_time.timetuple())
end_timestamp = time.mktime(end_time.timetuple())
command = Usage.CommandSent(
Command.Command("@@@%s@@@" % message), initial_timestamp,
Command.Command(str(obj)), end_timestamp
)
if not self.keep_running or not self.db_manager.append_command(reservation_id, command):
# If it could not be added because the experiment id
# did not exist, put it again in the queue
store.put(reservation_id, obj, initial_time, end_time)
time.sleep(0.01)
| #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2009 University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individuals,
# listed below:
#
# Author: Pablo Orduña <pablo@ordunya.com>
#
import threading
import time
import voodoo.log as log
import weblab.data.experiments.Usage as Usage
import weblab.data.Command as Command
class TemporalInformationRetriever(threading.Thread):
"""
This class retrieves continuously the information of initial and finished experiments.
"""
def __init__(self, initial_store, finished_store, db_manager):
threading.Thread.__init__(self)
self.keep_running = True
self.initial_store = initial_store
self.finished_store = finished_store
self.iterations = 0
self.db_manager = db_manager
self.timeout = None
self.setDaemon(True)
def run(self):
while self.keep_running:
try:
self.iterations += 1
self.iterate()
except:
log.log( TemporalInformationRetriever, log.LogLevel.Critical, "Exception iterating in TemporalInformationRetriever!!!")
log.log_exc( TemporalInformationRetriever, log.LogLevel.Critical )
def stop(self):
self.keep_running = False
def iterate(self):
self.iterate_over_store(self.initial_store, 'initial')
if self.keep_running:
self.iterate_over_store(self.finished_store, 'finish')
def iterate_over_store(self, store, message):
information = store.get(timeout=self.timeout)
if information is not None:
reservation_id, obj, initial_time, end_time = information
initial_timestamp = time.mktime(initial_time.timetuple())
end_timestamp = time.mktime(end_time.timetuple())
command = Usage.CommandSent(
Command.Command("@@@%s@@@" % message), initial_timestamp,
Command.Command(str(obj)), end_timestamp
)
if not self.keep_running or not self.db_manager.append_command(reservation_id, command):
# If it could not be added because the experiment id
# did not exist, put it again in the queue
store.put(reservation_id, obj, initial_time, end_time)
| Python | 0 |
92737e3f95ff94129e52e1fab1f40a0f70550d46 | Update the ParticleFilterSetOperations | hoomd/filter/set_.py | hoomd/filter/set_.py | from hoomd.filter.filter_ import ParticleFilter
from hoomd import _hoomd
class ParticleFilterSetOperations(ParticleFilter):
def __init__(self, f, g):
if f == g:
raise ValueError("Cannot use same filter for {}"
"".format(self.__class__.__name__))
else:
self._f = f
self._g = g
# Grab the C++ class constructor for the set operation using the class
# variable _cpp_cls_name
getattr(_hoomd, self._cpp_cls_name).__init__(self, f, g)
def __hash__(self):
return hash(hash(self._f) + hash(self._g))
def __eq__(self, other):
if self._symmetric:
return type(self) == type(other) and \
(self._f == other._f or self._f == other._g) and \
(self._g == other._g or self._g == other._f)
else:
return type(self) == type(other) and \
self._f == other._f and self._g == other._g
class SetDifference(_ParticleFilterSetOperations,
_hoomd.ParticleFilterSetDifference):
_cpp_cls_name = 'ParticleFilterSetDifference'
_symmetric = False
class Union(_ParticleFilterSetOperations, _hoomd.ParticleFilterUnion):
_cpp_cls_name = 'ParticleFilterUnion'
_symmetric = True
class Intersection(_ParticleFilterSetOperations,
_hoomd.ParticleFilterIntersection):
_cpp_cls_name = 'ParticleFilterIntersection'
_symmetric = True
| from hoomd.filter.filter_ import ParticleFilter
from hoomd import _hoomd
class ParticleFilterSetOperations(ParticleFilter):
def __init__(self, f, g):
if f == g:
raise ValueError("Cannot use same filter for {}"
"".format(self.__class__.__name__))
else:
self._f = f
self._g = g
getattr(_hoomd, self._cpp_cls_name).__init__(self, f, g)
def __hash__(self):
return hash(hash(self._f) + hash(self._g))
def __eq__(self, other):
return type(self) == type(other) and \
self._f == other._f and \
self._g == other._g
class SetDifference(ParticleFilterSetOperations,
_hoomd.ParticleFilterSetDifference):
_cpp_cls_name = 'ParticleFilterSetDifference'
class Union(ParticleFilterSetOperations, _hoomd.ParticleFilterUnion):
_cpp_cls_name = 'ParticleFilterUnion'
class Intersection(ParticleFilterSetOperations,
_hoomd.ParticleFilterIntersection):
_cpp_cls_name = 'ParticleFilterIntersection'
| Python | 0 |
8d98fe5570ce37512128d46853000dc860f798b2 | Update jupyterhub_config.py | jupyterhub/jupyterhub_config.py | jupyterhub/jupyterhub_config.py | # Configuration file for jupyterhub.
from jupyter_client.localinterfaces import public_ips
c = get_config() # noqa
c.JupyterHub.ssl_key = 'test.key'
c.JupyterHub.ssl_cert = 'test.crt'
c.JupyterHub.hub_ip = public_ips()[0]
# Choose between system-user mode and virtual-user mode
setting_mode = ('system_user', 'virtual_user')[1]
if setting_mode == 'virtual_user':
c.JupyterHub.spawner_class = 'remoteappmanager.spawners.VirtualUserSpawner'
# Parent directory in which temporary directory is created for
# each virtual user
# Set this to a drive with well defined capacity quota
# If unset, no workspace would be available
c.Spawner.workspace_dir = '/tmp/remoteapp'
# FIXME: replace me with other authenticator (e.g. GitHub OAuth...)
c.JupyterHub.authenticator_class = (
'remoteappmanager.auth.WorldAuthenticator')
elif setting_mode == 'system_user':
c.JupyterHub.spawner_class = 'remoteappmanager.spawners.SystemUserSpawner'
| # Configuration file for jupyterhub.
from jupyter_client.localinterfaces import public_ips
c = get_config() # noqa
c.JupyterHub.ssl_key = 'test.key'
c.JupyterHub.ssl_cert = 'test.crt'
c.JupyterHub.hub_ip = public_ips()[0]
# Choose between system-user mode and virtual-user mode
setting_mode = ('system_user', 'virtual_user')[1]
if setting_mode == 'virtual_user':
c.JupyterHub.spawner_class = 'remoteappmanager.spawners.VirtualUserSpawner'
# Parent directory in which temporary directory is created for
# each virtual user
# Set this to a drive with well defined capacity quota
# If unset, no workspace would be available
c.Spawner.workspace_dir = '/tmp/remoteapp'
# FIXME: replace me with other authenticator (e.g. GitHub OAuth...)
c.JupyterHub.authenticator_class = (
'remoteappmanager.auth.WorldAuthenticator')
elif setting_mode == 'system_user':
c.JupyterHub.spawner_class = 'remoteappmanager.spawners.Spawner'
| Python | 0.000001 |
fc6716854bc876730f1f3684945061fcf1d48072 | Fix default optional prefix | hashid_field/rest.py | hashid_field/rest.py | from django.apps import apps
from django.core import exceptions
from hashids import Hashids
from rest_framework import fields, serializers
from hashid_field.conf import settings
from hashid_field.hashid import Hashid
class UnconfiguredHashidSerialField(fields.Field):
def bind(self, field_name, parent):
super().bind(field_name, parent)
raise exceptions.ImproperlyConfigured(
"The field '{field_name}' on {parent} must be explicitly declared when used with a ModelSerializer".format(
field_name=field_name, parent=parent.__class__.__name__))
class HashidSerializerMixin(object):
usage_text = "Must pass a HashidField, HashidAutoField or 'app_label.model.field'"
def __init__(self, **kwargs):
self.hashid_salt = kwargs.pop('salt', settings.HASHID_FIELD_SALT)
self.hashid_min_length = kwargs.pop('min_length', 7)
self.hashid_alphabet = kwargs.pop('alphabet', Hashids.ALPHABET)
self.prefix = kwargs.pop('alphabet', "")
source_field = kwargs.pop('source_field', None)
if source_field:
from hashid_field import HashidField, HashidAutoField
if isinstance(source_field, str):
try:
app_label, model_name, field_name = source_field.split(".")
except ValueError:
raise ValueError(self.usage_text)
model = apps.get_model(app_label, model_name)
source_field = model._meta.get_field(field_name)
elif not isinstance(source_field, (HashidField, HashidAutoField)):
raise TypeError(self.usage_text)
self.hashid_salt, self.hashid_min_length, self.hashid_alphabet, self.prefix = \
source_field.salt, source_field.min_length, source_field.alphabet, source_field.prefix
self._hashids = Hashids(salt=self.hashid_salt, min_length=self.hashid_min_length, alphabet=self.hashid_alphabet)
super().__init__(**kwargs)
def to_internal_value(self, data):
try:
value = super().to_internal_value(data)
return Hashid(value, hashids=self._hashids, prefix=self.prefix)
except ValueError:
raise serializers.ValidationError("Invalid int or Hashid string")
class HashidSerializerCharField(HashidSerializerMixin, fields.CharField):
def to_representation(self, value):
return str(value)
class HashidSerializerIntegerField(HashidSerializerMixin, fields.IntegerField):
def to_representation(self, value):
return int(value)
| from django.apps import apps
from django.core import exceptions
from hashids import Hashids
from rest_framework import fields, serializers
from hashid_field.conf import settings
from hashid_field.hashid import Hashid
class UnconfiguredHashidSerialField(fields.Field):
def bind(self, field_name, parent):
super().bind(field_name, parent)
raise exceptions.ImproperlyConfigured(
"The field '{field_name}' on {parent} must be explicitly declared when used with a ModelSerializer".format(
field_name=field_name, parent=parent.__class__.__name__))
class HashidSerializerMixin(object):
usage_text = "Must pass a HashidField, HashidAutoField or 'app_label.model.field'"
def __init__(self, **kwargs):
self.hashid_salt = kwargs.pop('salt', settings.HASHID_FIELD_SALT)
self.hashid_min_length = kwargs.pop('min_length', 7)
self.hashid_alphabet = kwargs.pop('alphabet', Hashids.ALPHABET)
self.prefix = None
source_field = kwargs.pop('source_field', None)
if source_field:
from hashid_field import HashidField, HashidAutoField
if isinstance(source_field, str):
try:
app_label, model_name, field_name = source_field.split(".")
except ValueError:
raise ValueError(self.usage_text)
model = apps.get_model(app_label, model_name)
source_field = model._meta.get_field(field_name)
elif not isinstance(source_field, (HashidField, HashidAutoField)):
raise TypeError(self.usage_text)
self.hashid_salt, self.hashid_min_length, self.hashid_alphabet, self.prefix = \
source_field.salt, source_field.min_length, source_field.alphabet, source_field.prefix
self._hashids = Hashids(salt=self.hashid_salt, min_length=self.hashid_min_length, alphabet=self.hashid_alphabet)
super().__init__(**kwargs)
def to_internal_value(self, data):
try:
value = super().to_internal_value(data)
return Hashid(value, hashids=self._hashids, prefix=self.prefix)
except ValueError:
raise serializers.ValidationError("Invalid int or Hashid string")
class HashidSerializerCharField(HashidSerializerMixin, fields.CharField):
def to_representation(self, value):
return str(value)
class HashidSerializerIntegerField(HashidSerializerMixin, fields.IntegerField):
def to_representation(self, value):
return int(value)
| Python | 0.000002 |
bca7f7f6ae870a0a307566ee1735e899596d3f99 | Simplify the brightness calculation, in preparation for multi-LED drips | hardware/mote/mote_icicles.py | hardware/mote/mote_icicles.py | import time
from random import randint
from mote import Mote
mote = Mote()
mote.configure_channel(1, 16, False)
mote.configure_channel(2, 16, False)
mote.configure_channel(3, 16, False)
mote.configure_channel(4, 16, False)
full_brightness = 40
class Icicle:
def __init__(self, channel):
self.channel = channel
self.current_pixel = 0
self.start_random_wait_for_next_drip()
def step(self):
# Turn off previous pixel
mote.set_pixel(self.channel, self.previous_pixel(), 0, 0, 0)
# Check if we are pausing between drips
if self.frames_to_wait > 0:
self.frames_to_wait -= 1
return
# Advance to next pixel
brightness = full_brightness
mote.set_pixel(self.channel, self.current_pixel, brightness, brightness, brightness)
# Advance pixel number, ready for next frame
self.current_pixel = self.next_pixel()
# If the next pixel will be zero, set up a random wait before starting the
# next cycle:
if self.current_pixel == 0:
self.start_random_wait_for_next_drip()
def next_pixel(self, delta = 1):
new_pixel = self.current_pixel + delta
if not self.valid_pixel(new_pixel):
new_pixel -= 16
return new_pixel
def previous_pixel(self, delta = 1):
new_pixel = self.current_pixel - delta
if not self.valid_pixel(new_pixel):
new_pixel += 16
return new_pixel
def valid_pixel(self, pixel):
return pixel >=0 and pixel <= 15
def start_random_wait_for_next_drip(self):
self.frames_to_wait = randint(15, 30)
if __name__ == "__main__":
mote.clear()
icicles = [
Icicle(1),
Icicle(2),
Icicle(3),
Icicle(4)
]
while True:
for icicle in icicles:
icicle.step()
mote.show()
time.sleep(0.2)
| import time
from random import randint
from mote import Mote
mote = Mote()
mote.configure_channel(1, 16, False)
mote.configure_channel(2, 16, False)
mote.configure_channel(3, 16, False)
mote.configure_channel(4, 16, False)
max_brightness = 40
class Icicle:
def __init__(self, channel):
self.channel = channel
self.current_pixel = 0
self.start_random_wait_for_next_drip()
def step(self):
# Turn off previous pixel
mote.set_pixel(self.channel, self.previous_pixel(), 0, 0, 0)
# Check if we are pausing between drips
if self.frames_to_wait > 0:
self.frames_to_wait -= 1
return
# Advance to next pixel
brightness = max_brightness -(2*self.current_pixel)
mote.set_pixel(self.channel, self.current_pixel, brightness, brightness, brightness)
# Advance pixel number, ready for next frame
self.current_pixel = self.next_pixel()
# If the next pixel will be zero, set up a random wait before starting the
# next cycle:
if self.current_pixel == 0:
self.start_random_wait_for_next_drip()
def next_pixel(self, delta = 1):
new_pixel = self.current_pixel + delta
if not self.valid_pixel(new_pixel):
new_pixel -= 16
return new_pixel
def previous_pixel(self, delta = 1):
new_pixel = self.current_pixel - delta
if not self.valid_pixel(new_pixel):
new_pixel += 16
return new_pixel
def valid_pixel(self, pixel):
return pixel >=0 and pixel <= 15
def start_random_wait_for_next_drip(self):
self.frames_to_wait = randint(15, 30)
if __name__ == "__main__":
mote.clear()
icicles = [
Icicle(1),
Icicle(2),
Icicle(3),
Icicle(4)
]
while True:
for icicle in icicles:
icicle.step()
mote.show()
time.sleep(0.2)
| Python | 0 |
9718e6c216b8d5205a19f095593ec099004785a6 | add app | src/studio/launch/commands/app_commands.py | src/studio/launch/commands/app_commands.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import print_function
import os
import sys
import json
import importlib
from sh import pip
from termcolor import colored
from studio.frame.config import common as common_config
from studio.launch.base import manager
app_manager = manager.subcommand('app')
VASSALS = common_config['UWSGI_EMPEROR']
def _get_app(appname):
try:
module = importlib.import_module(appname)
except ImportError:
print(colored('Can\'t import app %s.' % appname,
'yellow', attrs=['bold']),
file=sys.stderr)
return None
for name in dir(module):
app = getattr(module, name)
if hasattr(app, 'config'):
return app
else:
print(colored('Can\'t find app %s\'s entry' % appname,
'yellow', attrs=['bold']),
file=sys.stderr)
return None
def _iter_all():
for pkg in pip.freeze():
appname, _ = pkg.split('==')
if 'microsite' == appname:
yield appname
def _get_pkgs():
return [str(pkg.split('==')[0]) for pkg in pip.freeze()]
def _get_appnames():
pkgs = _get_pkgs()
return [pkg[6:] for pkg in pkgs if pkg.startswith('qsapp-')]
def _mk_uwsgi_config(config):
config_d = {}
for k, v in config.items():
if k.startswith('UWSGI_'):
k = k[6:].replace('_', '-')
config_d[k] = v
return config_d
def _register(appname, **config_d):
vassals_dir = VASSALS
try:
os.makedirs(vassals_dir)
except OSError:
pass
uwsgi_cfg = {}
uwsgi_cfg.setdefault('env', []).extend([
# 'STUDIO_ENVIRON=%s' % common_config['ENVIRON'],
'STUDIO_APPNAME=%s' % appname])
uwsgi_cfg.update(config_d)
print('Registering app %s:' % appname, end=' ')
with open(os.path.join(vassals_dir,
'%s.json' % appname), 'wb') as fp:
json.dump({'uwsgi': uwsgi_cfg}, fp)
print(colored('ok', 'green', attrs=['bold']) + '.')
@app_manager.command
def add(*appnames):
_names = _get_appnames()
for appname in appnames:
if appname in _names:
app = _get_app(appname)
config_d = _mk_uwsgi_config(app.config)
_register(appname, **config_d)
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import print_function
import os
import sys
import importlib
from sh import pip
from termcolor import colored
from studio.frame.config import common as common_config
from studio.launch.base import manager
app_manager = manager.subcommand('app')
VASSAL = common_config['UWSGI_EMPEROR']
def _get_app(appname):
try:
module = importlib.import_module(appname)
except ImportError:
print(colored('Can\'t import app %s.' % appname,
'yellow', attrs=['bold']),
file=sys.stderr)
return None
for name in dir(module):
app = getattr(module, name)
if hasattr(app, 'config'):
return app
else:
print(colored('Can\'t find app %s\'s entry' % appname,
'yellow', attrs=['bold']),
file=sys.stderr)
return None
def _iter_all():
for pkg in pip.freeze():
appname, _ = pkg.split('==')
if 'microsite' == appname:
yield appname
def _get_pkgs():
return [str(pkg.split('==')[0]) for pkg in pip.freeze()]
def _get_appnames():
pkgs = _get_pkgs()
return [pkg[6:] for pkg in pkgs if pkg.startswith('qsapp-')]
def _mk_uwsgi_config(config):
conifg_d = {}
for k, v in config.items():
if k.startswith('UWSGI_'):
k = k[6:].replace('_', '-')
conifg_d[k] = v
print(VASSAL)
@app_manager.command
def add(*appnames):
_names = _get_appnames()
for appname in appnames:
if appname in _names:
app = _get_app(appname)
_mk_uwsgi_config(app.config)
| Python | 0.000003 |
6380aabe25e38d198b6c4e10d126d6fd97860c85 | remove Simple.validate function | flask_pam/token/simple.py | flask_pam/token/simple.py | # -*- coding: utf-8 -*-
from hashlib import sha256
from token import Token
class Simple(Token):
"""Simple token implementation. It's not safe. Only for testing purposes!"""
def generate(self):
return sha256(self.username).hexdigest()
| # -*- coding: utf-8 -*-
from hashlib import sha256
from token import Token
class Simple(Token):
"""Simple token implementation. It's not safe. Only for testing purposes!"""
def generate(self):
return sha256(self.username).hexdigest()
def validate(self, token):
return sha256(self.username).hexdigest() == token
| Python | 0.000126 |
d37d99dedfb7cc2c86a9f01a75213fcc430af13d | fix inheritance for `SmAttr`s | hashstore/utils/file_types.py | hashstore/utils/file_types.py | import mimetypes
from typing import List
from os.path import join, dirname
from hashstore.utils import load_json_file
from hashstore.utils.smattr import SmAttr
class FileType(SmAttr):
mime:str
ext:List[str]
def read_file_types(json_file):
load_json = load_json_file(json_file)
return {n: FileType(v) for n,v in load_json.items()}
file_types = read_file_types(join(dirname(__file__), 'file_types.json'))
my_mime_dict = dict(
(ext,ft.mime)
for ft in file_types.values()
for ext in ft.ext)
my_name_dict = dict(
(ext,k)
for k, ft in file_types.items()
for ext in ft.ext )
WDF = 'WDF'
HSB = 'HSB'
def guess_name(filename):
'''
>>> guess_name('abc.txt')
'TXT'
>>> guess_name('abc.log')
'LOG'
>>> guess_name('abc.wdf')
'WDF'
>>> guess_name('abc.hsb')
'HSB'
>>> guess_name('.wdf')
'BINARY'
>>> guess_name('abc.html')
'HTML'
>>> guess_name('abc.exe')
'BINARY'
:param filename: file path
:return: name from `file_types`
'''
try:
extension = extract_extension(filename)
if extension:
return my_name_dict[extension]
except:
pass
return 'BINARY'
def guess_type(filename):
'''
guess MIME type
>>> guess_type('abc.txt')
'text/plain'
>>> guess_type('abc.log')
'text/plain'
>>> guess_type('abc.wdf')
'text/wdf'
>>> guess_type('abc.hsb')
'text/hsb'
>>> guess_type('.wdf')
>>> guess_type('abc.html')
'text/html'
>>> guess_type('abc.exe')
'application/x-msdos-program'
:param filename: file path
:return: mime type
'''
try:
extension = extract_extension(filename)
if extension:
return my_mime_dict[extension]
except:
pass
return mimetypes.guess_type(filename)[0]
def extract_extension(filename):
'''
>>> extract_extension('.txt')
>>> extract_extension(None)
>>> extract_extension('abc.txt')
'txt'
>>> extract_extension('a.html')
'html'
:param filename: file path
:return: extension
'''
try:
dot_p = filename.rindex('.')
if dot_p > 0:
return filename[dot_p+1:]
except:
pass
return None
| import mimetypes
from typing import List
from os.path import join, dirname
from hashstore.utils import load_json_file
from hashstore.utils.smattr import SmAttr
class FileType(SmAttr):
mime:str
ext:List[str]
def read_file_types(json_file):
load_json = load_json_file(json_file)
return {n: FileType(v) for n,v in load_json.items()}
file_types = read_file_types(join(dirname(__file__), 'file_types.json'))
my_mime_dict = dict(
(ext,ft.mime)
for ft in file_types.values()
for ext in ft.ext)
my_name_dict = dict(
(ext,k)
for k, ft in file_types.items()
for ext in ft.ext )
WDF = 'WDF'
HSB = 'HSB'
def guess_name(filename):
'''
>>> guess_name('abc.txt')
'TXT'
>>> guess_name('abc.log')
'LOG'
>>> guess_name('abc.wdf')
'WDF'
>>> guess_name('abc.hsb')
'HSB'
>>> guess_name('.wdf')
'BINARY'
>>> guess_name('abc.html')
'HTML'
>>> guess_name('abc.exe')
'BINARY'
:param filename: file path
:return: name from `file_types`
'''
try:
extension = extract_extension(filename)
if extension:
return my_name_dict[extension]
except:
pass
return 'BINARY'
def guess_type(filename):
'''
guess MIME type
>>> guess_type('abc.txt')
'text/plain'
>>> guess_type('abc.log')
'text/plain'
>>> guess_type('abc.wdf')
'text/wdf'
>>> guess_type('abc.hsb')
'text/hsb'
>>> guess_type('.wdf')
>>> guess_type('abc.html')
'text/html'
>>> guess_type('abc.exe')
'application/x-msdownload'
:param filename: file path
:return: mime type
'''
try:
extension = extract_extension(filename)
if extension:
return my_mime_dict[extension]
except:
pass
return mimetypes.guess_type(filename)[0]
def extract_extension(filename):
'''
>>> extract_extension('.txt')
>>> extract_extension(None)
>>> extract_extension('abc.txt')
'txt'
>>> extract_extension('a.html')
'html'
:param filename: file path
:return: extension
'''
try:
dot_p = filename.rindex('.')
if dot_p > 0:
return filename[dot_p+1:]
except:
pass
return None
| Python | 0.000021 |
beccef4eccda11e32ba30022008de44450f69fa2 | Check if block exists before DAG edits. | src/api.py | src/api.py |
def check_blocks(dag, block_ids):
if set(block_ids) - set(dag.block_ids()):
return False
return True
def execute_blocks(dag_fpathname, block_ids, all=False):
import dag
import dagexecutor
d = dag.DAG.from_file(dag_fpathname)
if all: block_ids = d.block_ids()
nonexistent = set(block_ids) - set(d.block_ids())
elif nonexistent:
print('Block(s) {} have not been found.'.format(nonexistent))
return
dex = dagexecutor.DAGExecutor(d, dag_fpathname)
dex.execute_blocks(block_ids)
def open_notebook(nbfile):
from utils import ConsoleExecutor
import subprocess
print('Running {}'.format(nbfile))
command = 'jupyter notebook ' + nbfile
subprocess.check_output(command.split())
def create_flow(dag_fpathname, block_ids, flow_name, run=False):
import dag
import flowmanager
import os
d = dag.DAG.from_file(dag_fpathname)
flow = flowmanager.FlowManager(d, os.path.dirname(dag_fpathname))
flow_fname = flow_name
if not flow_fname.endswith('.ipynb'): flow_fname += '.ipynb'
flow.flow_to_file(block_ids, flow_fname)
if run: open_notebook(flow_fname)
def update_from_flow(dag_fpathname, flow_fpathname):
import dag
import flowmanager
d = dag.DAG.from_file(dag_fpathname)
flow = flowmanager.FlowManager(d, dag_fpathname)
flow.apply_flow_changes(flow_fpathname)
d.to_file(dag_fpathname)
def new_project(project_name, run=False):
import dag
from utils import ConsoleExecutor
d = dag.DAG.empty(project_name)
dag_fpathname = project_name+'.dagpy'
d.to_file(dag_fpathname)
if run:
flowname = project_name + '_initialflow.ipynb'
create_flow(dag_fpathname, [], flowname, run)
def display_dag(dag_fpathname, flow = None):
import dag
import utils
d = dag.DAG.from_file(dag_fpathname)
to_color = []
if flow is not None:
to_color = utils.all_dependencies(d, flow)
utils.dag_draw(d, to_color=to_color)
def add_or_update_block(dag_fpathname, block_id, block):
import dag
import blockio
d = dag.DAG.from_file(dag_fpathname)
is_new = d.add_or_update_block(block_id, block)
d.to_file(dag_fpathname)
if is_new: blockio.save_block(block_id, [], d)
def add_block(dag_fpathname, block_id, block):
import dag
import blockio
d = dag.DAG.from_file(dag_fpathname)
d.add_block(block_id, block)
d.to_file(dag_fpathname)
blockio.save_block(block_id, [], d)
def update_block(dag_fpathname, block_id, block):
import dag
import blockio
block['block_id'] = block_id
d = dag.DAG.from_file(dag_fpathname)
if block_id not in d.block_ids():
print('Block {} was not found.'.format(block_id))
return
d.update_block(block)
blockio.save_block(block_id, [], d)
def remove_block(dag_fpathname, block_id):
import dag
import blockio
d = dag.DAG.from_file(dag_fpathname)
if block_id not in d.block_ids():
print('Block {} was not found.'.format(block_id))
return
d.remove_block(block_id)
d.to_file(dag_fpathname)
|
def execute_blocks(dag_fpathname, block_ids, all=False):
import dag
import dagexecutor
d = dag.DAG.from_file(dag_fpathname)
if all: block_ids = d.block_ids()
dex = dagexecutor.DAGExecutor(d, dag_fpathname)
dex.execute_blocks(block_ids)
def open_notebook(nbfile):
from utils import ConsoleExecutor
import subprocess
print('Running {}'.format(nbfile))
command = 'jupyter notebook ' + nbfile
subprocess.check_output(command.split())
def create_flow(dag_fpathname, block_ids, flow_name, run=False):
import dag
import flowmanager
import os
d = dag.DAG.from_file(dag_fpathname)
flow = flowmanager.FlowManager(d, os.path.dirname(dag_fpathname))
flow_fname = flow_name
if not flow_fname.endswith('.ipynb'): flow_fname += '.ipynb'
flow.flow_to_file(block_ids, flow_fname)
if run: open_notebook(flow_fname)
def update_from_flow(dag_fpathname, flow_fpathname):
import dag
import flowmanager
d = dag.DAG.from_file(dag_fpathname)
flow = flowmanager.FlowManager(d, dag_fpathname)
flow.apply_flow_changes(flow_fpathname)
d.to_file(dag_fpathname)
def new_project(project_name, run=False):
import dag
from utils import ConsoleExecutor
d = dag.DAG.empty(project_name)
dag_fpathname = project_name+'.dagpy'
d.to_file(dag_fpathname)
if run:
flowname = project_name + '_initialflow.ipynb'
create_flow(dag_fpathname, [], flowname, run)
def display_dag(dag_fpathname, flow = None):
import dag
import utils
d = dag.DAG.from_file(dag_fpathname)
to_color = []
if flow is not None:
to_color = utils.all_dependencies(d, flow)
utils.dag_draw(d, to_color=to_color)
def add_or_update_block(dag_fpathname, block_id, block):
import dag
import blockio
d = dag.DAG.from_file(dag_fpathname)
is_new = d.add_or_update_block(block_id, block)
d.to_file(dag_fpathname)
if is_new: blockio.save_block(block_id, [], d)
def add_block(dag_fpathname, block_id, block):
import dag
import blockio
d = dag.DAG.from_file(dag_fpathname)
d.add_block(block_id, block)
d.to_file(dag_fpathname)
blockio.save_block(block_id, [], d)
def update_block(dag_fpathname, block_id, block):
import dag
import blockio
block['block_id'] = block_id
d = dag.DAG.from_file(dag_fpathname)
d.update_block(block)
blockio.save_block(block_id, [], d)
def remove_block(dag_fpathname, block_id):
import dag
import blockio
d = dag.DAG.from_file(dag_fpathname)
d.remove_block(block_id)
d.to_file(dag_fpathname)
| Python | 0 |
eb57a07277f86fc90b7845dc48fb5cde1778c8d4 | Test cut_by_number with words and normal chunk numbers | test/unit_test/test_cut_number.py | test/unit_test/test_cut_number.py | from lexos.processors.prepare.cutter import split_keep_whitespace, \
count_words, cut_by_number
class TestCutByNumbers:
def test_split_keep_whitespace(self):
assert split_keep_whitespace("Test string") == ["Test", " ", "string"]
assert split_keep_whitespace("Test") == ["Test"]
assert split_keep_whitespace(" ") == ["", " ", ""] # intended?
assert split_keep_whitespace("") == [""]
def test_count_words(self):
assert count_words(["word", "word", " ", "not", "word"]) == 4
assert count_words(['\n', '\t', ' ', '', '\u3000', "word"]) == 1
assert count_words([""]) == 0
def test_cut_by_number_normal(self):
assert cut_by_number("Text", 1) == ["Text"]
assert cut_by_number("This text has five words", 5) == \
["This ", "text ", "has ", "five ", "words"]
assert cut_by_number("Hanging space ", 2) == ["Hanging ", "space "]
| from lexos.processors.prepare.cutter import split_keep_whitespace, \
count_words, cut_by_number
class TestCutByNumbers:
def test_split_keep_whitespace(self):
assert split_keep_whitespace("Test string") == ["Test", " ", "string"]
assert split_keep_whitespace("Test") == ["Test"]
assert split_keep_whitespace(" ") == ["", " ", ""] # intended?
assert split_keep_whitespace("") == [""]
def test_count_words(self):
assert count_words(["word", "word", " ", "not", "word"]) == 4
assert count_words(['\n', '\t', ' ', '', '\u3000', "word"]) == 1
assert count_words([""]) == 0
| Python | 0.000003 |
c05d0f2dd77678133af1bbf49915aeaf24efbedc | simplify line counting method | httplang/httplang.py | httplang/httplang.py | import parse
import sys
import utils
import repl
def main():
if len(sys.argv) < 2:
repl.enterREPL()
sys.exit()
inputFile = sys.argv[1]
run(inputFile)
def run(file_):
with open(file_, 'rb') as file:
#pass enumerated file so we can get line numbers starting at 1
parse.preParse(enumerate(file,1))
return utils.baseVariables
if __name__ == "__main__":
main()
| import parse
import sys
import utils
import repl
def main():
if len(sys.argv) < 2:
repl.enterREPL()
sys.exit()
inputFile = sys.argv[1]
run(inputFile)
def run(file_):
with open(file_, 'rb') as file:
#pass enumerated file so we can get line numbers
parse.preParse(enumerate(file))
return utils.baseVariables
if __name__ == "__main__":
main()
| Python | 0.03329 |
c806a3702c95812dd57aca4106a782a854268993 | Comment out configuration of real systems | server/systems/__init__.py | server/systems/__init__.py | import logging
from django.core.exceptions import ObjectDoesNotExist
from base import BaseEnvironment
from producers import CogenerationUnit, PeakLoadBoiler
from storages import HeatStorage, PowerMeter
from consumers import ThermalConsumer, ElectricalConsumer
from server.models import Device, Configuration, DeviceConfiguration
logger = logging.getLogger('simulation')
def get_initialized_scenario():
devices = list(Device.objects.all())
system_list = []
env = BaseEnvironment()
for device in devices:
for device_type, class_name in Device.DEVICE_TYPES:
if device.device_type == device_type:
system_class = globals()[class_name]
system_list.append(system_class(device.id, env))
# configurations = DeviceConfiguration.objects.all()
# for device in system_list:
# # configure systems
# for configuration in configurations:
# if configuration.device_id == device.id:
# value = parse_value(configuration)
# if configuration.key in device.config:
# device.config[configuration.key] = value
return system_list
def get_user_function(systems, code=None):
local_names = ['device_%s' % system.id for system in systems]
if code is None:
with open('server/user_code.py', "r") as code_file:
code = code_file.read()
lines = []
lines.append("def user_function(%s):" %
(",".join(local_names)))
for line in code.split("\n"):
lines.append("\t" + line)
lines.append("\tpass") # make sure function is not empty
source = "\n".join(lines)
namespace = {}
exec source in namespace # execute code in namespace
return namespace['user_function']
def perform_configuration(data):
configurations = []
device_configurations = []
for config in data:
if all(x in config for x in ['device', 'key', 'value', 'type', 'unit']):
if config['device'] == '0':
try:
existing_config = Configuration.objects.get(
key=config['key'])
existing_config.value = config['value']
existing_config.value_type = int(
config['type'])
existing_config.unit = config['unit']
existing_config.save()
except Configuration.DoesNotExist:
configurations.append(
Configuration(key=config['key'], value=config['value'], value_type=int(config['type']), unit=config['unit']))
else:
try:
device = Device.objects.get(id=config['device'])
for device_type, class_name in Device.DEVICE_TYPES:
if device.device_type == device_type:
system_class = globals()[class_name]
# Make sure that key is present in corresponding system
# class
if config['key'] in system_class(0, BaseEnvironment()).config:
try:
existing_config = DeviceConfiguration.objects.get(
device=device, key=config['key'])
existing_config.device = device
existing_config.value = config['value']
existing_config.value_type = int(
config['type'])
existing_config.unit = config['unit']
existing_config.save()
except DeviceConfiguration.DoesNotExist:
device_configurations.append(
DeviceConfiguration(device=device, key=config['key'], value=config['value'], value_type=int(config['type']), unit=config['unit']))
except ObjectDoesNotExist:
logger.error("Unknown device %s" % config['device'])
except ValueError:
logger.error(
"ValueError value_type '%s' not an int" % config['type'])
else:
logger.error("Incomplete config data: %s" % config)
if len(configurations) > 0:
Configuration.objects.bulk_create(configurations)
if len(device_configurations) > 0:
DeviceConfiguration.objects.bulk_create(device_configurations) | import logging
from base import BaseEnvironment
from producers import CogenerationUnit, PeakLoadBoiler
from storages import HeatStorage, PowerMeter
from consumers import ThermalConsumer, ElectricalConsumer
from server.models import Device, Configuration, DeviceConfiguration
from django.core.exceptions import ObjectDoesNotExist
logger = logging.getLogger('simulation')
def get_initialized_scenario():
devices = list(Device.objects.all())
system_list = []
env = BaseEnvironment()
for device in devices:
for device_type, class_name in Device.DEVICE_TYPES:
if device.device_type == device_type:
system_class = globals()[class_name]
system_list.append(system_class(device.id, env))
configurations = DeviceConfiguration.objects.all()
for device in system_list:
# configure systems
for configuration in configurations:
if configuration.device_id == device.id:
value = parse_value(configuration)
if configuration.key in device.config:
device.config[configuration.key] = value
return system_list
def get_user_function(systems, code=None):
local_names = ['device_%s' % system.id for system in systems]
if code is None:
with open('server/user_code.py', "r") as code_file:
code = code_file.read()
lines = []
lines.append("def user_function(%s):" %
(",".join(local_names)))
for line in code.split("\n"):
lines.append("\t" + line)
lines.append("\tpass") # make sure function is not empty
source = "\n".join(lines)
namespace = {}
exec source in namespace # execute code in namespace
return namespace['user_function']
def perform_configuration(data):
configurations = []
device_configurations = []
for config in data:
if all(x in config for x in ['device', 'key', 'value', 'type', 'unit']):
if config['device'] == '0':
try:
existing_config = Configuration.objects.get(
key=config['key'])
existing_config.value = config['value']
existing_config.value_type = int(
config['type'])
existing_config.unit = config['unit']
existing_config.save()
except Configuration.DoesNotExist:
configurations.append(
Configuration(key=config['key'], value=config['value'], value_type=int(config['type']), unit=config['unit']))
else:
try:
device = Device.objects.get(id=config['device'])
for device_type, class_name in Device.DEVICE_TYPES:
if device.device_type == device_type:
system_class = globals()[class_name]
# Make sure that key is present in corresponding system
# class
if config['key'] in system_class(0, BaseEnvironment()).config:
try:
existing_config = DeviceConfiguration.objects.get(
device=device, key=config['key'])
existing_config.device = device
existing_config.value = config['value']
existing_config.value_type = int(
config['type'])
existing_config.unit = config['unit']
existing_config.save()
except DeviceConfiguration.DoesNotExist:
device_configurations.append(
DeviceConfiguration(device=device, key=config['key'], value=config['value'], value_type=int(config['type']), unit=config['unit']))
except ObjectDoesNotExist:
logger.error("Unknown device %s" % config['device'])
except ValueError:
logger.error(
"ValueError value_type '%s' not an int" % config['type'])
else:
logger.error("Incomplete config data: %s" % config)
if len(configurations) > 0:
Configuration.objects.bulk_create(configurations)
if len(device_configurations) > 0:
DeviceConfiguration.objects.bulk_create(device_configurations) | Python | 0 |
cf84dfda73032a276b2d6f63f2c70f69e61f89fe | Check validity of the config to avoid silent errors. | keras_retinanet/utils/config.py | keras_retinanet/utils/config.py | """
Copyright 2017-2018 Fizyr (https://fizyr.com)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import configparser
import numpy as np
import keras
from ..utils.anchors import AnchorParameters
def read_config_file(config_path):
config = configparser.ConfigParser()
config.read(config_path)
assert os.path.isfile(config_path), "Could not find {}.".format(config_path)
assert 'anchor_parameters' in config, \
"Malformed config file. Verify that it contains the anchor_parameters section."
assert {'sizes', 'strides', 'ratios', 'scales'} <= set(config['anchor_parameters']), \
"Malformed config file. Verify that it contains the following keys: sizes, strides, ratios and scales."
return config
def parse_anchor_parameters(config):
ratios = np.array(list(map(float, config['anchor_parameters']['ratios'].split(' '))), keras.backend.floatx())
scales = np.array(list(map(float, config['anchor_parameters']['scales'].split(' '))), keras.backend.floatx())
sizes = list(map(int, config['anchor_parameters']['sizes'].split(' ')))
strides = list(map(int, config['anchor_parameters']['strides'].split(' ')))
return AnchorParameters(sizes, strides, ratios, scales)
| """
Copyright 2017-2018 Fizyr (https://fizyr.com)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import configparser
import numpy as np
import keras
from ..utils.anchors import AnchorParameters
def read_config_file(config_path):
config = configparser.ConfigParser()
config.read(config_path)
return config
def parse_anchor_parameters(config):
ratios = np.array(list(map(float, config['anchor_parameters']['ratios'].split(' '))), keras.backend.floatx())
scales = np.array(list(map(float, config['anchor_parameters']['scales'].split(' '))), keras.backend.floatx())
sizes = list(map(int, config['anchor_parameters']['sizes'].split(' ')))
strides = list(map(int, config['anchor_parameters']['strides'].split(' ')))
return AnchorParameters(sizes, strides, ratios, scales)
| Python | 0 |
91e916cb67867db9ce835be28b31904e6efda832 | Add comment to new test | spacy/tests/regression/test_issue1727.py | spacy/tests/regression/test_issue1727.py | '''Test that models with no pretrained vectors can be deserialized correctly
after vectors are added.'''
from __future__ import unicode_literals
import numpy
from ...pipeline import Tagger
from ...vectors import Vectors
from ...vocab import Vocab
from ..util import make_tempdir
def test_issue1727():
data = numpy.ones((3, 300), dtype='f')
keys = [u'I', u'am', u'Matt']
vectors = Vectors(data=data, keys=keys)
tagger = Tagger(Vocab())
tagger.add_label('PRP')
tagger.begin_training()
assert tagger.cfg.get('pretrained_dims', 0) == 0
tagger.vocab.vectors = vectors
with make_tempdir() as path:
tagger.to_disk(path)
tagger = Tagger(Vocab()).from_disk(path)
assert tagger.cfg.get('pretrained_dims', 0) == 0
| from __future__ import unicode_literals
import numpy
from ...pipeline import Tagger
from ...vectors import Vectors
from ...vocab import Vocab
from ..util import make_tempdir
def test_issue1727():
data = numpy.ones((3, 300), dtype='f')
keys = [u'I', u'am', u'Matt']
vectors = Vectors(data=data, keys=keys)
tagger = Tagger(Vocab())
tagger.add_label('PRP')
tagger.begin_training()
assert tagger.cfg.get('pretrained_dims', 0) == 0
tagger.vocab.vectors = vectors
with make_tempdir() as path:
tagger.to_disk(path)
tagger = Tagger(Vocab()).from_disk(path)
assert tagger.cfg.get('pretrained_dims', 0) == 0
| Python | 0 |
3826140004b0686f9f262756da20c5163fc5b80d | update icinga_simple format string handling | py3status/modules/icinga_simple.py | py3status/modules/icinga_simple.py | # -*- coding: utf-8 -*-
"""
Display Icinga2 service status information
Configuration Parameters:
- cache_timeout: how often the data should be updated
- base_url: the base url to the icinga-web2 services list
- disable_acknowledge: enable or disable counting of acknowledged service problems
- user: username to authenticate against the icinga-web2 interface
- password: password to authenticate against the icinga-web2 interface
- format: define a format string like "CRITICAL: %d"
- color: define a color for the output
- status: set the status you want to optain (0=OK,1=WARNING,2=CRITICAL,3=UNKNOWN)
@author Ben Oswald <ben.oswald@root-space.de>
@license MIT License <https://opensource.org/licenses/MIT>
@source https://github.com/nazco/i3status-modules
"""
from time import time
import requests
class Py3status:
"""
"""
STATUS_NAMES = {
0: 'OK',
1: 'WARNING',
2: 'CRITICAL',
3: 'UNKNOWN'
}
# available configuration parameters
cache_timeout = 60
base_url = ''
disable_acknowledge = False
url_parameters = "?service_state={service_state}&format=json"
user = ''
password = ''
ca = True
format = '{status_name}: {count}'
color = '#ffffff'
status = 0
def get_status(self, i3s_output_list, i3s_config):
response = {
'color': self.color,
'cached_until': time() + self.cache_timeout,
'full_text': self.format.format(
status_name=self.STATUS_NAMES.get(self.status),
count=self._query_service_count(self.status)
)
}
return response
def _query_service_count(self, state):
if self.disable_acknowledge:
self.url_parameters = self.url_parameters + "&service_handled=0"
result = requests.get(
self.base_url + self.url_parameters.format(service_state=state),
auth=(self.user, self.password), verify=self.ca)
return len(result.json())
if __name__ == "__main__":
pass
| # -*- coding: utf-8 -*-
"""
Display Icinga2 service status information
Configuration Parameters:
- cache_timeout: how often the data should be updated
- base_url: the base url to the icinga-web2 services list
- disable_acknowledge: enable or disable counting of acknowledged service problems
- user: username to authenticate against the icinga-web2 interface
- password: password to authenticate against the icinga-web2 interface
- format: define a format string like "CRITICAL: %d"
- color: define a color for the output
- status: set the status you want to optain (0=OK,1=WARNING,2=CRITICAL,3=UNKNOWN)
@author Ben Oswald <ben.oswald@root-space.de>
@license MIT License <https://opensource.org/licenses/MIT>
@source https://github.com/nazco/i3status-modules
"""
from time import time
import requests
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 60
base_url = ''
disable_acknowledge = False
url_parameters = "?service_state={service_state}&format=json"
user = ''
password = ''
ca = True
format = ''
color = '#ffffff'
status = 0
def get_status(self, i3s_output_list, i3s_config):
response = {
'color': self.color,
'cached_until': time() + self.cache_timeout,
'full_text': self.format % self._query_service_count(self.status)
}
return response
def _query_service_count(self, state):
if self.disable_acknowledge:
self.url_parameters = self.url_parameters + "&service_handled=0"
result = requests.get(
self.base_url + self.url_parameters.format(service_state=state),
auth=(self.user, self.password), verify=self.ca)
return len(result.json())
if __name__ == "__main__":
pass
| Python | 0 |
30e984a0517e6443835f113c3a479aa8302ef14f | Update profile url on amazon tests | social_core/tests/backends/test_amazon.py | social_core/tests/backends/test_amazon.py | import json
from .oauth import OAuth2Test
class AmazonOAuth2Test(OAuth2Test):
backend_path = 'social_core.backends.amazon.AmazonOAuth2'
user_data_url = 'https://api.amazon.com/user/profile'
expected_username = 'FooBar'
access_token_body = json.dumps({
'access_token': 'foobar',
'token_type': 'bearer'
})
user_data_body = json.dumps({
'user_id': 'amzn1.account.ABCDE1234',
'email': 'foo@bar.com',
'name': 'Foo Bar'
})
def test_login(self):
self.do_login()
def test_partial_pipeline(self):
self.do_partial_pipeline()
class AmazonOAuth2BrokenServerResponseTest(OAuth2Test):
backend_path = 'social_core.backends.amazon.AmazonOAuth2'
user_data_url = 'https://www.amazon.com/ap/user/profile'
expected_username = 'FooBar'
access_token_body = json.dumps({
'access_token': 'foobar',
'token_type': 'bearer'
})
user_data_body = json.dumps({
'Request-Id': '02GGTU7CWMNFTV3KH3J6',
'Profile': {
'Name': 'Foo Bar',
'CustomerId': 'amzn1.account.ABCDE1234',
'PrimaryEmail': 'foo@bar.com'
}
})
def test_login(self):
self.do_login()
def test_partial_pipeline(self):
self.do_partial_pipeline()
| import json
from .oauth import OAuth2Test
class AmazonOAuth2Test(OAuth2Test):
backend_path = 'social_core.backends.amazon.AmazonOAuth2'
user_data_url = 'https://www.amazon.com/ap/user/profile'
expected_username = 'FooBar'
access_token_body = json.dumps({
'access_token': 'foobar',
'token_type': 'bearer'
})
user_data_body = json.dumps({
'user_id': 'amzn1.account.ABCDE1234',
'email': 'foo@bar.com',
'name': 'Foo Bar'
})
def test_login(self):
self.do_login()
def test_partial_pipeline(self):
self.do_partial_pipeline()
class AmazonOAuth2BrokenServerResponseTest(OAuth2Test):
backend_path = 'social_core.backends.amazon.AmazonOAuth2'
user_data_url = 'https://www.amazon.com/ap/user/profile'
expected_username = 'FooBar'
access_token_body = json.dumps({
'access_token': 'foobar',
'token_type': 'bearer'
})
user_data_body = json.dumps({
'Request-Id': '02GGTU7CWMNFTV3KH3J6',
'Profile': {
'Name': 'Foo Bar',
'CustomerId': 'amzn1.account.ABCDE1234',
'PrimaryEmail': 'foo@bar.com'
}
})
def test_login(self):
self.do_login()
def test_partial_pipeline(self):
self.do_partial_pipeline()
| Python | 0 |
08834335285b292fe0337525eb2052a38c35d881 | Test a random element. | OWR/oh/tests.py | OWR/oh/tests.py | from __future__ import absolute_import
import json
import random
from unittest import TestCase
from django.test.client import RequestFactory
from django.core.urlresolvers import reverse
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import SuspiciousOperation, PermissionDenied
from OWR.users.factory import UserFactory
from .models import OpenHardwareLike
from .views import like_set
from .factory import OpenHardwareFactory
class OpenHardwareViewTests(TestCase):
BATCH_NUMBER = 10
def setUp(self):
self.factory = RequestFactory()
self.user = UserFactory()
self.admin = UserFactory(is_superuser=True, is_staff=True)
self.ohs = OpenHardwareFactory.create_batch(self.BATCH_NUMBER)
def test_like_add_not_logged(self):
count_start = OpenHardwareLike.objects.count()
exception = False
response = None
# this is not logged
request = self.factory.post(reverse('oh:like_set'), data=json.dumps({'id': 1}), content_type='text/javascript')
request.user = AnonymousUser()
try:
response = like_set(request)
except PermissionDenied as e:
exception = True
# TODO: test error parameters
#js = json.loads(response.body)
count_final = OpenHardwareLike.objects.count()
self.assertEqual(exception, True, 'An exception must raise if the user is not logged')
self.assertEqual(count_start, count_final)
def test_like_add_logged(self):
idx = random.randrange(0, self.BATCH_NUMBER)
count_start = OpenHardwareLike.objects.count()
# this is logged
request = self.factory.post(reverse('oh:like_set'), data=json.dumps({'id': self.ohs[idx].pk, 'action': 'set'}), content_type='text/javascript')
request.user = self.user
response = like_set(request)
count_final = OpenHardwareLike.objects.count()
self.assertEqual(response.status_code, 200)
self.assertEqual(count_start + 1, count_final)
def test_like_with_malformed_json(self):
count_start = OpenHardwareLike.objects.count()
exception = False
# this is logged
request = self.factory.post(reverse('oh:like_set'), data=json.dumps({'id': 1, 'action': 'foobar'}), content_type='text/javascript')
request.user = self.user
try:
response = like_set(request)
except SuspiciousOperation as e:
exception = True
# TODO: test error parameters
#js = json.loads(response.body)
count_final = OpenHardwareLike.objects.count()
self.assertEqual(exception, True)
self.assertEqual(count_start, count_final)
def test_remove_like(self):
deal_to_test = self.ohs[0]
OpenHardwareLike.objects.create(oh=deal_to_test, user=self.user)
count_start = OpenHardwareLike.objects.count()
# this is logged
request = self.factory.post(reverse('oh:like_set'), data=json.dumps({'id': deal_to_test.pk, 'action': 'unset'}), content_type='text/javascript')
request.user = self.user
response = like_set(request)
count_final = OpenHardwareLike.objects.count()
self.assertEqual(response.status_code, 200)
self.assertEqual(count_start - 1, count_final)
self.assertEqual(json.loads(response.content), {'action': 'unset', 'status': 'ok'})
| from __future__ import absolute_import
import json
from unittest import TestCase
from django.test.client import RequestFactory
from django.core.urlresolvers import reverse
from django.contrib.auth.models import AnonymousUser
from django.core.exceptions import SuspiciousOperation, PermissionDenied
from OWR.users.factory import UserFactory
from .models import OpenHardwareLike
from .views import like_set
from .factory import OpenHardwareFactory
class OpenHardwareViewTests(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.user = UserFactory()
self.admin = UserFactory(is_superuser=True, is_staff=True)
self.ohs = OpenHardwareFactory.create_batch(10)
def test_like_add_not_logged(self):
count_start = OpenHardwareLike.objects.count()
exception = False
response = None
# this is not logged
request = self.factory.post(reverse('oh:like_set'), data=json.dumps({'id': 1}), content_type='text/javascript')
request.user = AnonymousUser()
try:
response = like_set(request)
except PermissionDenied as e:
exception = True
# TODO: test error parameters
#js = json.loads(response.body)
count_final = OpenHardwareLike.objects.count()
self.assertEqual(exception, True, 'An exception must raise if the user is not logged')
self.assertEqual(count_start, count_final)
def test_like_add_logged(self):
count_start = OpenHardwareLike.objects.count()
# this is logged
request = self.factory.post(reverse('oh:like_set'), data=json.dumps({'id': 1, 'action': 'set'}), content_type='text/javascript')
request.user = self.user
response = like_set(request)
count_final = OpenHardwareLike.objects.count()
self.assertEqual(response.status_code, 200)
self.assertEqual(count_start + 1, count_final)
def test_like_with_malformed_json(self):
count_start = OpenHardwareLike.objects.count()
exception = False
# this is logged
request = self.factory.post(reverse('oh:like_set'), data=json.dumps({'id': 1, 'action': 'foobar'}), content_type='text/javascript')
request.user = self.user
try:
response = like_set(request)
except SuspiciousOperation as e:
exception = True
# TODO: test error parameters
#js = json.loads(response.body)
count_final = OpenHardwareLike.objects.count()
self.assertEqual(exception, True)
self.assertEqual(count_start, count_final)
def test_remove_like(self):
deal_to_test = self.ohs[0]
OpenHardwareLike.objects.create(oh=deal_to_test, user=self.user)
count_start = OpenHardwareLike.objects.count()
# this is logged
request = self.factory.post(reverse('oh:like_set'), data=json.dumps({'id': deal_to_test.pk, 'action': 'unset'}), content_type='text/javascript')
request.user = self.user
response = like_set(request)
count_final = OpenHardwareLike.objects.count()
self.assertEqual(response.status_code, 200)
self.assertEqual(count_start - 1, count_final)
self.assertEqual(json.loads(response.content), {'action': 'unset', 'status': 'ok'})
| Python | 0 |
a85019e7c5e117467d0ce3bf30b9a7589cd17958 | Update create_test_cutout | src/tasks/python/create_test_cutout.py | src/tasks/python/create_test_cutout.py | from cloudvolume import CloudVolume
image_in = 'gs://neuroglancer/pinky100_v0/image_single_slices'
image_out = 'gs://neuroglancer/pinky100_v0/test_image'
image_mip = 0
roi_in = 'gs://neuroglancer/pinky100_v0/image_single_slices/roicc'
roi_out = 'gs://neuroglancer/pinky100_v0/test_image/roicc'
roi_mip = 6
cfsplit_in = 'gs://neuroglancer/pinky100_v0/image_single_slices/cfsplit'
cfsplit_out = 'gs://neuroglancer/pinky100_v0/test_image/cfsplit'
cfsplit_mip = 2
cfmanual_in = 'gs://neuroglancer/pinky100_v0/image_single_slices/cfmanual'
cfmanual_out = 'gs://neuroglancer/pinky100_v0/test_image/cfmanual'
cfmanual_mip = 5
match_in = 'gs://neuroglancer/pinky100_v0/image_single_slices/nccnet'
match_out = 'gs://neuroglancer/pinky100_v0/test_image/nccnet'
match_mip = 2
dst_in = 'gs://neuroglancer/pinky100_v0/aligned_test_v5'
dst_mip = 0
src_dst = [(cfmanual_in, cfmanual_out, cfmanual_mip)]
z_slice = slice(199, 208)
src_mip = 0
def scale_slice(s, src_mip, dst_mip):
scale = 1/2**(dst_mip - src_mip)
return slice(int(s.start*scale), int(s.stop*scale))
def scale_slices(x_slice, y_slice, z_slice, src_mip, dst_mip):
return (scale_slice(x_slice, src_mip, dst_mip),
scale_slice(y_slice, src_mip, dst_mip),
scale_slice(z_slice, src_mip, dst_mip))
def get_cloudvolume(path, mip):
return CloudVolume(path, mip=mip)
def update_info_mips(cv, no_of_mips=6):
print("updating info mips")
for mip in range(1,no_of_mips+1):
factor = (2**mip, 2**mip, 1)
cv.add_scale(factor)
cv.commit_info()
def get_xy_slice(cv):
o = cv.voxel_offset
s = cv.shape
return slice(o[0], o[0]+s[0]), slice(o[1], o[1]+s[1])
for (src_path, dst_path, mip) in src_dst:
print(src_path)
print(dst_path)
print(mip)
cv = get_cloudvolume(dst_path, 0)
update_info_mips(cv, 6)
dst_cv = get_cloudvolume(dst_path, mip)
src_cv = get_cloudvolume(src_path, mip)
sl = get_xy_slice(dst_cv) + (z_slice,)
print(sl)
dst_cv[sl] = src_cv[sl] | from cloudvolume import CloudVolume
image_in = 'gs://neuroglancer/pinky100_v0/image_single_slices'
image_out = 'gs://neuroglancer/pinky100_v0/test_image'
image_mip = 0
roi_in = 'gs://neuroglancer/pinky100_v0/image_single_slices/roicc'
roi_out = 'gs://neuroglancer/pinky100_v0/test_image/roicc'
roi_mip = 6
cfsplit_in = 'gs://neuroglancer/pinky100_v0/image_single_slices/cfsplit'
cfsplit_out = 'gs://neuroglancer/pinky100_v0/test_image/cfsplit'
cfsplit_mip = 2
match_in = 'gs://neuroglancer/pinky100_v0/image_single_slices/nccnet'
match_out = 'gs://neuroglancer/pinky100_v0/test_image/nccnet'
match_mip = 2
dst_in = 'gs://neuroglancer/pinky100_v0/aligned_test_v5'
dst_mip = 0
src_dst = [(cfsplit_in, cfsplit_out, cfsplit_mip),
(match_in, match_out, match_mip)]
z_slice = slice(199, 208)
src_mip = 0
def scale_slice(s, src_mip, dst_mip):
scale = 1/2**(dst_mip - src_mip)
return slice(int(s.start*scale), int(s.stop*scale))
def scale_slices(x_slice, y_slice, z_slice, src_mip, dst_mip):
return (scale_slice(x_slice, src_mip, dst_mip),
scale_slice(y_slice, src_mip, dst_mip),
scale_slice(z_slice, src_mip, dst_mip))
def get_cloudvolume(path, mip):
return CloudVolume(path, mip=mip)
def update_info_mips(cv, no_of_mips=6):
print("updating info mips")
for mip in range(1,no_of_mips+1):
factor = (2**mip, 2**mip, 1)
cv.add_scale(factor)
cv.commit_info()
def get_xy_slice(cv):
o = cv.voxel_offset
s = cv.shape
return slice(o[0], o[0]+s[0]), slice(o[1], o[1]+s[1])
for (src_path, dst_path, mip) in src_dst:
print(src_path)
print(dst_path)
print(mip)
cv = get_cloudvolume(dst_path, 0)
update_info_mips(cv, 6)
dst_cv = get_cloudvolume(dst_path, mip)
src_cv = get_cloudvolume(src_path, mip)
sl = get_xy_slice(dst_cv) + (z_slice,)
print(sl)
dst_cv[sl] = src_cv[sl] | Python | 0.000001 |
20f14f6c86607d0f1d084ee35c8f2645fde2dacb | Replace 1s and 2s with Xs and Os | capstone/util/tic2pdf.py | capstone/util/tic2pdf.py | from __future__ import division, unicode_literals
import subprocess
import tempfile
BG_COLOR = '1.0 1.0 1.0'
COLORS = {
'X': '0.85 0.12 0.15',
'O': '0.21 0.60 0.83',
' ': '0.83 0.60 0.32'
}
X_OFFSET = 17.0
ROWS = 3
COLS = 3
CELL_SIZE = 20
OFFSET = 10
class Tic2PDF(object):
'''
Generates a PDF of the given Tic-Tac-Toe board.
Example:
board = [[' ', ' ', 'X'],
[' ', ' ', 'O'],
[' ', ' ', 'X']]
filename = '/Users/drobles/Desktop/c4.pdf'
Tic2PDF(board, filename).create()
'''
def __init__(self, board, filename):
self.board = board
self.filename = filename
def create(self):
self._tf_ps = tempfile.NamedTemporaryFile()
self._draw_lines()
self._draw_pieces()
self._create_pdf()
def _draw_lines(self):
f = self._tf_ps
f.write('newpath\n')
# horizontal
f.write('10 %f moveto\n' % (CELL_SIZE + 10))
f.write('60 0 rlineto\n')
f.write('10 50 moveto\n')
f.write('60 0 rlineto\n')
# vertical
f.write('30 10 moveto\n')
f.write('0 60 rlineto\n')
f.write('50 10 moveto\n')
f.write('0 60 rlineto\n')
f.write('closepath\n')
# stroke
f.write('0 setgray\n')
f.write('1 setlinewidth\n')
f.write('stroke\n')
def _draw_pieces(self):
f = self._tf_ps
offset = (CELL_SIZE // 2) + OFFSET
for ri, row in enumerate(reversed(self.board)):
for ci, col in enumerate(row):
f.write('2 setlinewidth\n')
if col == 'X':
# /
f.write('newpath\n')
f.write('%f %f moveto\n' % ((ci * CELL_SIZE) + 10 + 4, (ri * CELL_SIZE) + 10 + 4))
f.write('12 12 rlineto\n')
f.write('closepath\n')
f.write('%s setrgbcolor\n' % COLORS[col])
f.write('stroke\n')
# \
f.write('newpath\n')
f.write('%f %f moveto\n' % ((ci * CELL_SIZE) + 10 + 16, (ri * CELL_SIZE) + 10 + 4))
f.write('-12 12 rlineto\n')
f.write('closepath\n')
f.write('%s setrgbcolor\n' % COLORS[col])
f.write('stroke\n')
elif col == 'O':
f.write('%s setrgbcolor\n' % COLORS[col])
arc = (ci * CELL_SIZE + offset, ri * CELL_SIZE + offset, CELL_SIZE * 0.38)
f.write('%d %d %d 0 360 arc stroke\n' % arc)
def _create_pdf(self):
self._tf_ps.write('showpage')
self._tf_ps.flush()
self.tf_updf = tempfile.NamedTemporaryFile()
subprocess.call(['ps2pdf', self._tf_ps.name, self.tf_updf.name])
self._tf_ps.close()
subprocess.call(["pdfcrop", self.tf_updf.name, self.filename])
self.tf_updf.close()
def tic2pdf(board, filename):
return Tic2PDF(board, filename).create()
| from __future__ import division
import subprocess
import tempfile
BG_COLOR = '1.0 1.0 1.0'
COLORS = {
'1': '0.85 0.12 0.15',
'2': '0.00 0.00 1.00',
' ': '0.90 0.90 0.90'
}
X_OFFSET = 17.0
ROWS = 3
COLS = 3
CELL_SIZE = 20
OFFSET = 10
class Tic2PDF(object):
'''
Generates a PDF of the given Tic-Tac-Toe board.
Example:
board = [[' ', ' ', '1'],
[' ', ' ', '2'],
[' ', ' ', '2']]
filename = '/Users/drobles/Desktop/c4.pdf'
Tic2PDF(board, filename).create()
'''
def __init__(self, board, filename):
self.board = board
self.filename = filename
def create(self):
self._tf_ps = tempfile.NamedTemporaryFile()
self._draw_lines()
self._draw_pieces()
self._create_pdf()
def _draw_lines(self):
f = self._tf_ps
f.write('newpath\n')
# horizontal
f.write('10 %f moveto\n' % (CELL_SIZE + 10))
f.write('60 0 rlineto\n')
f.write('10 50 moveto\n')
f.write('60 0 rlineto\n')
# vertical
f.write('30 10 moveto\n')
f.write('0 60 rlineto\n')
f.write('50 10 moveto\n')
f.write('0 60 rlineto\n')
f.write('closepath\n')
# stroke
f.write('0 setgray\n')
f.write('1 setlinewidth\n')
f.write('stroke\n')
def _draw_pieces(self):
f = self._tf_ps
offset = (CELL_SIZE // 2) + OFFSET
for ri, row in enumerate(reversed(self.board)):
for ci, col in enumerate(row):
f.write('2 setlinewidth\n')
if col == '1':
# /
f.write('newpath\n')
f.write('%f %f moveto\n' % ((ci * CELL_SIZE) + 10 + 4, (ri * CELL_SIZE) + 10 + 4))
f.write('12 12 rlineto\n')
f.write('closepath\n')
f.write('%s setrgbcolor\n' % COLORS[col])
f.write('stroke\n')
# \
f.write('newpath\n')
f.write('%f %f moveto\n' % ((ci * CELL_SIZE) + 10 + 16, (ri * CELL_SIZE) + 10 + 4))
f.write('-12 12 rlineto\n')
f.write('closepath\n')
f.write('%s setrgbcolor\n' % COLORS[col])
f.write('stroke\n')
elif col == '2':
f.write('%s setrgbcolor\n' % COLORS[col])
arc = (ci * CELL_SIZE + offset, ri * CELL_SIZE + offset, CELL_SIZE * 0.38)
f.write('%d %d %d 0 360 arc stroke\n' % arc)
def _create_pdf(self):
self._tf_ps.write('showpage')
self._tf_ps.flush()
self.tf_updf = tempfile.NamedTemporaryFile()
subprocess.call(['ps2pdf', self._tf_ps.name, self.tf_updf.name])
self._tf_ps.close()
subprocess.call(["pdfcrop", self.tf_updf.name, self.filename])
self.tf_updf.close()
def tic2pdf(board, filename):
return Tic2PDF(board, filename).create()
| Python | 0.010829 |
6ae4f3a71a80d7fe5bb1abe6925a05c4fe811f3c | bump version | forms_builder/__init__.py | forms_builder/__init__.py | __version__ = "9.7.16"
| __version__ = "0.12.2"
| Python | 0 |
3fcd816255116273d6c94558777d82ae089428f0 | Refactor to add subject info | graph_char-path_orth_subjects.py | graph_char-path_orth_subjects.py | import bct
import numpy as np
import pandas as pd
from my_settings import (source_folder, results_path)
subjects = [
"0008", "0009", "0010", "0012", "0013", "0014", "0015", "0016",
"0019", "0020", "0021", "0022"
]
ge_data_all = pd.DataFrame()
lambda_data_all = pd.DataFrame()
dia_data_all = pd.DataFrame()
conditions = ['classic', "plan"]
tois = ["pln", "pre-press", "post-press"]
for subject in subjects:
print("Working on subject: %s" % subject)
for toi in tois:
for condition in conditions:
data_all = []
pln_all = []
# noinspection PyAssignmentToLoopOrWithParameter
data = np.load(source_folder +
"graph_data/%s_%s_corr_%s_orth.npy" %
(subject, condition, toi))
graph_data = [bct.charpath(g) for g in data]
# calc global efficiency
data_ge = np.asarray([g[1] for g in graph_data])
# calc lambda
data_lambda = np.asarray([g[0] for g in graph_data])
# calc the diameter of the graph
data_dia = np.asarray([g[4] for g in graph_data])
ge_data = pd.DataFrame()
lambda_data = pd.DataFrame()
dia_data = pd.DataFrame()
ge_data["ge"] = ge_data
ge_data["measure"] = "ge"
ge_data["tio"] = toi
ge_data["condition"] = condition
ge_data["subject"] = subject
lambda_data["lambda"] = data_lambda
lambda_data["measure"] = "lambda"
lambda_data["tio"] = toi
lambda_data["condition"] = condition
lambda_data["subject"] = subject
dia_data["dia"] = data_dia
dia_data["measure"] = "dia"
dia_data["tio"] = toi
dia_data["condition"] = condition
dia_data["subject"] = subject
ge_data_all = ge_data_all.append(ge_data)
lambda_data_all = lambda_data_all.append(lambda_data)
dia_data_all = dia_data_all.append(dia_data)
ge_data_all.to_csv(results_path + "ge_data_all-tois.csv", index=False)
lambda_data_all.to_csv(results_path + "lambda_data_all-tois.csv", index=False)
dia_data_all.to_csv(results_path + "diameter_data_all-tois.csv", index=False)
| import bct
import numpy as np
import pandas as pd
from my_settings import (source_folder, results_path)
subjects = [
"0008", "0009", "0010", "0012", "0013", "0014", "0015", "0016",
"0019", "0020", "0021", "0022"
]
ge_data_all = pd.DataFrame()
lambda_data_all = pd.DataFrame()
dia_data_all = pd.DataFrame()
conditions =['cls', 'cls', 'cls',
'cls',
'cls',
'cls',
'cls',
'cls',
'cls',
'cls',
'cls',
'cls',
'pln',
'pln',
'pln',
'pln',
'pln',
'pln',
'pln',
'pln',
'pln',
'pln',
'pln',
'pln']
tois = ["pln", "pre-press", "post-press"]
for subject in subjects:
for toi in tois:
cls_all = []
pln_all = []
for subject in subjects:
cls = np.load(source_folder + "graph_data/%s_classic_corr_%s_orth.npy" %
(subject, toi))
pln = np.load(source_folder + "graph_data/%s_plan_corr_%s_orth.npy" %
(subject, toi))
data_cls = [bct.charpath(g) for g in cls]
data_pln = [bct.charpath(g) for g in pln]
# calc global efficiency
cls_ge = np.asarray([g[1] for g in data_cls])
pln_ge = np.asarray([g[1] for g in data_pln])
# calc lambda
cls_lambda = np.asarray([g[0] for g in data_cls])
pln_lambda = np.asarray([g[0] for g in data_pln])
# calc the diameter of the graph
cls_dia = np.asarray([g[4] for g in data_cls])
pln_dia = np.asarray([g[4] for g in data_pln])
ge_data = pd.DataFrame()
lambda_data = pd.DataFrame()
dia_data = pd.DataFrame()
ge_data["ge"] = np.concatenate((cls_ge, pln_ge))
ge_data["measure"] = "ge"
ge_data["tio"] = toi
ge_data["condition"] = conditions
ge_data["subject"] = subject
lambda_data["lambda"] = np.concatenate((cls_lambda, pln_lambda))
lambda_data["measure"] = "lambda"
lambda_data["tio"] = toi
lambda_data["condition"] = conditions
lambda_data["subject"] = subject
dia_data["dia"] = np.concatenate((cls_dia, pln_dia))
dia_data["measure"] = "dia"
dia_data["tio"] = toi
dia_data["condition"] = conditions
dia_data["subject"] = subject
ge_data_all = ge_data_all.append(ge_data)
lambda_data_all = lambda_data_all.append(lambda_data)
dia_data_all = dia_data_all.append(dia_data)
ge_data_all.to_csv(results_path + "ge_data_all-tois.csv", index=False)
lambda_data_all.to_csv(results_path + "lambda_data_all-tois.csv", index=False)
dia_data_all.to_csv(results_path + "diameter_data_all-tois.csv", index=False)
| Python | 0 |
a58646ee72fc894a2f2b885b242cc283a0addd7c | remove args | src/app.py | src/app.py | import argparse
import os
from actions import server, client
# the main entry point for the application
# for simplicity, let's decide that the user decides at runtime to listen
# and the server decides to serve
# location from which files should be served
app_directory = '/home/chris/blaster'
def main():
# get the arguments
parser = argparse.ArgumentParser(description="Exchange files!")
parser.add_argument('action',
help="To be the server, type serve; to be the client, type listen",
)
args = parser.parse_args()
app_runner(args.action)
def app_runner(how):
if how == "serve":
if os.path.exists(app_directory) == False:
os.mkdir(app_directory)
server.main(app_directory)
elif how == "listen":
client.main()
else:
return u'Please specify either listen or serve'
if __name__ == '__main__':
main()
| import argparse
import os
from actions import server, client
# the main entry point for the application
# for simplicity, let's decide that the user decides at runtime to listen
# and the server decides to serve
# location from which files should be served
app_directory = '/home/chris/blaster'
def main():
# get the arguments
parser = argparse.ArgumentParser(description="Exchange files!")
parser.add_argument('action',
help="To be the server, type serve; to be the client, type listen",
)
# parser.add_argument('directory',
# help="The top level directory from which to serve files, e.g. '~/Downloads'",
# )
args = parser.parse_args()
app_runner(args.action) #, args.directory)
def app_runner(how):
if how == "serve":
if os.path.exists(app_directory) == False:
os.mkdir(app_directory)
server.main(app_directory)
elif how == "listen":
client.main()
else:
return u'Please specify either listen or serve'
if __name__ == '__main__':
main()
| Python | 0.999811 |
0f216b43f42ebabedda701fafefe271a223798cb | Fix mcscf example | examples/mcscf/41-mcscf_with_given_densityfit_ints.py | examples/mcscf/41-mcscf_with_given_densityfit_ints.py | #!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
import tempfile
import h5py
from pyscf import gto, df, scf, mcscf
'''
Input Cholesky decomposed integrals for CASSCF
'''
mol = gto.M(atom='H 0 0 0; F 0 0 1', basis='ccpvdz')
#
# Integrals in memory. The size of the integral array is (M,N*(N+1)/2), where
# the last two AO indices are compressed due to the symmetry
#
int3c = df.incore.cholesky_eri(mol, auxbasis='ccpvdz-fit')
mf = scf.density_fit(scf.RHF(mol))
mf.with_df._cderi = int3c
mf.kernel()
# 3-cetner DF or Cholesky decomposed integrals need to be initialized once in
# mf.with_df._cderi. DFCASSCF method automatically use the approximate integrals
mc = mcscf.DFCASSCF(mf, 8, 8)
mc.kernel()
#
# Integrals on disk
#
ftmp = tempfile.NamedTemporaryFile()
df.outcore.cholesky_eri(mol, ftmp.name, auxbasis='ccpvdz-fit')
with h5py.File(ftmp.name, 'r') as file1:
mf = scf.density_fit(scf.RHF(mol))
# Note, here the integral object file1['eri_mo'] are not loaded in memory.
# It is still the HDF5 array object held on disk. The HDF5 array can be used
# the same way as the regular numpy ndarray stored in memory.
mf.with_df._cderi = file1['eri_mo']
mf.kernel()
# Note the mc object must be put inside the "with" statement block because it
# still needs access the HDF5 integral array on disk
mc = mcscf.DFCASSCF(mf, 8, 8)
mc.kernel()
| #!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
import tempfile
import h5py
from pyscf import gto, df, scf, mcscf
'''
Input Cholesky decomposed integrals for CASSCF
'''
mol = gto.M(atom='H 0 0 0; F 0 0 1', basis='ccpvdz')
#
# Integrals in memory. The size of the integral array is (M,N*(N+1)/2), where
# the last two AO indices are compressed due to the symmetry
#
int3c = df.incore.cholesky_eri(mol, auxbasis='ccpvdz-fit')
mf = scf.density_fit(scf.RHF(mol))
mf._cderi = int3c
mf.kernel()
# 3-cetner DF or Cholesky decomposed integrals need to be initialized once in
# mf._cderi. DFCASSCF method automatically use the approximate integrals
mc = mcscf.DFCASSCF(mf, 8, 8)
mc.kernel()
#
# Integrals on disk
#
ftmp = tempfile.NamedTemporaryFile()
df.outcore.cholesky_eri(mol, ftmp.name, auxbasis='ccpvdz-fit')
with h5py.File(ftmp.name, 'r') as file1:
mf = scf.density_fit(scf.RHF(mol))
# Note, here the integral object file1['eri_mo'] are not loaded in memory.
# It is still the HDF5 array object held on disk. The HDF5 array can be used
# the same way as the regular numpy ndarray stored in memory.
mf._cderi = file1['eri_mo']
mf.kernel()
# Note the mc object must be put inside the "with" statement block because it
# still needs access the HDF5 integral array on disk
mc = mcscf.DFCASSCF(mf, 8, 8)
mc.kernel()
| Python | 0.000001 |
d7b260005a30cfd848eefe62f021cb4bf7a59087 | Use tempfile for default upload directory | pyfarm/master/api/agent_updates.py | pyfarm/master/api/agent_updates.py | # No shebang line, this module is meant to be imported
#
# Copyright 2014 Ambient Entertainment Gmbh & Co. KG
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Agent Updates
-------------
The API allows access to agent update packages, possibly through redirects
"""
import re
import tempfile
from os import makedirs
from os.path import join, exists
try:
from httplib import BAD_REQUEST, CREATED
except ImportError: # pragma: no cover
from http.client import BAD_REQUEST, CREATED
from werkzeug.utils import secure_filename
from flask.views import MethodView
from flask import request, g
from pyfarm.core.config import read_env
from pyfarm.core.logger import getLogger
from pyfarm.master.utility import jsonify
logger = getLogger("api.agents")
class AgentUpdatesAPI(MethodView):
def put(self, version):
"""
A ``PUT`` to this endpoint will upload a new version of pyfarm-agent to
be used for agent auto-updates. The update must be a zip file.
.. http:put:: /api/v1/agents/updates/<string:version> HTTP/1.1
**Request**
.. sourcecode:: http
PUT /api/v1/agents/updates/1.2.3 HTTP/1.1
Content-Type: application/zip
<binary data>
**Response**
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Type: application/json
:statuscode 200: The update was put in place
:statuscode 400: there was something wrong with the request (such as an
invalid version number specified or the mime type not
being application/zip)
"""
if request.mimetype != "application/zip":
return (jsonify(error="Data for agent updates must be "
"application/zip"), BAD_REQUEST)
if not re.match("\d+(\.\d+(\.\d+)?)?((-pre\d?)|(-dev\d?)|(-rc?\d?)|"
"(-alpha\d?)|(-beta\d?))?$", version):
return (jsonify(error="Version is not an acceptable version number"),
BAD_REQUEST)
updates_dir = read_env("PYFARM_AGENT_UPDATES_DIR",
join(tempfile.gettempdir(), "pyfarm-updates"))
if not exists(updates_dir):
makedirs(updates_dir)
path = join(updates_dir, "pyfarm-agent-%s.zip" % version)
with open(path, "wb+") as file:
file.write(request.data)
return "", CREATED
| # No shebang line, this module is meant to be imported
#
# Copyright 2014 Ambient Entertainment Gmbh & Co. KG
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Agent Updates
-------------
The API allows access to agent update packages, possibly through redirects
"""
import re
from os import makedirs
from os.path import join, exists
try:
from httplib import BAD_REQUEST, CREATED
except ImportError: # pragma: no cover
from http.client import BAD_REQUEST, CREATED
from werkzeug.utils import secure_filename
from flask.views import MethodView
from flask import request, g
from pyfarm.core.config import read_env
from pyfarm.core.logger import getLogger
from pyfarm.master.utility import jsonify
logger = getLogger("api.agents")
class AgentUpdatesAPI(MethodView):
def put(self, version):
"""
A ``PUT`` to this endpoint will upload a new version of pyfarm-agent to
be used for agent auto-updates. The update must be a zip file.
.. http:put:: /api/v1/agents/updates/<string:version> HTTP/1.1
**Request**
.. sourcecode:: http
PUT /api/v1/agents/updates/1.2.3 HTTP/1.1
Content-Type: application/zip
<binary data>
**Response**
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Type: application/json
:statuscode 200: The update was put in place
:statuscode 400: there was something wrong with the request (such as an
invalid version number specified or the mime type not
being application/zip)
"""
if request.mimetype != "application/zip":
return (jsonify(error="Data for agent updates must be "
"application/zip"), BAD_REQUEST)
if not re.match("\d+(\.\d+(\.\d+)?)?((-pre\d?)|(-dev\d?)|(-rc?\d?)|"
"(-alpha\d?)|(-beta\d?))?$", version):
return (jsonify(error="Version is not an acceptable version number"),
BAD_REQUEST)
updates_dir = read_env("PYFARM_AGENT_UPDATES_DIR", "/tmp/pyfarm-updates")
if not exists(updates_dir):
makedirs(updates_dir)
path = join(updates_dir, "pyfarm-agent-%s.zip" % version)
with open(path, "wb+") as file:
file.write(request.data)
return "", CREATED
| Python | 0 |
cd828f76511d439af3baa0d209d6e23a19776142 | Check if minValue/maxValue is not none before setting default uiMin/uiMax | Python/kraken/core/objects/attributes/number_attribute.py | Python/kraken/core/objects/attributes/number_attribute.py | """Kraken - objects.Attributes.NumberAttribute module.
Classes:
NumberAttribute - Base Attribute.
"""
from attribute import Attribute
class NumberAttribute(Attribute):
"""Number Attributee. Base class for number attribute types"""
def __init__(self, name, value=0, minValue=None, maxValue=None):
super(NumberAttribute, self).__init__(name, value)
self._min = None
self._max = None
self._uiMin = None
self._uiMax = None
if minValue is not None:
self.setMin(minValue)
if maxValue is not None:
self.setMax(maxValue)
if minValue is not None:
self.setUIMin(minValue)
if maxValue is not None:
self.setUIMax(maxValue)
# ==================
# Min / Max Methods
# ==================
def getMin(self):
"""Gets the minimum value for this attribute.
Return:
Float / Integer - minimum value.
"""
return self._min
def setMin(self, minimum):
"""Sets the minimum value for the attribute.
Note: Only works on float or integer attributes.
Arguments:
minimum -- float / integer, minimum value the attribute can have.
Return:
True if successful.
"""
assert type(minimum) in (int, float), "'minimum' is not of type 'int' or 'float'."
self._min = minimum
return True
def getMax(self):
"""Gets the maximum value for this attribute.
Return:
Float / Integer - maximum value.
"""
return self._max
def setMax(self, maximum):
"""Sets the maximum value for the attribute.
Note: Only works on float or integer attributes.
Arguments:
maximum -- float / integer, maximum value the attribute can have.
Return:
True if successful.
"""
assert type(maximum) in (int, float), "'maximum' is not of type 'int' or 'float'."
self._max = maximum
return True
def getUIMin(self):
"""Gets the default minimum ui slider value for this attribute.
Return:
Float / Integer - default minimum ui slider value.
"""
return self._uiMin
def setUIMin(self, minimum):
"""Sets the default minimum ui slider value for the attribute.
Note: Only works on float or integer attributes.
Arguments:
minimum -- float / integer, default minimum ui slider value.
Return:
True if successful.
"""
attrType = self.__class__.__name__
if attrType is 'IntegerAttribute':
if type(minimum) is not int:
raise TypeError("UiMin value is not of type 'int'.")
if attrType is 'FloatAttribute':
if type(minimum) not in (int, float):
raise TypeError("UiMin value is not of type 'int' or 'float'.")
if self._uiMax is not None:
if minimum > self._uiMax:
raise ValueError('UiMin value is greater than attribute uiMax')
if minimum > self._max:
raise ValueError('UiMin value is greater than attribute maximum')
if minimum < self._min:
raise ValueError('UiMin value is less than attribute minimum')
self._uiMin = minimum
return True
def getUIMax(self):
"""Gets the default maximum ui slider value for this attribute.
Return:
Float / Integer - default maximum ui slider value.
"""
return self._uiMax
def setUIMax(self, maximum):
"""Sets the default maximum ui slider value for the attribute.
Note: Only works on float or integer attributes.
Arguments:
maximum -- float / integer, default maximum ui slider value.
Return:
True if successful.
"""
attrType = self.__class__.__name__
if attrType is 'IntegerAttribute':
if type(maximum) is not int:
raise TypeError("UiMax value is not of type 'int'.")
if attrType is 'FloatAttribute':
if type(maximum) not in (int, float):
raise TypeError("UiMax value is not of type 'int' or 'float'.")
if self._uiMin is not None:
if maximum < self._uiMin:
raise ValueError('UiMax value is less than attribute uiMin')
if maximum < self._min:
raise ValueError('UiMax value is less than attribute minimum')
if maximum > self._max:
raise ValueError('UiMax value is greater than attribute maximum')
self._uiMax = maximum
return True
| """Kraken - objects.Attributes.NumberAttribute module.
Classes:
NumberAttribute - Base Attribute.
"""
from attribute import Attribute
class NumberAttribute(Attribute):
"""Number Attributee. Base class for number attribute types"""
def __init__(self, name, value=0, minValue=None, maxValue=None):
super(NumberAttribute, self).__init__(name, value)
self._min = None
self._max = None
self._uiMin = None
self._uiMax = None
if minValue is not None:
self.setMin(minValue)
if maxValue is not None:
self.setMax(maxValue)
self.setUIMin(minValue)
self.setUIMax(maxValue)
# ==================
# Min / Max Methods
# ==================
def getMin(self):
"""Gets the minimum value for this attribute.
Return:
Float / Integer - minimum value.
"""
return self._min
def setMin(self, minimum):
"""Sets the minimum value for the attribute.
Note: Only works on float or integer attributes.
Arguments:
minimum -- float / integer, minimum value the attribute can have.
Return:
True if successful.
"""
assert type(minimum) in (int, float), "'minimum' is not of type 'int' or 'float'."
self._min = minimum
return True
def getMax(self):
"""Gets the maximum value for this attribute.
Return:
Float / Integer - maximum value.
"""
return self._max
def setMax(self, maximum):
"""Sets the maximum value for the attribute.
Note: Only works on float or integer attributes.
Arguments:
maximum -- float / integer, maximum value the attribute can have.
Return:
True if successful.
"""
assert type(maximum) in (int, float), "'maximum' is not of type 'int' or 'float'."
self._max = maximum
return True
def getUIMin(self):
"""Gets the default minimum ui slider value for this attribute.
Return:
Float / Integer - default minimum ui slider value.
"""
return self._uiMin
def setUIMin(self, minimum):
"""Sets the default minimum ui slider value for the attribute.
Note: Only works on float or integer attributes.
Arguments:
minimum -- float / integer, default minimum ui slider value.
Return:
True if successful.
"""
attrType = self.__class__.__name__
if attrType is 'IntegerAttribute':
if type(minimum) is not int:
raise TypeError("UiMin value is not of type 'int'.")
if attrType is 'FloatAttribute':
if type(minimum) not in (int, float):
raise TypeError("UiMin value is not of type 'int' or 'float'.")
if self._uiMax is not None:
if minimum > self._uiMax:
raise ValueError('UiMin value is greater than attribute uiMax')
if minimum > self._max:
raise ValueError('UiMin value is greater than attribute maximum')
if minimum < self._min:
raise ValueError('UiMin value is less than attribute minimum')
self._uiMin = minimum
return True
def getUIMax(self):
"""Gets the default maximum ui slider value for this attribute.
Return:
Float / Integer - default maximum ui slider value.
"""
return self._uiMax
def setUIMax(self, maximum):
"""Sets the default maximum ui slider value for the attribute.
Note: Only works on float or integer attributes.
Arguments:
maximum -- float / integer, default maximum ui slider value.
Return:
True if successful.
"""
attrType = self.__class__.__name__
if attrType is 'IntegerAttribute':
if type(maximum) is not int:
raise TypeError("UiMax value is not of type 'int'.")
if attrType is 'FloatAttribute':
if type(maximum) not in (int, float):
raise TypeError("UiMax value is not of type 'int' or 'float'.")
if self._uiMin is not None:
if maximum < self._uiMin:
raise ValueError('UiMax value is less than attribute uiMin')
if maximum < self._min:
raise ValueError('UiMax value is less than attribute minimum')
if maximum > self._max:
raise ValueError('UiMax value is greater than attribute maximum')
self._uiMax = maximum
return True
| Python | 0.000001 |
9a4f1da48e72627aa0ff358a3dafe8bb5639482a | refresh access token on each verification | componentsdb/ui.py | componentsdb/ui.py | """
Traditional Web UI.
"""
from functools import wraps
from flask import (
Blueprint, redirect, url_for, render_template, request, session, g
)
from werkzeug.exceptions import BadRequest, Unauthorized
from componentsdb.app import set_current_user_with_token
from componentsdb.auth import user_for_google_id_token
ui = Blueprint(
'ui', __name__, template_folder='ui/templates', static_folder='ui/static',
static_url_path='/ui_static',
)
AUTH_TOKEN_SESSION_KEY = 'componentsdb_auth'
def try_verify_session():
"""Like verify_session but return a boolean indicating success rather than
raising an exception."""
try:
verify_session()
except Unauthorized:
return False
return True
def verify_session():
"""Verify the authorisation in the current session. Raises Unauthorized if
the session is not authorised. Sets current_user if the session is
authorised.
"""
t = session.get(AUTH_TOKEN_SESSION_KEY)
if t is None:
raise Unauthorized('no user token provided')
set_current_user_with_token(t)
# Update the token in the session to make sure that the user always has a
# good long expiry windows
session[AUTH_TOKEN_SESSION_KEY] = g.current_user.token
def auth_or_signin(f):
"""Decorator for a view which re-directs to the sign in page if there is no
current user. The sign in page is given a query string which requests the
current URL as the redirect."""
@wraps(f)
def view(*args, **kwargs):
if not try_verify_session():
return redirect(url_for('ui.signin', target=request.url))
return f(*args, **kwargs)
return view
@ui.route('/')
@auth_or_signin
def index():
return render_template('index.html')
@ui.route('/auth/signin')
def signin():
redir_url = request.args.get('target', url_for('ui.index'))
# Already signed in?
if try_verify_session():
return redirect(redir_url)
# Have we been given a token?
token = request.args.get('token', None)
if token is not None:
set_current_user_with_token(token)
return redirect(redir_url)
# Show sign in
return render_template('signin.html')
@ui.route('/auth/google')
def signin_with_google_token():
redir_url = request.args.get('target', url_for('ui.index'))
token = request.args.get('token', None)
if token is None:
raise BadRequest('no token given')
# Get auth token and add to session
user = user_for_google_id_token(request.args['token'])
session[AUTH_TOKEN_SESSION_KEY] = user.token
return redirect(redir_url)
@ui.route('/auth/signout')
def signout():
redir_url = request.args.get('target', url_for('ui.index'))
# Clear token from user session
del session[AUTH_TOKEN_SESSION_KEY]
return redirect(redir_url)
| """
Traditional Web UI.
"""
from functools import wraps
from flask import (
Blueprint, redirect, url_for, render_template, request, session
)
from werkzeug.exceptions import BadRequest, Unauthorized
from componentsdb.app import set_current_user_with_token
from componentsdb.auth import user_for_google_id_token
ui = Blueprint(
'ui', __name__, template_folder='ui/templates', static_folder='ui/static',
static_url_path='/ui_static',
)
AUTH_TOKEN_SESSION_KEY = 'componentsdb_auth'
def try_verify_session():
"""Like verify_session but return a boolean indicating success rather than
raising an exception."""
try:
verify_session()
except Unauthorized:
return False
return True
def verify_session():
"""Verify the authorisation in the current session. Raises Unauthorized if
the session is not authorised. Sets current_user if the session is
authorised.
"""
t = session.get(AUTH_TOKEN_SESSION_KEY)
if t is None:
raise Unauthorized('no user token provided')
set_current_user_with_token(t)
def auth_or_signin(f):
"""Decorator for a view which re-directs to the sign in page if there is no
current user. The sign in page is given a query string which requests the
current URL as the redirect."""
@wraps(f)
def view(*args, **kwargs):
if not try_verify_session():
return redirect(url_for('ui.signin', target=request.url))
return f(*args, **kwargs)
return view
@ui.route('/')
@auth_or_signin
def index():
return render_template('index.html')
@ui.route('/auth/signin')
def signin():
redir_url = request.args.get('target', url_for('ui.index'))
# Already signed in?
if try_verify_session():
return redirect(redir_url)
# Have we been given a token?
token = request.args.get('token', None)
if token is not None:
set_current_user_with_token(token)
return redirect(redir_url)
# Show sign in
return render_template('signin.html')
@ui.route('/auth/google')
def signin_with_google_token():
redir_url = request.args.get('target', url_for('ui.index'))
token = request.args.get('token', None)
if token is None:
raise BadRequest('no token given')
# Get auth token and add to session
user = user_for_google_id_token(request.args['token'])
session[AUTH_TOKEN_SESSION_KEY] = user.token
return redirect(redir_url)
@ui.route('/auth/signout')
def signout():
redir_url = request.args.get('target', url_for('ui.index'))
# Clear token from user session
del session[AUTH_TOKEN_SESSION_KEY]
return redirect(redir_url)
| Python | 0 |
becef09e0680786343c581d984e7de5dcb961d16 | Fix for handle failed html parse | frappe/utils/xlsxutils.py | frappe/utils/xlsxutils.py | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
import openpyxl
import re
from openpyxl.styles import Font
from openpyxl import load_workbook
from six import BytesIO, string_types
ILLEGAL_CHARACTERS_RE = re.compile(r'[\000-\010]|[\013-\014]|[\016-\037]')
# return xlsx file object
def make_xlsx(data, sheet_name, wb=None):
if wb is None:
wb = openpyxl.Workbook(write_only=True)
ws = wb.create_sheet(sheet_name, 0)
row1 = ws.row_dimensions[1]
row1.font = Font(name='Calibri',bold=True)
for row in data:
clean_row = []
for item in row:
if isinstance(item, string_types) and (sheet_name not in ['Data Import Template', 'Data Export']):
value = handle_html(item)
else:
value = item
if isinstance(item, string_types) and next(ILLEGAL_CHARACTERS_RE.finditer(value), None):
# Remove illegal characters from the string
value = re.sub(ILLEGAL_CHARACTERS_RE, '', value)
clean_row.append(value)
ws.append(clean_row)
xlsx_file = BytesIO()
wb.save(xlsx_file)
return xlsx_file
def handle_html(data):
# return if no html tags found
data = frappe.as_unicode(data)
if '<' not in data:
return data
if '>' not in data:
return data
from html2text import HTML2Text
h = HTML2Text()
h.unicode_snob = True
h = h.unescape(data or "")
obj = HTML2Text()
obj.ignore_links = True
obj.body_width = 0
try:
value = obj.handle(h)
except Exception:
# unable to parse html, send it raw
return data
value = ", ".join(value.split(' \n'))
value = " ".join(value.split('\n'))
value = ", ".join(value.split('# '))
return value
def read_xlsx_file_from_attached_file(file_id=None, fcontent=None, filepath=None):
if file_id:
from frappe.utils.file_manager import get_file_path
filename = get_file_path(file_id)
elif fcontent:
from io import BytesIO
filename = BytesIO(fcontent)
elif filepath:
filename = filepath
else:
return
rows = []
wb1 = load_workbook(filename=filename, read_only=True, data_only=True)
ws1 = wb1.active
for row in ws1.iter_rows():
tmp_list = []
for cell in row:
tmp_list.append(cell.value)
rows.append(tmp_list)
return rows
| # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
import openpyxl
import re
from openpyxl.styles import Font
from openpyxl import load_workbook
from six import BytesIO, string_types
ILLEGAL_CHARACTERS_RE = re.compile(r'[\000-\010]|[\013-\014]|[\016-\037]')
# return xlsx file object
def make_xlsx(data, sheet_name, wb=None):
if wb is None:
wb = openpyxl.Workbook(write_only=True)
ws = wb.create_sheet(sheet_name, 0)
row1 = ws.row_dimensions[1]
row1.font = Font(name='Calibri',bold=True)
for row in data:
clean_row = []
for item in row:
if isinstance(item, string_types) and (sheet_name not in ['Data Import Template', 'Data Export']):
value = handle_html(item)
else:
value = item
if isinstance(item, string_types) and next(ILLEGAL_CHARACTERS_RE.finditer(value), None):
# Remove illegal characters from the string
value = re.sub(ILLEGAL_CHARACTERS_RE, '', value)
clean_row.append(value)
ws.append(clean_row)
xlsx_file = BytesIO()
wb.save(xlsx_file)
return xlsx_file
def handle_html(data):
# return if no html tags found
data = frappe.as_unicode(data)
if '<' not in data:
return data
if '>' not in data:
return data
from html2text import HTML2Text
h = HTML2Text()
h.unicode_snob = True
h = h.unescape(data or "")
obj = HTML2Text()
obj.ignore_links = True
obj.body_width = 0
try:
value = obj.handle(h)
except Exception:
# unable to parse html, send it raw
return value
value = ", ".join(value.split(' \n'))
value = " ".join(value.split('\n'))
value = ", ".join(value.split('# '))
return value
def read_xlsx_file_from_attached_file(file_id=None, fcontent=None, filepath=None):
if file_id:
from frappe.utils.file_manager import get_file_path
filename = get_file_path(file_id)
elif fcontent:
from io import BytesIO
filename = BytesIO(fcontent)
elif filepath:
filename = filepath
else:
return
rows = []
wb1 = load_workbook(filename=filename, read_only=True, data_only=True)
ws1 = wb1.active
for row in ws1.iter_rows():
tmp_list = []
for cell in row:
tmp_list.append(cell.value)
rows.append(tmp_list)
return rows
| Python | 0.000002 |
49d8bd1dbec1fa5927a1e487e7f0799de2e2ee11 | Remove unused import | tests/unit/states/archive_test.py | tests/unit/states/archive_test.py | # -*- coding: utf-8 -*-
'''
unit tests for the archive state
'''
# Import Python Libs
import os
import tempfile
# Import Salt Libs
from salt.states import archive
# Import Salt Testing Libs
from salttesting import skipIf, TestCase
from salttesting.helpers import ensure_in_syspath
from salttesting.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch
)
ensure_in_syspath('../../')
archive.__opts__ = {}
archive.__salt__ = {}
archive.__env__ = 'test'
@skipIf(NO_MOCK, NO_MOCK_REASON)
class ArchiveTest(TestCase):
'''
Validate the archive state
'''
def test_extracted_tar(self):
'''
archive.extracted tar options
'''
source = 'file.tar.gz'
tmp_dir = os.path.join(tempfile.gettempdir(), 'test_archive', '')
test_tar_opts = [
'--no-anchored foo',
'v -p --opt',
'-v -p',
'--long-opt -z',
'z -v -weird-long-opt arg',
]
ret_tar_opts = [
['tar', 'x', '--no-anchored', 'foo', '-f'],
['tar', 'xv', '-p', '--opt', '-f'],
['tar', 'x', '-v', '-p', '-f'],
['tar', 'x', '--long-opt', '-z', '-f'],
['tar', 'xz', '-v', '-weird-long-opt', 'arg', '-f'],
]
mock_true = MagicMock(return_value=True)
mock_false = MagicMock(return_value=False)
ret = {'stdout': ['saltines', 'cheese'], 'stderr': 'biscuits', 'retcode': '31337', 'pid': '1337'}
mock_run = MagicMock(return_value=ret)
with patch('os.path.exists', mock_true):
with patch.dict(archive.__opts__, {'test': False,
'cachedir': tmp_dir}):
with patch.dict(archive.__salt__, {'file.directory_exists': mock_false,
'file.file_exists': mock_false,
'file.makedirs': mock_true,
'cmd.run_all': mock_run}):
filename = os.path.join(
tmp_dir,
'files/test/_tmp_test_archive_.tar'
)
for test_opts, ret_opts in zip(test_tar_opts, ret_tar_opts):
ret = archive.extracted(tmp_dir,
source,
'tar',
tar_options=test_opts)
ret_opts.append(filename)
mock_run.assert_called_with(ret_opts, cwd=tmp_dir, python_shell=False)
if __name__ == '__main__':
from integration import run_tests
run_tests(ArchiveTest)
| # -*- coding: utf-8 -*-
'''
unit tests for the archive state
'''
# Import Python Libs
import os
import tempfile
try:
import pwd
HAS_PWD = True
except ImportError:
HAS_PWD = False
# Import Salt Libs
from salt.states import archive
# Import Salt Testing Libs
from salttesting import skipIf, TestCase
from salttesting.helpers import ensure_in_syspath
from salttesting.mock import (
NO_MOCK,
NO_MOCK_REASON,
MagicMock,
patch
)
ensure_in_syspath('../../')
archive.__opts__ = {}
archive.__salt__ = {}
archive.__env__ = 'test'
@skipIf(NO_MOCK, NO_MOCK_REASON)
class ArchiveTest(TestCase):
'''
Validate the archive state
'''
def test_extracted_tar(self):
'''
archive.extracted tar options
'''
source = 'file.tar.gz'
tmp_dir = os.path.join(tempfile.gettempdir(), 'test_archive', '')
test_tar_opts = [
'--no-anchored foo',
'v -p --opt',
'-v -p',
'--long-opt -z',
'z -v -weird-long-opt arg',
]
ret_tar_opts = [
['tar', 'x', '--no-anchored', 'foo', '-f'],
['tar', 'xv', '-p', '--opt', '-f'],
['tar', 'x', '-v', '-p', '-f'],
['tar', 'x', '--long-opt', '-z', '-f'],
['tar', 'xz', '-v', '-weird-long-opt', 'arg', '-f'],
]
mock_true = MagicMock(return_value=True)
mock_false = MagicMock(return_value=False)
ret = {'stdout': ['saltines', 'cheese'], 'stderr': 'biscuits', 'retcode': '31337', 'pid': '1337'}
mock_run = MagicMock(return_value=ret)
with patch('os.path.exists', mock_true):
with patch.dict(archive.__opts__, {'test': False,
'cachedir': tmp_dir}):
with patch.dict(archive.__salt__, {'file.directory_exists': mock_false,
'file.file_exists': mock_false,
'file.makedirs': mock_true,
'cmd.run_all': mock_run}):
filename = os.path.join(
tmp_dir,
'files/test/_tmp_test_archive_.tar'
)
for test_opts, ret_opts in zip(test_tar_opts, ret_tar_opts):
ret = archive.extracted(tmp_dir,
source,
'tar',
tar_options=test_opts)
ret_opts.append(filename)
mock_run.assert_called_with(ret_opts, cwd=tmp_dir, python_shell=False)
if __name__ == '__main__':
from integration import run_tests
run_tests(ArchiveTest)
| Python | 0.000001 |
f5a1e7f8e350a5f1b29c0e60caf178208946a2b1 | Add more samples. | learning-python/ch02/Looping.py | learning-python/ch02/Looping.py | for i in [1, 2, 3, 4]:
print(i)
for i in range(5):
print(i)
colors = ["red", "green", "blue"]
for i in range(len(colors)):
print(i, colors[i])
for color in colors:
print(color)
for idx, color in enumerate(colors):
print(idx, color)
people = ["Scott", "John", "Mike"]
ages = [50, 30, 25]
for person, age in zip(people, ages):
print(person, age)
for data in zip(people, ages):
print(data)
arr = [1, 2, 3, 4, 5, 6]
print(arr)
arr = arr[::-1]
print(arr)
empid = 2
class NoResourceFoundException(Exception):
pass
emps = {1: "Scott", 2: "John", 3: "Tiger"}
for emp in emps.items():
if emp.__contains__(empid):
print("Found")
break
else:
raise NoResourceFoundException("Not found")
from itertools import count
for n in count(5, 3):
if n > 20:
break
print(n, end=", ")
print()
from itertools import compress
ret = compress("abcdefg", (1, 0, 1, 1))
for x in ret:
print(x)
data = range(10)
even = [1, 0] * 10
odd = [0, 1] * 10
evenNumbers = compress(data, even)
oddNumbers = compress(data, odd)
print(list(data))
print(list(evenNumbers))
print(list(oddNumbers))
from itertools import permutations, combinations
print(list(permutations("ABC")))
print(list(combinations("ABC", 2)))
| for i in [1, 2, 3, 4]:
print(i)
for i in range(5):
print(i)
colors = ["red", "green", "blue"]
for i in range(len(colors)):
print(i, colors[i])
for color in colors:
print(color)
for idx, color in enumerate(colors):
print(idx, color)
people = ["Scott", "John", "Mike"]
ages = [50, 30, 25]
for person, age in zip(people, ages):
print(person, age)
for data in zip(people, ages):
print(data)
| Python | 0 |
8e35a5f5e7da38105961178478c33e92c81caf62 | Use new homely._ui.system() instead of subprocess | homely/pipinstall.py | homely/pipinstall.py | from homely._engine2 import Helper, Cleaner, getengine
from homely._utils import haveexecutable
from homely._ui import isinteractive, system
def pipinstall(packagename, which, user=True):
engine = getengine()
for version in which:
assert version in (2, 3)
helper = PIPInstall(packagename, version, user)
engine.run(helper)
_known_pips = set()
def _haspkg(pipcmd, name):
output = system([pipcmd, 'list', '--disable-pip-version-check'],
stdout=True)[1]
find = '%s ' % name
for line in output.decode('utf-8').split("\n"):
if line.startswith(find):
return True
return False
class PIPInstall(Helper):
_name = None
_version = None
_user = False
def __init__(self, name, version, user):
super(PIPInstall, self).__init__()
self._name = name
self._version = version
self._user = user
self._pipcmd = {2: "pip2", 3: "pip3"}[version]
if self._pipcmd not in _known_pips:
if not haveexecutable(self._pipcmd):
# FIXME: what type of helpful error should we be raising here?
raise Exception("%s executable not found" % self._pipcmd)
_known_pips.add(self._pipcmd)
def getcleaner(self):
return PIPCleaner(self._name, self._pipcmd)
def pathsownable(self):
return {}
def getclaims(self):
yield "%s:%s" % (self._pipcmd, self._name)
def isdone(self):
return _haspkg(self._pipcmd, self._name)
@property
def description(self):
user = ' --user' if self._user else ''
return "%s install %s%s" % (self._pipcmd, self._name, user)
def makechanges(self):
cmd = [
self._pipcmd,
'install',
self._name,
'--disable-pip-version-check',
]
if self._user:
cmd.append('--user')
system(cmd)
factname = 'pipinstall:%s:%s' % (self._pipcmd, self._name)
self._setfact(factname, True)
def affectspath(self, path):
return False
class PIPCleaner(Cleaner):
def __init__(self, name, pipcmd):
super(PIPCleaner, self).__init__()
self._name = name
assert pipcmd in ('pip2', 'pip3')
self._pipcmd = pipcmd
def asdict(self):
return dict(name=self._name, pipcmd=self._pipcmd)
@classmethod
def fromdict(class_, data):
return class_(data["name"], data["pipcmd"])
def __eq__(self, other):
return self._name == other._name and self._pipcmd == other._pipcmd
def isneeded(self):
factname = 'pipinstall:%s:%s' % (self._pipcmd, self._name)
hasfact = self._getfact(factname, False)
return hasfact and _haspkg(self._pipcmd, self._name)
@property
def description(self):
return "%s uninstall %s" % (self._pipcmd, self._name)
def makechanges(self):
cmd = [
self._pipcmd,
'uninstall',
self._name,
'--disable-pip-version-check',
]
if not isinteractive():
cmd.append('--yes')
factname = 'pipinstall:%s:%s' % (self._pipcmd, self._name)
try:
system(cmd)
finally:
self._clearfact(factname)
return []
def needsclaims(self):
yield "%s:%s" % (self._pipcmd, self._name)
def wantspath(self, path):
return False
| from subprocess import check_output, check_call
from homely._engine2 import Helper, Cleaner, getengine
from homely._utils import haveexecutable
from homely._ui import isinteractive
def pipinstall(packagename, which, user=True):
engine = getengine()
for version in which:
assert version in (2, 3)
helper = PIPInstall(packagename, version, user)
engine.run(helper)
_known_pips = set()
def _haspkg(pipcmd, name):
output = check_output([pipcmd, 'list', '--disable-pip-version-check'])
find = '%s ' % name
for line in output.decode('utf-8').split("\n"):
if line.startswith(find):
return True
return False
class PIPInstall(Helper):
_name = None
_version = None
_user = False
def __init__(self, name, version, user):
super(PIPInstall, self).__init__()
self._name = name
self._version = version
self._user = user
self._pipcmd = {2: "pip2", 3: "pip3"}[version]
if self._pipcmd not in _known_pips:
if not haveexecutable(self._pipcmd):
# FIXME: what type of helpful error should we be raising here?
raise Exception("%s executable not found" % self._pipcmd)
_known_pips.add(self._pipcmd)
def getcleaner(self):
return PIPCleaner(self._name, self._pipcmd)
def pathsownable(self):
return {}
def getclaims(self):
yield "%s:%s" % (self._pipcmd, self._name)
def isdone(self):
return _haspkg(self._pipcmd, self._name)
@property
def description(self):
user = ' --user' if self._user else ''
return "%s install %s%s" % (self._pipcmd, self._name, user)
def makechanges(self):
cmd = [
self._pipcmd,
'install',
self._name,
'--disable-pip-version-check',
]
if self._user:
cmd.append('--user')
check_call(cmd)
factname = 'pipinstall:%s:%s' % (self._pipcmd, self._name)
self._setfact(factname, True)
def affectspath(self, path):
return False
class PIPCleaner(Cleaner):
def __init__(self, name, pipcmd):
super(PIPCleaner, self).__init__()
self._name = name
assert pipcmd in ('pip2', 'pip3')
self._pipcmd = pipcmd
def asdict(self):
return dict(name=self._name, pipcmd=self._pipcmd)
@classmethod
def fromdict(class_, data):
return class_(data["name"], data["pipcmd"])
def __eq__(self, other):
return self._name == other._name and self._pipcmd == other._pipcmd
def isneeded(self):
factname = 'pipinstall:%s:%s' % (self._pipcmd, self._name)
hasfact = self._getfact(factname, False)
return hasfact and _haspkg(self._pipcmd, self._name)
@property
def description(self):
return "%s uninstall %s" % (self._pipcmd, self._name)
def makechanges(self):
cmd = [
self._pipcmd,
'uninstall',
self._name,
'--disable-pip-version-check',
]
if not isinteractive():
cmd.append('--yes')
factname = 'pipinstall:%s:%s' % (self._pipcmd, self._name)
try:
check_call(cmd)
finally:
self._clearfact(factname)
return []
def needsclaims(self):
yield "%s:%s" % (self._pipcmd, self._name)
def wantspath(self, path):
return False
| Python | 0.000001 |
52eebb215f52ae73a881e3d4e9a695139c260d3b | Empty names should be called @ | lexicon/providers/transip.py | lexicon/providers/transip.py | from __future__ import absolute_import
from .base import Provider as BaseProvider
from transip.client import DomainClient
def ProviderParser(subparser):
subparser.add_argument("--auth-username", help="specify username used to authenticate")
subparser.add_argument("--auth-api-key", help="specify API private key to authenticate")
subparser.add_argument("--auth-ca-bundle", help="specify CA bundle to use to verify API SSL certificate")
class Provider(BaseProvider):
def __init__(self, options):
super(Provider, self).__init__(options)
self.provider_name = 'transip'
self.domain_id = None
username = self.options.get('auth_username')
key_file = self.options.get('auth_api_key')
if not username or not key_file:
raise StandardError("No username and/or keyfile was specified")
self.client = DomainClient(
username=username,
key_file=key_file,
mode="readonly",
cacert=self.options.get('auth_ca_bundle', False)
)
# Authenticate against provider,
# Make any requests required to get the domain's id for this provider, so it can be used in subsequent calls.
# Should throw an error if authentication fails for any reason, of if the domain does not exist.
def authenticate(self):
## This request will fail when the domain does not exist,
## allowing us to check for existence
self.client.getInfo(self.options.get('domain'))
# Create record. If record already exists with the same content, do nothing'
def create_record(self, type, name, content):
raise NotImplementedError("Providers should implement this!")
# List all records. Return an empty list if no records found
# type, name and content are used to filter records.
# If possible filter during the query, otherwise filter after response is received.
def list_records(self, type=None, name=None, content=None):
records = self._filter_records(
records=self.client.getInfo(self.options.get('domain')).dnsEntries,
type=type,
name=name,
content=content
)
print 'list_records: {0}'.format(records)
return records
# Update a record. Identifier must be specified.
def update_record(self, identifier, type=None, name=None, content=None):
raise NotImplementedError("Providers should implement this!")
# Delete an existing record.
# If record does not exist, do nothing.
# If an identifier is specified, use it, otherwise do a lookup using type, name and content.
def delete_record(self, identifier=None, type=None, name=None, content=None):
raise NotImplementedError("Providers should implement this!")
def _relative_name(self, record_name):
name = super(Provider, self)._relative_name(record_name)
if not name:
name = "@"
return name
def _filter_records(self, records, type=None, name=None, content=None):
_records = []
for record in records:
if (not type or record.type == type) and \
(not name or record.name == self._relative_name(name)) and \
(not content or record.content == content):
_records.append({
"name": record.name,
"type": record.type,
"content": record.content,
"ttl": record.expire
})
return _records
| from __future__ import absolute_import
from .base import Provider as BaseProvider
from transip.client import DomainClient
def ProviderParser(subparser):
subparser.add_argument("--auth-username", help="specify username used to authenticate")
subparser.add_argument("--auth-api-key", help="specify API private key to authenticate")
subparser.add_argument("--auth-ca-bundle", help="specify CA bundle to use to verify API SSL certificate")
class Provider(BaseProvider):
def __init__(self, options):
super(Provider, self).__init__(options)
self.provider_name = 'transip'
self.domain_id = None
username = self.options.get('auth_username')
key_file = self.options.get('auth_api_key')
if not username or not key_file:
raise StandardError("No username and/or keyfile was specified")
self.client = DomainClient(
username=username,
key_file=key_file,
mode="readonly",
cacert=self.options.get('auth_ca_bundle', False)
)
# Authenticate against provider,
# Make any requests required to get the domain's id for this provider, so it can be used in subsequent calls.
# Should throw an error if authentication fails for any reason, of if the domain does not exist.
def authenticate(self):
## This request will fail when the domain does not exist,
## allowing us to check for existence
self.client.getInfo(self.options.get('domain'))
# Create record. If record already exists with the same content, do nothing'
def create_record(self, type, name, content):
raise NotImplementedError("Providers should implement this!")
# List all records. Return an empty list if no records found
# type, name and content are used to filter records.
# If possible filter during the query, otherwise filter after response is received.
def list_records(self, type=None, name=None, content=None):
records = self._filter_records(
records=self.client.getInfo(self.options.get('domain')).dnsEntries,
type=type,
name=name,
content=content
)
print 'list_records: {0}'.format(records)
return records
# Update a record. Identifier must be specified.
def update_record(self, identifier, type=None, name=None, content=None):
raise NotImplementedError("Providers should implement this!")
# Delete an existing record.
# If record does not exist, do nothing.
# If an identifier is specified, use it, otherwise do a lookup using type, name and content.
def delete_record(self, identifier=None, type=None, name=None, content=None):
raise NotImplementedError("Providers should implement this!")
def _filter_records(self, records, type=None, name=None, content=None):
_records = []
for record in records:
if (not type or record.type == type) and \
(not name or record.name == self._relative_name(name)) and \
(not content or record.content == content):
_records.append({
"name": record.name,
"type": record.type,
"content": record.content,
"ttl": record.expire
})
return _records
| Python | 0.998725 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.